fractafrag/services/api/tests/test_integration.py
John Lightner 5936ab167e feat(M001): Desire Economy
Completed slices:
- S01: Desire Embedding & Clustering
- S02: Fulfillment Flow & Frontend

Branch: milestone/M001
2026-03-25 02:22:50 -05:00

412 lines
16 KiB
Python

"""Integration tests — end-to-end acceptance scenarios through FastAPI.
Uses async SQLite test database, real FastAPI endpoint handlers,
and dependency overrides for auth and Celery worker.
Test classes:
TestInfrastructureSmoke — proves test infra works (T01)
TestClusteringScenario — clustering + heat elevation via API (T02)
TestFulfillmentScenario — desire fulfillment lifecycle (T02)
TestMCPFieldPassthrough — MCP tool field passthrough (T02, source-level)
"""
import inspect
import json
import uuid
from pathlib import Path
import pytest
from httpx import AsyncClient
from sqlalchemy import select, update
# ── Smoke Test: proves integration infrastructure works ───
class TestInfrastructureSmoke:
"""Verify that the integration test infrastructure (DB, client, auth, Celery mock) works."""
@pytest.mark.asyncio
async def test_create_and_read_desire(self, client: AsyncClient):
"""POST a desire, then GET it back — proves DB, serialization, auth override, and Celery mock."""
# Create a desire
response = await client.post(
"/api/v1/desires",
json={"prompt_text": "glowing neon wireframe city"},
)
assert response.status_code == 201, f"Expected 201, got {response.status_code}: {response.text}"
data = response.json()
desire_id = data["id"]
assert data["prompt_text"] == "glowing neon wireframe city"
assert data["status"] == "open"
# Read it back
response = await client.get(f"/api/v1/desires/{desire_id}")
assert response.status_code == 200, f"Expected 200, got {response.status_code}: {response.text}"
data = response.json()
assert data["id"] == desire_id
assert data["prompt_text"] == "glowing neon wireframe city"
assert data["heat_score"] == 1.0
assert data["cluster_count"] == 0
# ── Clustering Scenario ──────────────────────────────────────
class TestClusteringScenario:
"""Prove that clustered desires have elevated heat and cluster_count via the API.
Strategy: POST desires through the API, then directly insert DesireCluster
rows and update heat_score in the test DB (simulating what the Celery worker
pipeline does). Verify via GET /api/v1/desires/{id} that the API returns
correct heat_score and cluster_count.
Note: list_desires uses PostgreSQL ANY(:desire_ids) which doesn't work in
SQLite, so we verify via individual GET requests instead.
"""
@pytest.mark.asyncio
async def test_similar_desires_cluster_and_elevate_heat(
self, client: AsyncClient, db_session
):
"""Create 3 desires, cluster them, elevate heat, verify API returns correct data."""
from app.models.models import Desire, DesireCluster
# Create 3 desires via API
desire_ids = []
prompts = [
"neon fractal explosion in deep space",
"colorful fractal burst cosmic background",
"glowing fractal nova against dark stars",
]
for prompt in prompts:
resp = await client.post(
"/api/v1/desires", json={"prompt_text": prompt}
)
assert resp.status_code == 201, f"Create failed: {resp.text}"
desire_ids.append(resp.json()["id"])
# Simulate clustering: insert DesireCluster rows linking all 3 to one cluster
cluster_id = uuid.uuid4()
for did in desire_ids:
dc = DesireCluster(
cluster_id=cluster_id,
desire_id=uuid.UUID(did),
similarity=0.88,
)
db_session.add(dc)
# Simulate heat recalculation: update heat_score on all 3 desires
for did in desire_ids:
await db_session.execute(
update(Desire)
.where(Desire.id == uuid.UUID(did))
.values(heat_score=3.0)
)
await db_session.flush()
# Verify each desire via GET shows correct heat_score and cluster_count
for did in desire_ids:
resp = await client.get(f"/api/v1/desires/{did}")
assert resp.status_code == 200, f"GET {did} failed: {resp.text}"
data = resp.json()
assert data["heat_score"] >= 3.0, (
f"Desire {did} heat_score={data['heat_score']}, expected >= 3.0"
)
assert data["cluster_count"] >= 3, (
f"Desire {did} cluster_count={data['cluster_count']}, expected >= 3"
)
@pytest.mark.asyncio
async def test_lone_desire_has_default_heat(self, client: AsyncClient):
"""A single desire without clustering has heat_score=1.0 and cluster_count=0."""
resp = await client.post(
"/api/v1/desires",
json={"prompt_text": "unique standalone art concept"},
)
assert resp.status_code == 201
desire_id = resp.json()["id"]
resp = await client.get(f"/api/v1/desires/{desire_id}")
assert resp.status_code == 200
data = resp.json()
assert data["heat_score"] == 1.0, f"Expected heat_score=1.0, got {data['heat_score']}"
assert data["cluster_count"] == 0, f"Expected cluster_count=0, got {data['cluster_count']}"
@pytest.mark.asyncio
async def test_desires_sorted_by_heat_descending(
self, client: AsyncClient, db_session
):
"""When fetching desires, high-heat desires appear before low-heat ones.
Uses individual GET since list_desires relies on PostgreSQL ANY().
Verifies the ordering guarantee via direct heat_score comparison.
"""
from app.models.models import Desire, DesireCluster
# Create a "hot" desire and cluster it
hot_resp = await client.post(
"/api/v1/desires",
json={"prompt_text": "blazing hot fractal vortex"},
)
assert hot_resp.status_code == 201
hot_id = hot_resp.json()["id"]
# Simulate clustering for hot desire
cluster_id = uuid.uuid4()
dc = DesireCluster(
cluster_id=cluster_id,
desire_id=uuid.UUID(hot_id),
similarity=0.90,
)
db_session.add(dc)
await db_session.execute(
update(Desire)
.where(Desire.id == uuid.UUID(hot_id))
.values(heat_score=5.0)
)
await db_session.flush()
# Create a "cold" desire (no clustering)
cold_resp = await client.post(
"/api/v1/desires",
json={"prompt_text": "calm minimal zen garden"},
)
assert cold_resp.status_code == 201
cold_id = cold_resp.json()["id"]
# Verify hot desire has higher heat than cold
hot_data = (await client.get(f"/api/v1/desires/{hot_id}")).json()
cold_data = (await client.get(f"/api/v1/desires/{cold_id}")).json()
assert hot_data["heat_score"] > cold_data["heat_score"], (
f"Hot ({hot_data['heat_score']}) should be > Cold ({cold_data['heat_score']})"
)
# ── Fulfillment Scenario ─────────────────────────────────────
class TestFulfillmentScenario:
"""Prove desire fulfillment transitions status and links to a shader."""
@pytest.mark.asyncio
async def test_fulfill_desire_transitions_status(
self, client: AsyncClient, db_session
):
"""Create desire, insert published shader, fulfill, verify status transition."""
from app.models.models import Shader
# Create desire
resp = await client.post(
"/api/v1/desires",
json={"prompt_text": "ethereal particle waterfall"},
)
assert resp.status_code == 201
desire_id = resp.json()["id"]
# Insert a published shader directly in test DB
shader_id = uuid.uuid4()
shader = Shader(
id=shader_id,
title="Particle Waterfall",
glsl_code="void mainImage(out vec4 c, in vec2 f) { c = vec4(0); }",
status="published",
author_id=None,
)
db_session.add(shader)
await db_session.flush()
# Fulfill the desire
resp = await client.post(
f"/api/v1/desires/{desire_id}/fulfill",
params={"shader_id": str(shader_id)},
)
assert resp.status_code == 200, f"Fulfill failed: {resp.text}"
data = resp.json()
assert data["status"] == "fulfilled"
assert data["desire_id"] == desire_id
assert data["shader_id"] == str(shader_id)
# Verify read-back shows fulfilled status and linked shader
resp = await client.get(f"/api/v1/desires/{desire_id}")
assert resp.status_code == 200
data = resp.json()
assert data["status"] == "fulfilled"
assert data["fulfilled_by_shader"] == str(shader_id)
@pytest.mark.asyncio
async def test_fulfill_requires_published_shader(
self, client: AsyncClient, db_session
):
"""Fulfilling with a draft shader returns 400."""
from app.models.models import Shader
# Create desire
resp = await client.post(
"/api/v1/desires",
json={"prompt_text": "glitch art mosaic pattern"},
)
assert resp.status_code == 201
desire_id = resp.json()["id"]
# Insert a draft shader
shader_id = uuid.uuid4()
shader = Shader(
id=shader_id,
title="Draft Mosaic",
glsl_code="void mainImage(out vec4 c, in vec2 f) { c = vec4(1); }",
status="draft",
author_id=None,
)
db_session.add(shader)
await db_session.flush()
# Attempt fulfill — should fail
resp = await client.post(
f"/api/v1/desires/{desire_id}/fulfill",
params={"shader_id": str(shader_id)},
)
assert resp.status_code == 400, f"Expected 400, got {resp.status_code}: {resp.text}"
assert "published" in resp.json()["detail"].lower()
@pytest.mark.asyncio
async def test_fulfill_already_fulfilled_returns_400(
self, client: AsyncClient, db_session
):
"""Fulfilling an already-fulfilled desire returns 400."""
from app.models.models import Shader
# Create desire
resp = await client.post(
"/api/v1/desires",
json={"prompt_text": "recursive mirror tunnel"},
)
assert resp.status_code == 201
desire_id = resp.json()["id"]
# Insert published shader
shader_id = uuid.uuid4()
shader = Shader(
id=shader_id,
title="Mirror Tunnel",
glsl_code="void mainImage(out vec4 c, in vec2 f) { c = vec4(0.5); }",
status="published",
author_id=None,
)
db_session.add(shader)
await db_session.flush()
# First fulfill — should succeed
resp = await client.post(
f"/api/v1/desires/{desire_id}/fulfill",
params={"shader_id": str(shader_id)},
)
assert resp.status_code == 200
# Second fulfill — should fail
resp = await client.post(
f"/api/v1/desires/{desire_id}/fulfill",
params={"shader_id": str(shader_id)},
)
assert resp.status_code == 400, f"Expected 400, got {resp.status_code}: {resp.text}"
assert "not open" in resp.json()["detail"].lower()
# ── MCP Field Passthrough (source-level) ─────────────────────
class TestMCPFieldPassthrough:
"""Verify MCP server tools pass through all required fields via source inspection.
The MCP server runs as a separate process and can't be tested through
FastAPI TestClient. These tests verify the source code structure to ensure
field passthrough is correct.
"""
@classmethod
def _read_mcp_server_source(cls) -> str:
"""Read the MCP server source file."""
# From services/api/tests/ → up 3 to services/ → mcp/server.py
mcp_path = Path(__file__).resolve().parent.parent.parent / "mcp" / "server.py"
assert mcp_path.exists(), f"MCP server.py not found at {mcp_path}"
return mcp_path.read_text()
def test_get_desire_queue_includes_cluster_fields(self):
"""get_desire_queue maps cluster_count, heat_score, style_hints, fulfilled_by_shader."""
source = self._read_mcp_server_source()
# Verify get_desire_queue function exists
assert "async def get_desire_queue" in source, "get_desire_queue function not found"
# Extract the function body (from def to next @mcp or end)
fn_start = source.index("async def get_desire_queue")
# Find next top-level decorator or end of file
next_decorator = source.find("\n@mcp.", fn_start + 1)
if next_decorator == -1:
fn_body = source[fn_start:]
else:
fn_body = source[fn_start:next_decorator]
required_fields = ["cluster_count", "heat_score", "style_hints", "fulfilled_by_shader"]
for field in required_fields:
assert field in fn_body, (
f"get_desire_queue missing field '{field}' in response mapping"
)
def test_fulfill_desire_tool_exists(self):
"""fulfill_desire function exists and uses api_post_with_params."""
source = self._read_mcp_server_source()
assert "async def fulfill_desire" in source, "fulfill_desire function not found"
# Extract function body
fn_start = source.index("async def fulfill_desire")
next_decorator = source.find("\n@mcp.", fn_start + 1)
if next_decorator == -1:
fn_body = source[fn_start:]
else:
fn_body = source[fn_start:next_decorator]
assert "api_post_with_params" in fn_body, (
"fulfill_desire should call api_post_with_params"
)
def test_fulfill_desire_returns_structured_response(self):
"""fulfill_desire returns JSON with status, desire_id, shader_id."""
source = self._read_mcp_server_source()
fn_start = source.index("async def fulfill_desire")
next_decorator = source.find("\n@mcp.", fn_start + 1)
if next_decorator == -1:
fn_body = source[fn_start:]
else:
fn_body = source[fn_start:next_decorator]
# Check the success-path return contains the required fields
required_keys = ['"status"', '"desire_id"', '"shader_id"']
for key in required_keys:
assert key in fn_body, (
f"fulfill_desire response missing key {key}"
)
def test_submit_shader_accepts_fulfills_desire_id(self):
"""submit_shader accepts fulfills_desire_id parameter and passes it to the API."""
source = self._read_mcp_server_source()
assert "async def submit_shader" in source, "submit_shader function not found"
fn_start = source.index("async def submit_shader")
next_decorator = source.find("\n@mcp.", fn_start + 1)
if next_decorator == -1:
fn_body = source[fn_start:]
else:
fn_body = source[fn_start:next_decorator]
# Verify parameter exists in function signature
assert "fulfills_desire_id" in fn_body, (
"submit_shader should accept fulfills_desire_id parameter"
)
# Verify it's passed to the payload
assert 'payload["fulfills_desire_id"]' in fn_body or \
'"fulfills_desire_id"' in fn_body, (
"submit_shader should include fulfills_desire_id in the API payload"
)