fractafrag/services/api/app/schemas/schemas.py
John Lightner c4b8c0fe38 Tracks B+C+D: Auth system, renderer, full frontend shell
Track B — Auth & User System (complete):
- User registration with bcrypt + Turnstile verification
- JWT access/refresh token flow with httpOnly cookie rotation
- Redis refresh token blocklist for logout
- User profile + settings update endpoints (username, email)
- API key generation with bcrypt hashing (ff_key_ prefix)
- BYOK key management with AES-256-GCM encryption at rest
- Free tier rate limiting (5 shaders/month)
- Tier-gated endpoints (Pro/Studio for BYOK, API keys, bounty posting)

Track C — Shader Submission & Renderer (complete):
- GLSL validator: entry point check, banned extensions, infinite loop detection,
  brace balancing, loop bound warnings, code length limits
- Puppeteer/headless Chromium renderer with Shadertoy-compatible uniform injection
  (iTime, iResolution, iMouse), WebGL2 with SwiftShader fallback
- Shader compilation error detection via page title signaling
- Thumbnail capture at t=1s, preview frame at t=duration
- Renderer client service for API→renderer HTTP communication
- Shader submission pipeline: validate GLSL → create record → enqueue render job
- Desire fulfillment linking on shader submit
- Re-validation and re-render on shader code update
- Fork endpoint copies code, tags, metadata, enqueues new render

Track D — Frontend Shell (complete):
- React 18 + Vite + TypeScript + Tailwind CSS + TanStack Query + Zustand
- Dark theme with custom fracta color palette and surface tones
- Responsive layout with sticky navbar, gradient branding
- Auth: Login + Register pages with JWT token management
- API client with automatic 401 refresh interceptor
- ShaderCanvas: Full WebGL2 renderer component with Shadertoy uniforms,
  mouse tracking, ResizeObserver, debounced recompilation, error callbacks
- GLSL Editor: Split pane (code textarea + live preview), 400ms debounced
  preview, metadata panel (description, tags, type), GLSL validation errors,
  shader publish flow, fork-from-existing support
- Feed: Infinite scroll with IntersectionObserver sentinel, dwell time tracking,
  skeleton loading states, empty state with CTA
- Explore: Search + tag filter + sort tabs (trending/new/top), grid layout
- ShaderDetail: Full-screen preview, vote controls, view source toggle, fork button
- Bounties: Desire queue list sorted by heat score, status badges, tip display
- BountyDetail: Single desire view with style hints, fulfill CTA
- Profile: User header with avatar initial, shader grid
- Settings: Account info, API key management (create/revoke/copy), subscription tiers
- Generate: AI generation UI stub with prompt input, style controls, example prompts

76 files, ~5,700 lines of application code.
2026-03-24 20:56:42 -05:00

216 lines
7.5 KiB
Python

"""Fractafrag — Pydantic Request/Response Schemas."""
from __future__ import annotations
from datetime import datetime
from uuid import UUID
from typing import Optional
from pydantic import BaseModel, EmailStr, Field, ConfigDict
# ════════════════════════════════════════════════════════════
# AUTH
# ════════════════════════════════════════════════════════════
class UserRegister(BaseModel):
username: str = Field(..., min_length=3, max_length=30, pattern=r"^[a-zA-Z0-9_-]+$")
email: EmailStr
password: str = Field(..., min_length=8, max_length=128)
turnstile_token: str
class UserLogin(BaseModel):
email: EmailStr
password: str
turnstile_token: str
class TokenResponse(BaseModel):
access_token: str
token_type: str = "bearer"
class UserPublic(BaseModel):
model_config = ConfigDict(from_attributes=True)
id: UUID
username: str
role: str
subscription_tier: str
is_verified_creator: bool
created_at: datetime
class UserMe(UserPublic):
email: str
ai_credits_remaining: int
trust_tier: str
last_active_at: Optional[datetime] = None
class UserUpdate(BaseModel):
username: Optional[str] = Field(None, min_length=3, max_length=30, pattern=r"^[a-zA-Z0-9_-]+$")
email: Optional[EmailStr] = None
class ByokKeysUpdate(BaseModel):
"""Bring Your Own Key — encrypted API keys for AI providers."""
anthropic_key: Optional[str] = Field(None, description="Anthropic API key")
openai_key: Optional[str] = Field(None, description="OpenAI API key")
ollama_endpoint: Optional[str] = Field(None, description="Ollama endpoint URL")
# ════════════════════════════════════════════════════════════
# SHADERS
# ════════════════════════════════════════════════════════════
class ShaderCreate(BaseModel):
title: str = Field(..., min_length=1, max_length=120)
description: Optional[str] = Field(None, max_length=1000)
glsl_code: str = Field(..., min_length=10)
tags: list[str] = Field(default_factory=list, max_length=10)
shader_type: str = Field(default="2d", pattern=r"^(2d|3d|audio-reactive)$")
is_public: bool = True
style_metadata: Optional[dict] = None
fulfills_desire_id: Optional[UUID] = None
class ShaderUpdate(BaseModel):
title: Optional[str] = Field(None, min_length=1, max_length=120)
description: Optional[str] = Field(None, max_length=1000)
glsl_code: Optional[str] = Field(None, min_length=10)
tags: Optional[list[str]] = None
is_public: Optional[bool] = None
class ShaderPublic(BaseModel):
model_config = ConfigDict(from_attributes=True)
id: UUID
author_id: Optional[UUID]
title: str
description: Optional[str]
glsl_code: str
is_public: bool
is_ai_generated: bool
ai_provider: Optional[str]
thumbnail_url: Optional[str]
preview_url: Optional[str]
render_status: str
style_metadata: Optional[dict]
tags: list[str]
shader_type: str
forked_from: Optional[UUID]
view_count: int
score: float
created_at: datetime
updated_at: datetime
class ShaderFeedItem(BaseModel):
"""Lighter shader representation for feed responses."""
model_config = ConfigDict(from_attributes=True)
id: UUID
author_id: Optional[UUID]
title: str
thumbnail_url: Optional[str]
preview_url: Optional[str]
glsl_code: str
tags: list[str]
shader_type: str
score: float
view_count: int
is_ai_generated: bool
style_metadata: Optional[dict]
created_at: datetime
# ════════════════════════════════════════════════════════════
# VOTES & ENGAGEMENT
# ════════════════════════════════════════════════════════════
class VoteCreate(BaseModel):
value: int = Field(..., ge=-1, le=1)
class DwellReport(BaseModel):
shader_id: UUID
dwell_secs: float = Field(..., gt=0)
replayed: bool = False
session_id: Optional[str] = None
# ════════════════════════════════════════════════════════════
# DESIRES / BOUNTIES
# ════════════════════════════════════════════════════════════
class DesireCreate(BaseModel):
prompt_text: str = Field(..., min_length=5, max_length=500)
style_hints: Optional[dict] = None
class DesirePublic(BaseModel):
model_config = ConfigDict(from_attributes=True)
id: UUID
author_id: Optional[UUID]
prompt_text: str
style_hints: Optional[dict]
tip_amount_cents: int
status: str
heat_score: float
fulfilled_by_shader: Optional[UUID]
fulfilled_at: Optional[datetime]
created_at: datetime
# ════════════════════════════════════════════════════════════
# AI GENERATION
# ════════════════════════════════════════════════════════════
class GenerateRequest(BaseModel):
prompt: str = Field(..., min_length=5, max_length=500)
provider: Optional[str] = None # anthropic, openai, ollama — auto-selected if None
style_metadata: Optional[dict] = None
class GenerateStatusResponse(BaseModel):
job_id: str
status: str # queued, generating, rendering, complete, failed
shader_id: Optional[UUID] = None
error: Optional[str] = None
# ════════════════════════════════════════════════════════════
# API KEYS
# ════════════════════════════════════════════════════════════
class ApiKeyCreate(BaseModel):
name: str = Field(..., min_length=1, max_length=100)
class ApiKeyPublic(BaseModel):
model_config = ConfigDict(from_attributes=True)
id: UUID
key_prefix: str
name: Optional[str]
trust_tier: str
rate_limit_per_hour: int
last_used_at: Optional[datetime]
created_at: datetime
class ApiKeyCreated(ApiKeyPublic):
"""Returned only on creation — includes the full key (shown once)."""
full_key: str
# ════════════════════════════════════════════════════════════
# PAGINATION
# ════════════════════════════════════════════════════════════
class PaginatedResponse(BaseModel):
items: list
cursor: Optional[str] = None
has_more: bool = False