mirror of
https://github.com/xpltdco/media-rip.git
synced 2026-04-03 02:53:58 -06:00
Full-featured self-hosted yt-dlp web frontend:
- Python 3.12+ / FastAPI backend with async SQLite, SSE transport, session isolation
- Vue 3 / TypeScript / Pinia frontend with real-time progress, theme picker
- 3 built-in themes (cyberpunk/dark/light) + drop-in custom theme system
- Admin auth (bcrypt), purge system, cookie upload, file serving
- Docker multi-stage build, GitHub Actions CI/CD
- 179 backend tests, 29 frontend tests (208 total)
Slices: S01 (Foundation), S02 (SSE+Sessions), S03 (Frontend),
S04 (Admin+Auth), S05 (Themes), S06 (Docker+CI)
124 lines
3.3 KiB
Python
124 lines
3.3 KiB
Python
"""Admin API endpoints — protected by require_admin dependency."""
|
|
|
|
from __future__ import annotations
|
|
|
|
import logging
|
|
|
|
from fastapi import APIRouter, Depends, Request
|
|
|
|
from app.dependencies import require_admin
|
|
|
|
logger = logging.getLogger("mediarip.admin")
|
|
|
|
router = APIRouter(prefix="/admin", tags=["admin"])
|
|
|
|
|
|
@router.get("/sessions")
|
|
async def list_sessions(
|
|
request: Request,
|
|
_admin: str = Depends(require_admin),
|
|
) -> dict:
|
|
"""List all sessions with basic stats."""
|
|
db = request.app.state.db
|
|
cursor = await db.execute(
|
|
"""
|
|
SELECT s.id, s.created_at, s.last_seen,
|
|
COUNT(j.id) as job_count
|
|
FROM sessions s
|
|
LEFT JOIN jobs j ON j.session_id = s.id
|
|
GROUP BY s.id
|
|
ORDER BY s.last_seen DESC
|
|
"""
|
|
)
|
|
rows = await cursor.fetchall()
|
|
sessions = [
|
|
{
|
|
"id": row["id"],
|
|
"created_at": row["created_at"],
|
|
"last_seen": row["last_seen"],
|
|
"job_count": row["job_count"],
|
|
}
|
|
for row in rows
|
|
]
|
|
return {"sessions": sessions, "total": len(sessions)}
|
|
|
|
|
|
@router.get("/storage")
|
|
async def storage_info(
|
|
request: Request,
|
|
_admin: str = Depends(require_admin),
|
|
) -> dict:
|
|
"""Return storage usage information."""
|
|
import shutil
|
|
from pathlib import Path
|
|
|
|
config = request.app.state.config
|
|
db = request.app.state.db
|
|
output_dir = Path(config.downloads.output_dir)
|
|
|
|
# Disk usage
|
|
try:
|
|
usage = shutil.disk_usage(output_dir)
|
|
disk = {
|
|
"total": usage.total,
|
|
"used": usage.used,
|
|
"free": usage.free,
|
|
}
|
|
except OSError:
|
|
disk = {"total": 0, "used": 0, "free": 0}
|
|
|
|
# Job counts by status
|
|
cursor = await db.execute(
|
|
"SELECT status, COUNT(*) as count FROM jobs GROUP BY status"
|
|
)
|
|
rows = await cursor.fetchall()
|
|
by_status = {row["status"]: row["count"] for row in rows}
|
|
|
|
return {"disk": disk, "jobs_by_status": by_status}
|
|
|
|
|
|
@router.get("/unsupported-urls")
|
|
async def list_unsupported_urls(
|
|
request: Request,
|
|
_admin: str = Depends(require_admin),
|
|
limit: int = 100,
|
|
offset: int = 0,
|
|
) -> dict:
|
|
"""List logged unsupported URL extraction failures."""
|
|
db = request.app.state.db
|
|
cursor = await db.execute(
|
|
"SELECT * FROM unsupported_urls ORDER BY created_at DESC LIMIT ? OFFSET ?",
|
|
(limit, offset),
|
|
)
|
|
rows = await cursor.fetchall()
|
|
items = [
|
|
{
|
|
"id": row["id"],
|
|
"url": row["url"],
|
|
"session_id": row["session_id"],
|
|
"error": row["error"],
|
|
"created_at": row["created_at"],
|
|
}
|
|
for row in rows
|
|
]
|
|
|
|
# Total count
|
|
count_cursor = await db.execute("SELECT COUNT(*) FROM unsupported_urls")
|
|
count_row = await count_cursor.fetchone()
|
|
total = count_row[0] if count_row else 0
|
|
|
|
return {"items": items, "total": total, "limit": limit, "offset": offset}
|
|
|
|
|
|
@router.post("/purge")
|
|
async def manual_purge(
|
|
request: Request,
|
|
_admin: str = Depends(require_admin),
|
|
) -> dict:
|
|
"""Manually trigger a purge of expired downloads."""
|
|
from app.services.purge import run_purge
|
|
|
|
config = request.app.state.config
|
|
db = request.app.state.db
|
|
result = await run_purge(db, config)
|
|
return result
|