From 4606dce5531d2b56356fff9c90acfe9d546a2c8f Mon Sep 17 00:00:00 2001 From: John Lightner Date: Tue, 24 Mar 2026 20:20:10 -0500 Subject: [PATCH] =?UTF-8?q?feat:=20Tubearr=20=E2=80=94=20full=20project=20?= =?UTF-8?q?state=20through=20M006/S01?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Migrated git root from W:/programming/Projects/ to W:/programming/Projects/Tubearr/. Previous history preserved in Tubearr-full-backup.bundle at parent directory. Completed milestones: M001 through M005 Active: M006/S02 (Add Channel UX) --- .agents/skills/drizzle-migrations/SKILL.md | 518 ++ .agents/skills/drizzle-orm/SKILL.md | 396 ++ .../references/advanced-schemas.md | 380 ++ .../drizzle-orm/references/performance.md | 594 ++ .../drizzle-orm/references/query-patterns.md | 577 ++ .../drizzle-orm/references/vs-prisma.md | 503 ++ .../skills/fastify-best-practices/SKILL.md | 75 + .../rules/authentication.md | 521 ++ .../rules/configuration.md | 217 + .../rules/content-type.md | 387 ++ .../rules/cors-security.md | 445 ++ .../fastify-best-practices/rules/database.md | 320 + .../rules/decorators.md | 416 ++ .../rules/deployment.md | 425 ++ .../rules/error-handling.md | 412 ++ .../fastify-best-practices/rules/hooks.md | 464 ++ .../rules/http-proxy.md | 247 + .../fastify-best-practices/rules/logging.md | 402 ++ .../rules/performance.md | 425 ++ .../fastify-best-practices/rules/plugins.md | 320 + .../fastify-best-practices/rules/routes.md | 467 ++ .../fastify-best-practices/rules/schemas.md | 585 ++ .../rules/serialization.md | 475 ++ .../fastify-best-practices/rules/testing.md | 536 ++ .../rules/typescript.md | 458 ++ .../rules/websockets.md | 421 ++ .../skills/fastify-best-practices/tile.json | 11 + .agents/skills/fastify-typescript/SKILL.md | 244 + .bg-shell/manifest.json | 1 + .claude/settings.local.json | 20 + .claude/skills/drizzle-migrations/SKILL.md | 518 ++ .claude/skills/drizzle-orm/SKILL.md | 396 ++ .../references/advanced-schemas.md | 380 ++ .../drizzle-orm/references/performance.md | 594 ++ .../drizzle-orm/references/query-patterns.md | 577 ++ .../drizzle-orm/references/vs-prisma.md | 503 ++ .../skills/fastify-best-practices/SKILL.md | 75 + .../rules/authentication.md | 521 ++ .../rules/configuration.md | 217 + .../rules/content-type.md | 387 ++ .../rules/cors-security.md | 445 ++ .../fastify-best-practices/rules/database.md | 320 + .../rules/decorators.md | 416 ++ .../rules/deployment.md | 425 ++ .../rules/error-handling.md | 412 ++ .../fastify-best-practices/rules/hooks.md | 464 ++ .../rules/http-proxy.md | 247 + .../fastify-best-practices/rules/logging.md | 402 ++ .../rules/performance.md | 425 ++ .../fastify-best-practices/rules/plugins.md | 320 + .../fastify-best-practices/rules/routes.md | 467 ++ .../fastify-best-practices/rules/schemas.md | 585 ++ .../rules/serialization.md | 475 ++ .../fastify-best-practices/rules/testing.md | 536 ++ .../rules/typescript.md | 458 ++ .../rules/websockets.md | 421 ++ .../skills/fastify-best-practices/tile.json | 11 + .claude/skills/fastify-typescript/SKILL.md | 244 + .dockerignore | 46 + .env.example | 17 + .gitignore | 53 + Dockerfile | 66 + docker-compose.yml | 20 + drizzle.config.ts | 7 + drizzle/0000_colossal_jubilee.sql | 90 + drizzle/0001_natural_toad_men.sql | 2 + drizzle/0002_lonely_nico_minoru.sql | 1 + drizzle/0003_moaning_vertigo.sql | 3 + drizzle/0004_platform_settings.sql | 11 + drizzle/0005_monitoring_playlists.sql | 23 + drizzle/0006_rename_creators_to_channels.sql | 4 + drizzle/0007_steep_the_watchers.sql | 2 + drizzle/meta/0000_snapshot.json | 630 ++ drizzle/meta/0001_snapshot.json | 644 ++ drizzle/meta/0002_snapshot.json | 665 +++ drizzle/meta/0003_snapshot.json | 687 +++ drizzle/meta/0004_snapshot.json | 780 +++ drizzle/meta/0005_snapshot.json | 945 +++ drizzle/meta/0006_snapshot.json | 951 +++ drizzle/meta/0007_snapshot.json | 961 +++ drizzle/meta/_journal.json | 62 + package-lock.json | 5266 +++++++++++++++++ package.json | 47 + scripts/docker-smoke-test.sh | 239 + skills-lock.json | 25 + src/__tests__/auth-model.test.ts | 252 + src/__tests__/back-catalog-import.test.ts | 452 ++ src/__tests__/channel-counts.test.ts | 299 + src/__tests__/channel.test.ts | 451 ++ src/__tests__/content-api.test.ts | 309 + src/__tests__/cookie-manager.test.ts | 229 + src/__tests__/database.test.ts | 157 + src/__tests__/download-api.test.ts | 227 + src/__tests__/download.test.ts | 764 +++ src/__tests__/e2e-flow.test.ts | 407 ++ src/__tests__/file-organizer.test.ts | 216 + src/__tests__/format-profile-api.test.ts | 357 ++ src/__tests__/format-profile.test.ts | 544 ++ src/__tests__/health-service.test.ts | 326 + src/__tests__/history-api.test.ts | 255 + src/__tests__/history-repository.test.ts | 311 + src/__tests__/monitoring-api.test.ts | 421 ++ src/__tests__/notification-api.test.ts | 355 ++ .../notification-queue-integration.test.ts | 228 + src/__tests__/notification-service.test.ts | 338 ++ src/__tests__/platform-settings-api.test.ts | 328 + src/__tests__/playlist-api.test.ts | 310 + src/__tests__/quality-analyzer.test.ts | 375 ++ src/__tests__/queue-api.test.ts | 423 ++ src/__tests__/queue-repository.test.ts | 383 ++ src/__tests__/queue-service.test.ts | 593 ++ src/__tests__/scan-api.test.ts | 375 ++ src/__tests__/scheduler.test.ts | 884 +++ src/__tests__/server.integration.test.ts | 208 + src/__tests__/sources.test.ts | 880 +++ src/__tests__/subtitle-download.test.ts | 249 + src/__tests__/system-settings.test.ts | 284 + src/__tests__/yt-dlp.test.ts | 269 + src/config/index.ts | 117 + src/db/index.ts | 95 + src/db/migrate.ts | 37 + src/db/repositories/channel-repository.ts | 193 + src/db/repositories/content-repository.ts | 364 ++ .../repositories/format-profile-repository.ts | 186 + src/db/repositories/history-repository.ts | 158 + .../repositories/notification-repository.ts | 143 + .../platform-settings-repository.ts | 119 + src/db/repositories/playlist-repository.ts | 172 + src/db/repositories/queue-repository.ts | 283 + .../repositories/system-config-repository.ts | 101 + src/db/schema/channels.ts | 31 + src/db/schema/content.ts | 50 + src/db/schema/history.ts | 21 + src/db/schema/index.ts | 8 + src/db/schema/notifications.ts | 20 + src/db/schema/platform-settings.ts | 25 + src/db/schema/playlists.ts | 37 + src/db/schema/queue.ts | 24 + src/db/schema/system.ts | 14 + src/frontend/index.html | 13 + src/frontend/public/favicon.svg | 7 + src/frontend/public/logo.svg | 7 + src/frontend/src/App.tsx | 44 + src/frontend/src/api/client.ts | 75 + src/frontend/src/api/hooks/useActivity.ts | 61 + src/frontend/src/api/hooks/useChannels.ts | 145 + src/frontend/src/api/hooks/useContent.ts | 76 + .../src/api/hooks/useFormatProfiles.ts | 81 + src/frontend/src/api/hooks/useLibrary.ts | 46 + .../src/api/hooks/useNotifications.ts | 96 + .../src/api/hooks/usePlatformSettings.ts | 58 + src/frontend/src/api/hooks/usePlaylists.ts | 50 + src/frontend/src/api/hooks/useQueue.ts | 59 + src/frontend/src/api/hooks/useSystem.ts | 72 + .../src/components/AddChannelModal.tsx | 380 ++ src/frontend/src/components/FilterBar.tsx | 80 + .../src/components/FormatProfileForm.tsx | 319 + src/frontend/src/components/HealthStatus.tsx | 351 ++ src/frontend/src/components/Modal.tsx | 170 + .../src/components/NotificationForm.tsx | 280 + src/frontend/src/components/Pagination.tsx | 146 + src/frontend/src/components/PlatformBadge.tsx | 46 + .../src/components/PlatformSettingsForm.tsx | 314 + src/frontend/src/components/ProgressBar.tsx | 73 + src/frontend/src/components/QualityLabel.tsx | 110 + src/frontend/src/components/SearchBar.tsx | 96 + src/frontend/src/components/Sidebar.tsx | 138 + src/frontend/src/components/StatusBadge.tsx | 80 + src/frontend/src/components/Table.tsx | 151 + src/frontend/src/components/TubearrLogo.tsx | 64 + src/frontend/src/main.tsx | 29 + src/frontend/src/pages/Activity.tsx | 471 ++ src/frontend/src/pages/ChannelDetail.tsx | 1291 ++++ src/frontend/src/pages/Channels.tsx | 334 ++ src/frontend/src/pages/Library.tsx | 388 ++ src/frontend/src/pages/Login.tsx | 184 + src/frontend/src/pages/Queue.tsx | 357 ++ src/frontend/src/pages/Settings.tsx | 1397 +++++ src/frontend/src/pages/System.tsx | 261 + src/frontend/src/styles/global.css | 148 + src/frontend/src/styles/theme.css | 85 + src/frontend/src/utils/format.ts | 10 + src/frontend/tsconfig.json | 20 + src/frontend/vite.config.ts | 24 + src/index.ts | 190 + src/server/index.ts | 155 + src/server/middleware/auth.ts | 176 + src/server/middleware/error-handler.ts | 103 + src/server/routes/channel.ts | 353 ++ src/server/routes/content.ts | 237 + src/server/routes/download.ts | 80 + src/server/routes/format-profile.ts | 207 + src/server/routes/health.ts | 86 + src/server/routes/history.ts | 85 + src/server/routes/notification.ts | 285 + src/server/routes/platform-settings.ts | 126 + src/server/routes/playlist.ts | 122 + src/server/routes/queue.ts | 236 + src/server/routes/scan.ts | 108 + src/server/routes/system.ts | 182 + src/services/back-catalog-import.ts | 142 + src/services/cookie-manager.ts | 114 + src/services/download.ts | 306 + src/services/file-organizer.ts | 115 + src/services/health.ts | 212 + src/services/notification.ts | 192 + src/services/quality-analyzer.ts | 208 + src/services/queue.ts | 430 ++ src/services/rate-limiter.ts | 133 + src/services/scheduler.ts | 368 ++ src/sources/platform-source.ts | 115 + src/sources/soundcloud.ts | 103 + src/sources/youtube.ts | 261 + src/sources/yt-dlp.ts | 195 + src/types/api.ts | 82 + src/types/index.ts | 200 + tsconfig.json | 23 + vitest.config.ts | 16 + 218 files changed, 64040 insertions(+) create mode 100644 .agents/skills/drizzle-migrations/SKILL.md create mode 100644 .agents/skills/drizzle-orm/SKILL.md create mode 100644 .agents/skills/drizzle-orm/references/advanced-schemas.md create mode 100644 .agents/skills/drizzle-orm/references/performance.md create mode 100644 .agents/skills/drizzle-orm/references/query-patterns.md create mode 100644 .agents/skills/drizzle-orm/references/vs-prisma.md create mode 100644 .agents/skills/fastify-best-practices/SKILL.md create mode 100644 .agents/skills/fastify-best-practices/rules/authentication.md create mode 100644 .agents/skills/fastify-best-practices/rules/configuration.md create mode 100644 .agents/skills/fastify-best-practices/rules/content-type.md create mode 100644 .agents/skills/fastify-best-practices/rules/cors-security.md create mode 100644 .agents/skills/fastify-best-practices/rules/database.md create mode 100644 .agents/skills/fastify-best-practices/rules/decorators.md create mode 100644 .agents/skills/fastify-best-practices/rules/deployment.md create mode 100644 .agents/skills/fastify-best-practices/rules/error-handling.md create mode 100644 .agents/skills/fastify-best-practices/rules/hooks.md create mode 100644 .agents/skills/fastify-best-practices/rules/http-proxy.md create mode 100644 .agents/skills/fastify-best-practices/rules/logging.md create mode 100644 .agents/skills/fastify-best-practices/rules/performance.md create mode 100644 .agents/skills/fastify-best-practices/rules/plugins.md create mode 100644 .agents/skills/fastify-best-practices/rules/routes.md create mode 100644 .agents/skills/fastify-best-practices/rules/schemas.md create mode 100644 .agents/skills/fastify-best-practices/rules/serialization.md create mode 100644 .agents/skills/fastify-best-practices/rules/testing.md create mode 100644 .agents/skills/fastify-best-practices/rules/typescript.md create mode 100644 .agents/skills/fastify-best-practices/rules/websockets.md create mode 100644 .agents/skills/fastify-best-practices/tile.json create mode 100644 .agents/skills/fastify-typescript/SKILL.md create mode 100644 .bg-shell/manifest.json create mode 100644 .claude/settings.local.json create mode 100644 .claude/skills/drizzle-migrations/SKILL.md create mode 100644 .claude/skills/drizzle-orm/SKILL.md create mode 100644 .claude/skills/drizzle-orm/references/advanced-schemas.md create mode 100644 .claude/skills/drizzle-orm/references/performance.md create mode 100644 .claude/skills/drizzle-orm/references/query-patterns.md create mode 100644 .claude/skills/drizzle-orm/references/vs-prisma.md create mode 100644 .claude/skills/fastify-best-practices/SKILL.md create mode 100644 .claude/skills/fastify-best-practices/rules/authentication.md create mode 100644 .claude/skills/fastify-best-practices/rules/configuration.md create mode 100644 .claude/skills/fastify-best-practices/rules/content-type.md create mode 100644 .claude/skills/fastify-best-practices/rules/cors-security.md create mode 100644 .claude/skills/fastify-best-practices/rules/database.md create mode 100644 .claude/skills/fastify-best-practices/rules/decorators.md create mode 100644 .claude/skills/fastify-best-practices/rules/deployment.md create mode 100644 .claude/skills/fastify-best-practices/rules/error-handling.md create mode 100644 .claude/skills/fastify-best-practices/rules/hooks.md create mode 100644 .claude/skills/fastify-best-practices/rules/http-proxy.md create mode 100644 .claude/skills/fastify-best-practices/rules/logging.md create mode 100644 .claude/skills/fastify-best-practices/rules/performance.md create mode 100644 .claude/skills/fastify-best-practices/rules/plugins.md create mode 100644 .claude/skills/fastify-best-practices/rules/routes.md create mode 100644 .claude/skills/fastify-best-practices/rules/schemas.md create mode 100644 .claude/skills/fastify-best-practices/rules/serialization.md create mode 100644 .claude/skills/fastify-best-practices/rules/testing.md create mode 100644 .claude/skills/fastify-best-practices/rules/typescript.md create mode 100644 .claude/skills/fastify-best-practices/rules/websockets.md create mode 100644 .claude/skills/fastify-best-practices/tile.json create mode 100644 .claude/skills/fastify-typescript/SKILL.md create mode 100644 .dockerignore create mode 100644 .env.example create mode 100644 .gitignore create mode 100644 Dockerfile create mode 100644 docker-compose.yml create mode 100644 drizzle.config.ts create mode 100644 drizzle/0000_colossal_jubilee.sql create mode 100644 drizzle/0001_natural_toad_men.sql create mode 100644 drizzle/0002_lonely_nico_minoru.sql create mode 100644 drizzle/0003_moaning_vertigo.sql create mode 100644 drizzle/0004_platform_settings.sql create mode 100644 drizzle/0005_monitoring_playlists.sql create mode 100644 drizzle/0006_rename_creators_to_channels.sql create mode 100644 drizzle/0007_steep_the_watchers.sql create mode 100644 drizzle/meta/0000_snapshot.json create mode 100644 drizzle/meta/0001_snapshot.json create mode 100644 drizzle/meta/0002_snapshot.json create mode 100644 drizzle/meta/0003_snapshot.json create mode 100644 drizzle/meta/0004_snapshot.json create mode 100644 drizzle/meta/0005_snapshot.json create mode 100644 drizzle/meta/0006_snapshot.json create mode 100644 drizzle/meta/0007_snapshot.json create mode 100644 drizzle/meta/_journal.json create mode 100644 package-lock.json create mode 100644 package.json create mode 100644 scripts/docker-smoke-test.sh create mode 100644 skills-lock.json create mode 100644 src/__tests__/auth-model.test.ts create mode 100644 src/__tests__/back-catalog-import.test.ts create mode 100644 src/__tests__/channel-counts.test.ts create mode 100644 src/__tests__/channel.test.ts create mode 100644 src/__tests__/content-api.test.ts create mode 100644 src/__tests__/cookie-manager.test.ts create mode 100644 src/__tests__/database.test.ts create mode 100644 src/__tests__/download-api.test.ts create mode 100644 src/__tests__/download.test.ts create mode 100644 src/__tests__/e2e-flow.test.ts create mode 100644 src/__tests__/file-organizer.test.ts create mode 100644 src/__tests__/format-profile-api.test.ts create mode 100644 src/__tests__/format-profile.test.ts create mode 100644 src/__tests__/health-service.test.ts create mode 100644 src/__tests__/history-api.test.ts create mode 100644 src/__tests__/history-repository.test.ts create mode 100644 src/__tests__/monitoring-api.test.ts create mode 100644 src/__tests__/notification-api.test.ts create mode 100644 src/__tests__/notification-queue-integration.test.ts create mode 100644 src/__tests__/notification-service.test.ts create mode 100644 src/__tests__/platform-settings-api.test.ts create mode 100644 src/__tests__/playlist-api.test.ts create mode 100644 src/__tests__/quality-analyzer.test.ts create mode 100644 src/__tests__/queue-api.test.ts create mode 100644 src/__tests__/queue-repository.test.ts create mode 100644 src/__tests__/queue-service.test.ts create mode 100644 src/__tests__/scan-api.test.ts create mode 100644 src/__tests__/scheduler.test.ts create mode 100644 src/__tests__/server.integration.test.ts create mode 100644 src/__tests__/sources.test.ts create mode 100644 src/__tests__/subtitle-download.test.ts create mode 100644 src/__tests__/system-settings.test.ts create mode 100644 src/__tests__/yt-dlp.test.ts create mode 100644 src/config/index.ts create mode 100644 src/db/index.ts create mode 100644 src/db/migrate.ts create mode 100644 src/db/repositories/channel-repository.ts create mode 100644 src/db/repositories/content-repository.ts create mode 100644 src/db/repositories/format-profile-repository.ts create mode 100644 src/db/repositories/history-repository.ts create mode 100644 src/db/repositories/notification-repository.ts create mode 100644 src/db/repositories/platform-settings-repository.ts create mode 100644 src/db/repositories/playlist-repository.ts create mode 100644 src/db/repositories/queue-repository.ts create mode 100644 src/db/repositories/system-config-repository.ts create mode 100644 src/db/schema/channels.ts create mode 100644 src/db/schema/content.ts create mode 100644 src/db/schema/history.ts create mode 100644 src/db/schema/index.ts create mode 100644 src/db/schema/notifications.ts create mode 100644 src/db/schema/platform-settings.ts create mode 100644 src/db/schema/playlists.ts create mode 100644 src/db/schema/queue.ts create mode 100644 src/db/schema/system.ts create mode 100644 src/frontend/index.html create mode 100644 src/frontend/public/favicon.svg create mode 100644 src/frontend/public/logo.svg create mode 100644 src/frontend/src/App.tsx create mode 100644 src/frontend/src/api/client.ts create mode 100644 src/frontend/src/api/hooks/useActivity.ts create mode 100644 src/frontend/src/api/hooks/useChannels.ts create mode 100644 src/frontend/src/api/hooks/useContent.ts create mode 100644 src/frontend/src/api/hooks/useFormatProfiles.ts create mode 100644 src/frontend/src/api/hooks/useLibrary.ts create mode 100644 src/frontend/src/api/hooks/useNotifications.ts create mode 100644 src/frontend/src/api/hooks/usePlatformSettings.ts create mode 100644 src/frontend/src/api/hooks/usePlaylists.ts create mode 100644 src/frontend/src/api/hooks/useQueue.ts create mode 100644 src/frontend/src/api/hooks/useSystem.ts create mode 100644 src/frontend/src/components/AddChannelModal.tsx create mode 100644 src/frontend/src/components/FilterBar.tsx create mode 100644 src/frontend/src/components/FormatProfileForm.tsx create mode 100644 src/frontend/src/components/HealthStatus.tsx create mode 100644 src/frontend/src/components/Modal.tsx create mode 100644 src/frontend/src/components/NotificationForm.tsx create mode 100644 src/frontend/src/components/Pagination.tsx create mode 100644 src/frontend/src/components/PlatformBadge.tsx create mode 100644 src/frontend/src/components/PlatformSettingsForm.tsx create mode 100644 src/frontend/src/components/ProgressBar.tsx create mode 100644 src/frontend/src/components/QualityLabel.tsx create mode 100644 src/frontend/src/components/SearchBar.tsx create mode 100644 src/frontend/src/components/Sidebar.tsx create mode 100644 src/frontend/src/components/StatusBadge.tsx create mode 100644 src/frontend/src/components/Table.tsx create mode 100644 src/frontend/src/components/TubearrLogo.tsx create mode 100644 src/frontend/src/main.tsx create mode 100644 src/frontend/src/pages/Activity.tsx create mode 100644 src/frontend/src/pages/ChannelDetail.tsx create mode 100644 src/frontend/src/pages/Channels.tsx create mode 100644 src/frontend/src/pages/Library.tsx create mode 100644 src/frontend/src/pages/Login.tsx create mode 100644 src/frontend/src/pages/Queue.tsx create mode 100644 src/frontend/src/pages/Settings.tsx create mode 100644 src/frontend/src/pages/System.tsx create mode 100644 src/frontend/src/styles/global.css create mode 100644 src/frontend/src/styles/theme.css create mode 100644 src/frontend/src/utils/format.ts create mode 100644 src/frontend/tsconfig.json create mode 100644 src/frontend/vite.config.ts create mode 100644 src/index.ts create mode 100644 src/server/index.ts create mode 100644 src/server/middleware/auth.ts create mode 100644 src/server/middleware/error-handler.ts create mode 100644 src/server/routes/channel.ts create mode 100644 src/server/routes/content.ts create mode 100644 src/server/routes/download.ts create mode 100644 src/server/routes/format-profile.ts create mode 100644 src/server/routes/health.ts create mode 100644 src/server/routes/history.ts create mode 100644 src/server/routes/notification.ts create mode 100644 src/server/routes/platform-settings.ts create mode 100644 src/server/routes/playlist.ts create mode 100644 src/server/routes/queue.ts create mode 100644 src/server/routes/scan.ts create mode 100644 src/server/routes/system.ts create mode 100644 src/services/back-catalog-import.ts create mode 100644 src/services/cookie-manager.ts create mode 100644 src/services/download.ts create mode 100644 src/services/file-organizer.ts create mode 100644 src/services/health.ts create mode 100644 src/services/notification.ts create mode 100644 src/services/quality-analyzer.ts create mode 100644 src/services/queue.ts create mode 100644 src/services/rate-limiter.ts create mode 100644 src/services/scheduler.ts create mode 100644 src/sources/platform-source.ts create mode 100644 src/sources/soundcloud.ts create mode 100644 src/sources/youtube.ts create mode 100644 src/sources/yt-dlp.ts create mode 100644 src/types/api.ts create mode 100644 src/types/index.ts create mode 100644 tsconfig.json create mode 100644 vitest.config.ts diff --git a/.agents/skills/drizzle-migrations/SKILL.md b/.agents/skills/drizzle-migrations/SKILL.md new file mode 100644 index 0000000..be806d0 --- /dev/null +++ b/.agents/skills/drizzle-migrations/SKILL.md @@ -0,0 +1,518 @@ +--- +name: drizzle-migrations +description: "Migration-first database development workflow using Drizzle ORM for TypeScript/J..." +version: 1.0.0 +tags: [] +progressive_disclosure: + entry_point: + summary: "Migration-first database development workflow using Drizzle ORM for TypeScript/J..." + when_to_use: "When working with drizzle-migrations or related functionality." + quick_start: "1. Review the core concepts below. 2. Apply patterns to your use case. 3. Follow best practices for implementation." +--- +# Drizzle ORM Database Migrations (TypeScript) + +Migration-first database development workflow using Drizzle ORM for TypeScript/JavaScript projects. + +## When to Use This Skill + +Use this skill when: +- Working with Drizzle ORM in TypeScript/JavaScript projects +- Need to create or modify database schema +- Want migration-first development workflow +- Setting up new database tables or columns +- Need to ensure schema consistency across environments + +## Core Principle: Migration-First Development + +**Critical Rule**: Schema changes ALWAYS start with migrations, never code-first. + +### Why Migration-First? +- ✅ SQL migrations are the single source of truth +- ✅ Prevents schema drift between environments +- ✅ Enables rollback and versioning +- ✅ Forces explicit schema design decisions +- ✅ TypeScript types generated from migrations +- ✅ CI/CD can validate schema changes + +### Anti-Pattern (Code-First) +❌ **WRONG**: Writing TypeScript schema first +```typescript +// DON'T DO THIS FIRST +export const users = pgTable('users', { + id: uuid('id').primaryKey(), + email: text('email').notNull(), +}); +``` + +### Correct Pattern (Migration-First) +✅ **CORRECT**: Write SQL migration first +```sql +-- drizzle/0001_add_users_table.sql +CREATE TABLE users ( + id UUID PRIMARY KEY DEFAULT gen_random_uuid(), + email TEXT NOT NULL UNIQUE, + created_at TIMESTAMP DEFAULT NOW() +); +``` + +## Complete Migration Workflow + +### Step 1: Design Schema in SQL Migration + +Create descriptive SQL migration file: + +```sql +-- drizzle/0001_create_school_calendars.sql +CREATE TABLE school_calendars ( + id UUID PRIMARY KEY DEFAULT gen_random_uuid(), + school_id UUID NOT NULL REFERENCES schools(id) ON DELETE CASCADE, + start_date DATE NOT NULL, + end_date DATE NOT NULL, + academic_year TEXT NOT NULL, + created_at TIMESTAMP DEFAULT NOW(), + updated_at TIMESTAMP DEFAULT NOW() +); + +-- Add indexes for query performance +CREATE INDEX idx_school_calendars_school_id ON school_calendars(school_id); +CREATE INDEX idx_school_calendars_academic_year ON school_calendars(academic_year); + +-- Add constraints +ALTER TABLE school_calendars + ADD CONSTRAINT check_date_range + CHECK (end_date > start_date); +``` + +**Naming Convention**: +- Use sequential numbers: `0001_`, `0002_`, etc. +- Descriptive names: `create_school_calendars`, `add_user_roles` +- Format: `XXXX_descriptive_name.sql` + +### Step 2: Generate TypeScript Definitions + +Drizzle Kit generates TypeScript types from SQL: + +```bash +# Generate TypeScript schema and snapshots +pnpm drizzle-kit generate + +# Or using npm +npm run db:generate +``` + +**What This Creates**: +1. TypeScript schema files (if using `drizzle-kit push`) +2. Snapshot files in `drizzle/meta/XXXX_snapshot.json` +3. Migration metadata + +### Step 3: Create Schema Snapshot + +Snapshots enable schema drift detection: + +```json +// drizzle/meta/0001_snapshot.json (auto-generated) +{ + "version": "5", + "dialect": "postgresql", + "tables": { + "school_calendars": { + "name": "school_calendars", + "columns": { + "id": { + "name": "id", + "type": "uuid", + "primaryKey": true, + "notNull": true, + "default": "gen_random_uuid()" + }, + "school_id": { + "name": "school_id", + "type": "uuid", + "notNull": true + } + } + } + } +} +``` + +**Snapshots in Version Control**: +- ✅ Commit snapshots to git +- ✅ Enables drift detection in CI +- ✅ Documents schema history + +### Step 4: Implement TypeScript Schema + +Now write TypeScript schema that mirrors SQL migration: + +```typescript +// src/lib/db/schema/school/calendar.ts +import { pgTable, uuid, date, text, timestamp } from 'drizzle-orm/pg-core'; +import { schools } from './school'; + +export const schoolCalendars = pgTable('school_calendars', { + id: uuid('id').primaryKey().defaultRandom(), + schoolId: uuid('school_id') + .notNull() + .references(() => schools.id, { onDelete: 'cascade' }), + startDate: date('start_date').notNull(), + endDate: date('end_date').notNull(), + academicYear: text('academic_year').notNull(), + createdAt: timestamp('created_at').defaultNow(), + updatedAt: timestamp('updated_at').defaultNow(), +}); + +// Type inference +export type SchoolCalendar = typeof schoolCalendars.$inferSelect; +export type NewSchoolCalendar = typeof schoolCalendars.$inferInsert; +``` + +**Key Points**: +- Column names match SQL exactly: `school_id` → `'school_id'` +- TypeScript property names use camelCase: `schoolId` +- Constraints and indexes defined in SQL, not TypeScript +- Foreign keys reference other tables + +### Step 5: Organize Schemas by Domain + +Structure schemas for maintainability: + +``` +src/lib/db/schema/ +├── index.ts # Export all schemas +├── school/ +│ ├── index.ts +│ ├── district.ts +│ ├── holiday.ts +│ ├── school.ts +│ └── calendar.ts +├── providers.ts +├── cart.ts +└── users.ts +``` + +**index.ts** (export all): +```typescript +// src/lib/db/schema/index.ts +export * from './school'; +export * from './providers'; +export * from './cart'; +export * from './users'; +``` + +**school/index.ts**: +```typescript +// src/lib/db/schema/school/index.ts +export * from './district'; +export * from './holiday'; +export * from './school'; +export * from './calendar'; +``` + +### Step 6: Add Quality Check to CI + +Validate schema consistency in CI/CD: + +```yaml +# .github/workflows/quality.yml +name: Quality Checks + +on: + pull_request: + branches: [main, develop] + push: + branches: [main] + +jobs: + quality: + runs-on: ubuntu-latest + + steps: + - uses: actions/checkout@v4 + + - name: Setup Node.js + uses: actions/setup-node@v4 + with: + node-version: '20' + cache: 'pnpm' + + - name: Install dependencies + run: pnpm install --frozen-lockfile + + - name: Check database schema drift + run: pnpm drizzle-kit check + + - name: Verify migrations (dry-run) + run: pnpm drizzle-kit push --dry-run + env: + DATABASE_URL: ${{ secrets.STAGING_DATABASE_URL }} + + - name: Run type checking + run: pnpm tsc --noEmit + + - name: Lint code + run: pnpm lint +``` + +**CI Checks Explained**: +- `drizzle-kit check`: Validates snapshots match schema +- `drizzle-kit push --dry-run`: Tests migration without applying +- Type checking: Ensures TypeScript compiles +- Linting: Enforces code style + +### Step 7: Test on Staging + +Before production, test migration on staging: + +```bash +# 1. Run migration on staging +STAGING_DATABASE_URL="..." pnpm drizzle-kit push + +# 2. Verify schema +pnpm drizzle-kit check + +# 3. Test affected API routes +curl https://staging.example.com/api/schools/calendars + +# 4. Check for data integrity issues +# Run queries to verify data looks correct + +# 5. Monitor logs for errors +# Check application logs for migration-related errors +``` + +**Staging Checklist**: +- [ ] Migration runs without errors +- [ ] Schema drift check passes +- [ ] API routes using new schema work correctly +- [ ] No data integrity issues +- [ ] Application logs show no errors +- [ ] Query performance acceptable + +## Common Migration Patterns + +### Adding a Column + +```sql +-- drizzle/0005_add_user_phone.sql +ALTER TABLE users +ADD COLUMN phone TEXT; + +-- Add index if querying by phone +CREATE INDEX idx_users_phone ON users(phone); +``` + +TypeScript: +```typescript +export const users = pgTable('users', { + id: uuid('id').primaryKey(), + email: text('email').notNull(), + phone: text('phone'), // New column +}); +``` + +### Creating a Junction Table + +```sql +-- drizzle/0006_create_provider_specialties.sql +CREATE TABLE provider_specialties ( + provider_id UUID NOT NULL REFERENCES providers(id) ON DELETE CASCADE, + specialty_id UUID NOT NULL REFERENCES specialties(id) ON DELETE CASCADE, + PRIMARY KEY (provider_id, specialty_id) +); + +CREATE INDEX idx_provider_specialties_provider ON provider_specialties(provider_id); +CREATE INDEX idx_provider_specialties_specialty ON provider_specialties(specialty_id); +``` + +TypeScript: +```typescript +export const providerSpecialties = pgTable('provider_specialties', { + providerId: uuid('provider_id') + .notNull() + .references(() => providers.id, { onDelete: 'cascade' }), + specialtyId: uuid('specialty_id') + .notNull() + .references(() => specialties.id, { onDelete: 'cascade' }), +}, (table) => ({ + pk: primaryKey(table.providerId, table.specialtyId), +})); +``` + +### Modifying Column Type + +```sql +-- drizzle/0007_change_price_to_decimal.sql +ALTER TABLE services +ALTER COLUMN price TYPE DECIMAL(10, 2); +``` + +TypeScript: +```typescript +import { decimal } from 'drizzle-orm/pg-core'; + +export const services = pgTable('services', { + id: uuid('id').primaryKey(), + name: text('name').notNull(), + price: decimal('price', { precision: 10, scale: 2 }).notNull(), +}); +``` + +### Adding Constraints + +```sql +-- drizzle/0008_add_email_constraint.sql +ALTER TABLE users +ADD CONSTRAINT users_email_unique UNIQUE (email); + +ALTER TABLE users +ADD CONSTRAINT users_email_format CHECK (email ~* '^[A-Za-z0-9._%+-]+@[A-Za-z0-9.-]+\.[A-Z|a-z]{2,}$'); +``` + +## Configuration + +### drizzle.config.ts + +```typescript +import type { Config } from 'drizzle-kit'; + +export default { + schema: './src/lib/db/schema/index.ts', + out: './drizzle', + driver: 'pg', + dbCredentials: { + connectionString: process.env.DATABASE_URL!, + }, +} satisfies Config; +``` + +### package.json Scripts + +```json +{ + "scripts": { + "db:generate": "drizzle-kit generate:pg", + "db:push": "drizzle-kit push:pg", + "db:studio": "drizzle-kit studio", + "db:check": "drizzle-kit check:pg", + "db:up": "drizzle-kit up:pg" + } +} +``` + +## Migration Testing Workflow + +### Local Testing + +```bash +# 1. Create migration +echo "CREATE TABLE test (...)" > drizzle/0009_test.sql + +# 2. Generate TypeScript +pnpm db:generate + +# 3. Push to local database +pnpm db:push + +# 4. Verify schema +pnpm db:check + +# 5. Test in application +pnpm dev +# Manually test affected features + +# 6. Run tests +pnpm test +``` + +### Rollback Strategy + +```sql +-- drizzle/0010_add_feature.sql (up migration) +CREATE TABLE new_feature (...); + +-- drizzle/0010_add_feature_down.sql (down migration) +DROP TABLE new_feature; +``` + +Apply rollback: +```bash +# Manually run down migration +psql $DATABASE_URL -f drizzle/0010_add_feature_down.sql +``` + +## Best Practices + +### Do's +- ✅ Write SQL migrations first +- ✅ Use descriptive migration names +- ✅ Add indexes for foreign keys +- ✅ Include constraints in migrations +- ✅ Test migrations on staging before production +- ✅ Commit snapshots to version control +- ✅ Organize schemas by domain +- ✅ Use `drizzle-kit check` in CI + +### Don'ts +- ❌ Never write TypeScript schema before SQL migration +- ❌ Don't skip staging testing +- ❌ Don't modify old migrations (create new ones) +- ❌ Don't forget to add indexes +- ❌ Don't use `drizzle-kit push` in production (use proper migrations) +- ❌ Don't commit generated files without snapshots + +## Troubleshooting + +### Schema Drift Detected +**Error**: `Schema drift detected` + +**Solution**: +```bash +# Check what changed +pnpm drizzle-kit check + +# Regenerate snapshots +pnpm drizzle-kit generate + +# Review changes and commit +git add drizzle/meta/ +git commit -m "Update schema snapshots" +``` + +### Migration Fails on Staging +**Error**: Migration fails with data constraint violation + +**Solution**: +1. Rollback migration +2. Create data migration script +3. Run data migration first +4. Then run schema migration + +```sql +-- First: Migrate data +UPDATE users SET status = 'active' WHERE status IS NULL; + +-- Then: Add constraint +ALTER TABLE users +ALTER COLUMN status SET NOT NULL; +``` + +### TypeScript Types Out of Sync +**Error**: TypeScript types don't match database + +**Solution**: +```bash +# Regenerate everything +pnpm db:generate +pnpm tsc --noEmit + +# If still broken, check schema files +# Ensure column names match SQL exactly +``` + +## Related Skills + +- `universal-data-database-migration` - Universal migration patterns +- `toolchains-typescript-data-drizzle` - Drizzle ORM usage patterns +- `toolchains-typescript-core` - TypeScript best practices +- `universal-debugging-verification-before-completion` - Verification workflows diff --git a/.agents/skills/drizzle-orm/SKILL.md b/.agents/skills/drizzle-orm/SKILL.md new file mode 100644 index 0000000..d01fa29 --- /dev/null +++ b/.agents/skills/drizzle-orm/SKILL.md @@ -0,0 +1,396 @@ +--- +name: drizzle-orm +description: "Type-safe SQL ORM for TypeScript with zero runtime overhead" +progressive_disclosure: + entry_point: + summary: "Type-safe SQL ORM for TypeScript with zero runtime overhead" + when_to_use: "When working with drizzle-orm or related functionality." + quick_start: "1. Review the core concepts below. 2. Apply patterns to your use case. 3. Follow best practices for implementation." + references: + - advanced-schemas.md + - performance.md + - query-patterns.md + - vs-prisma.md +--- +# Drizzle ORM + +Modern TypeScript-first ORM with zero dependencies, compile-time type safety, and SQL-like syntax. Optimized for edge runtimes and serverless environments. + +## Quick Start + +### Installation + +```bash +# Core ORM +npm install drizzle-orm + +# Database driver (choose one) +npm install pg # PostgreSQL +npm install mysql2 # MySQL +npm install better-sqlite3 # SQLite + +# Drizzle Kit (migrations) +npm install -D drizzle-kit +``` + +### Basic Setup + +```typescript +// db/schema.ts +import { pgTable, serial, text, timestamp } from 'drizzle-orm/pg-core'; + +export const users = pgTable('users', { + id: serial('id').primaryKey(), + email: text('email').notNull().unique(), + name: text('name').notNull(), + createdAt: timestamp('created_at').defaultNow(), +}); + +// db/client.ts +import { drizzle } from 'drizzle-orm/node-postgres'; +import { Pool } from 'pg'; +import * as schema from './schema'; + +const pool = new Pool({ connectionString: process.env.DATABASE_URL }); +export const db = drizzle(pool, { schema }); +``` + +### First Query + +```typescript +import { db } from './db/client'; +import { users } from './db/schema'; +import { eq } from 'drizzle-orm'; + +// Insert +const newUser = await db.insert(users).values({ + email: 'user@example.com', + name: 'John Doe', +}).returning(); + +// Select +const allUsers = await db.select().from(users); + +// Where +const user = await db.select().from(users).where(eq(users.id, 1)); + +// Update +await db.update(users).set({ name: 'Jane Doe' }).where(eq(users.id, 1)); + +// Delete +await db.delete(users).where(eq(users.id, 1)); +``` + +## Schema Definition + +### Column Types Reference + +| PostgreSQL | MySQL | SQLite | TypeScript | +|------------|-------|--------|------------| +| `serial()` | `serial()` | `integer()` | `number` | +| `text()` | `text()` | `text()` | `string` | +| `integer()` | `int()` | `integer()` | `number` | +| `boolean()` | `boolean()` | `integer()` | `boolean` | +| `timestamp()` | `datetime()` | `integer()` | `Date` | +| `json()` | `json()` | `text()` | `unknown` | +| `uuid()` | `varchar(36)` | `text()` | `string` | + +### Common Schema Patterns + +```typescript +import { pgTable, serial, text, varchar, integer, boolean, timestamp, json, unique } from 'drizzle-orm/pg-core'; + +export const users = pgTable('users', { + id: serial('id').primaryKey(), + email: varchar('email', { length: 255 }).notNull().unique(), + passwordHash: varchar('password_hash', { length: 255 }).notNull(), + role: text('role', { enum: ['admin', 'user', 'guest'] }).default('user'), + metadata: json('metadata').$type<{ theme: string; locale: string }>(), + isActive: boolean('is_active').default(true), + createdAt: timestamp('created_at').defaultNow().notNull(), + updatedAt: timestamp('updated_at').defaultNow().notNull(), +}, (table) => ({ + emailIdx: unique('email_unique_idx').on(table.email), +})); + +// Infer TypeScript types +type User = typeof users.$inferSelect; +type NewUser = typeof users.$inferInsert; +``` + +## Relations + +### One-to-Many + +```typescript +import { pgTable, serial, text, integer } from 'drizzle-orm/pg-core'; +import { relations } from 'drizzle-orm'; + +export const authors = pgTable('authors', { + id: serial('id').primaryKey(), + name: text('name').notNull(), +}); + +export const posts = pgTable('posts', { + id: serial('id').primaryKey(), + title: text('title').notNull(), + authorId: integer('author_id').notNull().references(() => authors.id), +}); + +export const authorsRelations = relations(authors, ({ many }) => ({ + posts: many(posts), +})); + +export const postsRelations = relations(posts, ({ one }) => ({ + author: one(authors, { + fields: [posts.authorId], + references: [authors.id], + }), +})); + +// Query with relations +const authorsWithPosts = await db.query.authors.findMany({ + with: { posts: true }, +}); +``` + +### Many-to-Many + +```typescript +export const users = pgTable('users', { + id: serial('id').primaryKey(), + name: text('name').notNull(), +}); + +export const groups = pgTable('groups', { + id: serial('id').primaryKey(), + name: text('name').notNull(), +}); + +export const usersToGroups = pgTable('users_to_groups', { + userId: integer('user_id').notNull().references(() => users.id), + groupId: integer('group_id').notNull().references(() => groups.id), +}, (table) => ({ + pk: primaryKey({ columns: [table.userId, table.groupId] }), +})); + +export const usersRelations = relations(users, ({ many }) => ({ + groups: many(usersToGroups), +})); + +export const groupsRelations = relations(groups, ({ many }) => ({ + users: many(usersToGroups), +})); + +export const usersToGroupsRelations = relations(usersToGroups, ({ one }) => ({ + user: one(users, { fields: [usersToGroups.userId], references: [users.id] }), + group: one(groups, { fields: [usersToGroups.groupId], references: [groups.id] }), +})); +``` + +## Queries + +### Filtering + +```typescript +import { eq, ne, gt, gte, lt, lte, like, ilike, inArray, isNull, isNotNull, and, or, between } from 'drizzle-orm'; + +// Equality +await db.select().from(users).where(eq(users.email, 'user@example.com')); + +// Comparison +await db.select().from(users).where(gt(users.id, 10)); + +// Pattern matching +await db.select().from(users).where(like(users.name, '%John%')); + +// Multiple conditions +await db.select().from(users).where( + and( + eq(users.role, 'admin'), + gt(users.createdAt, new Date('2024-01-01')) + ) +); + +// IN clause +await db.select().from(users).where(inArray(users.id, [1, 2, 3])); + +// NULL checks +await db.select().from(users).where(isNull(users.deletedAt)); +``` + +### Joins + +```typescript +import { eq } from 'drizzle-orm'; + +// Inner join +const result = await db + .select({ + user: users, + post: posts, + }) + .from(users) + .innerJoin(posts, eq(users.id, posts.authorId)); + +// Left join +const result = await db + .select({ + user: users, + post: posts, + }) + .from(users) + .leftJoin(posts, eq(users.id, posts.authorId)); + +// Multiple joins with aggregation +import { count, sql } from 'drizzle-orm'; + +const result = await db + .select({ + authorName: authors.name, + postCount: count(posts.id), + }) + .from(authors) + .leftJoin(posts, eq(authors.id, posts.authorId)) + .groupBy(authors.id); +``` + +### Pagination & Sorting + +```typescript +import { desc, asc } from 'drizzle-orm'; + +// Order by +await db.select().from(users).orderBy(desc(users.createdAt)); + +// Limit & offset +await db.select().from(users).limit(10).offset(20); + +// Pagination helper +function paginate(page: number, pageSize: number = 10) { + return db.select().from(users) + .limit(pageSize) + .offset(page * pageSize); +} +``` + +## Transactions + +```typescript +// Auto-rollback on error +await db.transaction(async (tx) => { + await tx.insert(users).values({ email: 'user@example.com', name: 'John' }); + await tx.insert(posts).values({ title: 'First Post', authorId: 1 }); + // If any query fails, entire transaction rolls back +}); + +// Manual control +const tx = db.transaction(async (tx) => { + const user = await tx.insert(users).values({ ... }).returning(); + + if (!user) { + tx.rollback(); + return; + } + + await tx.insert(posts).values({ authorId: user.id }); +}); +``` + +## Migrations + +### Drizzle Kit Configuration + +```typescript +// drizzle.config.ts +import type { Config } from 'drizzle-kit'; + +export default { + schema: './db/schema.ts', + out: './drizzle', + dialect: 'postgresql', + dbCredentials: { + url: process.env.DATABASE_URL!, + }, +} satisfies Config; +``` + +### Migration Workflow + +```bash +# Generate migration +npx drizzle-kit generate + +# View SQL +cat drizzle/0000_migration.sql + +# Apply migration +npx drizzle-kit migrate + +# Introspect existing database +npx drizzle-kit introspect + +# Drizzle Studio (database GUI) +npx drizzle-kit studio +``` + +### Example Migration + +```sql +-- drizzle/0000_initial.sql +CREATE TABLE IF NOT EXISTS "users" ( + "id" serial PRIMARY KEY NOT NULL, + "email" varchar(255) NOT NULL, + "name" text NOT NULL, + "created_at" timestamp DEFAULT now() NOT NULL, + CONSTRAINT "users_email_unique" UNIQUE("email") +); +``` + +## Navigation + +### Detailed References + +- **[🏗️ Advanced Schemas](./references/advanced-schemas.md)** - Custom types, composite keys, indexes, constraints, multi-tenant patterns. Load when designing complex database schemas. + +- **[🔍 Query Patterns](./references/query-patterns.md)** - Subqueries, CTEs, raw SQL, prepared statements, batch operations. Load when optimizing queries or handling complex filtering. + +- **[⚡ Performance](./references/performance.md)** - Connection pooling, query optimization, N+1 prevention, prepared statements, edge runtime integration. Load when scaling or optimizing database performance. + +- **[🔄 vs Prisma](./references/vs-prisma.md)** - Feature comparison, migration guide, when to choose Drizzle over Prisma. Load when evaluating ORMs or migrating from Prisma. + +## Red Flags + +**Stop and reconsider if:** +- Using `any` or `unknown` for JSON columns without type annotation +- Building raw SQL strings without using `sql` template (SQL injection risk) +- Not using transactions for multi-step data modifications +- Fetching all rows without pagination in production queries +- Missing indexes on foreign keys or frequently queried columns +- Using `select()` without specifying columns for large tables + +## Performance Benefits vs Prisma + +| Metric | Drizzle | Prisma | +|--------|---------|--------| +| **Bundle Size** | ~35KB | ~230KB | +| **Cold Start** | ~10ms | ~250ms | +| **Query Speed** | Baseline | ~2-3x slower | +| **Memory** | ~10MB | ~50MB | +| **Type Generation** | Runtime inference | Build-time generation | + +## Integration + +- **typescript-core**: Type-safe schema inference with `satisfies` +- **nextjs-core**: Server Actions, Route Handlers, Middleware integration +- **Database Migration**: Safe schema evolution patterns + +## Related Skills + +When using Drizzle, these skills enhance your workflow: +- **prisma**: Alternative ORM comparison: Drizzle vs Prisma trade-offs +- **typescript**: Advanced TypeScript patterns for type-safe queries +- **nextjs**: Drizzle with Next.js Server Actions and API routes +- **sqlalchemy**: SQLAlchemy patterns for Python developers learning Drizzle + +[Full documentation available in these skills if deployed in your bundle] diff --git a/.agents/skills/drizzle-orm/references/advanced-schemas.md b/.agents/skills/drizzle-orm/references/advanced-schemas.md new file mode 100644 index 0000000..909445c --- /dev/null +++ b/.agents/skills/drizzle-orm/references/advanced-schemas.md @@ -0,0 +1,380 @@ +# Advanced Schemas + +Deep dive into complex schema patterns, custom types, and database-specific features in Drizzle ORM. + +## Custom Column Types + +### Enums + +```typescript +import { pgEnum, pgTable, serial } from 'drizzle-orm/pg-core'; + +// PostgreSQL native enum +export const roleEnum = pgEnum('role', ['admin', 'user', 'guest']); + +export const users = pgTable('users', { + id: serial('id').primaryKey(), + role: roleEnum('role').default('user'), +}); + +// MySQL/SQLite: Use text with constraints +import { mysqlTable, text } from 'drizzle-orm/mysql-core'; + +export const users = mysqlTable('users', { + role: text('role', { enum: ['admin', 'user', 'guest'] }).default('user'), +}); +``` + +### Custom JSON Types + +```typescript +import { pgTable, serial, json } from 'drizzle-orm/pg-core'; +import { z } from 'zod'; + +// Type-safe JSON with Zod +const MetadataSchema = z.object({ + theme: z.enum(['light', 'dark']), + locale: z.string(), + notifications: z.boolean(), +}); + +type Metadata = z.infer; + +export const users = pgTable('users', { + id: serial('id').primaryKey(), + metadata: json('metadata').$type(), +}); + +// Runtime validation +async function updateMetadata(userId: number, metadata: unknown) { + const validated = MetadataSchema.parse(metadata); + await db.update(users).set({ metadata: validated }).where(eq(users.id, userId)); +} +``` + +### Arrays + +```typescript +import { pgTable, serial, text } from 'drizzle-orm/pg-core'; + +export const posts = pgTable('posts', { + id: serial('id').primaryKey(), + tags: text('tags').array(), +}); + +// Query array columns +import { arrayContains, arrayContained } from 'drizzle-orm'; + +await db.select().from(posts).where(arrayContains(posts.tags, ['typescript', 'drizzle'])); +``` + +## Indexes + +### Basic Indexes + +```typescript +import { pgTable, serial, text, varchar, index, uniqueIndex } from 'drizzle-orm/pg-core'; + +export const users = pgTable('users', { + id: serial('id').primaryKey(), + email: varchar('email', { length: 255 }).notNull(), + name: text('name'), + city: text('city'), +}, (table) => ({ + emailIdx: uniqueIndex('email_idx').on(table.email), + nameIdx: index('name_idx').on(table.name), + cityNameIdx: index('city_name_idx').on(table.city, table.name), +})); +``` + +### Partial Indexes + +```typescript +import { sql } from 'drizzle-orm'; + +export const users = pgTable('users', { + id: serial('id').primaryKey(), + email: varchar('email', { length: 255 }), + deletedAt: timestamp('deleted_at'), +}, (table) => ({ + activeEmailIdx: uniqueIndex('active_email_idx') + .on(table.email) + .where(sql`${table.deletedAt} IS NULL`), +})); +``` + +### Full-Text Search + +```typescript +import { pgTable, serial, text, index } from 'drizzle-orm/pg-core'; +import { sql } from 'drizzle-orm'; + +export const posts = pgTable('posts', { + id: serial('id').primaryKey(), + title: text('title').notNull(), + content: text('content').notNull(), +}, (table) => ({ + searchIdx: index('search_idx').using( + 'gin', + sql`to_tsvector('english', ${table.title} || ' ' || ${table.content})` + ), +})); + +// Full-text search query +const results = await db.select().from(posts).where( + sql`to_tsvector('english', ${posts.title} || ' ' || ${posts.content}) @@ plainto_tsquery('english', 'typescript orm')` +); +``` + +## Composite Keys + +```typescript +import { pgTable, text, primaryKey } from 'drizzle-orm/pg-core'; + +export const userPreferences = pgTable('user_preferences', { + userId: integer('user_id').notNull(), + key: text('key').notNull(), + value: text('value').notNull(), +}, (table) => ({ + pk: primaryKey({ columns: [table.userId, table.key] }), +})); +``` + +## Check Constraints + +```typescript +import { pgTable, serial, integer, check } from 'drizzle-orm/pg-core'; +import { sql } from 'drizzle-orm'; + +export const products = pgTable('products', { + id: serial('id').primaryKey(), + price: integer('price').notNull(), + discountPrice: integer('discount_price'), +}, (table) => ({ + priceCheck: check('price_check', sql`${table.price} > 0`), + discountCheck: check('discount_check', sql`${table.discountPrice} < ${table.price}`), +})); +``` + +## Generated Columns + +```typescript +import { pgTable, serial, text, integer } from 'drizzle-orm/pg-core'; +import { sql } from 'drizzle-orm'; + +export const users = pgTable('users', { + id: serial('id').primaryKey(), + firstName: text('first_name').notNull(), + lastName: text('last_name').notNull(), + fullName: text('full_name').generatedAlwaysAs( + (): SQL => sql`${users.firstName} || ' ' || ${users.lastName}`, + { mode: 'stored' } + ), +}); +``` + +## Multi-Tenant Patterns + +### Row-Level Security (PostgreSQL) + +```typescript +import { pgTable, serial, text, uuid } from 'drizzle-orm/pg-core'; + +export const tenants = pgTable('tenants', { + id: uuid('id').defaultRandom().primaryKey(), + name: text('name').notNull(), +}); + +export const documents = pgTable('documents', { + id: serial('id').primaryKey(), + tenantId: uuid('tenant_id').notNull().references(() => tenants.id), + title: text('title').notNull(), + content: text('content'), +}); + +// Apply RLS policy (via migration SQL) +/* +ALTER TABLE documents ENABLE ROW LEVEL SECURITY; + +CREATE POLICY tenant_isolation ON documents + USING (tenant_id = current_setting('app.current_tenant_id')::uuid); +*/ + +// Set tenant context +await db.execute(sql`SET app.current_tenant_id = ${tenantId}`); +``` + +### Schema-Per-Tenant + +```typescript +import { drizzle } from 'drizzle-orm/node-postgres'; + +// Create schema-aware connection +function getTenantDb(tenantId: string) { + const schemaName = `tenant_${tenantId}`; + + return drizzle(pool, { + schema: { + ...schema, + }, + schemaPrefix: schemaName, + }); +} + +// Use tenant-specific DB +const tenantDb = getTenantDb('tenant123'); +await tenantDb.select().from(users); +``` + +## Database-Specific Features + +### PostgreSQL: JSONB Operations + +```typescript +import { pgTable, serial, jsonb } from 'drizzle-orm/pg-core'; +import { sql } from 'drizzle-orm'; + +export const settings = pgTable('settings', { + id: serial('id').primaryKey(), + config: jsonb('config').$type>(), +}); + +// JSONB operators +await db.select().from(settings).where( + sql`${settings.config}->>'theme' = 'dark'` +); + +// JSONB path query +await db.select().from(settings).where( + sql`${settings.config} @> '{"notifications": {"email": true}}'::jsonb` +); +``` + +### MySQL: Spatial Types + +```typescript +import { mysqlTable, serial, geometry } from 'drizzle-orm/mysql-core'; +import { sql } from 'drizzle-orm'; + +export const locations = mysqlTable('locations', { + id: serial('id').primaryKey(), + point: geometry('point', { type: 'point', srid: 4326 }), +}); + +// Spatial query +await db.select().from(locations).where( + sql`ST_Distance_Sphere(${locations.point}, POINT(${lng}, ${lat})) < 1000` +); +``` + +### SQLite: FTS5 + +```typescript +import { sqliteTable, text } from 'drizzle-orm/sqlite-core'; + +export const documents = sqliteTable('documents', { + title: text('title'), + content: text('content'), +}); + +// Create FTS5 virtual table (via migration) +/* +CREATE VIRTUAL TABLE documents_fts USING fts5(title, content, content='documents'); +*/ +``` + +## Schema Versioning + +### Migration Strategy + +```typescript +// db/schema.ts +export const schemaVersion = pgTable('schema_version', { + version: serial('version').primaryKey(), + appliedAt: timestamp('applied_at').defaultNow(), +}); + +// Track migrations +await db.insert(schemaVersion).values({ version: 1 }); + +// Check version +const [currentVersion] = await db.select().from(schemaVersion).orderBy(desc(schemaVersion.version)).limit(1); +``` + +## Type Inference Helpers + +```typescript +import { InferSelectModel, InferInsertModel } from 'drizzle-orm'; + +export const users = pgTable('users', { + id: serial('id').primaryKey(), + email: text('email').notNull(), + name: text('name'), +}); + +// Generate types +export type User = InferSelectModel; +export type NewUser = InferInsertModel; + +// Partial updates +export type UserUpdate = Partial; + +// Nested relation types +export type UserWithPosts = User & { + posts: Post[]; +}; +``` + +## Best Practices + +### Schema Organization + +```typescript +// db/schema/users.ts +export const users = pgTable('users', { ... }); +export const userRelations = relations(users, { ... }); + +// db/schema/posts.ts +export const posts = pgTable('posts', { ... }); +export const postRelations = relations(posts, { ... }); + +// db/schema/index.ts +export * from './users'; +export * from './posts'; + +// db/client.ts +import * as schema from './schema'; +export const db = drizzle(pool, { schema }); +``` + +### Naming Conventions + +```typescript +// ✅ Good: Consistent naming +export const users = pgTable('users', { + id: serial('id').primaryKey(), + firstName: text('first_name'), + createdAt: timestamp('created_at'), +}); + +// ❌ Bad: Inconsistent naming +export const Users = pgTable('user', { + ID: serial('userId').primaryKey(), + first_name: text('firstname'), +}); +``` + +### Default Values + +```typescript +import { sql } from 'drizzle-orm'; + +export const posts = pgTable('posts', { + id: serial('id').primaryKey(), + slug: text('slug').notNull(), + views: integer('views').default(0), + createdAt: timestamp('created_at').defaultNow(), + updatedAt: timestamp('updated_at').default(sql`CURRENT_TIMESTAMP`), + uuid: uuid('uuid').defaultRandom(), +}); +``` diff --git a/.agents/skills/drizzle-orm/references/performance.md b/.agents/skills/drizzle-orm/references/performance.md new file mode 100644 index 0000000..e2c9f98 --- /dev/null +++ b/.agents/skills/drizzle-orm/references/performance.md @@ -0,0 +1,594 @@ +# Performance Optimization + +Connection pooling, query optimization, edge runtime integration, and performance best practices. + +## Connection Pooling + +### PostgreSQL (node-postgres) + +```typescript +import { Pool } from 'pg'; +import { drizzle } from 'drizzle-orm/node-postgres'; + +const pool = new Pool({ + host: process.env.DB_HOST, + port: parseInt(process.env.DB_PORT || '5432'), + database: process.env.DB_NAME, + user: process.env.DB_USER, + password: process.env.DB_PASSWORD, + max: 20, // Maximum pool size + idleTimeoutMillis: 30000, // Close idle clients after 30s + connectionTimeoutMillis: 2000, // Timeout connection attempts +}); + +export const db = drizzle(pool); + +// Graceful shutdown +process.on('SIGTERM', async () => { + await pool.end(); +}); +``` + +### MySQL (mysql2) + +```typescript +import mysql from 'mysql2/promise'; +import { drizzle } from 'drizzle-orm/mysql2'; + +const poolConnection = mysql.createPool({ + host: process.env.DB_HOST, + user: process.env.DB_USER, + password: process.env.DB_PASSWORD, + database: process.env.DB_NAME, + waitForConnections: true, + connectionLimit: 10, + maxIdle: 10, + idleTimeout: 60000, + queueLimit: 0, + enableKeepAlive: true, + keepAliveInitialDelay: 0, +}); + +export const db = drizzle(poolConnection); +``` + +### SQLite (better-sqlite3) + +```typescript +import Database from 'better-sqlite3'; +import { drizzle } from 'drizzle-orm/better-sqlite3'; + +const sqlite = new Database('sqlite.db', { + readonly: false, + fileMustExist: false, + timeout: 5000, + verbose: console.log, // Remove in production +}); + +// Performance pragmas +sqlite.pragma('journal_mode = WAL'); +sqlite.pragma('synchronous = normal'); +sqlite.pragma('cache_size = -64000'); // 64MB cache +sqlite.pragma('temp_store = memory'); + +export const db = drizzle(sqlite); + +process.on('exit', () => sqlite.close()); +``` + +## Query Optimization + +### Select Only Needed Columns + +```typescript +// ❌ Bad: Fetch all columns +const users = await db.select().from(users); + +// ✅ Good: Fetch only needed columns +const users = await db.select({ + id: users.id, + email: users.email, + name: users.name, +}).from(users); +``` + +### Use Indexes Effectively + +```typescript +import { pgTable, serial, text, varchar, index } from 'drizzle-orm/pg-core'; + +export const users = pgTable('users', { + id: serial('id').primaryKey(), + email: varchar('email', { length: 255 }).notNull(), + city: text('city'), + status: text('status'), +}, (table) => ({ + // Index frequently queried columns + emailIdx: index('email_idx').on(table.email), + + // Composite index for common query patterns + cityStatusIdx: index('city_status_idx').on(table.city, table.status), +})); + +// Query uses index +const activeUsersInNYC = await db.select() + .from(users) + .where(and( + eq(users.city, 'NYC'), + eq(users.status, 'active') + )); +``` + +### Analyze Query Plans + +```typescript +import { sql } from 'drizzle-orm'; + +// PostgreSQL EXPLAIN +const plan = await db.execute( + sql`EXPLAIN ANALYZE SELECT * FROM ${users} WHERE ${users.email} = 'user@example.com'` +); + +console.log(plan.rows); + +// Check for: +// - "Seq Scan" (bad) vs "Index Scan" (good) +// - Actual time vs estimated time +// - Rows removed by filter +``` + +### Pagination Performance + +```typescript +// ❌ Bad: OFFSET on large datasets (gets slower as offset increases) +const page = await db.select() + .from(users) + .limit(20) + .offset(10000); // Scans 10,020 rows! + +// ✅ Good: Cursor-based pagination (constant time) +const page = await db.select() + .from(users) + .where(gt(users.id, lastSeenId)) + .orderBy(asc(users.id)) + .limit(20); + +// ✅ Good: Seek method for timestamp-based pagination +const page = await db.select() + .from(posts) + .where(lt(posts.createdAt, lastSeenTimestamp)) + .orderBy(desc(posts.createdAt)) + .limit(20); +``` + +## Edge Runtime Integration + +### Cloudflare Workers (D1) + +```typescript +import { drizzle } from 'drizzle-orm/d1'; + +export default { + async fetch(request: Request, env: Env): Promise { + const db = drizzle(env.DB); + + const users = await db.select().from(users).limit(10); + + return Response.json(users); + }, +}; +``` + +### Vercel Edge (Neon) + +```typescript +import { neon } from '@neondatabase/serverless'; +import { drizzle } from 'drizzle-orm/neon-http'; + +export const runtime = 'edge'; + +export async function GET() { + const sql = neon(process.env.DATABASE_URL!); + const db = drizzle(sql); + + const users = await db.select().from(users); + + return Response.json(users); +} +``` + +### Supabase Edge Functions + +```typescript +import { createClient } from '@supabase/supabase-js'; +import { drizzle } from 'drizzle-orm/postgres-js'; +import postgres from 'postgres'; + +Deno.serve(async (req) => { + const client = postgres(Deno.env.get('DATABASE_URL')!); + const db = drizzle(client); + + const data = await db.select().from(users); + + return new Response(JSON.stringify(data), { + headers: { 'Content-Type': 'application/json' }, + }); +}); +``` + +## Caching Strategies + +### In-Memory Cache + +```typescript +import { LRUCache } from 'lru-cache'; + +const cache = new LRUCache({ + max: 500, + ttl: 1000 * 60 * 5, // 5 minutes +}); + +async function getCachedUser(id: number) { + const key = `user:${id}`; + const cached = cache.get(key); + + if (cached) return cached; + + const user = await db.select().from(users).where(eq(users.id, id)); + cache.set(key, user); + + return user; +} +``` + +### Redis Cache Layer + +```typescript +import { Redis } from 'ioredis'; + +const redis = new Redis(process.env.REDIS_URL); + +async function getCachedData( + key: string, + fetcher: () => Promise, + ttl: number = 300 +): Promise { + // Try cache first + const cached = await redis.get(key); + if (cached) return JSON.parse(cached); + + // Fetch from database + const data = await fetcher(); + + // Store in cache + await redis.setex(key, ttl, JSON.stringify(data)); + + return data; +} + +// Usage +const users = await getCachedData( + 'users:all', + () => db.select().from(users), + 600 +); +``` + +### Materialized Views (PostgreSQL) + +```typescript +// Create materialized view (via migration) +/* +CREATE MATERIALIZED VIEW user_stats AS +SELECT + u.id, + u.name, + COUNT(p.id) AS post_count, + COUNT(c.id) AS comment_count +FROM users u +LEFT JOIN posts p ON p.author_id = u.id +LEFT JOIN comments c ON c.user_id = u.id +GROUP BY u.id; + +CREATE UNIQUE INDEX ON user_stats (id); +*/ + +// Define schema +export const userStats = pgMaterializedView('user_stats').as((qb) => + qb.select({ + id: users.id, + name: users.name, + postCount: sql`COUNT(${posts.id})`, + commentCount: sql`COUNT(${comments.id})`, + }) + .from(users) + .leftJoin(posts, eq(posts.authorId, users.id)) + .leftJoin(comments, eq(comments.userId, users.id)) + .groupBy(users.id) +); + +// Refresh materialized view +await db.execute(sql`REFRESH MATERIALIZED VIEW CONCURRENTLY user_stats`); + +// Query materialized view (fast!) +const stats = await db.select().from(userStats); +``` + +## Batch Operations Optimization + +### Batch Insert with COPY (PostgreSQL) + +```typescript +import { copyFrom } from 'pg-copy-streams'; +import { pipeline } from 'stream/promises'; +import { Readable } from 'stream'; + +async function bulkInsert(data: any[]) { + const client = await pool.connect(); + + try { + const stream = client.query( + copyFrom(`COPY users (email, name) FROM STDIN WITH (FORMAT csv)`) + ); + + const input = Readable.from( + data.map(row => `${row.email},${row.name}\n`) + ); + + await pipeline(input, stream); + } finally { + client.release(); + } +} + +// 10x faster than batch INSERT for large datasets +``` + +### Chunk Processing + +```typescript +async function* chunked(array: T[], size: number) { + for (let i = 0; i < array.length; i += size) { + yield array.slice(i, i + size); + } +} + +async function bulkUpdate(updates: { id: number; name: string }[]) { + for await (const chunk of chunked(updates, 100)) { + await db.transaction(async (tx) => { + for (const update of chunk) { + await tx.update(users) + .set({ name: update.name }) + .where(eq(users.id, update.id)); + } + }); + } +} +``` + +## Connection Management + +### Serverless Optimization + +```typescript +// ❌ Bad: New connection per request +export async function handler() { + const pool = new Pool({ connectionString: process.env.DATABASE_URL }); + const db = drizzle(pool); + + const users = await db.select().from(users); + + await pool.end(); + return users; +} + +// ✅ Good: Reuse connection across warm starts +let cachedDb: ReturnType | null = null; + +export async function handler() { + if (!cachedDb) { + const pool = new Pool({ + connectionString: process.env.DATABASE_URL, + max: 1, // Serverless: single connection per instance + }); + cachedDb = drizzle(pool); + } + + const users = await cachedDb.select().from(users); + return users; +} +``` + +### HTTP-based Databases (Neon, Turso) + +```typescript +// No connection pooling needed - uses HTTP +import { neon } from '@neondatabase/serverless'; +import { drizzle } from 'drizzle-orm/neon-http'; + +const sql = neon(process.env.DATABASE_URL!); +const db = drizzle(sql); + +// Each query is a single HTTP request +const users = await db.select().from(users); +``` + +## Read Replicas + +```typescript +import { Pool } from 'pg'; +import { drizzle } from 'drizzle-orm/node-postgres'; + +// Primary (writes) +const primaryPool = new Pool({ connectionString: process.env.PRIMARY_DB_URL }); +const primaryDb = drizzle(primaryPool); + +// Replica (reads) +const replicaPool = new Pool({ connectionString: process.env.REPLICA_DB_URL }); +const replicaDb = drizzle(replicaPool); + +// Route queries appropriately +async function getUsers() { + return replicaDb.select().from(users); // Read from replica +} + +async function createUser(data: NewUser) { + return primaryDb.insert(users).values(data).returning(); // Write to primary +} +``` + +## Monitoring & Profiling + +### Query Logging + +```typescript +import { drizzle } from 'drizzle-orm/node-postgres'; + +const db = drizzle(pool, { + logger: { + logQuery(query: string, params: unknown[]) { + console.log('Query:', query); + console.log('Params:', params); + console.time('query'); + }, + }, +}); + +// Custom logger with metrics +class MetricsLogger { + private queries: Map = new Map(); + + logQuery(query: string) { + const start = Date.now(); + + return () => { + const duration = Date.now() - start; + const stats = this.queries.get(query) || { count: 0, totalTime: 0 }; + + this.queries.set(query, { + count: stats.count + 1, + totalTime: stats.totalTime + duration, + }); + + if (duration > 1000) { + console.warn(`Slow query (${duration}ms):`, query); + } + }; + } + + getStats() { + return Array.from(this.queries.entries()).map(([query, stats]) => ({ + query, + count: stats.count, + avgTime: stats.totalTime / stats.count, + })); + } +} +``` + +### Performance Monitoring + +```typescript +import { performance } from 'perf_hooks'; + +async function measureQuery( + name: string, + query: Promise +): Promise { + const start = performance.now(); + + try { + const result = await query; + const duration = performance.now() - start; + + console.log(`[${name}] completed in ${duration.toFixed(2)}ms`); + + return result; + } catch (error) { + const duration = performance.now() - start; + console.error(`[${name}] failed after ${duration.toFixed(2)}ms`, error); + throw error; + } +} + +// Usage +const users = await measureQuery( + 'fetchUsers', + db.select().from(users).limit(100) +); +``` + +## Database-Specific Optimizations + +### PostgreSQL + +```typescript +// Connection optimization +const pool = new Pool({ + max: 20, + application_name: 'myapp', + statement_timeout: 30000, // 30s query timeout + query_timeout: 30000, + connectionTimeoutMillis: 5000, + idle_in_transaction_session_timeout: 10000, +}); + +// Session optimization +await db.execute(sql`SET work_mem = '256MB'`); +await db.execute(sql`SET maintenance_work_mem = '512MB'`); +await db.execute(sql`SET effective_cache_size = '4GB'`); +``` + +### MySQL + +```typescript +const pool = mysql.createPool({ + waitForConnections: true, + connectionLimit: 10, + queueLimit: 0, + enableKeepAlive: true, + keepAliveInitialDelay: 0, + dateStrings: false, + supportBigNumbers: true, + bigNumberStrings: false, + multipleStatements: false, // Security + timezone: 'Z', // UTC +}); +``` + +### SQLite + +```typescript +// WAL mode for concurrent reads +sqlite.pragma('journal_mode = WAL'); + +// Optimize for performance +sqlite.pragma('synchronous = NORMAL'); +sqlite.pragma('cache_size = -64000'); // 64MB +sqlite.pragma('temp_store = MEMORY'); +sqlite.pragma('mmap_size = 30000000000'); // 30GB mmap + +// Disable for bulk inserts +const stmt = sqlite.prepare('INSERT INTO users (email, name) VALUES (?, ?)'); + +const insertMany = sqlite.transaction((users) => { + for (const user of users) { + stmt.run(user.email, user.name); + } +}); + +insertMany(users); // 100x faster than individual inserts +``` + +## Best Practices Summary + +1. **Always use connection pooling** in long-running processes +2. **Select only needed columns** to reduce network transfer +3. **Add indexes** on frequently queried columns and foreign keys +4. **Use cursor-based pagination** instead of OFFSET for large datasets +5. **Batch operations** when inserting/updating multiple records +6. **Cache expensive queries** with appropriate TTL +7. **Monitor slow queries** and optimize with EXPLAIN ANALYZE +8. **Use prepared statements** for frequently executed queries +9. **Implement read replicas** for high-traffic read operations +10. **Use HTTP-based databases** (Neon, Turso) for edge/serverless diff --git a/.agents/skills/drizzle-orm/references/query-patterns.md b/.agents/skills/drizzle-orm/references/query-patterns.md new file mode 100644 index 0000000..07a1ffd --- /dev/null +++ b/.agents/skills/drizzle-orm/references/query-patterns.md @@ -0,0 +1,577 @@ +# Query Patterns + +Advanced querying techniques, subqueries, CTEs, and raw SQL in Drizzle ORM. + +## Subqueries + +### SELECT Subqueries + +```typescript +import { sql, eq } from 'drizzle-orm'; + +// Scalar subquery +const avgPrice = db.select({ value: avg(products.price) }).from(products); + +const expensiveProducts = await db + .select() + .from(products) + .where(gt(products.price, avgPrice)); + +// Correlated subquery +const authorsWithPostCount = await db + .select({ + author: authors, + postCount: sql`( + SELECT COUNT(*) + FROM ${posts} + WHERE ${posts.authorId} = ${authors.id} + )`, + }) + .from(authors); +``` + +### EXISTS Subqueries + +```typescript +// Find authors with posts +const authorsWithPosts = await db + .select() + .from(authors) + .where( + sql`EXISTS ( + SELECT 1 + FROM ${posts} + WHERE ${posts.authorId} = ${authors.id} + )` + ); + +// Find authors without posts +const authorsWithoutPosts = await db + .select() + .from(authors) + .where( + sql`NOT EXISTS ( + SELECT 1 + FROM ${posts} + WHERE ${posts.authorId} = ${authors.id} + )` + ); +``` + +### IN Subqueries + +```typescript +// Find users who commented +const usersWhoCommented = await db + .select() + .from(users) + .where( + sql`${users.id} IN ( + SELECT DISTINCT ${comments.userId} + FROM ${comments} + )` + ); +``` + +## Common Table Expressions (CTEs) + +### Basic CTE + +```typescript +import { sql } from 'drizzle-orm'; + +const topAuthors = db.$with('top_authors').as( + db.select({ + id: authors.id, + name: authors.name, + postCount: sql`COUNT(${posts.id})`.as('post_count'), + }) + .from(authors) + .leftJoin(posts, eq(authors.id, posts.authorId)) + .groupBy(authors.id) + .having(sql`COUNT(${posts.id}) > 10`) +); + +const result = await db + .with(topAuthors) + .select() + .from(topAuthors); +``` + +### Recursive CTE + +```typescript +// Organizational hierarchy +export const employees = pgTable('employees', { + id: serial('id').primaryKey(), + name: text('name').notNull(), + managerId: integer('manager_id').references((): AnyPgColumn => employees.id), +}); + +const employeeHierarchy = db.$with('employee_hierarchy').as( + db.select({ + id: employees.id, + name: employees.name, + managerId: employees.managerId, + level: sql`1`.as('level'), + }) + .from(employees) + .where(isNull(employees.managerId)) + .unionAll( + db.select({ + id: employees.id, + name: employees.name, + managerId: employees.managerId, + level: sql`employee_hierarchy.level + 1`, + }) + .from(employees) + .innerJoin( + sql`employee_hierarchy`, + sql`${employees.managerId} = employee_hierarchy.id` + ) + ) +); + +const hierarchy = await db + .with(employeeHierarchy) + .select() + .from(employeeHierarchy); +``` + +### Multiple CTEs + +```typescript +const activeUsers = db.$with('active_users').as( + db.select().from(users).where(eq(users.isActive, true)) +); + +const recentPosts = db.$with('recent_posts').as( + db.select().from(posts).where(gt(posts.createdAt, sql`NOW() - INTERVAL '30 days'`)) +); + +const result = await db + .with(activeUsers, recentPosts) + .select({ + user: activeUsers, + post: recentPosts, + }) + .from(activeUsers) + .leftJoin(recentPosts, eq(activeUsers.id, recentPosts.authorId)); +``` + +## Raw SQL + +### Safe Raw Queries + +```typescript +import { sql } from 'drizzle-orm'; + +// Parameterized query (safe from SQL injection) +const userId = 123; +const user = await db.execute( + sql`SELECT * FROM ${users} WHERE ${users.id} = ${userId}` +); + +// Raw SQL with type safety +const result = await db.execute<{ count: number }>( + sql`SELECT COUNT(*) as count FROM ${users}` +); +``` + +### SQL Template Composition + +```typescript +// Reusable SQL fragments +function whereActive() { + return sql`${users.isActive} = true`; +} + +function whereRole(role: string) { + return sql`${users.role} = ${role}`; +} + +// Compose fragments +const admins = await db + .select() + .from(users) + .where(sql`${whereActive()} AND ${whereRole('admin')}`); +``` + +### Dynamic WHERE Clauses + +```typescript +import { and, SQL } from 'drizzle-orm'; + +interface Filters { + name?: string; + role?: string; + isActive?: boolean; +} + +function buildFilters(filters: Filters): SQL | undefined { + const conditions: SQL[] = []; + + if (filters.name) { + conditions.push(like(users.name, `%${filters.name}%`)); + } + + if (filters.role) { + conditions.push(eq(users.role, filters.role)); + } + + if (filters.isActive !== undefined) { + conditions.push(eq(users.isActive, filters.isActive)); + } + + return conditions.length > 0 ? and(...conditions) : undefined; +} + +// Usage +const filters: Filters = { name: 'John', isActive: true }; +const users = await db + .select() + .from(users) + .where(buildFilters(filters)); +``` + +## Aggregations + +### Basic Aggregates + +```typescript +import { count, sum, avg, min, max, sql } from 'drizzle-orm'; + +// Count +const userCount = await db.select({ count: count() }).from(users); + +// Sum +const totalRevenue = await db.select({ total: sum(orders.amount) }).from(orders); + +// Average +const avgPrice = await db.select({ avg: avg(products.price) }).from(products); + +// Multiple aggregates +const stats = await db + .select({ + count: count(), + total: sum(orders.amount), + avg: avg(orders.amount), + min: min(orders.amount), + max: max(orders.amount), + }) + .from(orders); +``` + +### GROUP BY with HAVING + +```typescript +// Authors with more than 5 posts +const prolificAuthors = await db + .select({ + author: authors.name, + postCount: count(posts.id), + }) + .from(authors) + .leftJoin(posts, eq(authors.id, posts.authorId)) + .groupBy(authors.id) + .having(sql`COUNT(${posts.id}) > 5`); +``` + +### Window Functions + +```typescript +// Rank products by price within category +const rankedProducts = await db + .select({ + product: products, + priceRank: sql`RANK() OVER (PARTITION BY ${products.categoryId} ORDER BY ${products.price} DESC)`, + }) + .from(products); + +// Running total +const ordersWithRunningTotal = await db + .select({ + order: orders, + runningTotal: sql`SUM(${orders.amount}) OVER (ORDER BY ${orders.createdAt})`, + }) + .from(orders); + +// Row number +const numberedUsers = await db + .select({ + user: users, + rowNum: sql`ROW_NUMBER() OVER (ORDER BY ${users.createdAt})`, + }) + .from(users); +``` + +## Prepared Statements + +### Reusable Queries + +```typescript +// Prepare once, execute many times +const getUserById = db + .select() + .from(users) + .where(eq(users.id, sql.placeholder('id'))) + .prepare('get_user_by_id'); + +// Execute with different parameters +const user1 = await getUserById.execute({ id: 1 }); +const user2 = await getUserById.execute({ id: 2 }); + +// Complex prepared statement +const searchUsers = db + .select() + .from(users) + .where( + and( + like(users.name, sql.placeholder('name')), + eq(users.role, sql.placeholder('role')) + ) + ) + .prepare('search_users'); + +const admins = await searchUsers.execute({ name: '%John%', role: 'admin' }); +``` + +## Batch Operations + +### Batch Insert + +```typescript +// Insert multiple rows +const newUsers = await db.insert(users).values([ + { email: 'user1@example.com', name: 'User 1' }, + { email: 'user2@example.com', name: 'User 2' }, + { email: 'user3@example.com', name: 'User 3' }, +]).returning(); + +// Batch with onConflictDoNothing +await db.insert(users).values(bulkUsers).onConflictDoNothing(); + +// Batch with onConflictDoUpdate (upsert) +await db.insert(users) + .values(bulkUsers) + .onConflictDoUpdate({ + target: users.email, + set: { name: sql`EXCLUDED.name` }, + }); +``` + +### Batch Update + +```typescript +// Update multiple specific rows +await db.transaction(async (tx) => { + for (const update of updates) { + await tx.update(users) + .set({ name: update.name }) + .where(eq(users.id, update.id)); + } +}); + +// Bulk update with CASE +await db.execute(sql` + UPDATE ${users} + SET ${users.role} = CASE ${users.id} + ${sql.join( + updates.map((u) => sql`WHEN ${u.id} THEN ${u.role}`), + sql.raw(' ') + )} + END + WHERE ${users.id} IN (${sql.join(updates.map((u) => u.id), sql.raw(', '))}) +`); +``` + +### Batch Delete + +```typescript +// Delete multiple IDs +await db.delete(users).where(inArray(users.id, [1, 2, 3, 4, 5])); + +// Conditional batch delete +await db.delete(posts).where( + and( + lt(posts.createdAt, new Date('2023-01-01')), + eq(posts.isDraft, true) + ) +); +``` + +## LATERAL Joins + +```typescript +// Get top 3 posts for each author +const authorsWithTopPosts = await db + .select({ + author: authors, + post: posts, + }) + .from(authors) + .leftJoin( + sql`LATERAL ( + SELECT * FROM ${posts} + WHERE ${posts.authorId} = ${authors.id} + ORDER BY ${posts.views} DESC + LIMIT 3 + ) AS ${posts}`, + sql`true` + ); +``` + +## UNION Queries + +```typescript +// Combine results from multiple queries +const allContent = await db + .select({ id: posts.id, title: posts.title, type: sql`'post'` }) + .from(posts) + .union( + db.select({ id: articles.id, title: articles.title, type: sql`'article'` }) + .from(articles) + ); + +// UNION ALL (includes duplicates) +const allItems = await db + .select({ id: products.id, name: products.name }) + .from(products) + .unionAll( + db.select({ id: services.id, name: services.name }).from(services) + ); +``` + +## Distinct Queries + +```typescript +// DISTINCT +const uniqueRoles = await db.selectDistinct({ role: users.role }).from(users); + +// DISTINCT ON (PostgreSQL) +const latestPostPerAuthor = await db + .selectDistinctOn([posts.authorId], { + post: posts, + }) + .from(posts) + .orderBy(posts.authorId, desc(posts.createdAt)); +``` + +## Locking Strategies + +```typescript +// FOR UPDATE (pessimistic locking) +await db.transaction(async (tx) => { + const user = await tx + .select() + .from(users) + .where(eq(users.id, userId)) + .for('update'); + + // Critical section - user row is locked + await tx.update(users) + .set({ balance: user.balance - amount }) + .where(eq(users.id, userId)); +}); + +// FOR SHARE (shared lock) +const user = await db + .select() + .from(users) + .where(eq(users.id, userId)) + .for('share'); + +// SKIP LOCKED +const availableTask = await db + .select() + .from(tasks) + .where(eq(tasks.status, 'pending')) + .limit(1) + .for('update', { skipLocked: true }); +``` + +## Query Builder Patterns + +### Type-Safe Query Builder + +```typescript +class UserQueryBuilder { + private query = db.select().from(users); + + whereRole(role: string) { + this.query = this.query.where(eq(users.role, role)); + return this; + } + + whereActive() { + this.query = this.query.where(eq(users.isActive, true)); + return this; + } + + orderByCreated() { + this.query = this.query.orderBy(desc(users.createdAt)); + return this; + } + + async execute() { + return await this.query; + } +} + +// Usage +const admins = await new UserQueryBuilder() + .whereRole('admin') + .whereActive() + .orderByCreated() + .execute(); +``` + +## Best Practices + +### Avoid N+1 Queries + +```typescript +// ❌ Bad: N+1 query +const authors = await db.select().from(authors); +for (const author of authors) { + author.posts = await db.select().from(posts).where(eq(posts.authorId, author.id)); +} + +// ✅ Good: Single query with join +const authorsWithPosts = await db.query.authors.findMany({ + with: { posts: true }, +}); + +// ✅ Good: Dataloader pattern +import DataLoader from 'dataloader'; + +const postLoader = new DataLoader(async (authorIds: number[]) => { + const posts = await db.select().from(posts).where(inArray(posts.authorId, authorIds)); + + const grouped = authorIds.map(id => + posts.filter(post => post.authorId === id) + ); + + return grouped; +}); +``` + +### Query Timeouts + +```typescript +// PostgreSQL statement timeout +await db.execute(sql`SET statement_timeout = '5s'`); + +// Per-query timeout +const withTimeout = async (promise: Promise, ms: number): Promise => { + const timeout = new Promise((_, reject) => + setTimeout(() => reject(new Error('Query timeout')), ms) + ); + return Promise.race([promise, timeout]); +}; + +const users = await withTimeout( + db.select().from(users), + 5000 +); +``` diff --git a/.agents/skills/drizzle-orm/references/vs-prisma.md b/.agents/skills/drizzle-orm/references/vs-prisma.md new file mode 100644 index 0000000..121efd2 --- /dev/null +++ b/.agents/skills/drizzle-orm/references/vs-prisma.md @@ -0,0 +1,503 @@ +# Drizzle vs Prisma Comparison + +Feature comparison, migration guide, and decision framework for choosing between Drizzle and Prisma. + +## Quick Comparison + +| Feature | Drizzle ORM | Prisma | +|---------|-------------|--------| +| **Type Safety** | ✅ Compile-time inference | ✅ Generated types | +| **Bundle Size** | **~35KB** | ~230KB | +| **Runtime** | **Zero dependencies** | Heavy runtime | +| **Cold Start** | **~10ms** | ~250ms | +| **Query Performance** | **Faster (native SQL)** | Slower (translation layer) | +| **Learning Curve** | Moderate (SQL knowledge helpful) | Easier (abstracted) | +| **Migrations** | SQL-based | Declarative schema | +| **Raw SQL** | **First-class support** | Limited support | +| **Edge Runtime** | **Fully compatible** | Limited support | +| **Ecosystem** | Growing | Mature | +| **Studio (GUI)** | ✅ Drizzle Studio | ✅ Prisma Studio | + +## When to Choose Drizzle + +### ✅ Choose Drizzle if you need: + +1. **Performance-critical applications** + - Microservices with tight latency requirements + - High-throughput APIs (>10K req/s) + - Serverless/edge functions with cold start concerns + +2. **Minimal bundle size** + - Client-side database (SQLite in browser) + - Edge runtime deployments + - Mobile applications with bundle constraints + +3. **SQL control** + - Complex queries with CTEs, window functions + - Raw SQL for specific database features + - Database-specific optimizations + +4. **Type inference over generation** + - No build step for type generation + - Immediate TypeScript feedback + - Schema changes reflected instantly + +### Example: Edge Function with Drizzle + +```typescript +import { neon } from '@neondatabase/serverless'; +import { drizzle } from 'drizzle-orm/neon-http'; + +export const runtime = 'edge'; + +export async function GET() { + const sql = neon(process.env.DATABASE_URL!); + const db = drizzle(sql); // ~35KB bundle, <10ms cold start + + const users = await db.select().from(users); + return Response.json(users); +} +``` + +## When to Choose Prisma + +### ✅ Choose Prisma if you need: + +1. **Rapid prototyping** + - Quick schema iterations + - Automatic migrations + - Less SQL knowledge required + +2. **Team with varied SQL experience** + - Abstracted query interface + - Declarative migrations + - Generated documentation + +3. **Mature ecosystem** + - Extensive community resources + - Third-party integrations (Nexus, tRPC) + - Enterprise support options + +4. **Rich developer experience** + - Prisma Studio (GUI) + - VS Code extension + - Comprehensive documentation + +### Example: Next.js App with Prisma + +```typescript +// schema.prisma +model User { + id Int @id @default(autoincrement()) + email String @unique + posts Post[] +} + +model Post { + id Int @id @default(autoincrement()) + title String + authorId Int + author User @relation(fields: [authorId], references: [id]) +} + +// app/api/users/route.ts +import { prisma } from '@/lib/prisma'; + +export async function GET() { + const users = await prisma.user.findMany({ + include: { posts: true }, + }); + return Response.json(users); +} +``` + +## Feature Comparison + +### Schema Definition + +**Drizzle** (TypeScript-first): +```typescript +import { pgTable, serial, text, integer } from 'drizzle-orm/pg-core'; +import { relations } from 'drizzle-orm'; + +export const users = pgTable('users', { + id: serial('id').primaryKey(), + email: text('email').notNull().unique(), +}); + +export const posts = pgTable('posts', { + id: serial('id').primaryKey(), + title: text('title').notNull(), + authorId: integer('author_id').notNull().references(() => users.id), +}); + +export const usersRelations = relations(users, ({ many }) => ({ + posts: many(posts), +})); +``` + +**Prisma** (Schema DSL): +```prisma +model User { + id Int @id @default(autoincrement()) + email String @unique + posts Post[] +} + +model Post { + id Int @id @default(autoincrement()) + title String + authorId Int + author User @relation(fields: [authorId], references: [id]) +} +``` + +### Querying + +**Drizzle** (SQL-like): +```typescript +import { eq, like, and, gt } from 'drizzle-orm'; + +// Simple query +const user = await db.select().from(users).where(eq(users.id, 1)); + +// Complex filtering +const results = await db.select() + .from(users) + .where( + and( + like(users.email, '%@example.com'), + gt(users.createdAt, new Date('2024-01-01')) + ) + ); + +// Joins +const usersWithPosts = await db + .select({ + user: users, + post: posts, + }) + .from(users) + .leftJoin(posts, eq(users.id, posts.authorId)); +``` + +**Prisma** (Fluent API): +```typescript +// Simple query +const user = await prisma.user.findUnique({ where: { id: 1 } }); + +// Complex filtering +const results = await prisma.user.findMany({ + where: { + email: { endsWith: '@example.com' }, + createdAt: { gt: new Date('2024-01-01') }, + }, +}); + +// Relations +const usersWithPosts = await prisma.user.findMany({ + include: { posts: true }, +}); +``` + +### Migrations + +**Drizzle** (SQL-based): +```bash +# Generate migration +npx drizzle-kit generate + +# Output: drizzle/0000_migration.sql +# CREATE TABLE "users" ( +# "id" serial PRIMARY KEY, +# "email" text NOT NULL UNIQUE +# ); + +# Apply migration +npx drizzle-kit migrate +``` + +**Prisma** (Declarative): +```bash +# Generate and apply migration +npx prisma migrate dev --name add_users + +# Prisma compares schema.prisma to database +# Generates SQL automatically +# Applies migration +``` + +### Type Generation + +**Drizzle** (Inferred): +```typescript +// Types are inferred at compile time +type User = typeof users.$inferSelect; +type NewUser = typeof users.$inferInsert; + +// Immediate feedback in IDE +const user: User = await db.select().from(users); +``` + +**Prisma** (Generated): +```typescript +// Types generated after schema change +// Run: npx prisma generate + +import { User, Post } from '@prisma/client'; + +const user: User = await prisma.user.findUnique({ where: { id: 1 } }); +``` + +### Raw SQL + +**Drizzle** (First-class): +```typescript +import { sql } from 'drizzle-orm'; + +// Tagged template with type safety +const result = await db.execute( + sql`SELECT * FROM ${users} WHERE ${users.email} = ${email}` +); + +// Mix ORM and raw SQL +const customQuery = await db + .select({ + user: users, + postCount: sql`COUNT(${posts.id})`, + }) + .from(users) + .leftJoin(posts, eq(users.id, posts.authorId)) + .groupBy(users.id); +``` + +**Prisma** (Limited): +```typescript +// Raw query (loses type safety) +const result = await prisma.$queryRaw` + SELECT * FROM users WHERE email = ${email} +`; + +// Typed raw query (manual type annotation) +const users = await prisma.$queryRaw` + SELECT * FROM users +`; +``` + +## Performance Benchmarks + +### Query Execution Time (1000 queries) + +| Operation | Drizzle | Prisma | Difference | +|-----------|---------|--------|------------| +| findUnique | 1.2s | 3.1s | **2.6x faster** | +| findMany (10 rows) | 1.5s | 3.8s | **2.5x faster** | +| findMany (100 rows) | 2.1s | 5.2s | **2.5x faster** | +| create | 1.8s | 4.1s | **2.3x faster** | +| update | 1.7s | 3.9s | **2.3x faster** | + +### Bundle Size Impact + +```bash +# Next.js production build + +# With Drizzle +├─ Client (First Load JS) +│ └─ pages/index.js: 85 KB (+35KB Drizzle) + +# With Prisma +├─ Client (First Load JS) +│ └─ pages/index.js: 280 KB (+230KB Prisma) +``` + +### Cold Start Times (AWS Lambda) + +| Database | Drizzle | Prisma | +|----------|---------|--------| +| PostgreSQL | ~50ms | ~300ms | +| MySQL | ~45ms | ~280ms | +| SQLite | ~10ms | ~150ms | + +## Migration from Prisma to Drizzle + +### Step 1: Install Drizzle + +```bash +npm install drizzle-orm +npm install -D drizzle-kit + +# Keep Prisma temporarily +# npm uninstall prisma @prisma/client +``` + +### Step 2: Introspect Existing Database + +```typescript +// drizzle.config.ts +import type { Config } from 'drizzle-kit'; + +export default { + schema: './db/schema.ts', + out: './drizzle', + dialect: 'postgresql', + dbCredentials: { + url: process.env.DATABASE_URL!, + }, +} satisfies Config; +``` + +```bash +# Generate Drizzle schema from existing database +npx drizzle-kit introspect +``` + +### Step 3: Convert Queries + +**Prisma**: +```typescript +// Before (Prisma) +const users = await prisma.user.findMany({ + where: { email: { contains: 'example.com' } }, + include: { posts: true }, + orderBy: { createdAt: 'desc' }, + take: 10, +}); +``` + +**Drizzle**: +```typescript +// After (Drizzle) +import { like, desc } from 'drizzle-orm'; + +const users = await db.query.users.findMany({ + where: like(users.email, '%example.com%'), + with: { posts: true }, + orderBy: [desc(users.createdAt)], + limit: 10, +}); + +// Or SQL-style +const users = await db + .select() + .from(users) + .where(like(users.email, '%example.com%')) + .orderBy(desc(users.createdAt)) + .limit(10); +``` + +### Step 4: Conversion Patterns + +```typescript +// Prisma → Drizzle mapping + +// findUnique +await prisma.user.findUnique({ where: { id: 1 } }); +await db.select().from(users).where(eq(users.id, 1)); + +// findMany with filters +await prisma.user.findMany({ where: { role: 'admin' } }); +await db.select().from(users).where(eq(users.role, 'admin')); + +// create +await prisma.user.create({ data: { email: 'user@example.com' } }); +await db.insert(users).values({ email: 'user@example.com' }).returning(); + +// update +await prisma.user.update({ where: { id: 1 }, data: { name: 'John' } }); +await db.update(users).set({ name: 'John' }).where(eq(users.id, 1)); + +// delete +await prisma.user.delete({ where: { id: 1 } }); +await db.delete(users).where(eq(users.id, 1)); + +// count +await prisma.user.count(); +await db.select({ count: count() }).from(users); + +// aggregate +await prisma.post.aggregate({ _avg: { views: true } }); +await db.select({ avg: avg(posts.views) }).from(posts); +``` + +### Step 5: Test & Remove Prisma + +```bash +# Run tests with Drizzle +npm test + +# Remove Prisma when confident +npm uninstall prisma @prisma/client +rm -rf prisma/ +``` + +## Decision Matrix + +| Requirement | Drizzle | Prisma | +|-------------|---------|--------| +| Need minimal bundle size | ✅ | ❌ | +| Edge runtime deployment | ✅ | ⚠️ | +| Team unfamiliar with SQL | ❌ | ✅ | +| Complex raw SQL queries | ✅ | ❌ | +| Rapid prototyping | ⚠️ | ✅ | +| Type-safe migrations | ✅ | ✅ | +| Performance critical | ✅ | ❌ | +| Mature ecosystem | ⚠️ | ✅ | +| First-class TypeScript | ✅ | ✅ | +| Zero dependencies | ✅ | ❌ | + +## Hybrid Approach + +You can use both in the same project: + +```typescript +// Use Drizzle for performance-critical paths +import { db as drizzleDb } from './lib/drizzle'; + +export async function GET() { + const users = await drizzleDb.select().from(users); + return Response.json(users); +} + +// Use Prisma for admin dashboards (less performance-critical) +import { prisma } from './lib/prisma'; + +export async function getStaticProps() { + const stats = await prisma.user.aggregate({ + _count: true, + _avg: { posts: true }, + }); + return { props: { stats } }; +} +``` + +## Community & Resources + +### Drizzle +- Docs: [orm.drizzle.team](https://orm.drizzle.team) +- Discord: [drizzle.team/discord](https://drizzle.team/discord) +- GitHub: [drizzle-team/drizzle-orm](https://github.com/drizzle-team/drizzle-orm) + +### Prisma +- Docs: [prisma.io/docs](https://prisma.io/docs) +- Discord: [pris.ly/discord](https://pris.ly/discord) +- GitHub: [prisma/prisma](https://github.com/prisma/prisma) + +## Final Recommendation + +**Choose Drizzle for:** +- Greenfield projects prioritizing performance +- Edge/serverless applications +- Teams comfortable with SQL +- Minimal bundle size requirements + +**Choose Prisma for:** +- Established teams with Prisma experience +- Rapid MVP development +- Teams new to databases +- Reliance on Prisma ecosystem (Nexus, etc.) + +**Consider migration when:** +- Performance becomes a bottleneck +- Bundle size impacts user experience +- Edge runtime deployment needed +- Team SQL proficiency increases diff --git a/.agents/skills/fastify-best-practices/SKILL.md b/.agents/skills/fastify-best-practices/SKILL.md new file mode 100644 index 0000000..439e684 --- /dev/null +++ b/.agents/skills/fastify-best-practices/SKILL.md @@ -0,0 +1,75 @@ +--- +name: fastify-best-practices +description: "Guides development of Fastify Node.js backend servers and REST APIs using TypeScript or JavaScript. Use when building, configuring, or debugging a Fastify application — including defining routes, implementing plugins, setting up JSON Schema validation, handling errors, optimising performance, managing authentication, configuring CORS and security headers, integrating databases, working with WebSockets, and deploying to production. Covers the full Fastify request lifecycle (hooks, serialization, logging with Pino) and TypeScript integration via strip types. Trigger terms: Fastify, Node.js server, REST API, API routes, backend framework, fastify.config, server.ts, app.ts." +metadata: + tags: fastify, nodejs, typescript, backend, api, server, http +--- + +## When to use + +Use this skill when you need to: +- Develop backend applications using Fastify +- Implement Fastify plugins and route handlers +- Get guidance on Fastify architecture and patterns +- Use TypeScript with Fastify (strip types) +- Implement testing with Fastify's inject method +- Configure validation, serialization, and error handling + +## Quick Start + +A minimal, runnable Fastify server to get started immediately: + +```ts +import Fastify from 'fastify' + +const app = Fastify({ logger: true }) + +app.get('/health', async (request, reply) => { + return { status: 'ok' } +}) + +const start = async () => { + await app.listen({ port: 3000, host: '0.0.0.0' }) +} +start() +``` + +## Recommended Reading Order for Common Scenarios + +- **New to Fastify?** Start with `plugins.md` → `routes.md` → `schemas.md` +- **Adding authentication:** `plugins.md` → `hooks.md` → `authentication.md` +- **Improving performance:** `schemas.md` → `serialization.md` → `performance.md` +- **Setting up testing:** `routes.md` → `testing.md` +- **Going to production:** `logging.md` → `configuration.md` → `deployment.md` + +## How to use + +Read individual rule files for detailed explanations and code examples: + +- [rules/plugins.md](rules/plugins.md) - Plugin development and encapsulation +- [rules/routes.md](rules/routes.md) - Route organization and handlers +- [rules/schemas.md](rules/schemas.md) - JSON Schema validation +- [rules/error-handling.md](rules/error-handling.md) - Error handling patterns +- [rules/hooks.md](rules/hooks.md) - Hooks and request lifecycle +- [rules/authentication.md](rules/authentication.md) - Authentication and authorization +- [rules/testing.md](rules/testing.md) - Testing with inject() +- [rules/performance.md](rules/performance.md) - Performance optimization +- [rules/logging.md](rules/logging.md) - Logging with Pino +- [rules/typescript.md](rules/typescript.md) - TypeScript integration +- [rules/decorators.md](rules/decorators.md) - Decorators and extensions +- [rules/content-type.md](rules/content-type.md) - Content type parsing +- [rules/serialization.md](rules/serialization.md) - Response serialization +- [rules/cors-security.md](rules/cors-security.md) - CORS and security headers +- [rules/websockets.md](rules/websockets.md) - WebSocket support +- [rules/database.md](rules/database.md) - Database integration patterns +- [rules/configuration.md](rules/configuration.md) - Application configuration +- [rules/deployment.md](rules/deployment.md) - Production deployment +- [rules/http-proxy.md](rules/http-proxy.md) - HTTP proxying and reply.from() + +## Core Principles + +- **Encapsulation**: Fastify's plugin system provides automatic encapsulation +- **Schema-first**: Define schemas for validation and serialization +- **Performance**: Fastify is optimized for speed; use its features correctly +- **Async/await**: All handlers and hooks support async functions +- **Minimal dependencies**: Prefer Fastify's built-in features and official plugins diff --git a/.agents/skills/fastify-best-practices/rules/authentication.md b/.agents/skills/fastify-best-practices/rules/authentication.md new file mode 100644 index 0000000..3a84b53 --- /dev/null +++ b/.agents/skills/fastify-best-practices/rules/authentication.md @@ -0,0 +1,521 @@ +--- +name: authentication +description: Authentication and authorization patterns in Fastify +metadata: + tags: auth, jwt, session, oauth, security, authorization +--- + +# Authentication and Authorization + +## JWT Authentication with @fastify/jwt + +Use `@fastify/jwt` for JSON Web Token authentication: + +```typescript +import Fastify from 'fastify'; +import fastifyJwt from '@fastify/jwt'; + +const app = Fastify(); + +app.register(fastifyJwt, { + secret: process.env.JWT_SECRET, + sign: { + expiresIn: '1h', + }, +}); + +// Decorate request with authentication method +app.decorate('authenticate', async function (request, reply) { + try { + await request.jwtVerify(); + } catch (err) { + reply.code(401).send({ error: 'Unauthorized' }); + } +}); + +// Login route +app.post('/login', { + schema: { + body: { + type: 'object', + properties: { + email: { type: 'string', format: 'email' }, + password: { type: 'string' }, + }, + required: ['email', 'password'], + }, + }, +}, async (request, reply) => { + const { email, password } = request.body; + const user = await validateCredentials(email, password); + + if (!user) { + return reply.code(401).send({ error: 'Invalid credentials' }); + } + + const token = app.jwt.sign({ + id: user.id, + email: user.email, + role: user.role, + }); + + return { token }; +}); + +// Protected route +app.get('/profile', { + onRequest: [app.authenticate], +}, async (request) => { + return { user: request.user }; +}); +``` + +## Refresh Tokens + +Implement refresh token rotation: + +```typescript +import fastifyJwt from '@fastify/jwt'; +import { randomBytes } from 'node:crypto'; + +app.register(fastifyJwt, { + secret: process.env.JWT_SECRET, + sign: { + expiresIn: '15m', // Short-lived access tokens + }, +}); + +// Store refresh tokens (use Redis in production) +const refreshTokens = new Map(); + +app.post('/auth/login', async (request, reply) => { + const { email, password } = request.body; + const user = await validateCredentials(email, password); + + if (!user) { + return reply.code(401).send({ error: 'Invalid credentials' }); + } + + const accessToken = app.jwt.sign({ id: user.id, role: user.role }); + const refreshToken = randomBytes(32).toString('hex'); + + refreshTokens.set(refreshToken, { + userId: user.id, + expires: Date.now() + 7 * 24 * 60 * 60 * 1000, // 7 days + }); + + return { accessToken, refreshToken }; +}); + +app.post('/auth/refresh', async (request, reply) => { + const { refreshToken } = request.body; + const stored = refreshTokens.get(refreshToken); + + if (!stored || stored.expires < Date.now()) { + refreshTokens.delete(refreshToken); + return reply.code(401).send({ error: 'Invalid refresh token' }); + } + + // Delete old token (rotation) + refreshTokens.delete(refreshToken); + + const user = await db.users.findById(stored.userId); + const accessToken = app.jwt.sign({ id: user.id, role: user.role }); + const newRefreshToken = randomBytes(32).toString('hex'); + + refreshTokens.set(newRefreshToken, { + userId: user.id, + expires: Date.now() + 7 * 24 * 60 * 60 * 1000, + }); + + return { accessToken, refreshToken: newRefreshToken }; +}); + +app.post('/auth/logout', async (request, reply) => { + const { refreshToken } = request.body; + refreshTokens.delete(refreshToken); + return { success: true }; +}); +``` + +## Role-Based Access Control + +Implement RBAC with decorators: + +```typescript +type Role = 'admin' | 'user' | 'moderator'; + +// Create authorization decorator +app.decorate('authorize', function (...allowedRoles: Role[]) { + return async (request, reply) => { + await request.jwtVerify(); + + const userRole = request.user.role as Role; + if (!allowedRoles.includes(userRole)) { + return reply.code(403).send({ + error: 'Forbidden', + message: `Role '${userRole}' is not authorized for this resource`, + }); + } + }; +}); + +// Admin only route +app.get('/admin/users', { + onRequest: [app.authorize('admin')], +}, async (request) => { + return db.users.findAll(); +}); + +// Admin or moderator +app.delete('/posts/:id', { + onRequest: [app.authorize('admin', 'moderator')], +}, async (request) => { + await db.posts.delete(request.params.id); + return { deleted: true }; +}); +``` + +## Permission-Based Authorization + +Fine-grained permission checks: + +```typescript +interface Permission { + resource: string; + action: 'create' | 'read' | 'update' | 'delete'; +} + +const rolePermissions: Record = { + admin: [ + { resource: '*', action: 'create' }, + { resource: '*', action: 'read' }, + { resource: '*', action: 'update' }, + { resource: '*', action: 'delete' }, + ], + user: [ + { resource: 'posts', action: 'create' }, + { resource: 'posts', action: 'read' }, + { resource: 'comments', action: 'create' }, + { resource: 'comments', action: 'read' }, + ], +}; + +function hasPermission(role: string, resource: string, action: string): boolean { + const permissions = rolePermissions[role] || []; + return permissions.some( + (p) => + (p.resource === '*' || p.resource === resource) && + p.action === action + ); +} + +app.decorate('checkPermission', function (resource: string, action: string) { + return async (request, reply) => { + await request.jwtVerify(); + + if (!hasPermission(request.user.role, resource, action)) { + return reply.code(403).send({ + error: 'Forbidden', + message: `Not allowed to ${action} ${resource}`, + }); + } + }; +}); + +// Usage +app.post('/posts', { + onRequest: [app.checkPermission('posts', 'create')], +}, createPostHandler); + +app.delete('/posts/:id', { + onRequest: [app.checkPermission('posts', 'delete')], +}, deletePostHandler); +``` + +## API Key / Bearer Token Authentication + +Use `@fastify/bearer-auth` for API key and bearer token authentication: + +```typescript +import bearerAuth from '@fastify/bearer-auth'; + +const validKeys = new Set([process.env.API_KEY]); + +app.register(bearerAuth, { + keys: validKeys, + errorResponse: (err) => ({ + error: 'Unauthorized', + message: 'Invalid API key', + }), +}); + +// All routes are now protected +app.get('/api/data', async (request) => { + return { data: [] }; +}); +``` + +For database-backed API keys with custom validation: + +```typescript +import bearerAuth from '@fastify/bearer-auth'; + +app.register(bearerAuth, { + auth: async (key, request) => { + const apiKey = await db.apiKeys.findByKey(key); + + if (!apiKey || !apiKey.active) { + return false; + } + + // Track usage (fire and forget) + db.apiKeys.recordUsage(apiKey.id, { + ip: request.ip, + timestamp: new Date(), + }); + + request.apiKey = apiKey; + return true; + }, + errorResponse: (err) => ({ + error: 'Unauthorized', + message: 'Invalid API key', + }), +}); +``` + +## OAuth 2.0 Integration + +Integrate with OAuth providers using @fastify/oauth2: + +```typescript +import fastifyOauth2 from '@fastify/oauth2'; + +app.register(fastifyOauth2, { + name: 'googleOAuth2', + scope: ['profile', 'email'], + credentials: { + client: { + id: process.env.GOOGLE_CLIENT_ID, + secret: process.env.GOOGLE_CLIENT_SECRET, + }, + }, + startRedirectPath: '/auth/google', + callbackUri: 'http://localhost:3000/auth/google/callback', + discovery: { + issuer: 'https://accounts.google.com', + }, +}); + +app.get('/auth/google/callback', async (request, reply) => { + const { token } = await app.googleOAuth2.getAccessTokenFromAuthorizationCodeFlow(request); + + // Fetch user info from Google + const userInfo = await fetch('https://www.googleapis.com/oauth2/v2/userinfo', { + headers: { Authorization: `Bearer ${token.access_token}` }, + }).then((r) => r.json()); + + // Find or create user + let user = await db.users.findByEmail(userInfo.email); + if (!user) { + user = await db.users.create({ + email: userInfo.email, + name: userInfo.name, + provider: 'google', + providerId: userInfo.id, + }); + } + + // Generate JWT + const jwt = app.jwt.sign({ id: user.id, role: user.role }); + + // Redirect to frontend with token + return reply.redirect(`/auth/success?token=${jwt}`); +}); +``` + +## Session-Based Authentication + +Use @fastify/session for session management: + +```typescript +import fastifyCookie from '@fastify/cookie'; +import fastifySession from '@fastify/session'; +import RedisStore from 'connect-redis'; +import { createClient } from 'redis'; + +const redisClient = createClient({ url: process.env.REDIS_URL }); +await redisClient.connect(); + +app.register(fastifyCookie); +app.register(fastifySession, { + secret: process.env.SESSION_SECRET, + store: new RedisStore({ client: redisClient }), + cookie: { + secure: process.env.NODE_ENV === 'production', + httpOnly: true, + maxAge: 24 * 60 * 60 * 1000, // 1 day + }, +}); + +app.post('/login', async (request, reply) => { + const { email, password } = request.body; + const user = await validateCredentials(email, password); + + if (!user) { + return reply.code(401).send({ error: 'Invalid credentials' }); + } + + request.session.userId = user.id; + request.session.role = user.role; + + return { success: true }; +}); + +app.decorate('requireSession', async function (request, reply) { + if (!request.session.userId) { + return reply.code(401).send({ error: 'Not authenticated' }); + } +}); + +app.get('/profile', { + onRequest: [app.requireSession], +}, async (request) => { + const user = await db.users.findById(request.session.userId); + return { user }; +}); + +app.post('/logout', async (request, reply) => { + await request.session.destroy(); + return { success: true }; +}); +``` + +## Resource-Based Authorization + +Check ownership of resources: + +```typescript +app.decorate('checkOwnership', function (getResourceOwnerId: (request) => Promise) { + return async (request, reply) => { + const ownerId = await getResourceOwnerId(request); + + if (ownerId !== request.user.id && request.user.role !== 'admin') { + return reply.code(403).send({ + error: 'Forbidden', + message: 'You do not own this resource', + }); + } + }; +}); + +// Check post ownership +app.put('/posts/:id', { + onRequest: [ + app.authenticate, + app.checkOwnership(async (request) => { + const post = await db.posts.findById(request.params.id); + return post?.authorId; + }), + ], +}, updatePostHandler); + +// Alternative: inline check +app.put('/posts/:id', { + onRequest: [app.authenticate], +}, async (request, reply) => { + const post = await db.posts.findById(request.params.id); + + if (!post) { + return reply.code(404).send({ error: 'Post not found' }); + } + + if (post.authorId !== request.user.id && request.user.role !== 'admin') { + return reply.code(403).send({ error: 'Forbidden' }); + } + + return db.posts.update(post.id, request.body); +}); +``` + +## Password Hashing + +Use secure password hashing with argon2: + +```typescript +import { hash, verify } from '@node-rs/argon2'; + +async function hashPassword(password: string): Promise { + return hash(password, { + memoryCost: 65536, + timeCost: 3, + parallelism: 4, + }); +} + +async function verifyPassword(hash: string, password: string): Promise { + return verify(hash, password); +} + +app.post('/register', async (request, reply) => { + const { email, password } = request.body; + + const hashedPassword = await hashPassword(password); + const user = await db.users.create({ + email, + password: hashedPassword, + }); + + reply.code(201); + return { id: user.id, email: user.email }; +}); + +app.post('/login', async (request, reply) => { + const { email, password } = request.body; + const user = await db.users.findByEmail(email); + + if (!user || !(await verifyPassword(user.password, password))) { + return reply.code(401).send({ error: 'Invalid credentials' }); + } + + const token = app.jwt.sign({ id: user.id, role: user.role }); + return { token }; +}); +``` + +## Rate Limiting for Auth Endpoints + +Protect auth endpoints from brute force. **IMPORTANT: For production security, you MUST configure rate limiting with a Redis backend.** In-memory rate limiting is not safe for distributed deployments and can be bypassed. + +```typescript +import fastifyRateLimit from '@fastify/rate-limit'; +import Redis from 'ioredis'; + +const redis = new Redis(process.env.REDIS_URL); + +// Global rate limit with Redis backend +app.register(fastifyRateLimit, { + max: 100, + timeWindow: '1 minute', + redis, // REQUIRED for production - ensures rate limiting works across all instances +}); + +// Stricter limit for auth endpoints +app.register(async function authRoutes(fastify) { + await fastify.register(fastifyRateLimit, { + max: 5, + timeWindow: '1 minute', + redis, // REQUIRED for production + keyGenerator: (request) => { + // Rate limit by IP + email combination + const email = request.body?.email || ''; + return `${request.ip}:${email}`; + }, + }); + + fastify.post('/login', loginHandler); + fastify.post('/register', registerHandler); + fastify.post('/forgot-password', forgotPasswordHandler); +}, { prefix: '/auth' }); +``` diff --git a/.agents/skills/fastify-best-practices/rules/configuration.md b/.agents/skills/fastify-best-practices/rules/configuration.md new file mode 100644 index 0000000..ceba3e3 --- /dev/null +++ b/.agents/skills/fastify-best-practices/rules/configuration.md @@ -0,0 +1,217 @@ +--- +name: configuration +description: Application configuration in Fastify using env-schema +metadata: + tags: configuration, environment, env, settings, env-schema +--- + +# Application Configuration + +## Use env-schema for Configuration + +**Always use `env-schema` for configuration validation.** It provides JSON Schema validation for environment variables with sensible defaults. + +```typescript +import Fastify from 'fastify'; +import envSchema from 'env-schema'; +import { Type, type Static } from '@sinclair/typebox'; + +const schema = Type.Object({ + PORT: Type.Number({ default: 3000 }), + HOST: Type.String({ default: '0.0.0.0' }), + DATABASE_URL: Type.String(), + JWT_SECRET: Type.String({ minLength: 32 }), + LOG_LEVEL: Type.Union([ + Type.Literal('trace'), + Type.Literal('debug'), + Type.Literal('info'), + Type.Literal('warn'), + Type.Literal('error'), + Type.Literal('fatal'), + ], { default: 'info' }), +}); + +type Config = Static; + +const config = envSchema({ + schema, + dotenv: true, // Load from .env file +}); + +const app = Fastify({ + logger: { level: config.LOG_LEVEL }, +}); + +app.decorate('config', config); + +declare module 'fastify' { + interface FastifyInstance { + config: Config; + } +} + +await app.listen({ port: config.PORT, host: config.HOST }); +``` + +## Configuration as Plugin + +Encapsulate configuration in a plugin for reuse: + +```typescript +import fp from 'fastify-plugin'; +import envSchema from 'env-schema'; +import { Type, type Static } from '@sinclair/typebox'; + +const schema = Type.Object({ + PORT: Type.Number({ default: 3000 }), + HOST: Type.String({ default: '0.0.0.0' }), + DATABASE_URL: Type.String(), + JWT_SECRET: Type.String({ minLength: 32 }), + LOG_LEVEL: Type.String({ default: 'info' }), +}); + +type Config = Static; + +declare module 'fastify' { + interface FastifyInstance { + config: Config; + } +} + +export default fp(async function configPlugin(fastify) { + const config = envSchema({ + schema, + dotenv: true, + }); + + fastify.decorate('config', config); +}, { + name: 'config', +}); +``` + +## Secrets Management + +Handle secrets securely: + +```typescript +// Never log secrets +const app = Fastify({ + logger: { + level: config.LOG_LEVEL, + redact: ['req.headers.authorization', '*.password', '*.secret', '*.apiKey'], + }, +}); + +// For production, use secret managers (AWS Secrets Manager, Vault, etc.) +// Pass secrets through environment variables - never commit them +``` + +## Feature Flags + +Implement feature flags via environment variables: + +```typescript +import { Type, type Static } from '@sinclair/typebox'; + +const schema = Type.Object({ + // ... other config + FEATURE_NEW_DASHBOARD: Type.Boolean({ default: false }), + FEATURE_BETA_API: Type.Boolean({ default: false }), +}); + +type Config = Static; + +const config = envSchema({ schema, dotenv: true }); + +// Use in routes +app.get('/dashboard', async (request) => { + if (app.config.FEATURE_NEW_DASHBOARD) { + return { version: 'v2', data: await getNewDashboardData() }; + } + return { version: 'v1', data: await getOldDashboardData() }; +}); +``` + +## Anti-Patterns to Avoid + +### NEVER use configuration files + +```typescript +// ❌ NEVER DO THIS - configuration files are an antipattern +import config from './config/production.json'; + +// ❌ NEVER DO THIS - per-environment config files +const env = process.env.NODE_ENV || 'development'; +const config = await import(`./config/${env}.js`); +``` + +Configuration files lead to: +- Security risks (secrets in files) +- Deployment complexity +- Environment drift +- Difficult secret rotation + +### NEVER use per-environment configuration + +```typescript +// ❌ NEVER DO THIS +const configs = { + development: { logLevel: 'debug' }, + production: { logLevel: 'info' }, + test: { logLevel: 'silent' }, +}; +const config = configs[process.env.NODE_ENV]; +``` + +Instead, use a single configuration source (environment variables) with sensible defaults. The environment controls the values, not conditional code. + +### Use specific environment variables, not NODE_ENV + +```typescript +// ❌ AVOID checking NODE_ENV +if (process.env.NODE_ENV === 'production') { + // do something +} + +// ✅ BETTER - use explicit feature flags or configuration +if (app.config.ENABLE_DETAILED_LOGGING) { + // do something +} +``` + +## Dynamic Configuration + +For configuration that needs to change without restart, fetch from an external service: + +```typescript +interface DynamicConfig { + rateLimit: number; + maintenanceMode: boolean; +} + +let dynamicConfig: DynamicConfig = { + rateLimit: 100, + maintenanceMode: false, +}; + +async function refreshConfig() { + try { + const newConfig = await fetchConfigFromService(); + dynamicConfig = newConfig; + app.log.info('Configuration refreshed'); + } catch (error) { + app.log.error({ err: error }, 'Failed to refresh configuration'); + } +} + +// Refresh periodically +setInterval(refreshConfig, 60000); + +// Use in hooks +app.addHook('onRequest', async (request, reply) => { + if (dynamicConfig.maintenanceMode && !request.url.startsWith('/health')) { + reply.code(503).send({ error: 'Service under maintenance' }); + } +}); +``` diff --git a/.agents/skills/fastify-best-practices/rules/content-type.md b/.agents/skills/fastify-best-practices/rules/content-type.md new file mode 100644 index 0000000..8c98f1e --- /dev/null +++ b/.agents/skills/fastify-best-practices/rules/content-type.md @@ -0,0 +1,387 @@ +--- +name: content-type +description: Content type parsing in Fastify +metadata: + tags: content-type, parsing, body, multipart, json +--- + +# Content Type Parsing + +## Default Content Type Parsers + +Fastify includes parsers for common content types: + +```typescript +import Fastify from 'fastify'; + +const app = Fastify(); + +// Built-in parsers: +// - application/json +// - text/plain + +app.post('/json', async (request) => { + // request.body is parsed JSON object + return { received: request.body }; +}); + +app.post('/text', async (request) => { + // request.body is string for text/plain + return { text: request.body }; +}); +``` + +## Custom Content Type Parsers + +Add parsers for additional content types: + +```typescript +// Parse application/x-www-form-urlencoded +app.addContentTypeParser( + 'application/x-www-form-urlencoded', + { parseAs: 'string' }, + (request, body, done) => { + const parsed = new URLSearchParams(body); + done(null, Object.fromEntries(parsed)); + }, +); + +// Async parser +app.addContentTypeParser( + 'application/x-www-form-urlencoded', + { parseAs: 'string' }, + async (request, body) => { + const parsed = new URLSearchParams(body); + return Object.fromEntries(parsed); + }, +); +``` + +## XML Parsing + +Parse XML content: + +```typescript +import { XMLParser } from 'fast-xml-parser'; + +const xmlParser = new XMLParser({ + ignoreAttributes: false, + attributeNamePrefix: '@_', +}); + +app.addContentTypeParser( + 'application/xml', + { parseAs: 'string' }, + async (request, body) => { + return xmlParser.parse(body); + }, +); + +app.addContentTypeParser( + 'text/xml', + { parseAs: 'string' }, + async (request, body) => { + return xmlParser.parse(body); + }, +); + +app.post('/xml', async (request) => { + // request.body is parsed XML as JavaScript object + return { data: request.body }; +}); +``` + +## Multipart Form Data + +Use @fastify/multipart for file uploads. **Configure these critical options:** + +```typescript +import fastifyMultipart from '@fastify/multipart'; + +app.register(fastifyMultipart, { + // CRITICAL: Always set explicit limits + limits: { + fieldNameSize: 100, // Max field name size in bytes + fieldSize: 1024 * 1024, // Max field value size (1MB) + fields: 10, // Max number of non-file fields + fileSize: 10 * 1024 * 1024, // Max file size (10MB) + files: 5, // Max number of files + headerPairs: 2000, // Max number of header pairs + parts: 1000, // Max number of parts (fields + files) + }, + // IMPORTANT: Throw on limit exceeded (default is to truncate silently!) + throwFileSizeLimit: true, + // Attach all fields to request.body for easier access + attachFieldsToBody: true, + // Only accept specific file types (security!) + // onFile: async (part) => { + // if (!['image/jpeg', 'image/png'].includes(part.mimetype)) { + // throw new Error('Invalid file type'); + // } + // }, +}); + +// Handle file upload +app.post('/upload', async (request, reply) => { + const data = await request.file(); + + if (!data) { + return reply.code(400).send({ error: 'No file uploaded' }); + } + + // data.file is a stream + const buffer = await data.toBuffer(); + + return { + filename: data.filename, + mimetype: data.mimetype, + size: buffer.length, + }; +}); + +// Handle multiple files +app.post('/upload-multiple', async (request) => { + const files = []; + + for await (const part of request.files()) { + const buffer = await part.toBuffer(); + files.push({ + filename: part.filename, + mimetype: part.mimetype, + size: buffer.length, + }); + } + + return { files }; +}); + +// Handle mixed form data +app.post('/form', async (request) => { + const parts = request.parts(); + const fields: Record = {}; + const files: Array<{ name: string; size: number }> = []; + + for await (const part of parts) { + if (part.type === 'file') { + const buffer = await part.toBuffer(); + files.push({ name: part.filename, size: buffer.length }); + } else { + fields[part.fieldname] = part.value as string; + } + } + + return { fields, files }; +}); +``` + +## Stream Processing + +Process body as stream for large payloads: + +```typescript +import { pipeline } from 'node:stream/promises'; +import { createWriteStream } from 'node:fs'; + +// Add parser that returns stream +app.addContentTypeParser( + 'application/octet-stream', + async (request, payload) => { + return payload; // Return stream directly + }, +); + +app.post('/upload-stream', async (request, reply) => { + const destination = createWriteStream('./upload.bin'); + + await pipeline(request.body, destination); + + return { success: true }; +}); +``` + +## Custom JSON Parser + +Replace the default JSON parser: + +```typescript +// Remove default parser +app.removeContentTypeParser('application/json'); + +// Add custom parser with error handling +app.addContentTypeParser( + 'application/json', + { parseAs: 'string' }, + async (request, body) => { + try { + return JSON.parse(body); + } catch (error) { + throw { + statusCode: 400, + code: 'INVALID_JSON', + message: 'Invalid JSON payload', + }; + } + }, +); +``` + +## Content Type with Parameters + +Handle content types with parameters: + +```typescript +// Match content type with any charset +app.addContentTypeParser( + 'application/json; charset=utf-8', + { parseAs: 'string' }, + async (request, body) => { + return JSON.parse(body); + }, +); + +// Use regex for flexible matching +app.addContentTypeParser( + /^application\/.*\+json$/, + { parseAs: 'string' }, + async (request, body) => { + return JSON.parse(body); + }, +); +``` + +## Catch-All Parser + +Handle unknown content types: + +```typescript +app.addContentTypeParser('*', async (request, payload) => { + const chunks: Buffer[] = []; + + for await (const chunk of payload) { + chunks.push(chunk); + } + + const buffer = Buffer.concat(chunks); + + // Try to determine content type + const contentType = request.headers['content-type']; + + if (contentType?.includes('json')) { + return JSON.parse(buffer.toString('utf-8')); + } + + if (contentType?.includes('text')) { + return buffer.toString('utf-8'); + } + + return buffer; +}); +``` + +## Body Limit Configuration + +Configure body size limits: + +```typescript +// Global limit +const app = Fastify({ + bodyLimit: 1048576, // 1MB +}); + +// Per-route limit +app.post('/large-upload', { + bodyLimit: 52428800, // 50MB for this route +}, async (request) => { + return { size: JSON.stringify(request.body).length }; +}); + +// Per content type limit +app.addContentTypeParser('application/json', { + parseAs: 'string', + bodyLimit: 2097152, // 2MB for JSON +}, async (request, body) => { + return JSON.parse(body); +}); +``` + +## Protocol Buffers + +Parse protobuf content: + +```typescript +import protobuf from 'protobufjs'; + +const root = await protobuf.load('./schema.proto'); +const MessageType = root.lookupType('package.MessageType'); + +app.addContentTypeParser( + 'application/x-protobuf', + { parseAs: 'buffer' }, + async (request, body) => { + const message = MessageType.decode(body); + return MessageType.toObject(message); + }, +); +``` + +## Form Data with @fastify/formbody + +Simple form parsing: + +```typescript +import formbody from '@fastify/formbody'; + +app.register(formbody); + +app.post('/form', async (request) => { + // request.body is parsed form data + const { name, email } = request.body as { name: string; email: string }; + return { name, email }; +}); +``` + +## Content Negotiation + +Handle different request formats: + +```typescript +app.post('/data', async (request, reply) => { + const contentType = request.headers['content-type']; + + // Body is already parsed by the appropriate parser + const data = request.body; + + // Respond based on Accept header + const accept = request.headers.accept; + + if (accept?.includes('application/xml')) { + reply.type('application/xml'); + return `${JSON.stringify(data)}`; + } + + reply.type('application/json'); + return data; +}); +``` + +## Validation After Parsing + +Validate parsed content: + +```typescript +app.post('/users', { + schema: { + body: { + type: 'object', + properties: { + name: { type: 'string', minLength: 1 }, + email: { type: 'string', format: 'email' }, + }, + required: ['name', 'email'], + }, + }, +}, async (request) => { + // Body is parsed AND validated + return request.body; +}); +``` diff --git a/.agents/skills/fastify-best-practices/rules/cors-security.md b/.agents/skills/fastify-best-practices/rules/cors-security.md new file mode 100644 index 0000000..89833c4 --- /dev/null +++ b/.agents/skills/fastify-best-practices/rules/cors-security.md @@ -0,0 +1,445 @@ +--- +name: cors-security +description: CORS and security headers in Fastify +metadata: + tags: cors, security, headers, helmet, csrf +--- + +# CORS and Security + +## CORS with @fastify/cors + +Enable Cross-Origin Resource Sharing: + +```typescript +import Fastify from 'fastify'; +import cors from '@fastify/cors'; + +const app = Fastify(); + +// Simple CORS - allow all origins +app.register(cors); + +// Configured CORS +app.register(cors, { + origin: ['https://example.com', 'https://app.example.com'], + methods: ['GET', 'POST', 'PUT', 'DELETE'], + allowedHeaders: ['Content-Type', 'Authorization'], + exposedHeaders: ['X-Total-Count'], + credentials: true, + maxAge: 86400, // 24 hours +}); +``` + +## Dynamic CORS Origin + +Validate origins dynamically: + +```typescript +app.register(cors, { + origin: (origin, callback) => { + // Allow requests with no origin (mobile apps, curl, etc.) + if (!origin) { + return callback(null, true); + } + + // Check against allowed origins + const allowedOrigins = [ + 'https://example.com', + 'https://app.example.com', + /\.example\.com$/, + ]; + + const isAllowed = allowedOrigins.some((allowed) => { + if (allowed instanceof RegExp) { + return allowed.test(origin); + } + return allowed === origin; + }); + + if (isAllowed) { + callback(null, true); + } else { + callback(new Error('Not allowed by CORS'), false); + } + }, + credentials: true, +}); +``` + +## Per-Route CORS + +Configure CORS for specific routes: + +```typescript +app.register(cors, { + origin: true, // Reflect request origin + credentials: true, +}); + +// Or disable CORS for specific routes +app.route({ + method: 'GET', + url: '/internal', + config: { + cors: false, + }, + handler: async () => { + return { internal: true }; + }, +}); +``` + +## Security Headers with @fastify/helmet + +Add security headers: + +```typescript +import helmet from '@fastify/helmet'; + +app.register(helmet, { + contentSecurityPolicy: { + directives: { + defaultSrc: ["'self'"], + scriptSrc: ["'self'", "'unsafe-inline'"], + styleSrc: ["'self'", "'unsafe-inline'"], + imgSrc: ["'self'", 'data:', 'https:'], + connectSrc: ["'self'", 'https://api.example.com'], + }, + }, + crossOriginEmbedderPolicy: false, // Disable if embedding external resources +}); +``` + +## Configure Individual Headers + +Fine-tune security headers: + +```typescript +app.register(helmet, { + // Strict Transport Security + hsts: { + maxAge: 31536000, // 1 year + includeSubDomains: true, + preload: true, + }, + + // Content Security Policy + contentSecurityPolicy: { + useDefaults: true, + directives: { + 'script-src': ["'self'", 'https://trusted-cdn.com'], + }, + }, + + // X-Frame-Options + frameguard: { + action: 'deny', // or 'sameorigin' + }, + + // X-Content-Type-Options + noSniff: true, + + // X-XSS-Protection (legacy) + xssFilter: true, + + // Referrer-Policy + referrerPolicy: { + policy: 'strict-origin-when-cross-origin', + }, + + // X-Permitted-Cross-Domain-Policies + permittedCrossDomainPolicies: false, + + // X-DNS-Prefetch-Control + dnsPrefetchControl: { + allow: false, + }, +}); +``` + +## Rate Limiting + +Protect against abuse: + +```typescript +import rateLimit from '@fastify/rate-limit'; + +app.register(rateLimit, { + max: 100, + timeWindow: '1 minute', + errorResponseBuilder: (request, context) => ({ + statusCode: 429, + error: 'Too Many Requests', + message: `Rate limit exceeded. Retry in ${context.after}`, + retryAfter: context.after, + }), +}); + +// Per-route rate limit +app.get('/expensive', { + config: { + rateLimit: { + max: 10, + timeWindow: '1 minute', + }, + }, +}, handler); + +// Skip rate limit for certain routes +app.get('/health', { + config: { + rateLimit: false, + }, +}, () => ({ status: 'ok' })); +``` + +## Redis-Based Rate Limiting + +Use Redis for distributed rate limiting: + +```typescript +import rateLimit from '@fastify/rate-limit'; +import Redis from 'ioredis'; + +const redis = new Redis(process.env.REDIS_URL); + +app.register(rateLimit, { + max: 100, + timeWindow: '1 minute', + redis, + nameSpace: 'rate-limit:', + keyGenerator: (request) => { + // Rate limit by user ID if authenticated, otherwise by IP + return request.user?.id || request.ip; + }, +}); +``` + +## CSRF Protection + +Protect against Cross-Site Request Forgery: + +```typescript +import fastifyCsrf from '@fastify/csrf-protection'; +import fastifyCookie from '@fastify/cookie'; + +app.register(fastifyCookie); +app.register(fastifyCsrf, { + cookieOpts: { + signed: true, + httpOnly: true, + sameSite: 'strict', + }, +}); + +// Generate token +app.get('/csrf-token', async (request, reply) => { + const token = reply.generateCsrf(); + return { token }; +}); + +// Protected route +app.post('/transfer', { + preHandler: app.csrfProtection, +}, async (request) => { + // CSRF token validated + return { success: true }; +}); +``` + +## Custom Security Headers + +Add custom headers: + +```typescript +app.addHook('onSend', async (request, reply) => { + // Custom security headers + reply.header('X-Request-ID', request.id); + reply.header('X-Content-Type-Options', 'nosniff'); + reply.header('X-Frame-Options', 'DENY'); + reply.header('Permissions-Policy', 'geolocation=(), camera=()'); +}); + +// Per-route headers +app.get('/download', async (request, reply) => { + reply.header('Content-Disposition', 'attachment; filename="file.pdf"'); + reply.header('X-Download-Options', 'noopen'); + return reply.send(fileStream); +}); +``` + +## Secure Cookies + +Configure secure cookies: + +```typescript +import cookie from '@fastify/cookie'; + +app.register(cookie, { + secret: process.env.COOKIE_SECRET, + parseOptions: { + httpOnly: true, + secure: process.env.NODE_ENV === 'production', + sameSite: 'strict', + path: '/', + maxAge: 3600, // 1 hour + }, +}); + +// Set secure cookie +app.post('/login', async (request, reply) => { + const token = await createSession(request.body); + + reply.setCookie('session', token, { + httpOnly: true, + secure: true, + sameSite: 'strict', + path: '/', + maxAge: 86400, + signed: true, + }); + + return { success: true }; +}); + +// Read signed cookie +app.get('/profile', async (request) => { + const session = request.cookies.session; + const unsigned = request.unsignCookie(session); + + if (!unsigned.valid) { + throw { statusCode: 401, message: 'Invalid session' }; + } + + return { sessionId: unsigned.value }; +}); +``` + +## Request Validation Security + +Validate and sanitize input: + +```typescript +// Schema-based validation protects against injection +app.post('/users', { + schema: { + body: { + type: 'object', + properties: { + email: { + type: 'string', + format: 'email', + maxLength: 254, + }, + name: { + type: 'string', + minLength: 1, + maxLength: 100, + pattern: '^[a-zA-Z\\s]+$', // Only letters and spaces + }, + }, + required: ['email', 'name'], + additionalProperties: false, + }, + }, +}, handler); +``` + +## IP Filtering + +Restrict access by IP: + +```typescript +const allowedIps = new Set([ + '192.168.1.0/24', + '10.0.0.0/8', +]); + +app.addHook('onRequest', async (request, reply) => { + if (request.url.startsWith('/admin')) { + const clientIp = request.ip; + + if (!isIpAllowed(clientIp, allowedIps)) { + reply.code(403).send({ error: 'Forbidden' }); + } + } +}); + +function isIpAllowed(ip: string, allowed: Set): boolean { + // Implement IP/CIDR matching + for (const range of allowed) { + if (ipInRange(ip, range)) return true; + } + return false; +} +``` + +## Trust Proxy + +Configure for reverse proxy environments: + +```typescript +const app = Fastify({ + trustProxy: true, // Trust X-Forwarded-* headers +}); + +// Or specific proxy configuration +const app = Fastify({ + trustProxy: ['127.0.0.1', '10.0.0.0/8'], +}); + +// Now request.ip returns the real client IP +app.get('/ip', async (request) => { + return { + ip: request.ip, + ips: request.ips, // Array of all IPs in chain + }; +}); +``` + +## HTTPS Redirect + +Force HTTPS in production: + +```typescript +app.addHook('onRequest', async (request, reply) => { + if ( + process.env.NODE_ENV === 'production' && + request.headers['x-forwarded-proto'] !== 'https' + ) { + const httpsUrl = `https://${request.hostname}${request.url}`; + reply.redirect(301, httpsUrl); + } +}); +``` + +## Security Best Practices Summary + +```typescript +import Fastify from 'fastify'; +import cors from '@fastify/cors'; +import helmet from '@fastify/helmet'; +import rateLimit from '@fastify/rate-limit'; + +const app = Fastify({ + trustProxy: true, + bodyLimit: 1048576, // 1MB max body +}); + +// Security plugins +app.register(helmet); +app.register(cors, { + origin: process.env.ALLOWED_ORIGINS?.split(','), + credentials: true, +}); +app.register(rateLimit, { + max: 100, + timeWindow: '1 minute', +}); + +// Validate all input with schemas +// Never expose internal errors in production +// Use parameterized queries for database +// Keep dependencies updated +``` diff --git a/.agents/skills/fastify-best-practices/rules/database.md b/.agents/skills/fastify-best-practices/rules/database.md new file mode 100644 index 0000000..acf6048 --- /dev/null +++ b/.agents/skills/fastify-best-practices/rules/database.md @@ -0,0 +1,320 @@ +--- +name: database +description: Database integration with Fastify using official adapters +metadata: + tags: database, postgres, mysql, mongodb, redis, sql +--- + +# Database Integration + +## Use Official Fastify Database Adapters + +Always use the official Fastify database plugins from the `@fastify` organization. They provide proper connection pooling, encapsulation, and integration with Fastify's lifecycle. + +## PostgreSQL with @fastify/postgres + +```typescript +import Fastify from 'fastify'; +import fastifyPostgres from '@fastify/postgres'; + +const app = Fastify({ logger: true }); + +app.register(fastifyPostgres, { + connectionString: process.env.DATABASE_URL, +}); + +// Use in routes +app.get('/users', async (request) => { + const client = await app.pg.connect(); + try { + const { rows } = await client.query('SELECT * FROM users'); + return rows; + } finally { + client.release(); + } +}); + +// Or use the pool directly for simple queries +app.get('/users/:id', async (request) => { + const { id } = request.params; + const { rows } = await app.pg.query( + 'SELECT * FROM users WHERE id = $1', + [id], + ); + return rows[0]; +}); + +// Transactions +app.post('/transfer', async (request) => { + const { fromId, toId, amount } = request.body; + const client = await app.pg.connect(); + + try { + await client.query('BEGIN'); + await client.query( + 'UPDATE accounts SET balance = balance - $1 WHERE id = $2', + [amount, fromId], + ); + await client.query( + 'UPDATE accounts SET balance = balance + $1 WHERE id = $2', + [amount, toId], + ); + await client.query('COMMIT'); + return { success: true }; + } catch (error) { + await client.query('ROLLBACK'); + throw error; + } finally { + client.release(); + } +}); +``` + +## MySQL with @fastify/mysql + +```typescript +import Fastify from 'fastify'; +import fastifyMysql from '@fastify/mysql'; + +const app = Fastify({ logger: true }); + +app.register(fastifyMysql, { + promise: true, + connectionString: process.env.MYSQL_URL, +}); + +app.get('/users', async (request) => { + const connection = await app.mysql.getConnection(); + try { + const [rows] = await connection.query('SELECT * FROM users'); + return rows; + } finally { + connection.release(); + } +}); +``` + +## MongoDB with @fastify/mongodb + +```typescript +import Fastify from 'fastify'; +import fastifyMongo from '@fastify/mongodb'; + +const app = Fastify({ logger: true }); + +app.register(fastifyMongo, { + url: process.env.MONGODB_URL, +}); + +app.get('/users', async (request) => { + const users = await app.mongo.db + .collection('users') + .find({}) + .toArray(); + return users; +}); + +app.get('/users/:id', async (request) => { + const { id } = request.params; + const user = await app.mongo.db + .collection('users') + .findOne({ _id: new app.mongo.ObjectId(id) }); + return user; +}); + +app.post('/users', async (request) => { + const result = await app.mongo.db + .collection('users') + .insertOne(request.body); + return { id: result.insertedId }; +}); +``` + +## Redis with @fastify/redis + +```typescript +import Fastify from 'fastify'; +import fastifyRedis from '@fastify/redis'; + +const app = Fastify({ logger: true }); + +app.register(fastifyRedis, { + url: process.env.REDIS_URL, +}); + +// Caching example +app.get('/data/:key', async (request) => { + const { key } = request.params; + + // Try cache first + const cached = await app.redis.get(`cache:${key}`); + if (cached) { + return JSON.parse(cached); + } + + // Fetch from database + const data = await fetchFromDatabase(key); + + // Cache for 5 minutes + await app.redis.setex(`cache:${key}`, 300, JSON.stringify(data)); + + return data; +}); +``` + +## Database as Plugin + +Encapsulate database access in a plugin: + +```typescript +// plugins/database.ts +import fp from 'fastify-plugin'; +import fastifyPostgres from '@fastify/postgres'; + +export default fp(async function databasePlugin(fastify) { + await fastify.register(fastifyPostgres, { + connectionString: fastify.config.DATABASE_URL, + }); + + // Add health check + fastify.decorate('checkDatabaseHealth', async () => { + try { + await fastify.pg.query('SELECT 1'); + return true; + } catch { + return false; + } + }); +}, { + name: 'database', + dependencies: ['config'], +}); +``` + +## Repository Pattern + +Abstract database access with repositories: + +```typescript +// repositories/user.repository.ts +import type { FastifyInstance } from 'fastify'; + +export interface User { + id: string; + email: string; + name: string; +} + +export function createUserRepository(app: FastifyInstance) { + return { + async findById(id: string): Promise { + const { rows } = await app.pg.query( + 'SELECT * FROM users WHERE id = $1', + [id], + ); + return rows[0] || null; + }, + + async findByEmail(email: string): Promise { + const { rows } = await app.pg.query( + 'SELECT * FROM users WHERE email = $1', + [email], + ); + return rows[0] || null; + }, + + async create(data: Omit): Promise { + const { rows } = await app.pg.query( + 'INSERT INTO users (email, name) VALUES ($1, $2) RETURNING *', + [data.email, data.name], + ); + return rows[0]; + }, + + async update(id: string, data: Partial): Promise { + const fields = Object.keys(data); + const values = Object.values(data); + const setClause = fields + .map((f, i) => `${f} = $${i + 2}`) + .join(', '); + + const { rows } = await app.pg.query( + `UPDATE users SET ${setClause} WHERE id = $1 RETURNING *`, + [id, ...values], + ); + return rows[0] || null; + }, + + async delete(id: string): Promise { + const { rowCount } = await app.pg.query( + 'DELETE FROM users WHERE id = $1', + [id], + ); + return rowCount > 0; + }, + }; +} + +// Usage in plugin +import fp from 'fastify-plugin'; +import { createUserRepository } from './repositories/user.repository.js'; + +export default fp(async function repositoriesPlugin(fastify) { + fastify.decorate('repositories', { + users: createUserRepository(fastify), + }); +}, { + name: 'repositories', + dependencies: ['database'], +}); +``` + +## Testing with Database + +Use transactions for test isolation: + +```typescript +import { describe, it, beforeEach, afterEach } from 'node:test'; +import { build } from './app.js'; + +describe('User API', () => { + let app; + let client; + + beforeEach(async () => { + app = await build(); + client = await app.pg.connect(); + await client.query('BEGIN'); + }); + + afterEach(async () => { + await client.query('ROLLBACK'); + client.release(); + await app.close(); + }); + + it('should create a user', async (t) => { + const response = await app.inject({ + method: 'POST', + url: '/users', + payload: { email: 'test@example.com', name: 'Test' }, + }); + + t.assert.equal(response.statusCode, 201); + }); +}); +``` + +## Connection Pool Configuration + +Configure connection pools appropriately: + +```typescript +app.register(fastifyPostgres, { + connectionString: process.env.DATABASE_URL, + // Pool configuration + max: 20, // Maximum pool size + idleTimeoutMillis: 30000, // Close idle clients after 30s + connectionTimeoutMillis: 5000, // Timeout for new connections +}); +``` diff --git a/.agents/skills/fastify-best-practices/rules/decorators.md b/.agents/skills/fastify-best-practices/rules/decorators.md new file mode 100644 index 0000000..a9a322a --- /dev/null +++ b/.agents/skills/fastify-best-practices/rules/decorators.md @@ -0,0 +1,416 @@ +--- +name: decorators +description: Decorators and request/reply extensions in Fastify +metadata: + tags: decorators, extensions, customization, utilities +--- + +# Decorators and Extensions + +## Understanding Decorators + +Decorators add custom properties and methods to Fastify instances, requests, and replies: + +```typescript +import Fastify from 'fastify'; + +const app = Fastify(); + +// Decorate the Fastify instance +app.decorate('utility', { + formatDate: (date: Date) => date.toISOString(), + generateId: () => crypto.randomUUID(), +}); + +// Use in routes +app.get('/example', async function (request, reply) { + const id = this.utility.generateId(); + return { id, timestamp: this.utility.formatDate(new Date()) }; +}); +``` + +## Decorator Types + +Three types of decorators for different contexts: + +```typescript +// Instance decorator - available on fastify instance +app.decorate('config', { apiVersion: '1.0.0' }); +app.decorate('db', databaseConnection); +app.decorate('cache', cacheClient); + +// Request decorator - available on each request +app.decorateRequest('user', null); // Object property +app.decorateRequest('startTime', 0); // Primitive +app.decorateRequest('getData', function() { // Method + return this.body; +}); + +// Reply decorator - available on each reply +app.decorateReply('sendError', function(code: number, message: string) { + return this.code(code).send({ error: message }); +}); +app.decorateReply('success', function(data: unknown) { + return this.send({ success: true, data }); +}); +``` + +## TypeScript Declaration Merging + +Extend Fastify types for type safety: + +```typescript +// Declare custom properties +declare module 'fastify' { + interface FastifyInstance { + config: { + apiVersion: string; + environment: string; + }; + db: DatabaseClient; + cache: CacheClient; + } + + interface FastifyRequest { + user: { + id: string; + email: string; + roles: string[]; + } | null; + startTime: number; + requestId: string; + } + + interface FastifyReply { + sendError: (code: number, message: string) => void; + success: (data: unknown) => void; + } +} + +// Register decorators +app.decorate('config', { + apiVersion: '1.0.0', + environment: process.env.NODE_ENV, +}); + +app.decorateRequest('user', null); +app.decorateRequest('startTime', 0); + +app.decorateReply('sendError', function (code: number, message: string) { + this.code(code).send({ error: message }); +}); +``` + +## Decorator Initialization + +Initialize request/reply decorators in hooks: + +```typescript +// Decorators with primitive defaults are copied +app.decorateRequest('startTime', 0); + +// Initialize in hook +app.addHook('onRequest', async (request) => { + request.startTime = Date.now(); +}); + +// Object decorators need getter pattern for proper initialization +app.decorateRequest('context', null); + +app.addHook('onRequest', async (request) => { + request.context = { + traceId: request.headers['x-trace-id'] || crypto.randomUUID(), + clientIp: request.ip, + userAgent: request.headers['user-agent'], + }; +}); +``` + +## Dependency Injection with Decorators + +Use decorators for dependency injection: + +```typescript +import fp from 'fastify-plugin'; + +// Database plugin +export default fp(async function databasePlugin(fastify, options) { + const db = await createDatabaseConnection(options.connectionString); + + fastify.decorate('db', db); + + fastify.addHook('onClose', async () => { + await db.close(); + }); +}); + +// User service plugin +export default fp(async function userServicePlugin(fastify) { + // Depends on db decorator + if (!fastify.hasDecorator('db')) { + throw new Error('Database plugin must be registered first'); + } + + const userService = { + findById: (id: string) => fastify.db.query('SELECT * FROM users WHERE id = $1', [id]), + create: (data: CreateUserInput) => fastify.db.query( + 'INSERT INTO users (name, email) VALUES ($1, $2) RETURNING *', + [data.name, data.email] + ), + }; + + fastify.decorate('userService', userService); +}, { + dependencies: ['database-plugin'], +}); + +// Use in routes +app.get('/users/:id', async function (request) { + const user = await this.userService.findById(request.params.id); + return user; +}); +``` + +## Request Context Pattern + +Build rich request context: + +```typescript +interface RequestContext { + traceId: string; + user: User | null; + permissions: Set; + startTime: number; + metadata: Map; +} + +declare module 'fastify' { + interface FastifyRequest { + ctx: RequestContext; + } +} + +app.decorateRequest('ctx', null); + +app.addHook('onRequest', async (request) => { + request.ctx = { + traceId: request.headers['x-trace-id']?.toString() || crypto.randomUUID(), + user: null, + permissions: new Set(), + startTime: Date.now(), + metadata: new Map(), + }; +}); + +// Auth hook populates user +app.addHook('preHandler', async (request) => { + const token = request.headers.authorization; + if (token) { + const user = await verifyToken(token); + request.ctx.user = user; + request.ctx.permissions = new Set(user.permissions); + } +}); + +// Use in handlers +app.get('/profile', async (request, reply) => { + if (!request.ctx.user) { + return reply.code(401).send({ error: 'Unauthorized' }); + } + + if (!request.ctx.permissions.has('read:profile')) { + return reply.code(403).send({ error: 'Forbidden' }); + } + + return request.ctx.user; +}); +``` + +## Reply Helpers + +Create consistent response methods: + +```typescript +declare module 'fastify' { + interface FastifyReply { + ok: (data?: unknown) => void; + created: (data: unknown) => void; + noContent: () => void; + badRequest: (message: string, details?: unknown) => void; + unauthorized: (message?: string) => void; + forbidden: (message?: string) => void; + notFound: (resource?: string) => void; + conflict: (message: string) => void; + serverError: (message?: string) => void; + } +} + +app.decorateReply('ok', function (data?: unknown) { + this.code(200).send(data ?? { success: true }); +}); + +app.decorateReply('created', function (data: unknown) { + this.code(201).send(data); +}); + +app.decorateReply('noContent', function () { + this.code(204).send(); +}); + +app.decorateReply('badRequest', function (message: string, details?: unknown) { + this.code(400).send({ + statusCode: 400, + error: 'Bad Request', + message, + details, + }); +}); + +app.decorateReply('unauthorized', function (message = 'Authentication required') { + this.code(401).send({ + statusCode: 401, + error: 'Unauthorized', + message, + }); +}); + +app.decorateReply('notFound', function (resource = 'Resource') { + this.code(404).send({ + statusCode: 404, + error: 'Not Found', + message: `${resource} not found`, + }); +}); + +// Usage +app.get('/users/:id', async (request, reply) => { + const user = await db.users.findById(request.params.id); + if (!user) { + return reply.notFound('User'); + } + return reply.ok(user); +}); + +app.post('/users', async (request, reply) => { + const user = await db.users.create(request.body); + return reply.created(user); +}); +``` + +## Checking Decorators + +Check if decorators exist before using: + +```typescript +// Check at registration time +app.register(async function (fastify) { + if (!fastify.hasDecorator('db')) { + throw new Error('Database decorator required'); + } + + if (!fastify.hasRequestDecorator('user')) { + throw new Error('User request decorator required'); + } + + if (!fastify.hasReplyDecorator('sendError')) { + throw new Error('sendError reply decorator required'); + } + + // Safe to use decorators +}); +``` + +## Decorator Encapsulation + +Decorators respect encapsulation by default: + +```typescript +app.register(async function pluginA(fastify) { + fastify.decorate('pluginAUtil', () => 'A'); + + fastify.get('/a', async function () { + return this.pluginAUtil(); // Works + }); +}); + +app.register(async function pluginB(fastify) { + // this.pluginAUtil is NOT available here (encapsulated) + + fastify.get('/b', async function () { + // this.pluginAUtil() would be undefined + }); +}); +``` + +Use `fastify-plugin` to share decorators: + +```typescript +import fp from 'fastify-plugin'; + +export default fp(async function sharedDecorator(fastify) { + fastify.decorate('sharedUtil', () => 'shared'); +}); + +// Now available to parent and sibling plugins +``` + +## Functional Decorators + +Create decorators that return functions: + +```typescript +declare module 'fastify' { + interface FastifyInstance { + createValidator: (schema: object) => (data: unknown) => T; + createRateLimiter: (options: RateLimitOptions) => RateLimiter; + } +} + +app.decorate('createValidator', function (schema: object) { + const validate = ajv.compile(schema); + return (data: unknown): T => { + if (!validate(data)) { + throw new ValidationError(validate.errors); + } + return data as T; + }; +}); + +// Usage +const validateUser = app.createValidator(userSchema); + +app.post('/users', async (request) => { + const user = validateUser(request.body); + return db.users.create(user); +}); +``` + +## Async Decorator Initialization + +Handle async initialization properly: + +```typescript +import fp from 'fastify-plugin'; + +export default fp(async function asyncPlugin(fastify) { + // Async initialization + const connection = await createAsyncConnection(); + const cache = await initializeCache(); + + fastify.decorate('asyncService', { + connection, + cache, + query: async (sql: string) => connection.query(sql), + }); + + fastify.addHook('onClose', async () => { + await connection.close(); + await cache.disconnect(); + }); +}); + +// Plugin is fully initialized before routes execute +app.get('/data', async function () { + return this.asyncService.query('SELECT * FROM data'); +}); +``` diff --git a/.agents/skills/fastify-best-practices/rules/deployment.md b/.agents/skills/fastify-best-practices/rules/deployment.md new file mode 100644 index 0000000..00a29eb --- /dev/null +++ b/.agents/skills/fastify-best-practices/rules/deployment.md @@ -0,0 +1,425 @@ +--- +name: deployment +description: Production deployment for Fastify applications +metadata: + tags: deployment, production, docker, kubernetes, scaling +--- + +# Production Deployment + +## Graceful Shutdown with close-with-grace + +Use `close-with-grace` for proper shutdown handling: + +```typescript +import Fastify from 'fastify'; +import closeWithGrace from 'close-with-grace'; + +const app = Fastify({ logger: true }); + +// Register plugins and routes +await app.register(import('./plugins/index.js')); +await app.register(import('./routes/index.js')); + +// Graceful shutdown handler +closeWithGrace({ delay: 10000 }, async ({ signal, err }) => { + if (err) { + app.log.error({ err }, 'Server closing due to error'); + } else { + app.log.info({ signal }, 'Server closing due to signal'); + } + + await app.close(); +}); + +// Start server +await app.listen({ + port: parseInt(process.env.PORT || '3000', 10), + host: '0.0.0.0', +}); + +app.log.info(`Server listening on ${app.server.address()}`); +``` + +## Health Check Endpoints + +Implement comprehensive health checks: + +```typescript +app.get('/health', async () => { + return { status: 'ok', timestamp: new Date().toISOString() }; +}); + +app.get('/health/live', async () => { + return { status: 'ok' }; +}); + +app.get('/health/ready', async (request, reply) => { + const checks = { + database: false, + cache: false, + }; + + try { + await app.db`SELECT 1`; + checks.database = true; + } catch { + // Database not ready + } + + try { + await app.cache.ping(); + checks.cache = true; + } catch { + // Cache not ready + } + + const allHealthy = Object.values(checks).every(Boolean); + + if (!allHealthy) { + reply.code(503); + } + + return { + status: allHealthy ? 'ok' : 'degraded', + checks, + timestamp: new Date().toISOString(), + }; +}); + +// Detailed health for monitoring +app.get('/health/details', { + preHandler: [app.authenticate, app.requireAdmin], +}, async () => { + const memory = process.memoryUsage(); + + return { + status: 'ok', + uptime: process.uptime(), + memory: { + heapUsed: Math.round(memory.heapUsed / 1024 / 1024), + heapTotal: Math.round(memory.heapTotal / 1024 / 1024), + rss: Math.round(memory.rss / 1024 / 1024), + }, + version: process.env.APP_VERSION, + nodeVersion: process.version, + }; +}); +``` + +## Docker Configuration + +Create an optimized Dockerfile: + +```dockerfile +# Build stage +FROM node:22-alpine AS builder + +WORKDIR /app + +COPY package*.json ./ +RUN npm ci --only=production + +COPY . . + +# Production stage +FROM node:22-alpine + +WORKDIR /app + +# Run as non-root user +RUN addgroup -g 1001 -S nodejs && \ + adduser -S nodejs -u 1001 + +# Copy from builder +COPY --from=builder --chown=nodejs:nodejs /app/node_modules ./node_modules +COPY --from=builder --chown=nodejs:nodejs /app/src ./src +COPY --from=builder --chown=nodejs:nodejs /app/package.json ./ + +USER nodejs + +EXPOSE 3000 + +ENV NODE_ENV=production +ENV PORT=3000 + +# Health check +HEALTHCHECK --interval=30s --timeout=5s --start-period=5s --retries=3 \ + CMD wget --no-verbose --tries=1 --spider http://localhost:3000/health || exit 1 + +CMD ["node", "src/app.ts"] +``` + +```yaml +# docker-compose.yml +services: + api: + build: . + ports: + - "3000:3000" + environment: + - NODE_ENV=production + - DATABASE_URL=postgres://user:pass@db:5432/app + - JWT_SECRET=${JWT_SECRET} + depends_on: + db: + condition: service_healthy + restart: unless-stopped + + db: + image: postgres:16-alpine + environment: + - POSTGRES_USER=user + - POSTGRES_PASSWORD=pass + - POSTGRES_DB=app + volumes: + - pgdata:/var/lib/postgresql/data + healthcheck: + test: ["CMD-SHELL", "pg_isready -U user -d app"] + interval: 5s + timeout: 5s + retries: 5 + +volumes: + pgdata: +``` + +## Kubernetes Deployment + +Deploy to Kubernetes: + +```yaml +# deployment.yaml +apiVersion: apps/v1 +kind: Deployment +metadata: + name: fastify-api +spec: + replicas: 3 + selector: + matchLabels: + app: fastify-api + template: + metadata: + labels: + app: fastify-api + spec: + containers: + - name: api + image: my-registry/fastify-api:latest + ports: + - containerPort: 3000 + env: + - name: NODE_ENV + value: "production" + - name: DATABASE_URL + valueFrom: + secretKeyRef: + name: api-secrets + key: database-url + resources: + requests: + memory: "256Mi" + cpu: "100m" + limits: + memory: "512Mi" + cpu: "500m" + livenessProbe: + httpGet: + path: /health/live + port: 3000 + initialDelaySeconds: 5 + periodSeconds: 10 + readinessProbe: + httpGet: + path: /health/ready + port: 3000 + initialDelaySeconds: 5 + periodSeconds: 5 + lifecycle: + preStop: + exec: + command: ["/bin/sh", "-c", "sleep 5"] +--- +apiVersion: v1 +kind: Service +metadata: + name: fastify-api +spec: + selector: + app: fastify-api + ports: + - port: 80 + targetPort: 3000 + type: ClusterIP +``` + +## Production Logger Configuration + +Configure logging for production: + +```typescript +import Fastify from 'fastify'; + +const app = Fastify({ + logger: { + level: process.env.LOG_LEVEL || 'info', + // JSON output for log aggregation + formatters: { + level: (label) => ({ level: label }), + bindings: (bindings) => ({ + pid: bindings.pid, + hostname: bindings.hostname, + service: 'fastify-api', + version: process.env.APP_VERSION, + }), + }, + timestamp: () => `,"time":"${new Date().toISOString()}"`, + // Redact sensitive data + redact: { + paths: [ + 'req.headers.authorization', + 'req.headers.cookie', + '*.password', + '*.token', + '*.secret', + ], + censor: '[REDACTED]', + }, + }, +}); +``` + +## Request Timeouts + +Configure appropriate timeouts: + +```typescript +const app = Fastify({ + connectionTimeout: 30000, // 30s connection timeout + keepAliveTimeout: 72000, // 72s keep-alive (longer than ALB 60s) + requestTimeout: 30000, // 30s request timeout + bodyLimit: 1048576, // 1MB body limit +}); + +// Per-route timeout +app.get('/long-operation', { + config: { + timeout: 60000, // 60s for this route + }, +}, longOperationHandler); +``` + +## Trust Proxy Settings + +Configure for load balancers: + +```typescript +const app = Fastify({ + // Trust first proxy (load balancer) + trustProxy: true, + + // Or trust specific proxies + trustProxy: ['127.0.0.1', '10.0.0.0/8'], + + // Or number of proxies to trust + trustProxy: 1, +}); + +// Now request.ip returns real client IP +``` + +## Static File Serving + +Serve static files efficiently. **Always use `import.meta.dirname` as the base path**, never `process.cwd()`: + +```typescript +import fastifyStatic from '@fastify/static'; +import { join } from 'node:path'; + +app.register(fastifyStatic, { + root: join(import.meta.dirname, '..', 'public'), + prefix: '/static/', + maxAge: '1d', + immutable: true, + etag: true, + lastModified: true, +}); +``` + +## Compression + +Enable response compression: + +```typescript +import fastifyCompress from '@fastify/compress'; + +app.register(fastifyCompress, { + global: true, + threshold: 1024, // Only compress > 1KB + encodings: ['gzip', 'deflate'], +}); +``` + +## Metrics and Monitoring + +Expose Prometheus metrics: + +```typescript +import { register, collectDefaultMetrics, Counter, Histogram } from 'prom-client'; + +collectDefaultMetrics(); + +const httpRequestDuration = new Histogram({ + name: 'http_request_duration_seconds', + help: 'Duration of HTTP requests in seconds', + labelNames: ['method', 'route', 'status'], + buckets: [0.01, 0.05, 0.1, 0.5, 1, 5], +}); + +const httpRequestTotal = new Counter({ + name: 'http_requests_total', + help: 'Total number of HTTP requests', + labelNames: ['method', 'route', 'status'], +}); + +app.addHook('onResponse', (request, reply, done) => { + const route = request.routeOptions.url || request.url; + const labels = { + method: request.method, + route, + status: reply.statusCode, + }; + + httpRequestDuration.observe(labels, reply.elapsedTime / 1000); + httpRequestTotal.inc(labels); + done(); +}); + +app.get('/metrics', async (request, reply) => { + reply.header('Content-Type', register.contentType); + return register.metrics(); +}); +``` + +## Zero-Downtime Deployments + +Support rolling updates: + +```typescript +import closeWithGrace from 'close-with-grace'; + +// Stop accepting new connections gracefully +closeWithGrace({ delay: 30000 }, async ({ signal }) => { + app.log.info({ signal }, 'Received shutdown signal'); + + // Stop accepting new connections + // Existing connections continue to be served + + // Wait for in-flight requests (handled by close-with-grace delay) + await app.close(); + + app.log.info('Server closed'); +}); +``` + diff --git a/.agents/skills/fastify-best-practices/rules/error-handling.md b/.agents/skills/fastify-best-practices/rules/error-handling.md new file mode 100644 index 0000000..8e43c85 --- /dev/null +++ b/.agents/skills/fastify-best-practices/rules/error-handling.md @@ -0,0 +1,412 @@ +--- +name: error-handling +description: Error handling patterns in Fastify +metadata: + tags: errors, exceptions, error-handler, validation +--- + +# Error Handling in Fastify + +## Default Error Handler + +Fastify has a built-in error handler. Thrown errors automatically become HTTP responses: + +```typescript +import Fastify from 'fastify'; + +const app = Fastify({ logger: true }); + +app.get('/users/:id', async (request) => { + const user = await findUser(request.params.id); + if (!user) { + // Throwing an error with statusCode sets the response status + const error = new Error('User not found'); + error.statusCode = 404; + throw error; + } + return user; +}); +``` + +## Custom Error Classes + +Use `@fastify/error` for creating typed errors: + +```typescript +import createError from '@fastify/error'; + +const NotFoundError = createError('NOT_FOUND', '%s not found', 404); +const UnauthorizedError = createError('UNAUTHORIZED', 'Authentication required', 401); +const ForbiddenError = createError('FORBIDDEN', 'Access denied: %s', 403); +const ValidationError = createError('VALIDATION_ERROR', '%s', 400); +const ConflictError = createError('CONFLICT', '%s already exists', 409); + +// Usage +app.get('/users/:id', async (request) => { + const user = await findUser(request.params.id); + if (!user) { + throw new NotFoundError('User'); + } + return user; +}); + +app.post('/users', async (request) => { + const exists = await userExists(request.body.email); + if (exists) { + throw new ConflictError('Email'); + } + return createUser(request.body); +}); +``` + +## Custom Error Handler + +Implement a centralized error handler: + +```typescript +import Fastify from 'fastify'; +import type { FastifyError, FastifyRequest, FastifyReply } from 'fastify'; + +const app = Fastify({ logger: true }); + +app.setErrorHandler((error: FastifyError, request: FastifyRequest, reply: FastifyReply) => { + // Log the error + request.log.error({ err: error }, 'Request error'); + + // Handle validation errors + if (error.validation) { + return reply.code(400).send({ + statusCode: 400, + error: 'Bad Request', + message: 'Validation failed', + details: error.validation, + }); + } + + // Handle known errors with status codes + const statusCode = error.statusCode ?? 500; + const code = error.code ?? 'INTERNAL_ERROR'; + + // Don't expose internal error details in production + const message = statusCode >= 500 && process.env.NODE_ENV === 'production' + ? 'Internal Server Error' + : error.message; + + return reply.code(statusCode).send({ + statusCode, + error: code, + message, + }); +}); +``` + +## Error Response Schema + +Define consistent error response schemas: + +```typescript +app.addSchema({ + $id: 'httpError', + type: 'object', + properties: { + statusCode: { type: 'integer' }, + error: { type: 'string' }, + message: { type: 'string' }, + details: { + type: 'array', + items: { + type: 'object', + properties: { + field: { type: 'string' }, + message: { type: 'string' }, + }, + }, + }, + }, + required: ['statusCode', 'error', 'message'], +}); + +// Use in route schemas +app.get('/users/:id', { + schema: { + params: { + type: 'object', + properties: { id: { type: 'string' } }, + required: ['id'], + }, + response: { + 200: { $ref: 'user#' }, + 404: { $ref: 'httpError#' }, + 500: { $ref: 'httpError#' }, + }, + }, +}, handler); +``` + +## Reply Helpers with @fastify/sensible + +Use `@fastify/sensible` for standard HTTP errors: + +```typescript +import fastifySensible from '@fastify/sensible'; + +app.register(fastifySensible); + +app.get('/users/:id', async (request, reply) => { + const user = await findUser(request.params.id); + if (!user) { + return reply.notFound('User not found'); + } + if (!hasAccess(request.user, user)) { + return reply.forbidden('You cannot access this user'); + } + return user; +}); + +// Available methods: +// reply.badRequest(message?) +// reply.unauthorized(message?) +// reply.forbidden(message?) +// reply.notFound(message?) +// reply.methodNotAllowed(message?) +// reply.conflict(message?) +// reply.gone(message?) +// reply.unprocessableEntity(message?) +// reply.tooManyRequests(message?) +// reply.internalServerError(message?) +// reply.notImplemented(message?) +// reply.badGateway(message?) +// reply.serviceUnavailable(message?) +// reply.gatewayTimeout(message?) +``` + +## Async Error Handling + +Errors in async handlers are automatically caught: + +```typescript +// Errors are automatically caught and passed to error handler +app.get('/users', async (request) => { + const users = await db.users.findAll(); // If this throws, error handler catches it + return users; +}); + +// Explicit error handling for custom logic +app.get('/users/:id', async (request, reply) => { + try { + const user = await db.users.findById(request.params.id); + if (!user) { + return reply.code(404).send({ error: 'User not found' }); + } + return user; + } catch (error) { + // Transform database errors + if (error.code === 'CONNECTION_ERROR') { + request.log.error({ err: error }, 'Database connection failed'); + return reply.code(503).send({ error: 'Service temporarily unavailable' }); + } + throw error; // Re-throw for error handler + } +}); +``` + +## Hook Error Handling + +Errors in hooks are handled the same way: + +```typescript +app.addHook('onRequest', async (request, reply) => { + const token = request.headers.authorization; + if (!token) { + // This error goes to the error handler + throw new UnauthorizedError(); + } + + try { + request.user = await verifyToken(token); + } catch (error) { + throw new UnauthorizedError(); + } +}); + +// Or use reply to send response directly +app.addHook('onRequest', async (request, reply) => { + if (!request.headers.authorization) { + reply.code(401).send({ error: 'Unauthorized' }); + return; // Must return to stop processing + } +}); +``` + +## Not Found Handler + +Customize the 404 response: + +```typescript +app.setNotFoundHandler(async (request, reply) => { + return reply.code(404).send({ + statusCode: 404, + error: 'Not Found', + message: `Route ${request.method} ${request.url} not found`, + }); +}); + +// With schema validation +app.setNotFoundHandler({ + preValidation: async (request, reply) => { + // Pre-validation hook for 404 handler + }, +}, async (request, reply) => { + return reply.code(404).send({ error: 'Not Found' }); +}); +``` + +## Error Wrapping + +Wrap external errors with context: + +```typescript +import createError from '@fastify/error'; + +const DatabaseError = createError('DATABASE_ERROR', 'Database operation failed: %s', 500); +const ExternalServiceError = createError('EXTERNAL_SERVICE_ERROR', 'External service failed: %s', 502); + +app.get('/users/:id', async (request) => { + try { + return await db.users.findById(request.params.id); + } catch (error) { + throw new DatabaseError(error.message, { cause: error }); + } +}); + +app.get('/weather', async (request) => { + try { + return await weatherApi.fetch(request.query.city); + } catch (error) { + throw new ExternalServiceError(error.message, { cause: error }); + } +}); +``` + +## Validation Error Customization + +Customize validation error format: + +```typescript +app.setErrorHandler((error, request, reply) => { + if (error.validation) { + const details = error.validation.map((err) => { + const field = err.instancePath + ? err.instancePath.slice(1).replace(/\//g, '.') + : err.params?.missingProperty || 'unknown'; + + return { + field, + message: err.message, + value: err.data, + }; + }); + + return reply.code(400).send({ + statusCode: 400, + error: 'Validation Error', + message: `Invalid ${error.validationContext}: ${details.map(d => d.field).join(', ')}`, + details, + }); + } + + // Handle other errors... + throw error; +}); +``` + +## Error Cause Chain + +Preserve error chains for debugging: + +```typescript +app.get('/complex-operation', async (request) => { + try { + await step1(); + } catch (error) { + const wrapped = new Error('Step 1 failed', { cause: error }); + wrapped.statusCode = 500; + throw wrapped; + } +}); + +// In error handler, log the full chain +app.setErrorHandler((error, request, reply) => { + // Log error with cause chain + let current = error; + const chain = []; + while (current) { + chain.push({ + message: current.message, + code: current.code, + stack: current.stack, + }); + current = current.cause; + } + + request.log.error({ errorChain: chain }, 'Request failed'); + + reply.code(error.statusCode || 500).send({ + error: error.message, + }); +}); +``` + +## Plugin-Scoped Error Handlers + +Set error handlers at the plugin level: + +```typescript +app.register(async function apiRoutes(fastify) { + // This error handler only applies to routes in this plugin + fastify.setErrorHandler((error, request, reply) => { + request.log.error({ err: error }, 'API error'); + + reply.code(error.statusCode || 500).send({ + error: { + code: error.code || 'API_ERROR', + message: error.message, + }, + }); + }); + + fastify.get('/data', async () => { + throw new Error('API-specific error'); + }); +}, { prefix: '/api' }); +``` + +## Graceful Error Recovery + +Handle errors gracefully without crashing: + +```typescript +app.get('/resilient', async (request, reply) => { + const results = await Promise.allSettled([ + fetchPrimaryData(), + fetchSecondaryData(), + fetchOptionalData(), + ]); + + const [primary, secondary, optional] = results; + + if (primary.status === 'rejected') { + // Primary data is required + throw new Error('Primary data unavailable'); + } + + return { + data: primary.value, + secondary: secondary.status === 'fulfilled' ? secondary.value : null, + optional: optional.status === 'fulfilled' ? optional.value : null, + warnings: results + .filter((r) => r.status === 'rejected') + .map((r) => r.reason.message), + }; +}); +``` diff --git a/.agents/skills/fastify-best-practices/rules/hooks.md b/.agents/skills/fastify-best-practices/rules/hooks.md new file mode 100644 index 0000000..d992a27 --- /dev/null +++ b/.agents/skills/fastify-best-practices/rules/hooks.md @@ -0,0 +1,464 @@ +--- +name: hooks +description: Hooks and request lifecycle in Fastify +metadata: + tags: hooks, lifecycle, middleware, onRequest, preHandler +--- + +# Hooks and Request Lifecycle + +## Request Lifecycle Overview + +Fastify executes hooks in a specific order: + +``` +Incoming Request + | + onRequest + | + preParsing + | + preValidation + | + preHandler + | + Handler + | + preSerialization + | + onSend + | + onResponse +``` + +## onRequest Hook + +First hook to execute, before body parsing. Use for authentication, request ID setup: + +```typescript +import Fastify from 'fastify'; + +const app = Fastify(); + +// Global onRequest hook +app.addHook('onRequest', async (request, reply) => { + request.startTime = Date.now(); + request.log.info({ url: request.url, method: request.method }, 'Request started'); +}); + +// Authentication check +app.addHook('onRequest', async (request, reply) => { + // Skip auth for public routes + if (request.url.startsWith('/public')) { + return; + } + + const token = request.headers.authorization?.replace('Bearer ', ''); + if (!token) { + reply.code(401).send({ error: 'Unauthorized' }); + return; // Stop processing + } + + try { + request.user = await verifyToken(token); + } catch { + reply.code(401).send({ error: 'Invalid token' }); + } +}); +``` + +## preParsing Hook + +Execute before body parsing. Can modify the payload stream: + +```typescript +app.addHook('preParsing', async (request, reply, payload) => { + // Log raw payload size + request.log.debug({ contentLength: request.headers['content-length'] }, 'Parsing body'); + + // Return modified payload stream if needed + return payload; +}); + +// Decompress incoming data +app.addHook('preParsing', async (request, reply, payload) => { + if (request.headers['content-encoding'] === 'gzip') { + return payload.pipe(zlib.createGunzip()); + } + return payload; +}); +``` + +## preValidation Hook + +Execute after parsing, before schema validation: + +```typescript +app.addHook('preValidation', async (request, reply) => { + // Modify body before validation + if (request.body && typeof request.body === 'object') { + // Normalize data + request.body.email = request.body.email?.toLowerCase().trim(); + } +}); + +// Rate limiting check +app.addHook('preValidation', async (request, reply) => { + const key = request.ip; + const count = await redis.incr(`ratelimit:${key}`); + + if (count === 1) { + await redis.expire(`ratelimit:${key}`, 60); + } + + if (count > 100) { + reply.code(429).send({ error: 'Too many requests' }); + } +}); +``` + +## preHandler Hook + +Most common hook, execute after validation, before handler: + +```typescript +// Authorization check +app.addHook('preHandler', async (request, reply) => { + const { userId } = request.params as { userId: string }; + + if (request.user.id !== userId && !request.user.isAdmin) { + reply.code(403).send({ error: 'Forbidden' }); + } +}); + +// Load related data +app.addHook('preHandler', async (request, reply) => { + if (request.params?.projectId) { + request.project = await db.projects.findById(request.params.projectId); + if (!request.project) { + reply.code(404).send({ error: 'Project not found' }); + } + } +}); + +// Transaction wrapper +app.addHook('preHandler', async (request) => { + request.transaction = await db.beginTransaction(); +}); + +app.addHook('onResponse', async (request) => { + if (request.transaction) { + await request.transaction.commit(); + } +}); + +app.addHook('onError', async (request, reply, error) => { + if (request.transaction) { + await request.transaction.rollback(); + } +}); +``` + +## preSerialization Hook + +Modify payload before serialization: + +```typescript +app.addHook('preSerialization', async (request, reply, payload) => { + // Add metadata to all responses + if (payload && typeof payload === 'object') { + return { + ...payload, + _meta: { + requestId: request.id, + timestamp: new Date().toISOString(), + }, + }; + } + return payload; +}); + +// Remove sensitive fields +app.addHook('preSerialization', async (request, reply, payload) => { + if (payload?.user?.password) { + const { password, ...user } = payload.user; + return { ...payload, user }; + } + return payload; +}); +``` + +## onSend Hook + +Modify response after serialization: + +```typescript +app.addHook('onSend', async (request, reply, payload) => { + // Add response headers + reply.header('X-Response-Time', Date.now() - request.startTime); + + // Compress response + if (payload && payload.length > 1024) { + const compressed = await gzip(payload); + reply.header('Content-Encoding', 'gzip'); + return compressed; + } + + return payload; +}); + +// Transform JSON string response +app.addHook('onSend', async (request, reply, payload) => { + if (reply.getHeader('content-type')?.includes('application/json')) { + // payload is already a string at this point + return payload; + } + return payload; +}); +``` + +## onResponse Hook + +Execute after response is sent. Cannot modify response: + +```typescript +app.addHook('onResponse', async (request, reply) => { + // Log response time + const responseTime = Date.now() - request.startTime; + request.log.info({ + method: request.method, + url: request.url, + statusCode: reply.statusCode, + responseTime, + }, 'Request completed'); + + // Track metrics + metrics.histogram('http_request_duration', responseTime, { + method: request.method, + route: request.routeOptions.url, + status: reply.statusCode, + }); +}); +``` + +## onError Hook + +Execute when an error is thrown: + +```typescript +app.addHook('onError', async (request, reply, error) => { + // Log error details + request.log.error({ + err: error, + url: request.url, + method: request.method, + body: request.body, + }, 'Request error'); + + // Track error metrics + metrics.increment('http_errors', { + error: error.code || 'UNKNOWN', + route: request.routeOptions.url, + }); + + // Cleanup resources + if (request.tempFile) { + await fs.unlink(request.tempFile).catch(() => {}); + } +}); +``` + +## onTimeout Hook + +Execute when request times out: + +```typescript +const app = Fastify({ + connectionTimeout: 30000, // 30 seconds +}); + +app.addHook('onTimeout', async (request, reply) => { + request.log.warn({ + url: request.url, + method: request.method, + }, 'Request timeout'); + + // Cleanup + if (request.abortController) { + request.abortController.abort(); + } +}); +``` + +## onRequestAbort Hook + +Execute when client closes connection: + +```typescript +app.addHook('onRequestAbort', async (request) => { + request.log.info('Client aborted request'); + + // Cancel ongoing operations + if (request.abortController) { + request.abortController.abort(); + } + + // Cleanup uploaded files + if (request.uploadedFiles) { + for (const file of request.uploadedFiles) { + await fs.unlink(file.path).catch(() => {}); + } + } +}); +``` + +## Application Lifecycle Hooks + +Hooks that run at application startup/shutdown: + +```typescript +// After all plugins are loaded +app.addHook('onReady', async function () { + this.log.info('Server is ready'); + + // Initialize connections + await this.db.connect(); + await this.redis.connect(); + + // Warm caches + await this.cache.warmup(); +}); + +// When server is closing +app.addHook('onClose', async function () { + this.log.info('Server is closing'); + + // Cleanup connections + await this.db.close(); + await this.redis.disconnect(); +}); + +// After routes are registered +app.addHook('onRoute', (routeOptions) => { + console.log(`Route registered: ${routeOptions.method} ${routeOptions.url}`); + + // Track all routes + routes.push({ + method: routeOptions.method, + url: routeOptions.url, + schema: routeOptions.schema, + }); +}); + +// After plugin is registered +app.addHook('onRegister', (instance, options) => { + console.log(`Plugin registered with prefix: ${options.prefix}`); +}); +``` + +## Scoped Hooks + +Hooks are scoped to their encapsulation context: + +```typescript +app.addHook('onRequest', async (request) => { + // Runs for ALL routes + request.log.info('Global hook'); +}); + +app.register(async function adminRoutes(fastify) { + // Only runs for routes in this plugin + fastify.addHook('onRequest', async (request, reply) => { + if (!request.user?.isAdmin) { + reply.code(403).send({ error: 'Admin only' }); + } + }); + + fastify.get('/admin/users', async () => { + return { users: [] }; + }); +}, { prefix: '/admin' }); +``` + +## Hook Execution Order + +Multiple hooks of the same type execute in registration order: + +```typescript +app.addHook('onRequest', async () => { + console.log('First'); +}); + +app.addHook('onRequest', async () => { + console.log('Second'); +}); + +app.addHook('onRequest', async () => { + console.log('Third'); +}); + +// Output: First, Second, Third +``` + +## Stopping Hook Execution + +Return early from hooks to stop processing: + +```typescript +app.addHook('preHandler', async (request, reply) => { + if (!request.user) { + // Send response and return to stop further processing + reply.code(401).send({ error: 'Unauthorized' }); + return; + } + // Continue to next hook and handler +}); +``` + +## Route-Level Hooks + +Add hooks to specific routes: + +```typescript +const adminOnlyHook = async (request, reply) => { + if (!request.user?.isAdmin) { + reply.code(403).send({ error: 'Forbidden' }); + } +}; + +app.get('/admin/settings', { + preHandler: [adminOnlyHook], + handler: async (request) => { + return { settings: {} }; + }, +}); + +// Multiple hooks +app.post('/orders', { + preValidation: [validateApiKey], + preHandler: [loadUser, checkQuota, logOrder], + handler: createOrderHandler, +}); +``` + +## Async Hook Patterns + +Always use async/await in hooks: + +```typescript +// GOOD - async hook +app.addHook('preHandler', async (request, reply) => { + const user = await loadUser(request.headers.authorization); + request.user = user; +}); + +// AVOID - callback style (deprecated) +app.addHook('preHandler', (request, reply, done) => { + loadUser(request.headers.authorization) + .then((user) => { + request.user = user; + done(); + }) + .catch(done); +}); +``` diff --git a/.agents/skills/fastify-best-practices/rules/http-proxy.md b/.agents/skills/fastify-best-practices/rules/http-proxy.md new file mode 100644 index 0000000..e4e0884 --- /dev/null +++ b/.agents/skills/fastify-best-practices/rules/http-proxy.md @@ -0,0 +1,247 @@ +--- +name: http-proxy +description: HTTP proxying and reply.from() in Fastify +metadata: + tags: proxy, gateway, reverse-proxy, microservices +--- + +# HTTP Proxy and Reply.from() + +## @fastify/http-proxy + +Use `@fastify/http-proxy` for simple reverse proxy scenarios: + +```typescript +import Fastify from 'fastify'; +import httpProxy from '@fastify/http-proxy'; + +const app = Fastify({ logger: true }); + +// Proxy all requests to /api/* to another service +app.register(httpProxy, { + upstream: 'http://backend-service:3001', + prefix: '/api', + rewritePrefix: '/v1', + http2: false, +}); + +// With authentication +app.register(httpProxy, { + upstream: 'http://internal-api:3002', + prefix: '/internal', + preHandler: async (request, reply) => { + // Verify authentication before proxying + if (!request.headers.authorization) { + reply.code(401).send({ error: 'Unauthorized' }); + } + }, +}); + +await app.listen({ port: 3000 }); +``` + +## @fastify/reply-from + +For more control over proxying, use `@fastify/reply-from` with `reply.from()`: + +```typescript +import Fastify from 'fastify'; +import replyFrom from '@fastify/reply-from'; + +const app = Fastify({ logger: true }); + +app.register(replyFrom, { + base: 'http://backend-service:3001', + http2: false, +}); + +// Proxy with request/response manipulation +app.get('/users/:id', async (request, reply) => { + const { id } = request.params; + + return reply.from(`/api/users/${id}`, { + // Modify request before forwarding + rewriteRequestHeaders: (originalReq, headers) => ({ + ...headers, + 'x-request-id': request.id, + 'x-forwarded-for': request.ip, + }), + // Modify response before sending + onResponse: (request, reply, res) => { + reply.header('x-proxy', 'fastify'); + reply.send(res); + }, + }); +}); + +// Conditional routing +app.all('/api/*', async (request, reply) => { + const upstream = selectUpstream(request); + + return reply.from(request.url, { + base: upstream, + }); +}); + +function selectUpstream(request) { + // Route to different backends based on request + if (request.headers['x-beta']) { + return 'http://beta-backend:3001'; + } + return 'http://stable-backend:3001'; +} +``` + +## API Gateway Pattern + +Build an API gateway with multiple backends: + +```typescript +import Fastify from 'fastify'; +import replyFrom from '@fastify/reply-from'; + +const app = Fastify({ logger: true }); + +// Configure multiple upstreams +const services = { + users: 'http://users-service:3001', + orders: 'http://orders-service:3002', + products: 'http://products-service:3003', +}; + +app.register(replyFrom); + +// Route to user service +app.register(async function (fastify) { + fastify.all('/*', async (request, reply) => { + return reply.from(request.url.replace('/users', ''), { + base: services.users, + }); + }); +}, { prefix: '/users' }); + +// Route to orders service +app.register(async function (fastify) { + fastify.all('/*', async (request, reply) => { + return reply.from(request.url.replace('/orders', ''), { + base: services.orders, + }); + }); +}, { prefix: '/orders' }); + +// Route to products service +app.register(async function (fastify) { + fastify.all('/*', async (request, reply) => { + return reply.from(request.url.replace('/products', ''), { + base: services.products, + }); + }); +}, { prefix: '/products' }); +``` + +## Request Body Handling + +Handle request bodies when proxying: + +```typescript +app.post('/api/data', async (request, reply) => { + return reply.from('/data', { + body: request.body, + contentType: request.headers['content-type'], + }); +}); + +// Stream large bodies +app.post('/upload', async (request, reply) => { + return reply.from('/upload', { + body: request.raw, + contentType: request.headers['content-type'], + }); +}); +``` + +## Error Handling + +Handle upstream errors gracefully: + +```typescript +app.register(replyFrom, { + base: 'http://backend:3001', + // Called when upstream returns an error + onError: (reply, error) => { + reply.log.error({ err: error }, 'Proxy error'); + reply.code(502).send({ + error: 'Bad Gateway', + message: 'Upstream service unavailable', + }); + }, +}); + +// Custom error handling per route +app.get('/data', async (request, reply) => { + try { + return await reply.from('/data'); + } catch (error) { + request.log.error({ err: error }, 'Failed to proxy request'); + return reply.code(503).send({ + error: 'Service Unavailable', + retryAfter: 30, + }); + } +}); +``` + +## WebSocket Proxying + +Proxy WebSocket connections: + +```typescript +import Fastify from 'fastify'; +import httpProxy from '@fastify/http-proxy'; + +const app = Fastify({ logger: true }); + +app.register(httpProxy, { + upstream: 'http://ws-backend:3001', + prefix: '/ws', + websocket: true, +}); +``` + +## Timeout Configuration + +Configure proxy timeouts: + +```typescript +app.register(replyFrom, { + base: 'http://backend:3001', + http: { + requestOptions: { + timeout: 30000, // 30 seconds + }, + }, +}); +``` + +## Caching Proxied Responses + +Add caching to proxied responses: + +```typescript +import { createCache } from 'async-cache-dedupe'; + +const cache = createCache({ + ttl: 60, + storage: { type: 'memory' }, +}); + +cache.define('proxyGet', async (url: string) => { + const response = await fetch(`http://backend:3001${url}`); + return response.json(); +}); + +app.get('/cached/*', async (request, reply) => { + const data = await cache.proxyGet(request.url); + return data; +}); +``` diff --git a/.agents/skills/fastify-best-practices/rules/logging.md b/.agents/skills/fastify-best-practices/rules/logging.md new file mode 100644 index 0000000..fdc2c61 --- /dev/null +++ b/.agents/skills/fastify-best-practices/rules/logging.md @@ -0,0 +1,402 @@ +--- +name: logging +description: Logging with Pino in Fastify +metadata: + tags: logging, pino, debugging, observability +--- + +# Logging with Pino + +## Built-in Pino Integration + +Fastify uses Pino for high-performance logging: + +```typescript +import Fastify from 'fastify'; + +const app = Fastify({ + logger: true, // Enable default logging +}); + +// Or with configuration +const app = Fastify({ + logger: { + level: 'info', + transport: { + target: 'pino-pretty', + options: { + colorize: true, + }, + }, + }, +}); +``` + +## Log Levels + +Available log levels (in order of severity): + +```typescript +app.log.trace('Detailed debugging'); +app.log.debug('Debugging information'); +app.log.info('General information'); +app.log.warn('Warning messages'); +app.log.error('Error messages'); +app.log.fatal('Fatal errors'); +``` + +## Request-Scoped Logging + +Each request has its own logger with request context: + +```typescript +app.get('/users/:id', async (request) => { + // Logs include request ID automatically + request.log.info('Fetching user'); + + const user = await db.users.findById(request.params.id); + + if (!user) { + request.log.warn({ userId: request.params.id }, 'User not found'); + return { error: 'Not found' }; + } + + request.log.info({ userId: user.id }, 'User fetched'); + return user; +}); +``` + +## Structured Logging + +Always use structured logging with objects: + +```typescript +// GOOD - structured, searchable +request.log.info({ + action: 'user_created', + userId: user.id, + email: user.email, +}, 'User created successfully'); + +request.log.error({ + err: error, + userId: request.params.id, + operation: 'fetch_user', +}, 'Failed to fetch user'); + +// BAD - unstructured, hard to parse +request.log.info(`User ${user.id} created with email ${user.email}`); +request.log.error(`Failed to fetch user: ${error.message}`); +``` + +## Logging Configuration by Environment + +```typescript +function getLoggerConfig() { + if (process.env.NODE_ENV === 'production') { + return { + level: 'info', + // JSON output for log aggregation + }; + } + + if (process.env.NODE_ENV === 'test') { + return false; // Disable logging in tests + } + + // Development + return { + level: 'debug', + transport: { + target: 'pino-pretty', + options: { + colorize: true, + translateTime: 'HH:MM:ss Z', + ignore: 'pid,hostname', + }, + }, + }; +} + +const app = Fastify({ + logger: getLoggerConfig(), +}); +``` + +## Custom Serializers + +Customize how objects are serialized: + +```typescript +const app = Fastify({ + logger: { + level: 'info', + serializers: { + // Customize request serialization + req: (request) => ({ + method: request.method, + url: request.url, + headers: { + host: request.headers.host, + 'user-agent': request.headers['user-agent'], + }, + remoteAddress: request.ip, + }), + + // Customize response serialization + res: (response) => ({ + statusCode: response.statusCode, + }), + + // Custom serializer for users + user: (user) => ({ + id: user.id, + email: user.email, + // Exclude sensitive fields + }), + }, + }, +}); + +// Use custom serializer +request.log.info({ user: request.user }, 'User action'); +``` + +## Redacting Sensitive Data + +Prevent logging sensitive information: + +```typescript +import Fastify from 'fastify'; + +const app = Fastify({ + logger: { + level: 'info', + redact: { + paths: [ + 'req.headers.authorization', + 'req.headers.cookie', + 'body.password', + 'body.creditCard', + '*.password', + '*.secret', + '*.token', + ], + censor: '[REDACTED]', + }, + }, +}); +``` + +## Child Loggers + +Create child loggers with additional context: + +```typescript +app.addHook('onRequest', async (request) => { + // Add user context to all logs for this request + if (request.user) { + request.log = request.log.child({ + userId: request.user.id, + userRole: request.user.role, + }); + } +}); + +// Service-level child logger +const userService = { + log: app.log.child({ service: 'UserService' }), + + async create(data) { + this.log.info({ email: data.email }, 'Creating user'); + // ... + }, +}; +``` + +## Request Logging Configuration + +Customize automatic request logging: + +```typescript +const app = Fastify({ + logger: true, + disableRequestLogging: true, // Disable default request/response logs +}); + +// Custom request logging +app.addHook('onRequest', async (request) => { + request.log.info({ + method: request.method, + url: request.url, + query: request.query, + }, 'Request received'); +}); + +app.addHook('onResponse', async (request, reply) => { + request.log.info({ + statusCode: reply.statusCode, + responseTime: reply.elapsedTime, + }, 'Request completed'); +}); +``` + +## Logging Errors + +Properly log errors with stack traces: + +```typescript +app.setErrorHandler((error, request, reply) => { + // Log error with full details + request.log.error({ + err: error, // Pino serializes error objects properly + url: request.url, + method: request.method, + body: request.body, + query: request.query, + }, 'Request error'); + + reply.code(error.statusCode || 500).send({ + error: error.message, + }); +}); + +// In handlers +app.get('/data', async (request) => { + try { + return await fetchData(); + } catch (error) { + request.log.error({ err: error }, 'Failed to fetch data'); + throw error; + } +}); +``` + +## Log Destinations + +Configure where logs are sent: + +```typescript +import { createWriteStream } from 'node:fs'; + +// File output +const app = Fastify({ + logger: { + level: 'info', + stream: createWriteStream('./app.log'), + }, +}); + +// Multiple destinations with pino.multistream +import pino from 'pino'; + +const streams = [ + { stream: process.stdout }, + { stream: createWriteStream('./app.log') }, + { level: 'error', stream: createWriteStream('./error.log') }, +]; + +const app = Fastify({ + logger: pino({ level: 'info' }, pino.multistream(streams)), +}); +``` + +## Log Rotation + +Use pino-roll for log rotation: + +```bash +node app.js | pino-roll --frequency daily --extension .log +``` + +Or configure programmatically: + +```typescript +import { createStream } from 'rotating-file-stream'; + +const stream = createStream('app.log', { + size: '10M', // Rotate every 10MB + interval: '1d', // Rotate daily + compress: 'gzip', + path: './logs', +}); + +const app = Fastify({ + logger: { + level: 'info', + stream, + }, +}); +``` + +## Log Aggregation + +Format logs for aggregation services: + +```typescript +// For ELK Stack, Datadog, etc. - use default JSON format +const app = Fastify({ + logger: { + level: 'info', + // Default JSON output works with most log aggregators + }, +}); + +// Add service metadata +const app = Fastify({ + logger: { + level: 'info', + base: { + service: 'user-api', + version: process.env.APP_VERSION, + environment: process.env.NODE_ENV, + }, + }, +}); +``` + +## Request ID Tracking + +Use request IDs for distributed tracing: + +```typescript +const app = Fastify({ + logger: true, + requestIdHeader: 'x-request-id', // Use incoming header + genReqId: (request) => { + // Generate ID if not provided + return request.headers['x-request-id'] || crypto.randomUUID(); + }, +}); + +// Forward request ID to downstream services +app.addHook('onRequest', async (request) => { + request.requestId = request.id; +}); + +// Include in outgoing requests +const response = await fetch('http://other-service/api', { + headers: { + 'x-request-id': request.id, + }, +}); +``` + +## Performance Considerations + +Pino is fast, but consider: + +```typescript +// Avoid string concatenation in log calls +// BAD +request.log.info('User ' + user.id + ' did ' + action); + +// GOOD +request.log.info({ userId: user.id, action }, 'User action'); + +// Use appropriate log levels +// Don't log at info level in hot paths +if (app.log.isLevelEnabled('debug')) { + request.log.debug({ details: expensiveToCompute() }, 'Debug info'); +} +``` diff --git a/.agents/skills/fastify-best-practices/rules/performance.md b/.agents/skills/fastify-best-practices/rules/performance.md new file mode 100644 index 0000000..7f59b7d --- /dev/null +++ b/.agents/skills/fastify-best-practices/rules/performance.md @@ -0,0 +1,425 @@ +--- +name: performance +description: Performance optimization for Fastify applications +metadata: + tags: performance, optimization, speed, benchmarking +--- + +# Performance Optimization + +## Fastify is Fast by Default + +Fastify is designed for performance. Key optimizations are built-in: + +- Fast JSON serialization with `fast-json-stringify` +- Efficient routing with `find-my-way` +- Schema-based validation with `ajv` (compiled validators) +- Low overhead request/response handling + +## Use @fastify/under-pressure for Load Shedding + +Protect your application from overload with `@fastify/under-pressure`: + +```typescript +import underPressure from '@fastify/under-pressure'; + +app.register(underPressure, { + maxEventLoopDelay: 1000, // Max event loop delay in ms + maxHeapUsedBytes: 1000000000, // Max heap used (~1GB) + maxRssBytes: 1500000000, // Max RSS (~1.5GB) + maxEventLoopUtilization: 0.98, // Max event loop utilization + pressureHandler: (request, reply, type, value) => { + reply.code(503).send({ + error: 'Service Unavailable', + message: `Server under pressure: ${type}`, + }); + }, +}); + +// Health check that respects pressure +app.get('/health', async (request, reply) => { + return { status: 'ok' }; +}); +``` + +## Always Define Response Schemas + +Response schemas enable fast-json-stringify, which is significantly faster than JSON.stringify: + +```typescript +// FAST - uses fast-json-stringify +app.get('/users', { + schema: { + response: { + 200: { + type: 'array', + items: { + type: 'object', + properties: { + id: { type: 'string' }, + name: { type: 'string' }, + email: { type: 'string' }, + }, + }, + }, + }, + }, +}, async () => { + return db.users.findAll(); +}); + +// SLOW - uses JSON.stringify +app.get('/users-slow', async () => { + return db.users.findAll(); +}); +``` + +## Avoid Dynamic Schema Compilation + +Add schemas at startup, not at request time: + +```typescript +// GOOD - schemas compiled at startup +app.addSchema({ $id: 'user', ... }); + +app.get('/users', { + schema: { response: { 200: { $ref: 'user#' } } }, +}, handler); + +// BAD - schema compiled per request +app.get('/users', async (request, reply) => { + const schema = getSchemaForUser(request.user); + // This is slow! +}); +``` + +## Use Logger Wisely + +Pino is fast, but excessive logging has overhead: + +```typescript +import Fastify from 'fastify'; + +// Set log level via environment variable +const app = Fastify({ + logger: { + level: process.env.LOG_LEVEL || 'info', + }, +}); + +// Avoid logging large objects +app.get('/data', async (request) => { + // BAD - logs entire payload + request.log.info({ data: largeObject }, 'Processing'); + + // GOOD - log only what's needed + request.log.info({ id: largeObject.id }, 'Processing'); + + return largeObject; +}); +``` + +## Connection Pooling + +Use connection pools for databases: + +```typescript +import postgres from 'postgres'; + +// Create pool at startup +const sql = postgres(process.env.DATABASE_URL, { + max: 20, // Maximum pool size + idle_timeout: 20, + connect_timeout: 10, +}); + +app.decorate('db', sql); + +// Connections are reused +app.get('/users', async () => { + return app.db`SELECT * FROM users LIMIT 100`; +}); +``` + +## Avoid Blocking the Event Loop + +Use `piscina` for CPU-intensive operations. It provides a robust worker thread pool: + +```typescript +import Piscina from 'piscina'; +import { join } from 'node:path'; + +const piscina = new Piscina({ + filename: join(import.meta.dirname, 'workers', 'compute.js'), +}); + +app.post('/compute', async (request) => { + const result = await piscina.run(request.body); + return result; +}); +``` + +```typescript +// workers/compute.js +export default function compute(data) { + // CPU-intensive work here + return processedResult; +} +``` + +## Stream Large Responses + +Stream large payloads instead of buffering: + +```typescript +import { createReadStream } from 'node:fs'; +import { pipeline } from 'node:stream/promises'; + +// GOOD - stream file +app.get('/large-file', async (request, reply) => { + const stream = createReadStream('./large-file.json'); + reply.type('application/json'); + return reply.send(stream); +}); + +// BAD - load entire file into memory +app.get('/large-file-bad', async () => { + const content = await fs.readFile('./large-file.json', 'utf-8'); + return JSON.parse(content); +}); + +// Stream database results +app.get('/export', async (request, reply) => { + reply.type('application/json'); + + const cursor = db.users.findCursor(); + reply.raw.write('['); + + let first = true; + for await (const user of cursor) { + if (!first) reply.raw.write(','); + reply.raw.write(JSON.stringify(user)); + first = false; + } + + reply.raw.write(']'); + reply.raw.end(); +}); +``` + +## Caching Strategies + +Implement caching for expensive operations: + +```typescript +import { LRUCache } from 'lru-cache'; + +const cache = new LRUCache({ + max: 1000, + ttl: 60000, // 1 minute +}); + +app.get('/expensive/:id', async (request) => { + const { id } = request.params; + const cacheKey = `expensive:${id}`; + + const cached = cache.get(cacheKey); + if (cached) { + return cached; + } + + const result = await expensiveOperation(id); + cache.set(cacheKey, result); + + return result; +}); + +// Cache control headers +app.get('/static-data', async (request, reply) => { + reply.header('Cache-Control', 'public, max-age=3600'); + return { data: 'static' }; +}); +``` + +## Request Coalescing with async-cache-dedupe + +Use `async-cache-dedupe` for deduplicating concurrent identical requests and caching: + +```typescript +import { createCache } from 'async-cache-dedupe'; + +const cache = createCache({ + ttl: 60, // seconds + stale: 5, // serve stale while revalidating + storage: { type: 'memory' }, +}); + +cache.define('fetchData', async (id: string) => { + return db.findById(id); +}); + +app.get('/data/:id', async (request) => { + const { id } = request.params; + // Automatically deduplicates concurrent requests for the same id + // and caches the result + return cache.fetchData(id); +}); +``` + +For distributed caching, use Redis storage: + +```typescript +import { createCache } from 'async-cache-dedupe'; +import Redis from 'ioredis'; + +const redis = new Redis(process.env.REDIS_URL); + +const cache = createCache({ + ttl: 60, + storage: { type: 'redis', options: { client: redis } }, +}); +``` + +## Payload Limits + +Set appropriate payload limits: + +```typescript +import Fastify from 'fastify'; + +const app = Fastify({ + bodyLimit: 1048576, // 1MB default +}); + +// Per-route limit for file uploads +app.post('/upload', { + bodyLimit: 10485760, // 10MB for this route +}, uploadHandler); +``` + +## Compression + +Use compression for responses: + +```typescript +import fastifyCompress from '@fastify/compress'; + +app.register(fastifyCompress, { + global: true, + threshold: 1024, // Only compress responses > 1KB + encodings: ['gzip', 'deflate'], +}); + +// Disable for specific route +app.get('/already-compressed', { + compress: false, +}, handler); +``` + +## Connection Timeouts + +Configure appropriate timeouts: + +```typescript +import Fastify from 'fastify'; + +const app = Fastify({ + connectionTimeout: 30000, // 30 seconds + keepAliveTimeout: 5000, // 5 seconds +}); + +// Per-route timeout +app.get('/long-operation', { + config: { + timeout: 60000, // 60 seconds + }, +}, async (request) => { + return longOperation(); +}); +``` + +## Disable Unnecessary Features + +Disable features you don't need: + +```typescript +import Fastify from 'fastify'; + +const app = Fastify({ + disableRequestLogging: true, // If you don't need request logs + trustProxy: false, // If not behind proxy + caseSensitive: true, // Enable for slight performance gain + ignoreDuplicateSlashes: false, +}); +``` + +## Benchmarking + +Use autocannon for load testing: + +```bash +# Install +npm install -g autocannon + +# Basic benchmark +autocannon http://localhost:3000/api/users + +# With options +autocannon -c 100 -d 30 -p 10 http://localhost:3000/api/users +# -c: connections +# -d: duration in seconds +# -p: pipelining factor +``` + +```typescript +// Programmatic benchmarking +import autocannon from 'autocannon'; + +const result = await autocannon({ + url: 'http://localhost:3000/api/users', + connections: 100, + duration: 30, + pipelining: 10, +}); + +console.log(autocannon.printResult(result)); +``` + +## Profiling + +Use `@platformatic/flame` for flame graph profiling: + +```bash +npx @platformatic/flame app.js +``` + +This generates an interactive flame graph to identify performance bottlenecks. + +## Memory Management + +Monitor and optimize memory usage: + +```typescript +// Add health endpoint with memory info +app.get('/health', async () => { + const memory = process.memoryUsage(); + return { + status: 'ok', + memory: { + heapUsed: Math.round(memory.heapUsed / 1024 / 1024) + 'MB', + heapTotal: Math.round(memory.heapTotal / 1024 / 1024) + 'MB', + rss: Math.round(memory.rss / 1024 / 1024) + 'MB', + }, + }; +}); + +// Avoid memory leaks in closures +app.addHook('onRequest', async (request) => { + // BAD - holding reference to large object + const largeData = await loadLargeData(); + request.getData = () => largeData; + + // GOOD - load on demand + request.getData = () => loadLargeData(); +}); +``` diff --git a/.agents/skills/fastify-best-practices/rules/plugins.md b/.agents/skills/fastify-best-practices/rules/plugins.md new file mode 100644 index 0000000..f76a474 --- /dev/null +++ b/.agents/skills/fastify-best-practices/rules/plugins.md @@ -0,0 +1,320 @@ +--- +name: plugins +description: Plugin development and encapsulation in Fastify +metadata: + tags: plugins, encapsulation, modules, architecture +--- + +# Plugin Development and Encapsulation + +## Understanding Encapsulation + +Fastify's plugin system provides automatic encapsulation. Each plugin creates its own context, isolating decorators, hooks, and plugins registered within it: + +```typescript +import Fastify from 'fastify'; +import fp from 'fastify-plugin'; + +const app = Fastify(); + +// This plugin is encapsulated - its decorators are NOT available to siblings +app.register(async function childPlugin(fastify) { + fastify.decorate('privateUtil', () => 'only available here'); + + // This decorator is only available within this plugin and its children + fastify.get('/child', async function (request, reply) { + return this.privateUtil(); + }); +}); + +// This route CANNOT access privateUtil - it's in a different context +app.get('/parent', async function (request, reply) { + // this.privateUtil is undefined here + return { status: 'ok' }; +}); +``` + +## Breaking Encapsulation with fastify-plugin + +Use `fastify-plugin` when you need to share decorators, hooks, or plugins with the parent context: + +```typescript +import fp from 'fastify-plugin'; + +// This plugin's decorators will be available to the parent and siblings +export default fp(async function databasePlugin(fastify, options) { + const db = await createConnection(options.connectionString); + + fastify.decorate('db', db); + + fastify.addHook('onClose', async () => { + await db.close(); + }); +}, { + name: 'database-plugin', + dependencies: [], // List plugin dependencies +}); +``` + +## Plugin Registration Order + +Plugins are registered in order, but loading is asynchronous. Use `after()` for sequential dependencies: + +```typescript +import Fastify from 'fastify'; +import databasePlugin from './plugins/database.js'; +import authPlugin from './plugins/auth.js'; +import routesPlugin from './routes/index.js'; + +const app = Fastify(); + +// Database must be ready before auth +app.register(databasePlugin); + +// Auth depends on database +app.register(authPlugin); + +// Routes depend on both +app.register(routesPlugin); + +// Or use after() for explicit sequencing +app.register(databasePlugin).after(() => { + app.register(authPlugin).after(() => { + app.register(routesPlugin); + }); +}); + +await app.ready(); +``` + +## Plugin Options + +Always validate and document plugin options: + +```typescript +import fp from 'fastify-plugin'; + +interface CachePluginOptions { + ttl: number; + maxSize?: number; + prefix?: string; +} + +export default fp(async function cachePlugin(fastify, options) { + const { ttl, maxSize = 1000, prefix = 'cache:' } = options; + + if (typeof ttl !== 'number' || ttl <= 0) { + throw new Error('Cache plugin requires a positive ttl option'); + } + + const cache = new Map(); + + fastify.decorate('cache', { + get(key: string): unknown | undefined { + const item = cache.get(prefix + key); + if (!item) return undefined; + if (Date.now() > item.expires) { + cache.delete(prefix + key); + return undefined; + } + return item.value; + }, + set(key: string, value: unknown): void { + if (cache.size >= maxSize) { + const firstKey = cache.keys().next().value; + cache.delete(firstKey); + } + cache.set(prefix + key, { value, expires: Date.now() + ttl }); + }, + }); +}, { + name: 'cache-plugin', +}); +``` + +## Plugin Factory Pattern + +Create configurable plugins using factory functions: + +```typescript +import fp from 'fastify-plugin'; + +interface RateLimitOptions { + max: number; + timeWindow: number; +} + +function createRateLimiter(defaults: Partial = {}) { + return fp(async function rateLimitPlugin(fastify, options) { + const config = { ...defaults, ...options }; + + // Implementation + fastify.decorate('rateLimit', config); + }, { + name: 'rate-limiter', + }); +} + +// Usage +app.register(createRateLimiter({ max: 100 }), { timeWindow: 60000 }); +``` + +## Plugin Dependencies + +Declare dependencies to ensure proper load order: + +```typescript +import fp from 'fastify-plugin'; + +export default fp(async function authPlugin(fastify) { + // This plugin requires 'database-plugin' to be loaded first + if (!fastify.hasDecorator('db')) { + throw new Error('Auth plugin requires database plugin'); + } + + fastify.decorate('authenticate', async (request) => { + const user = await fastify.db.users.findByToken(request.headers.authorization); + return user; + }); +}, { + name: 'auth-plugin', + dependencies: ['database-plugin'], +}); +``` + +## Scoped Plugins for Route Groups + +Use encapsulation to scope plugins to specific routes: + +```typescript +import Fastify from 'fastify'; + +const app = Fastify(); + +// Public routes - no auth required +app.register(async function publicRoutes(fastify) { + fastify.get('/health', async () => ({ status: 'ok' })); + fastify.get('/docs', async () => ({ version: '1.0.0' })); +}); + +// Protected routes - auth required +app.register(async function protectedRoutes(fastify) { + // Auth hook only applies to routes in this plugin + fastify.addHook('onRequest', async (request, reply) => { + const token = request.headers.authorization; + if (!token) { + reply.code(401).send({ error: 'Unauthorized' }); + return; + } + request.user = await verifyToken(token); + }); + + fastify.get('/profile', async (request) => { + return { user: request.user }; + }); + + fastify.get('/settings', async (request) => { + return { settings: await getSettings(request.user.id) }; + }); +}); +``` + +## Prefix Routes with Register + +Use the `prefix` option to namespace routes: + +```typescript +app.register(import('./routes/users.js'), { prefix: '/api/v1/users' }); +app.register(import('./routes/posts.js'), { prefix: '/api/v1/posts' }); + +// In routes/users.js +export default async function userRoutes(fastify) { + // Becomes /api/v1/users + fastify.get('/', async () => { + return { users: [] }; + }); + + // Becomes /api/v1/users/:id + fastify.get('/:id', async (request) => { + return { user: { id: request.params.id } }; + }); +} +``` + +## Plugin Metadata + +Add metadata for documentation and tooling: + +```typescript +import fp from 'fastify-plugin'; + +async function metricsPlugin(fastify) { + // Implementation +} + +export default fp(metricsPlugin, { + name: 'metrics-plugin', + fastify: '5.x', // Fastify version compatibility + dependencies: ['pino-plugin'], + decorators: { + fastify: ['db'], // Required decorators + request: [], + reply: [], + }, +}); +``` + +## Autoload Plugins + +Use `@fastify/autoload` for automatic plugin loading: + +```typescript +import Fastify from 'fastify'; +import autoload from '@fastify/autoload'; +import { fileURLToPath } from 'node:url'; +import { dirname, join } from 'node:path'; + +const __dirname = dirname(fileURLToPath(import.meta.url)); + +const app = Fastify(); + +// Load all plugins from the plugins directory +app.register(autoload, { + dir: join(__dirname, 'plugins'), + options: { prefix: '/api' }, +}); + +// Load all routes from the routes directory +app.register(autoload, { + dir: join(__dirname, 'routes'), + options: { prefix: '/api' }, +}); +``` + +## Testing Plugins in Isolation + +Test plugins independently: + +```typescript +import { describe, it, before, after } from 'node:test'; +import Fastify from 'fastify'; +import myPlugin from './my-plugin.js'; + +describe('MyPlugin', () => { + let app; + + before(async () => { + app = Fastify(); + app.register(myPlugin, { option: 'value' }); + await app.ready(); + }); + + after(async () => { + await app.close(); + }); + + it('should decorate fastify instance', (t) => { + t.assert.ok(app.hasDecorator('myDecorator')); + }); +}); +``` diff --git a/.agents/skills/fastify-best-practices/rules/routes.md b/.agents/skills/fastify-best-practices/rules/routes.md new file mode 100644 index 0000000..2924560 --- /dev/null +++ b/.agents/skills/fastify-best-practices/rules/routes.md @@ -0,0 +1,467 @@ +--- +name: routes +description: Route organization and handlers in Fastify +metadata: + tags: routes, handlers, http, rest, api +--- + +# Route Organization and Handlers + +## Basic Route Definition + +Define routes with the shorthand methods or the full route method: + +```typescript +import Fastify from 'fastify'; + +const app = Fastify(); + +// Shorthand methods +app.get('/users', async (request, reply) => { + return { users: [] }; +}); + +app.post('/users', async (request, reply) => { + return { created: true }; +}); + +// Full route method with all options +app.route({ + method: 'GET', + url: '/users/:id', + schema: { + params: { + type: 'object', + properties: { + id: { type: 'string' }, + }, + required: ['id'], + }, + }, + handler: async (request, reply) => { + return { id: request.params.id }; + }, +}); +``` + +## Route Parameters + +Access URL parameters through `request.params`: + +```typescript +// Single parameter +app.get('/users/:id', async (request) => { + const { id } = request.params as { id: string }; + return { userId: id }; +}); + +// Multiple parameters +app.get('/users/:userId/posts/:postId', async (request) => { + const { userId, postId } = request.params as { userId: string; postId: string }; + return { userId, postId }; +}); + +// Wildcard parameter (captures everything after) +app.get('/files/*', async (request) => { + const path = (request.params as { '*': string })['*']; + return { filePath: path }; +}); + +// Regex parameters (Fastify uses find-my-way) +app.get('/orders/:id(\\d+)', async (request) => { + // Only matches numeric IDs + const { id } = request.params as { id: string }; + return { orderId: parseInt(id, 10) }; +}); +``` + +## Query String Parameters + +Access query parameters through `request.query`: + +```typescript +app.get('/search', { + schema: { + querystring: { + type: 'object', + properties: { + q: { type: 'string' }, + page: { type: 'integer', default: 1 }, + limit: { type: 'integer', default: 10, maximum: 100 }, + }, + required: ['q'], + }, + }, + handler: async (request) => { + const { q, page, limit } = request.query as { + q: string; + page: number; + limit: number; + }; + return { query: q, page, limit }; + }, +}); +``` + +## Request Body + +Access the request body through `request.body`: + +```typescript +app.post('/users', { + schema: { + body: { + type: 'object', + properties: { + name: { type: 'string', minLength: 1 }, + email: { type: 'string', format: 'email' }, + age: { type: 'integer', minimum: 0 }, + }, + required: ['name', 'email'], + }, + }, + handler: async (request, reply) => { + const user = request.body as { name: string; email: string; age?: number }; + // Create user... + reply.code(201); + return { user }; + }, +}); +``` + +## Headers + +Access request headers through `request.headers`: + +```typescript +app.get('/protected', { + schema: { + headers: { + type: 'object', + properties: { + authorization: { type: 'string' }, + }, + required: ['authorization'], + }, + }, + handler: async (request) => { + const token = request.headers.authorization; + return { authenticated: true }; + }, +}); +``` + +## Reply Methods + +Use reply methods to control the response: + +```typescript +app.get('/examples', async (request, reply) => { + // Set status code + reply.code(201); + + // Set headers + reply.header('X-Custom-Header', 'value'); + reply.headers({ 'X-Another': 'value', 'X-Third': 'value' }); + + // Set content type + reply.type('application/json'); + + // Redirect + // reply.redirect('/other-url'); + // reply.redirect(301, '/permanent-redirect'); + + // Return response (automatic serialization) + return { status: 'ok' }; +}); + +// Explicit send (useful in non-async handlers) +app.get('/explicit', (request, reply) => { + reply.send({ status: 'ok' }); +}); + +// Stream response +app.get('/stream', async (request, reply) => { + const stream = fs.createReadStream('./large-file.txt'); + reply.type('text/plain'); + return reply.send(stream); +}); +``` + +## Route Organization by Feature + +Organize routes by feature/domain in separate files: + +``` +src/ + routes/ + users/ + index.ts # Route definitions + handlers.ts # Handler functions + schemas.ts # JSON schemas + posts/ + index.ts + handlers.ts + schemas.ts +``` + +```typescript +// routes/users/schemas.ts +export const userSchema = { + type: 'object', + properties: { + id: { type: 'string', format: 'uuid' }, + name: { type: 'string' }, + email: { type: 'string', format: 'email' }, + }, +}; + +export const createUserSchema = { + body: { + type: 'object', + properties: { + name: { type: 'string', minLength: 1 }, + email: { type: 'string', format: 'email' }, + }, + required: ['name', 'email'], + }, + response: { + 201: userSchema, + }, +}; + +// routes/users/handlers.ts +import type { FastifyRequest, FastifyReply } from 'fastify'; + +export async function createUser( + request: FastifyRequest<{ Body: { name: string; email: string } }>, + reply: FastifyReply, +) { + const { name, email } = request.body; + const user = await request.server.db.users.create({ name, email }); + reply.code(201); + return user; +} + +export async function getUsers(request: FastifyRequest) { + return request.server.db.users.findAll(); +} + +// routes/users/index.ts +import type { FastifyInstance } from 'fastify'; +import { createUser, getUsers } from './handlers.js'; +import { createUserSchema } from './schemas.js'; + +export default async function userRoutes(fastify: FastifyInstance) { + fastify.get('/', getUsers); + fastify.post('/', { schema: createUserSchema }, createUser); +} +``` + +## Route Constraints + +Add constraints to routes for versioning or host-based routing: + +```typescript +// Version constraint +app.get('/users', { + constraints: { version: '1.0.0' }, + handler: async () => ({ version: '1.0.0', users: [] }), +}); + +app.get('/users', { + constraints: { version: '2.0.0' }, + handler: async () => ({ version: '2.0.0', data: { users: [] } }), +}); + +// Client sends: Accept-Version: 1.0.0 + +// Host constraint +app.get('/', { + constraints: { host: 'api.example.com' }, + handler: async () => ({ api: true }), +}); + +app.get('/', { + constraints: { host: 'www.example.com' }, + handler: async () => ({ web: true }), +}); +``` + +## Route Prefixing + +Use prefixes to namespace routes: + +```typescript +// Using register +app.register(async function (fastify) { + fastify.get('/list', async () => ({ users: [] })); + fastify.get('/:id', async (request) => ({ id: request.params.id })); +}, { prefix: '/users' }); + +// Results in: +// GET /users/list +// GET /users/:id +``` + +## Multiple Methods + +Handle multiple HTTP methods with one handler: + +```typescript +app.route({ + method: ['GET', 'HEAD'], + url: '/resource', + handler: async (request) => { + return { data: 'resource' }; + }, +}); +``` + +## 404 Handler + +Customize the not found handler: + +```typescript +app.setNotFoundHandler({ + preValidation: async (request, reply) => { + // Optional pre-validation hook + }, + preHandler: async (request, reply) => { + // Optional pre-handler hook + }, +}, async (request, reply) => { + reply.code(404); + return { + error: 'Not Found', + message: `Route ${request.method} ${request.url} not found`, + statusCode: 404, + }; +}); +``` + +## Method Not Allowed + +Handle method not allowed responses: + +```typescript +// Fastify doesn't have built-in 405 handling +// Implement with a custom not found handler that checks allowed methods +app.setNotFoundHandler(async (request, reply) => { + // Check if the URL exists with a different method + const route = app.hasRoute({ + url: request.url, + method: 'GET', // Check other methods + }); + + if (route) { + reply.code(405); + return { error: 'Method Not Allowed' }; + } + + reply.code(404); + return { error: 'Not Found' }; +}); +``` + +## Route-Level Configuration + +Apply configuration to specific routes: + +```typescript +app.get('/slow-operation', { + config: { + rateLimit: { max: 10, timeWindow: '1 minute' }, + }, + handler: async (request) => { + return { result: await slowOperation() }; + }, +}); + +// Access config in hooks +app.addHook('onRequest', async (request, reply) => { + const config = request.routeOptions.config; + if (config.rateLimit) { + // Apply rate limiting + } +}); +``` + +## Async Route Registration + +Register routes from async sources: + +```typescript +app.register(async function (fastify) { + const routeConfigs = await loadRoutesFromDatabase(); + + for (const config of routeConfigs) { + fastify.route({ + method: config.method, + url: config.path, + handler: createDynamicHandler(config), + }); + } +}); +``` + +## Auto-loading Routes with @fastify/autoload + +Use `@fastify/autoload` to automatically load routes from a directory structure: + +```typescript +import Fastify from 'fastify'; +import autoload from '@fastify/autoload'; +import { join } from 'node:path'; + +const app = Fastify({ logger: true }); + +// Auto-load plugins +app.register(autoload, { + dir: join(import.meta.dirname, 'plugins'), + options: { prefix: '' }, +}); + +// Auto-load routes +app.register(autoload, { + dir: join(import.meta.dirname, 'routes'), + options: { prefix: '/api' }, +}); + +await app.listen({ port: 3000 }); +``` + +Directory structure: + +``` +src/ + plugins/ + database.ts # Loaded automatically + auth.ts # Loaded automatically + routes/ + users/ + index.ts # GET/POST /api/users + _id/ + index.ts # GET/PUT/DELETE /api/users/:id + posts/ + index.ts # GET/POST /api/posts +``` + +Route file example: + +```typescript +// routes/users/index.ts +import type { FastifyPluginAsync } from 'fastify'; + +const users: FastifyPluginAsync = async (fastify) => { + fastify.get('/', async () => { + return fastify.repositories.users.findAll(); + }); + + fastify.post('/', async (request) => { + return fastify.repositories.users.create(request.body); + }); +}; + +export default users; +``` diff --git a/.agents/skills/fastify-best-practices/rules/schemas.md b/.agents/skills/fastify-best-practices/rules/schemas.md new file mode 100644 index 0000000..404f3f9 --- /dev/null +++ b/.agents/skills/fastify-best-practices/rules/schemas.md @@ -0,0 +1,585 @@ +--- +name: schemas +description: JSON Schema validation in Fastify with TypeBox +metadata: + tags: validation, json-schema, schemas, ajv, typebox +--- + +# JSON Schema Validation + +## Use TypeBox for Type-Safe Schemas + +**Prefer TypeBox for defining schemas.** It provides TypeScript types automatically and compiles to JSON Schema: + +```typescript +import Fastify from 'fastify'; +import { Type, type Static } from '@sinclair/typebox'; + +const app = Fastify(); + +// Define schema with TypeBox - get TypeScript types for free +const CreateUserBody = Type.Object({ + name: Type.String({ minLength: 1, maxLength: 100 }), + email: Type.String({ format: 'email' }), + age: Type.Optional(Type.Integer({ minimum: 0, maximum: 150 })), +}); + +const UserResponse = Type.Object({ + id: Type.String({ format: 'uuid' }), + name: Type.String(), + email: Type.String(), + createdAt: Type.String({ format: 'date-time' }), +}); + +// TypeScript types are derived automatically +type CreateUserBodyType = Static; +type UserResponseType = Static; + +app.post<{ + Body: CreateUserBodyType; + Reply: UserResponseType; +}>('/users', { + schema: { + body: CreateUserBody, + response: { + 201: UserResponse, + }, + }, +}, async (request, reply) => { + // request.body is fully typed as CreateUserBodyType + const user = await createUser(request.body); + reply.code(201); + return user; +}); +``` + +## TypeBox Common Patterns + +```typescript +import { Type, type Static } from '@sinclair/typebox'; + +// Enums +const Status = Type.Union([ + Type.Literal('active'), + Type.Literal('inactive'), + Type.Literal('pending'), +]); + +// Arrays +const Tags = Type.Array(Type.String(), { minItems: 1, maxItems: 10 }); + +// Nested objects +const Address = Type.Object({ + street: Type.String(), + city: Type.String(), + country: Type.String(), + zip: Type.Optional(Type.String()), +}); + +// References (reusable schemas) +const User = Type.Object({ + id: Type.String({ format: 'uuid' }), + name: Type.String(), + address: Address, + tags: Tags, + status: Status, +}); + +// Nullable +const NullableString = Type.Union([Type.String(), Type.Null()]); + +// Record/Map +const Metadata = Type.Record(Type.String(), Type.Unknown()); +``` + +## Register TypeBox Schemas Globally + +```typescript +import { Type, type Static } from '@sinclair/typebox'; + +// Define shared schemas +const ErrorResponse = Type.Object({ + error: Type.String(), + message: Type.String(), + statusCode: Type.Integer(), +}); + +const PaginationQuery = Type.Object({ + page: Type.Integer({ minimum: 1, default: 1 }), + limit: Type.Integer({ minimum: 1, maximum: 100, default: 20 }), +}); + +// Register globally +app.addSchema(Type.Object({ $id: 'ErrorResponse', ...ErrorResponse })); +app.addSchema(Type.Object({ $id: 'PaginationQuery', ...PaginationQuery })); + +// Reference in routes +app.get('/items', { + schema: { + querystring: { $ref: 'PaginationQuery#' }, + response: { + 400: { $ref: 'ErrorResponse#' }, + }, + }, +}, handler); +``` + +## Plain JSON Schema (Alternative) + +You can also use plain JSON Schema directly: + +```typescript +import Fastify from 'fastify'; + +const app = Fastify(); + +const createUserSchema = { + body: { + type: 'object', + properties: { + name: { type: 'string', minLength: 1, maxLength: 100 }, + email: { type: 'string', format: 'email' }, + age: { type: 'integer', minimum: 0, maximum: 150 }, + }, + required: ['name', 'email'], + additionalProperties: false, + }, + response: { + 201: { + type: 'object', + properties: { + id: { type: 'string', format: 'uuid' }, + name: { type: 'string' }, + email: { type: 'string' }, + createdAt: { type: 'string', format: 'date-time' }, + }, + }, + }, +}; + +app.post('/users', { schema: createUserSchema }, async (request, reply) => { + const user = await createUser(request.body); + reply.code(201); + return user; +}); +``` + +## Request Validation Parts + +Validate different parts of the request: + +```typescript +const fullRequestSchema = { + // URL parameters + params: { + type: 'object', + properties: { + id: { type: 'string', format: 'uuid' }, + }, + required: ['id'], + }, + + // Query string + querystring: { + type: 'object', + properties: { + include: { type: 'string', enum: ['posts', 'comments', 'all'] }, + limit: { type: 'integer', minimum: 1, maximum: 100, default: 10 }, + }, + }, + + // Request headers + headers: { + type: 'object', + properties: { + 'x-api-key': { type: 'string', minLength: 32 }, + }, + required: ['x-api-key'], + }, + + // Request body + body: { + type: 'object', + properties: { + data: { type: 'object' }, + }, + required: ['data'], + }, +}; + +app.put('/resources/:id', { schema: fullRequestSchema }, handler); +``` + +## Shared Schemas with $id + +Define reusable schemas with `$id` and reference them with `$ref`: + +```typescript +// Add shared schemas to Fastify +app.addSchema({ + $id: 'user', + type: 'object', + properties: { + id: { type: 'string', format: 'uuid' }, + name: { type: 'string' }, + email: { type: 'string', format: 'email' }, + createdAt: { type: 'string', format: 'date-time' }, + }, + required: ['id', 'name', 'email'], +}); + +app.addSchema({ + $id: 'userCreate', + type: 'object', + properties: { + name: { type: 'string', minLength: 1 }, + email: { type: 'string', format: 'email' }, + }, + required: ['name', 'email'], + additionalProperties: false, +}); + +app.addSchema({ + $id: 'error', + type: 'object', + properties: { + statusCode: { type: 'integer' }, + error: { type: 'string' }, + message: { type: 'string' }, + }, +}); + +// Reference shared schemas +app.post('/users', { + schema: { + body: { $ref: 'userCreate#' }, + response: { + 201: { $ref: 'user#' }, + 400: { $ref: 'error#' }, + }, + }, +}, handler); + +app.get('/users/:id', { + schema: { + params: { + type: 'object', + properties: { id: { type: 'string', format: 'uuid' } }, + required: ['id'], + }, + response: { + 200: { $ref: 'user#' }, + 404: { $ref: 'error#' }, + }, + }, +}, handler); +``` + +## Array Schemas + +Define schemas for array responses: + +```typescript +app.addSchema({ + $id: 'userList', + type: 'object', + properties: { + users: { + type: 'array', + items: { $ref: 'user#' }, + }, + total: { type: 'integer' }, + page: { type: 'integer' }, + pageSize: { type: 'integer' }, + }, +}); + +app.get('/users', { + schema: { + querystring: { + type: 'object', + properties: { + page: { type: 'integer', minimum: 1, default: 1 }, + pageSize: { type: 'integer', minimum: 1, maximum: 100, default: 20 }, + }, + }, + response: { + 200: { $ref: 'userList#' }, + }, + }, +}, handler); +``` + +## Custom Formats + +Add custom validation formats: + +```typescript +import Fastify from 'fastify'; + +const app = Fastify({ + ajv: { + customOptions: { + formats: { + 'iso-country': /^[A-Z]{2}$/, + 'phone': /^\+?[1-9]\d{1,14}$/, + }, + }, + }, +}); + +// Or add formats dynamically +app.addSchema({ + $id: 'address', + type: 'object', + properties: { + street: { type: 'string' }, + country: { type: 'string', format: 'iso-country' }, + phone: { type: 'string', format: 'phone' }, + }, +}); +``` + +## Custom Keywords + +Add custom validation keywords: + +```typescript +import Fastify from 'fastify'; +import Ajv from 'ajv'; + +const app = Fastify({ + ajv: { + customOptions: { + keywords: [ + { + keyword: 'isEven', + type: 'number', + validate: (schema: boolean, data: number) => { + if (schema) { + return data % 2 === 0; + } + return true; + }, + errors: false, + }, + ], + }, + }, +}); + +// Use custom keyword +app.post('/numbers', { + schema: { + body: { + type: 'object', + properties: { + value: { type: 'integer', isEven: true }, + }, + }, + }, +}, handler); +``` + +## Coercion + +Fastify coerces types by default for query strings and params: + +```typescript +// Query string "?page=5&active=true" becomes: +// { page: 5, active: true } (number and boolean, not strings) + +app.get('/items', { + schema: { + querystring: { + type: 'object', + properties: { + page: { type: 'integer' }, // "5" -> 5 + active: { type: 'boolean' }, // "true" -> true + tags: { + type: 'array', + items: { type: 'string' }, // "a,b,c" -> ["a", "b", "c"] + }, + }, + }, + }, +}, handler); +``` + +## Validation Error Handling + +Customize validation error responses: + +```typescript +app.setErrorHandler((error, request, reply) => { + if (error.validation) { + reply.code(400).send({ + error: 'Validation Error', + message: 'Request validation failed', + details: error.validation.map((err) => ({ + field: err.instancePath || err.params?.missingProperty, + message: err.message, + keyword: err.keyword, + })), + }); + return; + } + + // Handle other errors + reply.code(error.statusCode || 500).send({ + error: error.name, + message: error.message, + }); +}); +``` + +## Schema Compiler Options + +Configure the Ajv schema compiler: + +```typescript +import Fastify from 'fastify'; + +const app = Fastify({ + ajv: { + customOptions: { + removeAdditional: 'all', // Remove extra properties + useDefaults: true, // Apply default values + coerceTypes: true, // Coerce types + allErrors: true, // Report all errors, not just first + }, + plugins: [ + require('ajv-formats'), // Add format validators + ], + }, +}); +``` + +## Nullable Fields + +Handle nullable fields properly: + +```typescript +app.addSchema({ + $id: 'profile', + type: 'object', + properties: { + name: { type: 'string' }, + bio: { type: ['string', 'null'] }, // Can be string or null + avatar: { + oneOf: [ + { type: 'string', format: 'uri' }, + { type: 'null' }, + ], + }, + }, +}); +``` + +## Conditional Validation + +Use if/then/else for conditional validation: + +```typescript +app.addSchema({ + $id: 'payment', + type: 'object', + properties: { + method: { type: 'string', enum: ['card', 'bank'] }, + cardNumber: { type: 'string' }, + bankAccount: { type: 'string' }, + }, + required: ['method'], + if: { + properties: { method: { const: 'card' } }, + }, + then: { + required: ['cardNumber'], + }, + else: { + required: ['bankAccount'], + }, +}); +``` + +## Schema Organization + +Organize schemas in a dedicated file: + +```typescript +// schemas/index.ts +export const schemas = [ + { + $id: 'user', + type: 'object', + properties: { + id: { type: 'string', format: 'uuid' }, + name: { type: 'string' }, + email: { type: 'string', format: 'email' }, + }, + }, + { + $id: 'error', + type: 'object', + properties: { + statusCode: { type: 'integer' }, + error: { type: 'string' }, + message: { type: 'string' }, + }, + }, +]; + +// app.ts +import { schemas } from './schemas/index.js'; + +for (const schema of schemas) { + app.addSchema(schema); +} +``` + +## OpenAPI/Swagger Integration + +Schemas work directly with @fastify/swagger: + +```typescript +import fastifySwagger from '@fastify/swagger'; +import fastifySwaggerUi from '@fastify/swagger-ui'; + +app.register(fastifySwagger, { + openapi: { + info: { + title: 'My API', + version: '1.0.0', + }, + }, +}); + +app.register(fastifySwaggerUi, { + routePrefix: '/docs', +}); + +// Schemas are automatically converted to OpenAPI definitions +``` + +## Performance Considerations + +Response schemas enable fast-json-stringify for serialization: + +```typescript +// With response schema - uses fast-json-stringify (faster) +app.get('/users', { + schema: { + response: { + 200: { + type: 'array', + items: { $ref: 'user#' }, + }, + }, + }, +}, handler); + +// Without response schema - uses JSON.stringify (slower) +app.get('/users-slow', handler); +``` + +Always define response schemas for production APIs to benefit from optimized serialization. diff --git a/.agents/skills/fastify-best-practices/rules/serialization.md b/.agents/skills/fastify-best-practices/rules/serialization.md new file mode 100644 index 0000000..88029b3 --- /dev/null +++ b/.agents/skills/fastify-best-practices/rules/serialization.md @@ -0,0 +1,475 @@ +--- +name: serialization +description: Response serialization in Fastify with TypeBox +metadata: + tags: serialization, response, json, fast-json-stringify, typebox +--- + +# Response Serialization + +## Use TypeBox for Type-Safe Response Schemas + +Define response schemas with TypeBox for automatic TypeScript types and fast serialization: + +```typescript +import Fastify from 'fastify'; +import { Type, type Static } from '@sinclair/typebox'; + +const app = Fastify(); + +// Define response schema with TypeBox +const UserResponse = Type.Object({ + id: Type.String(), + name: Type.String(), + email: Type.String(), +}); + +const UsersResponse = Type.Array(UserResponse); + +type UserResponseType = Static; + +// With TypeBox schema - uses fast-json-stringify (faster) + TypeScript types +app.get<{ Reply: Static }>('/users', { + schema: { + response: { + 200: UsersResponse, + }, + }, +}, async () => { + return db.users.findAll(); +}); + +// Without schema - uses JSON.stringify (slower), no type safety +app.get('/users-slow', async () => { + return db.users.findAll(); +}); +``` + +## Fast JSON Stringify + +Fastify uses `fast-json-stringify` when response schemas are defined. This provides: + +1. **Performance**: 2-3x faster serialization than JSON.stringify +2. **Security**: Only defined properties are serialized (strips sensitive data) +3. **Type coercion**: Ensures output matches the schema +4. **TypeScript**: Full type inference with TypeBox + +## Response Schema Benefits + +1. **Performance**: 2-3x faster serialization +2. **Security**: Only defined properties are included +3. **Documentation**: OpenAPI/Swagger integration +4. **Type coercion**: Ensures correct output types + +```typescript +app.get('/user/:id', { + schema: { + response: { + 200: { + type: 'object', + properties: { + id: { type: 'string' }, + name: { type: 'string' }, + // password is NOT in schema, so it's stripped + }, + }, + }, + }, +}, async (request) => { + const user = await db.users.findById(request.params.id); + // Even if user has password field, it won't be serialized + return user; +}); +``` + +## Multiple Status Code Schemas + +Define schemas for different response codes: + +```typescript +app.get('/users/:id', { + schema: { + response: { + 200: { + type: 'object', + properties: { + id: { type: 'string' }, + name: { type: 'string' }, + email: { type: 'string' }, + }, + }, + 404: { + type: 'object', + properties: { + statusCode: { type: 'integer' }, + error: { type: 'string' }, + message: { type: 'string' }, + }, + }, + }, + }, +}, async (request, reply) => { + const user = await db.users.findById(request.params.id); + + if (!user) { + reply.code(404); + return { statusCode: 404, error: 'Not Found', message: 'User not found' }; + } + + return user; +}); +``` + +## Default Response Schema + +Use 'default' for common error responses: + +```typescript +app.get('/resource', { + schema: { + response: { + 200: { $ref: 'resource#' }, + '4xx': { + type: 'object', + properties: { + statusCode: { type: 'integer' }, + error: { type: 'string' }, + message: { type: 'string' }, + }, + }, + '5xx': { + type: 'object', + properties: { + statusCode: { type: 'integer' }, + error: { type: 'string' }, + }, + }, + }, + }, +}, handler); +``` + +## Custom Serializers + +Create custom serialization functions: + +```typescript +// Per-route serializer +app.get('/custom', { + schema: { + response: { + 200: { + type: 'object', + properties: { + value: { type: 'string' }, + }, + }, + }, + }, + serializerCompiler: ({ schema }) => { + return (data) => { + // Custom serialization logic + return JSON.stringify({ + value: String(data.value).toUpperCase(), + serializedAt: new Date().toISOString(), + }); + }; + }, +}, async () => { + return { value: 'hello' }; +}); +``` + +## Shared Serializers + +Use the global serializer compiler: + +```typescript +import Fastify from 'fastify'; + +const app = Fastify({ + serializerCompiler: ({ schema, method, url, httpStatus }) => { + // Custom compilation logic + const stringify = fastJson(schema); + return (data) => stringify(data); + }, +}); +``` + +## Serialization with Type Coercion + +fast-json-stringify coerces types: + +```typescript +app.get('/data', { + schema: { + response: { + 200: { + type: 'object', + properties: { + count: { type: 'integer' }, // '5' -> 5 + active: { type: 'boolean' }, // 'true' -> true + tags: { + type: 'array', + items: { type: 'string' }, // [1, 2] -> ['1', '2'] + }, + }, + }, + }, + }, +}, async () => { + return { + count: '5', // Coerced to integer + active: 'true', // Coerced to boolean + tags: [1, 2, 3], // Coerced to strings + }; +}); +``` + +## Nullable Fields + +Handle nullable fields properly: + +```typescript +app.get('/profile', { + schema: { + response: { + 200: { + type: 'object', + properties: { + name: { type: 'string' }, + bio: { type: ['string', 'null'] }, + avatar: { + oneOf: [ + { type: 'string', format: 'uri' }, + { type: 'null' }, + ], + }, + }, + }, + }, + }, +}, async () => { + return { + name: 'John', + bio: null, + avatar: null, + }; +}); +``` + +## Additional Properties + +Control extra properties in response: + +```typescript +// Strip additional properties (default) +app.get('/strict', { + schema: { + response: { + 200: { + type: 'object', + properties: { + id: { type: 'string' }, + name: { type: 'string' }, + }, + additionalProperties: false, + }, + }, + }, +}, async () => { + return { id: '1', name: 'John', secret: 'hidden' }; + // Output: { "id": "1", "name": "John" } +}); + +// Allow additional properties +app.get('/flexible', { + schema: { + response: { + 200: { + type: 'object', + properties: { + id: { type: 'string' }, + }, + additionalProperties: true, + }, + }, + }, +}, async () => { + return { id: '1', extra: 'included' }; + // Output: { "id": "1", "extra": "included" } +}); +``` + +## Nested Objects + +Serialize nested structures: + +```typescript +app.addSchema({ + $id: 'address', + type: 'object', + properties: { + street: { type: 'string' }, + city: { type: 'string' }, + country: { type: 'string' }, + }, +}); + +app.get('/user', { + schema: { + response: { + 200: { + type: 'object', + properties: { + name: { type: 'string' }, + address: { $ref: 'address#' }, + contacts: { + type: 'array', + items: { + type: 'object', + properties: { + type: { type: 'string' }, + value: { type: 'string' }, + }, + }, + }, + }, + }, + }, + }, +}, async () => { + return { + name: 'John', + address: { street: '123 Main', city: 'Boston', country: 'USA' }, + contacts: [ + { type: 'email', value: 'john@example.com' }, + { type: 'phone', value: '+1234567890' }, + ], + }; +}); +``` + +## Date Serialization + +Handle dates consistently: + +```typescript +app.get('/events', { + schema: { + response: { + 200: { + type: 'array', + items: { + type: 'object', + properties: { + name: { type: 'string' }, + date: { type: 'string', format: 'date-time' }, + }, + }, + }, + }, + }, +}, async () => { + const events = await db.events.findAll(); + + // Convert Date objects to ISO strings + return events.map((e) => ({ + ...e, + date: e.date.toISOString(), + })); +}); +``` + +## BigInt Serialization + +Handle BigInt values: + +```typescript +// BigInt is not JSON serializable by default +app.get('/large-number', { + schema: { + response: { + 200: { + type: 'object', + properties: { + id: { type: 'string' }, // Serialize as string + count: { type: 'integer' }, + }, + }, + }, + }, +}, async () => { + const bigValue = 9007199254740993n; + + return { + id: bigValue.toString(), // Convert to string + count: Number(bigValue), // Or number if safe + }; +}); +``` + +## Stream Responses + +Stream responses bypass serialization: + +```typescript +import { createReadStream } from 'node:fs'; + +app.get('/file', async (request, reply) => { + const stream = createReadStream('./data.json'); + reply.type('application/json'); + return reply.send(stream); +}); + +// Streaming JSON array +app.get('/stream', async (request, reply) => { + reply.type('application/json'); + + const cursor = db.users.findCursor(); + + reply.raw.write('['); + let first = true; + + for await (const user of cursor) { + if (!first) reply.raw.write(','); + reply.raw.write(JSON.stringify(user)); + first = false; + } + + reply.raw.write(']'); + reply.raw.end(); +}); +``` + +## Pre-Serialization Hook + +Modify data before serialization: + +```typescript +app.addHook('preSerialization', async (request, reply, payload) => { + // Add metadata to responses + if (payload && typeof payload === 'object' && !Array.isArray(payload)) { + return { + ...payload, + _links: { + self: request.url, + }, + }; + } + return payload; +}); +``` + +## Disable Serialization + +Skip serialization for specific routes: + +```typescript +app.get('/raw', async (request, reply) => { + const data = JSON.stringify({ raw: true }); + reply.type('application/json'); + reply.serializer((payload) => payload); // Pass through + return data; +}); +``` diff --git a/.agents/skills/fastify-best-practices/rules/testing.md b/.agents/skills/fastify-best-practices/rules/testing.md new file mode 100644 index 0000000..beed39c --- /dev/null +++ b/.agents/skills/fastify-best-practices/rules/testing.md @@ -0,0 +1,536 @@ +--- +name: testing +description: Testing Fastify applications with inject() +metadata: + tags: testing, inject, node-test, integration, unit +--- + +# Testing Fastify Applications + +## Using inject() for Request Testing + +Fastify's `inject()` method simulates HTTP requests without network overhead: + +```typescript +import { describe, it, before, after } from 'node:test'; +import Fastify from 'fastify'; +import { buildApp } from './app.js'; + +describe('User API', () => { + let app; + + before(async () => { + app = await buildApp(); + await app.ready(); + }); + + after(async () => { + await app.close(); + }); + + it('should return users list', async (t) => { + const response = await app.inject({ + method: 'GET', + url: '/users', + }); + + t.assert.equal(response.statusCode, 200); + t.assert.equal(response.headers['content-type'], 'application/json; charset=utf-8'); + + const body = response.json(); + t.assert.ok(Array.isArray(body.users)); + }); + + it('should create a user', async (t) => { + const response = await app.inject({ + method: 'POST', + url: '/users', + payload: { + name: 'John Doe', + email: 'john@example.com', + }, + }); + + t.assert.equal(response.statusCode, 201); + + const body = response.json(); + t.assert.equal(body.name, 'John Doe'); + t.assert.ok(body.id); + }); +}); +``` + +## Testing with Headers and Authentication + +Test authenticated endpoints: + +```typescript +describe('Protected Routes', () => { + let app; + let authToken; + + before(async () => { + app = await buildApp(); + await app.ready(); + + // Get auth token + const loginResponse = await app.inject({ + method: 'POST', + url: '/auth/login', + payload: { + email: 'test@example.com', + password: 'password123', + }, + }); + + authToken = loginResponse.json().token; + }); + + after(async () => { + await app.close(); + }); + + it('should reject unauthenticated requests', async (t) => { + const response = await app.inject({ + method: 'GET', + url: '/profile', + }); + + t.assert.equal(response.statusCode, 401); + }); + + it('should return profile for authenticated user', async (t) => { + const response = await app.inject({ + method: 'GET', + url: '/profile', + headers: { + authorization: `Bearer ${authToken}`, + }, + }); + + t.assert.equal(response.statusCode, 200); + t.assert.equal(response.json().email, 'test@example.com'); + }); +}); +``` + +## Testing Query Parameters + +Test routes with query strings: + +```typescript +it('should filter users by status', async (t) => { + const response = await app.inject({ + method: 'GET', + url: '/users', + query: { + status: 'active', + page: '1', + limit: '10', + }, + }); + + t.assert.equal(response.statusCode, 200); + const body = response.json(); + t.assert.ok(body.users.every((u) => u.status === 'active')); +}); + +// Or use URL with query string +it('should search users', async (t) => { + const response = await app.inject({ + method: 'GET', + url: '/users?q=john&sort=name', + }); + + t.assert.equal(response.statusCode, 200); +}); +``` + +## Testing URL Parameters + +Test routes with path parameters: + +```typescript +it('should return user by id', async (t) => { + const userId = 'user-123'; + + const response = await app.inject({ + method: 'GET', + url: `/users/${userId}`, + }); + + t.assert.equal(response.statusCode, 200); + t.assert.equal(response.json().id, userId); +}); + +it('should return 404 for non-existent user', async (t) => { + const response = await app.inject({ + method: 'GET', + url: '/users/non-existent', + }); + + t.assert.equal(response.statusCode, 404); +}); +``` + +## Testing Validation Errors + +Test schema validation: + +```typescript +describe('Validation', () => { + it('should reject invalid email', async (t) => { + const response = await app.inject({ + method: 'POST', + url: '/users', + payload: { + name: 'John', + email: 'not-an-email', + }, + }); + + t.assert.equal(response.statusCode, 400); + const body = response.json(); + t.assert.ok(body.message.includes('email')); + }); + + it('should reject missing required fields', async (t) => { + const response = await app.inject({ + method: 'POST', + url: '/users', + payload: { + name: 'John', + // missing email + }, + }); + + t.assert.equal(response.statusCode, 400); + }); + + it('should coerce query parameters', async (t) => { + const response = await app.inject({ + method: 'GET', + url: '/items?limit=10&active=true', + }); + + t.assert.equal(response.statusCode, 200); + // limit is coerced to number, active to boolean + }); +}); +``` + +## Testing File Uploads + +Test multipart form data: + +```typescript +import { createReadStream } from 'node:fs'; +import FormData from 'form-data'; + +it('should upload file', async (t) => { + const form = new FormData(); + form.append('file', createReadStream('./test/fixtures/test.pdf')); + form.append('name', 'test-document'); + + const response = await app.inject({ + method: 'POST', + url: '/upload', + payload: form, + headers: form.getHeaders(), + }); + + t.assert.equal(response.statusCode, 200); + t.assert.ok(response.json().fileId); +}); +``` + +## Testing Streams + +Test streaming responses: + +```typescript +it('should stream large file', async (t) => { + const response = await app.inject({ + method: 'GET', + url: '/files/large-file', + }); + + t.assert.equal(response.statusCode, 200); + t.assert.ok(response.rawPayload.length > 0); +}); +``` + +## Mocking Dependencies + +Mock external services and databases: + +```typescript +import { describe, it, before, after, mock } from 'node:test'; + +describe('User Service', () => { + let app; + + before(async () => { + // Create app with mocked dependencies + const mockDb = { + users: { + findAll: mock.fn(async () => [ + { id: '1', name: 'User 1' }, + { id: '2', name: 'User 2' }, + ]), + findById: mock.fn(async (id) => { + if (id === '1') return { id: '1', name: 'User 1' }; + return null; + }), + create: mock.fn(async (data) => ({ id: 'new-id', ...data })), + }, + }; + + app = Fastify(); + app.decorate('db', mockDb); + app.register(import('./routes/users.js')); + await app.ready(); + }); + + after(async () => { + await app.close(); + }); + + it('should call findAll', async (t) => { + const response = await app.inject({ + method: 'GET', + url: '/users', + }); + + t.assert.equal(response.statusCode, 200); + t.assert.equal(app.db.users.findAll.mock.calls.length, 1); + }); +}); +``` + +## Testing Plugins in Isolation + +Test plugins independently: + +```typescript +import { describe, it, before, after } from 'node:test'; +import Fastify from 'fastify'; +import cachePlugin from './plugins/cache.js'; + +describe('Cache Plugin', () => { + let app; + + before(async () => { + app = Fastify(); + app.register(cachePlugin, { ttl: 1000 }); + await app.ready(); + }); + + after(async () => { + await app.close(); + }); + + it('should decorate fastify with cache', (t) => { + t.assert.ok(app.hasDecorator('cache')); + t.assert.equal(typeof app.cache.get, 'function'); + t.assert.equal(typeof app.cache.set, 'function'); + }); + + it('should cache and retrieve values', (t) => { + app.cache.set('key', 'value'); + t.assert.equal(app.cache.get('key'), 'value'); + }); +}); +``` + +## Testing Hooks + +Test hook behavior: + +```typescript +describe('Hooks', () => { + it('should add request id header', async (t) => { + const response = await app.inject({ + method: 'GET', + url: '/health', + }); + + t.assert.ok(response.headers['x-request-id']); + }); + + it('should log request timing', async (t) => { + const logs = []; + const app = Fastify({ + logger: { + level: 'info', + stream: { + write: (msg) => logs.push(JSON.parse(msg)), + }, + }, + }); + + app.register(import('./app.js')); + await app.ready(); + + await app.inject({ method: 'GET', url: '/health' }); + + const responseLog = logs.find((l) => l.msg?.includes('completed')); + t.assert.ok(responseLog); + t.assert.ok(responseLog.responseTime); + + await app.close(); + }); +}); +``` + +## Test Factory Pattern + +Create a reusable test app builder: + +```typescript +// test/helper.ts +import Fastify from 'fastify'; +import type { FastifyInstance } from 'fastify'; + +interface TestContext { + app: FastifyInstance; + inject: FastifyInstance['inject']; +} + +export async function buildTestApp(options = {}): Promise { + const app = Fastify({ + logger: false, // Disable logging in tests + ...options, + }); + + // Register plugins + app.register(import('../src/plugins/database.js'), { + connectionString: process.env.TEST_DATABASE_URL, + }); + app.register(import('../src/routes/index.js')); + + await app.ready(); + + return { + app, + inject: app.inject.bind(app), + }; +} + +// Usage in tests +describe('API Tests', () => { + let ctx: TestContext; + + before(async () => { + ctx = await buildTestApp(); + }); + + after(async () => { + await ctx.app.close(); + }); + + it('should work', async (t) => { + const response = await ctx.inject({ + method: 'GET', + url: '/health', + }); + t.assert.equal(response.statusCode, 200); + }); +}); +``` + +## Database Testing with Transactions + +Use transactions for test isolation: + +```typescript +describe('Database Integration', () => { + let app; + let transaction; + + before(async () => { + app = await buildApp(); + await app.ready(); + }); + + after(async () => { + await app.close(); + }); + + beforeEach(async () => { + transaction = await app.db.beginTransaction(); + app.db.setTransaction(transaction); + }); + + afterEach(async () => { + await transaction.rollback(); + }); + + it('should create user', async (t) => { + const response = await app.inject({ + method: 'POST', + url: '/users', + payload: { name: 'Test', email: 'test@example.com' }, + }); + + t.assert.equal(response.statusCode, 201); + // Transaction is rolled back after test + }); +}); +``` + +## Parallel Test Execution + +Structure tests for parallel execution: + +```typescript +// Tests run in parallel by default with node:test +// Use separate app instances or proper isolation + +import { describe, it } from 'node:test'; + +describe('User API', async () => { + // Each test suite gets its own app instance + const app = await buildTestApp(); + + it('test 1', async (t) => { + // ... + }); + + it('test 2', async (t) => { + // ... + }); + + // Cleanup after all tests in this suite + after(() => app.close()); +}); + +describe('Post API', async () => { + const app = await buildTestApp(); + + it('test 1', async (t) => { + // ... + }); + + after(() => app.close()); +}); +``` + +## Running Tests + +```bash +# Run all tests +node --test + +# Run with TypeScript +node --test src/**/*.test.ts + +# Run specific file +node --test src/routes/users.test.ts + +# With coverage +node --test --experimental-test-coverage + +# Watch mode +node --test --watch +``` diff --git a/.agents/skills/fastify-best-practices/rules/typescript.md b/.agents/skills/fastify-best-practices/rules/typescript.md new file mode 100644 index 0000000..b948478 --- /dev/null +++ b/.agents/skills/fastify-best-practices/rules/typescript.md @@ -0,0 +1,458 @@ +--- +name: typescript +description: TypeScript integration with Fastify +metadata: + tags: typescript, types, generics, type-safety +--- + +# TypeScript Integration + +## Type Stripping with Node.js + +Use Node.js built-in type stripping (Node.js 22.6+): + +```bash +# Run TypeScript directly +node --experimental-strip-types app.ts + +# In Node.js 23+ +node app.ts +``` + +```json +// package.json +{ + "type": "module", + "scripts": { + "start": "node app.ts", + "dev": "node --watch app.ts" + } +} +``` + +```typescript +// tsconfig.json for type stripping +{ + "compilerOptions": { + "target": "ESNext", + "module": "NodeNext", + "moduleResolution": "NodeNext", + "verbatimModuleSyntax": true, + "erasableSyntaxOnly": true, + "noEmit": true, + "strict": true + } +} +``` + +## Basic Type Safety + +Type your Fastify application: + +```typescript +import Fastify, { type FastifyInstance, type FastifyRequest, type FastifyReply } from 'fastify'; + +const app: FastifyInstance = Fastify({ logger: true }); + +app.get('/health', async (request: FastifyRequest, reply: FastifyReply) => { + return { status: 'ok' }; +}); + +await app.listen({ port: 3000 }); +``` + +## Typing Route Handlers + +Use generics to type request parts: + +```typescript +import type { FastifyRequest, FastifyReply } from 'fastify'; + +interface CreateUserBody { + name: string; + email: string; +} + +interface UserParams { + id: string; +} + +interface UserQuery { + include?: string; +} + +// Type the request with generics +app.post<{ + Body: CreateUserBody; +}>('/users', async (request, reply) => { + const { name, email } = request.body; // Fully typed + return { name, email }; +}); + +app.get<{ + Params: UserParams; + Querystring: UserQuery; +}>('/users/:id', async (request) => { + const { id } = request.params; // string + const { include } = request.query; // string | undefined + return { id, include }; +}); + +// Full route options typing +app.route<{ + Params: UserParams; + Querystring: UserQuery; + Body: CreateUserBody; + Reply: { user: { id: string; name: string } }; +}>({ + method: 'PUT', + url: '/users/:id', + handler: async (request, reply) => { + return { user: { id: request.params.id, name: request.body.name } }; + }, +}); +``` + +## Type Providers + +Use @fastify/type-provider-typebox for runtime + compile-time safety: + +```typescript +import Fastify from 'fastify'; +import { TypeBoxTypeProvider } from '@fastify/type-provider-typebox'; +import { Type } from '@sinclair/typebox'; + +const app = Fastify().withTypeProvider(); + +const UserSchema = Type.Object({ + id: Type.String(), + name: Type.String(), + email: Type.String({ format: 'email' }), +}); + +const CreateUserSchema = Type.Object({ + name: Type.String({ minLength: 1 }), + email: Type.String({ format: 'email' }), +}); + +app.post('/users', { + schema: { + body: CreateUserSchema, + response: { + 201: UserSchema, + }, + }, +}, async (request, reply) => { + // request.body is typed as { name: string; email: string } + const { name, email } = request.body; + + reply.code(201); + return { id: 'generated', name, email }; +}); +``` + +## Typing Decorators + +Extend Fastify types with declaration merging: + +```typescript +import Fastify from 'fastify'; + +// Declare types for decorators +declare module 'fastify' { + interface FastifyInstance { + config: { + port: number; + host: string; + }; + db: Database; + } + + interface FastifyRequest { + user?: { + id: string; + email: string; + role: string; + }; + startTime: number; + } + + interface FastifyReply { + sendSuccess: (data: unknown) => void; + } +} + +const app = Fastify(); + +// Add decorators +app.decorate('config', { port: 3000, host: 'localhost' }); +app.decorate('db', new Database()); + +app.decorateRequest('user', null); +app.decorateRequest('startTime', 0); + +app.decorateReply('sendSuccess', function (data: unknown) { + this.send({ success: true, data }); +}); + +// Now fully typed +app.get('/profile', async (request, reply) => { + const user = request.user; // { id: string; email: string; role: string } | undefined + const config = app.config; // { port: number; host: string } + + reply.sendSuccess({ user }); +}); +``` + +## Typing Plugins + +Type plugin options and exports: + +```typescript +import fp from 'fastify-plugin'; +import type { FastifyPluginAsync } from 'fastify'; + +interface DatabasePluginOptions { + connectionString: string; + poolSize?: number; +} + +declare module 'fastify' { + interface FastifyInstance { + db: { + query: (sql: string, params?: unknown[]) => Promise; + close: () => Promise; + }; + } +} + +const databasePlugin: FastifyPluginAsync = async ( + fastify, + options, +) => { + const { connectionString, poolSize = 10 } = options; + + const db = await createConnection(connectionString, poolSize); + + fastify.decorate('db', { + query: (sql: string, params?: unknown[]) => db.query(sql, params), + close: () => db.end(), + }); + + fastify.addHook('onClose', async () => { + await db.end(); + }); +}; + +export default fp(databasePlugin, { + name: 'database', +}); +``` + +## Typing Hooks + +Type hook functions: + +```typescript +import type { + FastifyRequest, + FastifyReply, + onRequestHookHandler, + preHandlerHookHandler, +} from 'fastify'; + +const authHook: preHandlerHookHandler = async ( + request: FastifyRequest, + reply: FastifyReply, +) => { + const token = request.headers.authorization; + if (!token) { + reply.code(401).send({ error: 'Unauthorized' }); + return; + } + request.user = await verifyToken(token); +}; + +const timingHook: onRequestHookHandler = async (request) => { + request.startTime = Date.now(); +}; + +app.addHook('onRequest', timingHook); +app.addHook('preHandler', authHook); +``` + +## Typing Schema Objects + +Create reusable typed schemas: + +```typescript +import type { JSONSchema7 } from 'json-schema'; + +// Define schema with const assertion for type inference +const userSchema = { + type: 'object', + properties: { + id: { type: 'string' }, + name: { type: 'string' }, + email: { type: 'string', format: 'email' }, + }, + required: ['id', 'name', 'email'], +} as const satisfies JSONSchema7; + +// Infer TypeScript type from schema +type User = { + id: string; + name: string; + email: string; +}; + +app.get<{ Reply: User }>('/users/:id', { + schema: { + response: { + 200: userSchema, + }, + }, +}, async (request) => { + return { id: '1', name: 'John', email: 'john@example.com' }; +}); +``` + +## Shared Types + +Organize types in dedicated files: + +```typescript +// types/index.ts +export interface User { + id: string; + name: string; + email: string; + role: 'admin' | 'user'; +} + +export interface CreateUserInput { + name: string; + email: string; +} + +export interface PaginationQuery { + page?: number; + limit?: number; + sort?: string; +} + +// routes/users.ts +import type { FastifyInstance } from 'fastify'; +import type { User, CreateUserInput, PaginationQuery } from '../types/index.js'; + +export default async function userRoutes(fastify: FastifyInstance) { + fastify.get<{ + Querystring: PaginationQuery; + Reply: { users: User[]; total: number }; + }>('/', async (request) => { + const { page = 1, limit = 10 } = request.query; + // ... + }); + + fastify.post<{ + Body: CreateUserInput; + Reply: User; + }>('/', async (request, reply) => { + reply.code(201); + // ... + }); +} +``` + +## Type-Safe Route Registration + +Create typed route factories: + +```typescript +import type { FastifyInstance, RouteOptions } from 'fastify'; + +function createCrudRoutes( + fastify: FastifyInstance, + options: { + prefix: string; + schema: { + item: object; + create: object; + update: object; + }; + handlers: { + list: () => Promise; + get: (id: string) => Promise; + create: (data: unknown) => Promise; + update: (id: string, data: unknown) => Promise; + delete: (id: string) => Promise; + }; + }, +) { + const { prefix, schema, handlers } = options; + + fastify.get(`${prefix}`, { + schema: { response: { 200: { type: 'array', items: schema.item } } }, + }, async () => handlers.list()); + + fastify.get(`${prefix}/:id`, { + schema: { response: { 200: schema.item } }, + }, async (request) => { + const item = await handlers.get((request.params as { id: string }).id); + if (!item) throw { statusCode: 404, message: 'Not found' }; + return item; + }); + + // ... more routes +} +``` + +## Avoiding Type Gymnastics + +Keep types simple and practical: + +```typescript +// GOOD - simple, readable types +interface UserRequest { + Params: { id: string }; + Body: { name: string }; +} + +app.put('/users/:id', handler); + +// AVOID - overly complex generic types +type DeepPartial = T extends object ? { + [P in keyof T]?: DeepPartial; +} : T; + +// AVOID - excessive type inference +type InferSchemaType = T extends { properties: infer P } + ? { [K in keyof P]: InferPropertyType } + : never; +``` + +## Type Checking Without Compilation + +Use TypeScript for type checking only: + +```bash +# Type check without emitting +npx tsc --noEmit + +# Watch mode +npx tsc --noEmit --watch + +# In CI +npm run typecheck +``` + +```json +// package.json +{ + "scripts": { + "start": "node app.ts", + "typecheck": "tsc --noEmit", + "test": "npm run typecheck && node --test" + } +} +``` diff --git a/.agents/skills/fastify-best-practices/rules/websockets.md b/.agents/skills/fastify-best-practices/rules/websockets.md new file mode 100644 index 0000000..3481570 --- /dev/null +++ b/.agents/skills/fastify-best-practices/rules/websockets.md @@ -0,0 +1,421 @@ +--- +name: websockets +description: WebSocket support in Fastify +metadata: + tags: websockets, realtime, ws, socket +--- + +# WebSocket Support + +## Using @fastify/websocket + +Add WebSocket support to Fastify: + +```typescript +import Fastify from 'fastify'; +import websocket from '@fastify/websocket'; + +const app = Fastify(); + +app.register(websocket); + +app.get('/ws', { websocket: true }, (socket, request) => { + socket.on('message', (message) => { + const data = message.toString(); + console.log('Received:', data); + + // Echo back + socket.send(`Echo: ${data}`); + }); + + socket.on('close', () => { + console.log('Client disconnected'); + }); + + socket.on('error', (error) => { + console.error('WebSocket error:', error); + }); +}); + +await app.listen({ port: 3000 }); +``` + +## WebSocket with Hooks + +Use Fastify hooks with WebSocket routes: + +```typescript +app.register(async function wsRoutes(fastify) { + // This hook runs before WebSocket upgrade + fastify.addHook('preValidation', async (request, reply) => { + const token = request.headers.authorization; + if (!token) { + reply.code(401).send({ error: 'Unauthorized' }); + return; + } + request.user = await verifyToken(token); + }); + + fastify.get('/ws', { websocket: true }, (socket, request) => { + console.log('Connected user:', request.user.id); + + socket.on('message', (message) => { + // Handle authenticated messages + }); + }); +}); +``` + +## Connection Options + +Configure WebSocket server options: + +```typescript +app.register(websocket, { + options: { + maxPayload: 1048576, // 1MB max message size + clientTracking: true, + perMessageDeflate: { + zlibDeflateOptions: { + chunkSize: 1024, + memLevel: 7, + level: 3, + }, + zlibInflateOptions: { + chunkSize: 10 * 1024, + }, + }, + }, +}); +``` + +## Broadcast to All Clients + +Broadcast messages to connected clients: + +```typescript +const clients = new Set(); + +app.get('/ws', { websocket: true }, (socket, request) => { + clients.add(socket); + + socket.on('close', () => { + clients.delete(socket); + }); + + socket.on('message', (message) => { + // Broadcast to all other clients + for (const client of clients) { + if (client !== socket && client.readyState === WebSocket.OPEN) { + client.send(message); + } + } + }); +}); + +// Broadcast from HTTP route +app.post('/broadcast', async (request) => { + const { message } = request.body; + + for (const client of clients) { + if (client.readyState === WebSocket.OPEN) { + client.send(JSON.stringify({ type: 'broadcast', message })); + } + } + + return { sent: clients.size }; +}); +``` + +## Rooms/Channels Pattern + +Organize connections into rooms: + +```typescript +const rooms = new Map>(); + +function joinRoom(socket: WebSocket, roomId: string) { + if (!rooms.has(roomId)) { + rooms.set(roomId, new Set()); + } + rooms.get(roomId)!.add(socket); +} + +function leaveRoom(socket: WebSocket, roomId: string) { + rooms.get(roomId)?.delete(socket); + if (rooms.get(roomId)?.size === 0) { + rooms.delete(roomId); + } +} + +function broadcastToRoom(roomId: string, message: string, exclude?: WebSocket) { + const room = rooms.get(roomId); + if (!room) return; + + for (const client of room) { + if (client !== exclude && client.readyState === WebSocket.OPEN) { + client.send(message); + } + } +} + +app.get('/ws/:roomId', { websocket: true }, (socket, request) => { + const { roomId } = request.params as { roomId: string }; + + joinRoom(socket, roomId); + + socket.on('message', (message) => { + broadcastToRoom(roomId, message.toString(), socket); + }); + + socket.on('close', () => { + leaveRoom(socket, roomId); + }); +}); +``` + +## Structured Message Protocol + +Use JSON for structured messages: + +```typescript +interface WSMessage { + type: string; + payload?: unknown; + id?: string; +} + +app.get('/ws', { websocket: true }, (socket, request) => { + function send(message: WSMessage) { + socket.send(JSON.stringify(message)); + } + + socket.on('message', (raw) => { + let message: WSMessage; + + try { + message = JSON.parse(raw.toString()); + } catch { + send({ type: 'error', payload: 'Invalid JSON' }); + return; + } + + switch (message.type) { + case 'ping': + send({ type: 'pong', id: message.id }); + break; + + case 'subscribe': + handleSubscribe(socket, message.payload); + send({ type: 'subscribed', payload: message.payload, id: message.id }); + break; + + case 'message': + handleMessage(socket, message.payload); + break; + + default: + send({ type: 'error', payload: 'Unknown message type' }); + } + }); +}); +``` + +## Heartbeat/Ping-Pong + +Keep connections alive: + +```typescript +const HEARTBEAT_INTERVAL = 30000; +const clients = new Map(); + +app.get('/ws', { websocket: true }, (socket, request) => { + clients.set(socket, { isAlive: true }); + + socket.on('pong', () => { + const client = clients.get(socket); + if (client) client.isAlive = true; + }); + + socket.on('close', () => { + clients.delete(socket); + }); +}); + +// Heartbeat interval +setInterval(() => { + for (const [socket, state] of clients) { + if (!state.isAlive) { + socket.terminate(); + clients.delete(socket); + continue; + } + + state.isAlive = false; + socket.ping(); + } +}, HEARTBEAT_INTERVAL); +``` + +## Authentication + +Authenticate WebSocket connections: + +```typescript +app.get('/ws', { + websocket: true, + preValidation: async (request, reply) => { + // Authenticate via query parameter or header + const token = request.query.token || request.headers.authorization?.replace('Bearer ', ''); + + if (!token) { + reply.code(401).send({ error: 'Token required' }); + return; + } + + try { + request.user = await verifyToken(token); + } catch { + reply.code(401).send({ error: 'Invalid token' }); + } + }, +}, (socket, request) => { + console.log('Authenticated user:', request.user); + + socket.on('message', (message) => { + // Handle authenticated messages + }); +}); +``` + +## Error Handling + +Handle WebSocket errors properly: + +```typescript +app.get('/ws', { websocket: true }, (socket, request) => { + socket.on('error', (error) => { + request.log.error({ err: error }, 'WebSocket error'); + }); + + socket.on('message', async (raw) => { + try { + const message = JSON.parse(raw.toString()); + const result = await processMessage(message); + socket.send(JSON.stringify({ success: true, result })); + } catch (error) { + request.log.error({ err: error }, 'Message processing error'); + socket.send(JSON.stringify({ + success: false, + error: error.message, + })); + } + }); +}); +``` + +## Rate Limiting WebSocket Messages + +Limit message frequency: + +```typescript +const rateLimits = new Map(); + +function checkRateLimit(socket: WebSocket, limit: number, window: number): boolean { + const now = Date.now(); + let state = rateLimits.get(socket); + + if (!state || now > state.resetAt) { + state = { count: 0, resetAt: now + window }; + rateLimits.set(socket, state); + } + + state.count++; + + if (state.count > limit) { + return false; + } + + return true; +} + +app.get('/ws', { websocket: true }, (socket, request) => { + socket.on('message', (message) => { + if (!checkRateLimit(socket, 100, 60000)) { + socket.send(JSON.stringify({ error: 'Rate limit exceeded' })); + return; + } + + // Process message + }); + + socket.on('close', () => { + rateLimits.delete(socket); + }); +}); +``` + +## Graceful Shutdown + +Close WebSocket connections on shutdown: + +```typescript +import closeWithGrace from 'close-with-grace'; + +const connections = new Set(); + +app.get('/ws', { websocket: true }, (socket, request) => { + connections.add(socket); + + socket.on('close', () => { + connections.delete(socket); + }); +}); + +closeWithGrace({ delay: 5000 }, async ({ signal }) => { + // Notify clients + for (const socket of connections) { + if (socket.readyState === WebSocket.OPEN) { + socket.send(JSON.stringify({ type: 'shutdown', message: 'Server is shutting down' })); + socket.close(1001, 'Server shutdown'); + } + } + + await app.close(); +}); +``` + +## Full-Duplex Stream Pattern + +Use WebSocket for streaming data: + +```typescript +app.get('/ws/stream', { websocket: true }, async (socket, request) => { + const stream = createDataStream(); + + stream.on('data', (data) => { + if (socket.readyState === WebSocket.OPEN) { + socket.send(JSON.stringify({ type: 'data', payload: data })); + } + }); + + stream.on('end', () => { + socket.send(JSON.stringify({ type: 'end' })); + socket.close(); + }); + + socket.on('message', (message) => { + const { type, payload } = JSON.parse(message.toString()); + + if (type === 'pause') { + stream.pause(); + } else if (type === 'resume') { + stream.resume(); + } + }); + + socket.on('close', () => { + stream.destroy(); + }); +}); +``` diff --git a/.agents/skills/fastify-best-practices/tile.json b/.agents/skills/fastify-best-practices/tile.json new file mode 100644 index 0000000..56a442a --- /dev/null +++ b/.agents/skills/fastify-best-practices/tile.json @@ -0,0 +1,11 @@ +{ + "name": "mcollina/fastify-best-practices", + "version": "0.1.0", + "private": false, + "summary": "Guides development of Fastify Node.js backend servers and REST APIs using TypeScript or JavaScript. Use when building, configuring, or debugging a Fastify application — including defining routes, implementing plugins, setting up JSON Schema validation, handling errors, optimising performance, managing authentication, configuring CORS and security headers, integrating databases, working with WebSockets, and deploying to production. Covers the full Fastify request lifecycle (hooks, serialization, logging with Pino) and TypeScript integration via strip types. Trigger terms: Fastify, Node.js server, REST API, API routes, backend framework, fastify.config, server.ts, app.ts.", + "skills": { + "fastify-best-practices": { + "path": "SKILL.md" + } + } +} diff --git a/.agents/skills/fastify-typescript/SKILL.md b/.agents/skills/fastify-typescript/SKILL.md new file mode 100644 index 0000000..10e673e --- /dev/null +++ b/.agents/skills/fastify-typescript/SKILL.md @@ -0,0 +1,244 @@ +--- +name: fastify-typescript +description: Guidelines for building high-performance APIs with Fastify and TypeScript, covering validation, Prisma integration, and testing best practices +--- + +# Fastify TypeScript Development + +You are an expert in Fastify and TypeScript development with deep knowledge of building high-performance, type-safe APIs. + +## TypeScript General Guidelines + +### Basic Principles + +- Use English for all code and documentation +- Always declare types for variables and functions (parameters and return values) +- Avoid using `any` type - create necessary types instead +- Use JSDoc to document public classes and methods +- Write concise, maintainable, and technically accurate code +- Use functional and declarative programming patterns; avoid classes +- Prefer iteration and modularization to adhere to DRY principles + +### Nomenclature + +- Use PascalCase for types and interfaces +- Use camelCase for variables, functions, and methods +- Use kebab-case for file and directory names +- Use UPPERCASE for environment variables +- Use descriptive variable names with auxiliary verbs: `isLoading`, `hasError`, `canDelete` +- Start each function with a verb + +### Functions + +- Write short functions with a single purpose +- Use arrow functions for simple operations +- Use async/await consistently throughout the codebase +- Use the RO-RO pattern (Receive an Object, Return an Object) for multiple parameters + +### Types and Interfaces + +- Prefer interfaces over types for object shapes +- Avoid enums; use maps or const objects instead +- Use Zod for runtime validation with inferred types +- Use `readonly` for immutable properties +- Use `import type` for type-only imports + +## Fastify-Specific Guidelines + +### Project Structure + +``` +src/ + routes/ + {resource}/ + index.ts + handlers.ts + schemas.ts + plugins/ + auth.ts + database.ts + cors.ts + services/ + {domain}Service.ts + repositories/ + {entity}Repository.ts + types/ + index.ts + utils/ + config/ + app.ts + server.ts +``` + +### Route Organization + +- Organize routes by resource/domain +- Use route plugins for modular registration +- Define schemas alongside route handlers +- Use route prefixes for API versioning + +```typescript +import { FastifyPluginAsync } from 'fastify'; + +const usersRoutes: FastifyPluginAsync = async (fastify) => { + fastify.get('/', { schema: listUsersSchema }, listUsersHandler); + fastify.get('/:id', { schema: getUserSchema }, getUserHandler); + fastify.post('/', { schema: createUserSchema }, createUserHandler); + fastify.put('/:id', { schema: updateUserSchema }, updateUserHandler); + fastify.delete('/:id', { schema: deleteUserSchema }, deleteUserHandler); +}; + +export default usersRoutes; +``` + +### Schema Validation with JSON Schema / Ajv + +- Define JSON schemas for all request/response validation +- Use @sinclair/typebox for type-safe schema definitions +- Leverage Fastify's built-in Ajv integration + +```typescript +import { Type, Static } from '@sinclair/typebox'; + +const UserSchema = Type.Object({ + id: Type.String({ format: 'uuid' }), + name: Type.String({ minLength: 1 }), + email: Type.String({ format: 'email' }), + createdAt: Type.String({ format: 'date-time' }), +}); + +type User = Static; + +const createUserSchema = { + body: Type.Object({ + name: Type.String({ minLength: 1 }), + email: Type.String({ format: 'email' }), + }), + response: { + 201: UserSchema, + 400: ErrorSchema, + }, +}; +``` + +### Plugins and Decorators + +- Use plugins for shared functionality +- Decorate Fastify instance with services and utilities +- Register plugins with proper encapsulation + +```typescript +import fp from 'fastify-plugin'; + +const databasePlugin = fp(async (fastify) => { + const prisma = new PrismaClient(); + + await prisma.$connect(); + + fastify.decorate('prisma', prisma); + + fastify.addHook('onClose', async () => { + await prisma.$disconnect(); + }); +}); + +export default databasePlugin; +``` + +### Prisma Integration + +- Use Prisma as the ORM for database operations +- Create repository classes for data access +- Use transactions for complex operations + +```typescript +class UserRepository { + constructor(private prisma: PrismaClient) {} + + async findById(id: string): Promise { + return this.prisma.user.findUnique({ where: { id } }); + } + + async create(data: CreateUserInput): Promise { + return this.prisma.user.create({ data }); + } +} +``` + +### Error Handling + +- Use Fastify's built-in error handling +- Create custom error classes for domain errors +- Return consistent error responses + +```typescript +import { FastifyError } from 'fastify'; + +class NotFoundError extends Error implements FastifyError { + code = 'NOT_FOUND'; + statusCode = 404; + + constructor(resource: string, id: string) { + super(`${resource} with id ${id} not found`); + this.name = 'NotFoundError'; + } +} + +// Global error handler +fastify.setErrorHandler((error, request, reply) => { + const statusCode = error.statusCode || 500; + + reply.status(statusCode).send({ + error: error.name, + message: error.message, + statusCode, + }); +}); +``` + +### Testing with Jest + +- Write unit tests for services and handlers +- Use integration tests for routes +- Mock external dependencies + +```typescript +import { build } from '../app'; + +describe('Users API', () => { + let app: FastifyInstance; + + beforeAll(async () => { + app = await build(); + }); + + afterAll(async () => { + await app.close(); + }); + + it('should list users', async () => { + const response = await app.inject({ + method: 'GET', + url: '/api/users', + }); + + expect(response.statusCode).toBe(200); + expect(JSON.parse(response.payload)).toBeInstanceOf(Array); + }); +}); +``` + +### Performance + +- Fastify is one of the fastest Node.js frameworks +- Use schema validation for automatic serialization optimization +- Enable logging only when needed in production +- Use connection pooling for database connections + +### Security + +- Use @fastify/helmet for security headers +- Implement rate limiting with @fastify/rate-limit +- Use @fastify/cors for CORS configuration +- Validate all inputs with JSON Schema +- Use JWT for authentication with @fastify/jwt diff --git a/.bg-shell/manifest.json b/.bg-shell/manifest.json new file mode 100644 index 0000000..0637a08 --- /dev/null +++ b/.bg-shell/manifest.json @@ -0,0 +1 @@ +[] \ No newline at end of file diff --git a/.claude/settings.local.json b/.claude/settings.local.json new file mode 100644 index 0000000..bfe07c9 --- /dev/null +++ b/.claude/settings.local.json @@ -0,0 +1,20 @@ +{ + "permissions": { + "allow": [ + "Bash(MSYS_NO_PATHCONV=1 docker build -t tubearr-test:latest .)", + "Bash(MSYS_NO_PATHCONV=1 docker compose up -d)", + "Bash(MSYS_NO_PATHCONV=1 docker inspect --format='{{.State.Health.Status}}' tubearr 2>&1)", + "Bash(MSYS_NO_PATHCONV=1 docker logs tubearr)", + "Bash(MSYS_NO_PATHCONV=1 docker run --rm --entrypoint sh tubearr:latest -c \"ls /app/dist/config/ 2>&1\")", + "Bash(MSYS_NO_PATHCONV=1 docker run --rm --entrypoint sh tubearr:latest -c \"cat /app/package.json | head -10\")", + "Bash(MSYS_NO_PATHCONV=1 docker compose -f \"W:/programming/Projects/Tubearr/.gsd/worktrees/M001/docker-compose.yml\" down 2>&1)", + "Bash(MSYS_NO_PATHCONV=1 docker compose build --no-cache)", + "Bash(MSYS_NO_PATHCONV=1 docker compose build)", + "Bash(MSYS_NO_PATHCONV=1 docker inspect --format='{{.State.Health.Status}}' tubearr)", + "Bash(MSYS_NO_PATHCONV=1 docker inspect --format='{{json .State.Health}}' tubearr 2>&1)", + "Bash(curl:*)", + "Bash(python -c \"import sys,json; d=json.load\\(sys.stdin\\); print\\(f''''Channels: {len\\(d\\)}''''\\); [print\\(f'''' - {c[name]} \\({c[platform]}\\) monitoring={c.get\\(monitoringEnabled,?\\)} mode={c.get\\(monitoringMode,?\\)}''''\\) for c in d]\")", + "Bash(python -c \":*)" + ] + } +} diff --git a/.claude/skills/drizzle-migrations/SKILL.md b/.claude/skills/drizzle-migrations/SKILL.md new file mode 100644 index 0000000..be806d0 --- /dev/null +++ b/.claude/skills/drizzle-migrations/SKILL.md @@ -0,0 +1,518 @@ +--- +name: drizzle-migrations +description: "Migration-first database development workflow using Drizzle ORM for TypeScript/J..." +version: 1.0.0 +tags: [] +progressive_disclosure: + entry_point: + summary: "Migration-first database development workflow using Drizzle ORM for TypeScript/J..." + when_to_use: "When working with drizzle-migrations or related functionality." + quick_start: "1. Review the core concepts below. 2. Apply patterns to your use case. 3. Follow best practices for implementation." +--- +# Drizzle ORM Database Migrations (TypeScript) + +Migration-first database development workflow using Drizzle ORM for TypeScript/JavaScript projects. + +## When to Use This Skill + +Use this skill when: +- Working with Drizzle ORM in TypeScript/JavaScript projects +- Need to create or modify database schema +- Want migration-first development workflow +- Setting up new database tables or columns +- Need to ensure schema consistency across environments + +## Core Principle: Migration-First Development + +**Critical Rule**: Schema changes ALWAYS start with migrations, never code-first. + +### Why Migration-First? +- ✅ SQL migrations are the single source of truth +- ✅ Prevents schema drift between environments +- ✅ Enables rollback and versioning +- ✅ Forces explicit schema design decisions +- ✅ TypeScript types generated from migrations +- ✅ CI/CD can validate schema changes + +### Anti-Pattern (Code-First) +❌ **WRONG**: Writing TypeScript schema first +```typescript +// DON'T DO THIS FIRST +export const users = pgTable('users', { + id: uuid('id').primaryKey(), + email: text('email').notNull(), +}); +``` + +### Correct Pattern (Migration-First) +✅ **CORRECT**: Write SQL migration first +```sql +-- drizzle/0001_add_users_table.sql +CREATE TABLE users ( + id UUID PRIMARY KEY DEFAULT gen_random_uuid(), + email TEXT NOT NULL UNIQUE, + created_at TIMESTAMP DEFAULT NOW() +); +``` + +## Complete Migration Workflow + +### Step 1: Design Schema in SQL Migration + +Create descriptive SQL migration file: + +```sql +-- drizzle/0001_create_school_calendars.sql +CREATE TABLE school_calendars ( + id UUID PRIMARY KEY DEFAULT gen_random_uuid(), + school_id UUID NOT NULL REFERENCES schools(id) ON DELETE CASCADE, + start_date DATE NOT NULL, + end_date DATE NOT NULL, + academic_year TEXT NOT NULL, + created_at TIMESTAMP DEFAULT NOW(), + updated_at TIMESTAMP DEFAULT NOW() +); + +-- Add indexes for query performance +CREATE INDEX idx_school_calendars_school_id ON school_calendars(school_id); +CREATE INDEX idx_school_calendars_academic_year ON school_calendars(academic_year); + +-- Add constraints +ALTER TABLE school_calendars + ADD CONSTRAINT check_date_range + CHECK (end_date > start_date); +``` + +**Naming Convention**: +- Use sequential numbers: `0001_`, `0002_`, etc. +- Descriptive names: `create_school_calendars`, `add_user_roles` +- Format: `XXXX_descriptive_name.sql` + +### Step 2: Generate TypeScript Definitions + +Drizzle Kit generates TypeScript types from SQL: + +```bash +# Generate TypeScript schema and snapshots +pnpm drizzle-kit generate + +# Or using npm +npm run db:generate +``` + +**What This Creates**: +1. TypeScript schema files (if using `drizzle-kit push`) +2. Snapshot files in `drizzle/meta/XXXX_snapshot.json` +3. Migration metadata + +### Step 3: Create Schema Snapshot + +Snapshots enable schema drift detection: + +```json +// drizzle/meta/0001_snapshot.json (auto-generated) +{ + "version": "5", + "dialect": "postgresql", + "tables": { + "school_calendars": { + "name": "school_calendars", + "columns": { + "id": { + "name": "id", + "type": "uuid", + "primaryKey": true, + "notNull": true, + "default": "gen_random_uuid()" + }, + "school_id": { + "name": "school_id", + "type": "uuid", + "notNull": true + } + } + } + } +} +``` + +**Snapshots in Version Control**: +- ✅ Commit snapshots to git +- ✅ Enables drift detection in CI +- ✅ Documents schema history + +### Step 4: Implement TypeScript Schema + +Now write TypeScript schema that mirrors SQL migration: + +```typescript +// src/lib/db/schema/school/calendar.ts +import { pgTable, uuid, date, text, timestamp } from 'drizzle-orm/pg-core'; +import { schools } from './school'; + +export const schoolCalendars = pgTable('school_calendars', { + id: uuid('id').primaryKey().defaultRandom(), + schoolId: uuid('school_id') + .notNull() + .references(() => schools.id, { onDelete: 'cascade' }), + startDate: date('start_date').notNull(), + endDate: date('end_date').notNull(), + academicYear: text('academic_year').notNull(), + createdAt: timestamp('created_at').defaultNow(), + updatedAt: timestamp('updated_at').defaultNow(), +}); + +// Type inference +export type SchoolCalendar = typeof schoolCalendars.$inferSelect; +export type NewSchoolCalendar = typeof schoolCalendars.$inferInsert; +``` + +**Key Points**: +- Column names match SQL exactly: `school_id` → `'school_id'` +- TypeScript property names use camelCase: `schoolId` +- Constraints and indexes defined in SQL, not TypeScript +- Foreign keys reference other tables + +### Step 5: Organize Schemas by Domain + +Structure schemas for maintainability: + +``` +src/lib/db/schema/ +├── index.ts # Export all schemas +├── school/ +│ ├── index.ts +│ ├── district.ts +│ ├── holiday.ts +│ ├── school.ts +│ └── calendar.ts +├── providers.ts +├── cart.ts +└── users.ts +``` + +**index.ts** (export all): +```typescript +// src/lib/db/schema/index.ts +export * from './school'; +export * from './providers'; +export * from './cart'; +export * from './users'; +``` + +**school/index.ts**: +```typescript +// src/lib/db/schema/school/index.ts +export * from './district'; +export * from './holiday'; +export * from './school'; +export * from './calendar'; +``` + +### Step 6: Add Quality Check to CI + +Validate schema consistency in CI/CD: + +```yaml +# .github/workflows/quality.yml +name: Quality Checks + +on: + pull_request: + branches: [main, develop] + push: + branches: [main] + +jobs: + quality: + runs-on: ubuntu-latest + + steps: + - uses: actions/checkout@v4 + + - name: Setup Node.js + uses: actions/setup-node@v4 + with: + node-version: '20' + cache: 'pnpm' + + - name: Install dependencies + run: pnpm install --frozen-lockfile + + - name: Check database schema drift + run: pnpm drizzle-kit check + + - name: Verify migrations (dry-run) + run: pnpm drizzle-kit push --dry-run + env: + DATABASE_URL: ${{ secrets.STAGING_DATABASE_URL }} + + - name: Run type checking + run: pnpm tsc --noEmit + + - name: Lint code + run: pnpm lint +``` + +**CI Checks Explained**: +- `drizzle-kit check`: Validates snapshots match schema +- `drizzle-kit push --dry-run`: Tests migration without applying +- Type checking: Ensures TypeScript compiles +- Linting: Enforces code style + +### Step 7: Test on Staging + +Before production, test migration on staging: + +```bash +# 1. Run migration on staging +STAGING_DATABASE_URL="..." pnpm drizzle-kit push + +# 2. Verify schema +pnpm drizzle-kit check + +# 3. Test affected API routes +curl https://staging.example.com/api/schools/calendars + +# 4. Check for data integrity issues +# Run queries to verify data looks correct + +# 5. Monitor logs for errors +# Check application logs for migration-related errors +``` + +**Staging Checklist**: +- [ ] Migration runs without errors +- [ ] Schema drift check passes +- [ ] API routes using new schema work correctly +- [ ] No data integrity issues +- [ ] Application logs show no errors +- [ ] Query performance acceptable + +## Common Migration Patterns + +### Adding a Column + +```sql +-- drizzle/0005_add_user_phone.sql +ALTER TABLE users +ADD COLUMN phone TEXT; + +-- Add index if querying by phone +CREATE INDEX idx_users_phone ON users(phone); +``` + +TypeScript: +```typescript +export const users = pgTable('users', { + id: uuid('id').primaryKey(), + email: text('email').notNull(), + phone: text('phone'), // New column +}); +``` + +### Creating a Junction Table + +```sql +-- drizzle/0006_create_provider_specialties.sql +CREATE TABLE provider_specialties ( + provider_id UUID NOT NULL REFERENCES providers(id) ON DELETE CASCADE, + specialty_id UUID NOT NULL REFERENCES specialties(id) ON DELETE CASCADE, + PRIMARY KEY (provider_id, specialty_id) +); + +CREATE INDEX idx_provider_specialties_provider ON provider_specialties(provider_id); +CREATE INDEX idx_provider_specialties_specialty ON provider_specialties(specialty_id); +``` + +TypeScript: +```typescript +export const providerSpecialties = pgTable('provider_specialties', { + providerId: uuid('provider_id') + .notNull() + .references(() => providers.id, { onDelete: 'cascade' }), + specialtyId: uuid('specialty_id') + .notNull() + .references(() => specialties.id, { onDelete: 'cascade' }), +}, (table) => ({ + pk: primaryKey(table.providerId, table.specialtyId), +})); +``` + +### Modifying Column Type + +```sql +-- drizzle/0007_change_price_to_decimal.sql +ALTER TABLE services +ALTER COLUMN price TYPE DECIMAL(10, 2); +``` + +TypeScript: +```typescript +import { decimal } from 'drizzle-orm/pg-core'; + +export const services = pgTable('services', { + id: uuid('id').primaryKey(), + name: text('name').notNull(), + price: decimal('price', { precision: 10, scale: 2 }).notNull(), +}); +``` + +### Adding Constraints + +```sql +-- drizzle/0008_add_email_constraint.sql +ALTER TABLE users +ADD CONSTRAINT users_email_unique UNIQUE (email); + +ALTER TABLE users +ADD CONSTRAINT users_email_format CHECK (email ~* '^[A-Za-z0-9._%+-]+@[A-Za-z0-9.-]+\.[A-Z|a-z]{2,}$'); +``` + +## Configuration + +### drizzle.config.ts + +```typescript +import type { Config } from 'drizzle-kit'; + +export default { + schema: './src/lib/db/schema/index.ts', + out: './drizzle', + driver: 'pg', + dbCredentials: { + connectionString: process.env.DATABASE_URL!, + }, +} satisfies Config; +``` + +### package.json Scripts + +```json +{ + "scripts": { + "db:generate": "drizzle-kit generate:pg", + "db:push": "drizzle-kit push:pg", + "db:studio": "drizzle-kit studio", + "db:check": "drizzle-kit check:pg", + "db:up": "drizzle-kit up:pg" + } +} +``` + +## Migration Testing Workflow + +### Local Testing + +```bash +# 1. Create migration +echo "CREATE TABLE test (...)" > drizzle/0009_test.sql + +# 2. Generate TypeScript +pnpm db:generate + +# 3. Push to local database +pnpm db:push + +# 4. Verify schema +pnpm db:check + +# 5. Test in application +pnpm dev +# Manually test affected features + +# 6. Run tests +pnpm test +``` + +### Rollback Strategy + +```sql +-- drizzle/0010_add_feature.sql (up migration) +CREATE TABLE new_feature (...); + +-- drizzle/0010_add_feature_down.sql (down migration) +DROP TABLE new_feature; +``` + +Apply rollback: +```bash +# Manually run down migration +psql $DATABASE_URL -f drizzle/0010_add_feature_down.sql +``` + +## Best Practices + +### Do's +- ✅ Write SQL migrations first +- ✅ Use descriptive migration names +- ✅ Add indexes for foreign keys +- ✅ Include constraints in migrations +- ✅ Test migrations on staging before production +- ✅ Commit snapshots to version control +- ✅ Organize schemas by domain +- ✅ Use `drizzle-kit check` in CI + +### Don'ts +- ❌ Never write TypeScript schema before SQL migration +- ❌ Don't skip staging testing +- ❌ Don't modify old migrations (create new ones) +- ❌ Don't forget to add indexes +- ❌ Don't use `drizzle-kit push` in production (use proper migrations) +- ❌ Don't commit generated files without snapshots + +## Troubleshooting + +### Schema Drift Detected +**Error**: `Schema drift detected` + +**Solution**: +```bash +# Check what changed +pnpm drizzle-kit check + +# Regenerate snapshots +pnpm drizzle-kit generate + +# Review changes and commit +git add drizzle/meta/ +git commit -m "Update schema snapshots" +``` + +### Migration Fails on Staging +**Error**: Migration fails with data constraint violation + +**Solution**: +1. Rollback migration +2. Create data migration script +3. Run data migration first +4. Then run schema migration + +```sql +-- First: Migrate data +UPDATE users SET status = 'active' WHERE status IS NULL; + +-- Then: Add constraint +ALTER TABLE users +ALTER COLUMN status SET NOT NULL; +``` + +### TypeScript Types Out of Sync +**Error**: TypeScript types don't match database + +**Solution**: +```bash +# Regenerate everything +pnpm db:generate +pnpm tsc --noEmit + +# If still broken, check schema files +# Ensure column names match SQL exactly +``` + +## Related Skills + +- `universal-data-database-migration` - Universal migration patterns +- `toolchains-typescript-data-drizzle` - Drizzle ORM usage patterns +- `toolchains-typescript-core` - TypeScript best practices +- `universal-debugging-verification-before-completion` - Verification workflows diff --git a/.claude/skills/drizzle-orm/SKILL.md b/.claude/skills/drizzle-orm/SKILL.md new file mode 100644 index 0000000..d01fa29 --- /dev/null +++ b/.claude/skills/drizzle-orm/SKILL.md @@ -0,0 +1,396 @@ +--- +name: drizzle-orm +description: "Type-safe SQL ORM for TypeScript with zero runtime overhead" +progressive_disclosure: + entry_point: + summary: "Type-safe SQL ORM for TypeScript with zero runtime overhead" + when_to_use: "When working with drizzle-orm or related functionality." + quick_start: "1. Review the core concepts below. 2. Apply patterns to your use case. 3. Follow best practices for implementation." + references: + - advanced-schemas.md + - performance.md + - query-patterns.md + - vs-prisma.md +--- +# Drizzle ORM + +Modern TypeScript-first ORM with zero dependencies, compile-time type safety, and SQL-like syntax. Optimized for edge runtimes and serverless environments. + +## Quick Start + +### Installation + +```bash +# Core ORM +npm install drizzle-orm + +# Database driver (choose one) +npm install pg # PostgreSQL +npm install mysql2 # MySQL +npm install better-sqlite3 # SQLite + +# Drizzle Kit (migrations) +npm install -D drizzle-kit +``` + +### Basic Setup + +```typescript +// db/schema.ts +import { pgTable, serial, text, timestamp } from 'drizzle-orm/pg-core'; + +export const users = pgTable('users', { + id: serial('id').primaryKey(), + email: text('email').notNull().unique(), + name: text('name').notNull(), + createdAt: timestamp('created_at').defaultNow(), +}); + +// db/client.ts +import { drizzle } from 'drizzle-orm/node-postgres'; +import { Pool } from 'pg'; +import * as schema from './schema'; + +const pool = new Pool({ connectionString: process.env.DATABASE_URL }); +export const db = drizzle(pool, { schema }); +``` + +### First Query + +```typescript +import { db } from './db/client'; +import { users } from './db/schema'; +import { eq } from 'drizzle-orm'; + +// Insert +const newUser = await db.insert(users).values({ + email: 'user@example.com', + name: 'John Doe', +}).returning(); + +// Select +const allUsers = await db.select().from(users); + +// Where +const user = await db.select().from(users).where(eq(users.id, 1)); + +// Update +await db.update(users).set({ name: 'Jane Doe' }).where(eq(users.id, 1)); + +// Delete +await db.delete(users).where(eq(users.id, 1)); +``` + +## Schema Definition + +### Column Types Reference + +| PostgreSQL | MySQL | SQLite | TypeScript | +|------------|-------|--------|------------| +| `serial()` | `serial()` | `integer()` | `number` | +| `text()` | `text()` | `text()` | `string` | +| `integer()` | `int()` | `integer()` | `number` | +| `boolean()` | `boolean()` | `integer()` | `boolean` | +| `timestamp()` | `datetime()` | `integer()` | `Date` | +| `json()` | `json()` | `text()` | `unknown` | +| `uuid()` | `varchar(36)` | `text()` | `string` | + +### Common Schema Patterns + +```typescript +import { pgTable, serial, text, varchar, integer, boolean, timestamp, json, unique } from 'drizzle-orm/pg-core'; + +export const users = pgTable('users', { + id: serial('id').primaryKey(), + email: varchar('email', { length: 255 }).notNull().unique(), + passwordHash: varchar('password_hash', { length: 255 }).notNull(), + role: text('role', { enum: ['admin', 'user', 'guest'] }).default('user'), + metadata: json('metadata').$type<{ theme: string; locale: string }>(), + isActive: boolean('is_active').default(true), + createdAt: timestamp('created_at').defaultNow().notNull(), + updatedAt: timestamp('updated_at').defaultNow().notNull(), +}, (table) => ({ + emailIdx: unique('email_unique_idx').on(table.email), +})); + +// Infer TypeScript types +type User = typeof users.$inferSelect; +type NewUser = typeof users.$inferInsert; +``` + +## Relations + +### One-to-Many + +```typescript +import { pgTable, serial, text, integer } from 'drizzle-orm/pg-core'; +import { relations } from 'drizzle-orm'; + +export const authors = pgTable('authors', { + id: serial('id').primaryKey(), + name: text('name').notNull(), +}); + +export const posts = pgTable('posts', { + id: serial('id').primaryKey(), + title: text('title').notNull(), + authorId: integer('author_id').notNull().references(() => authors.id), +}); + +export const authorsRelations = relations(authors, ({ many }) => ({ + posts: many(posts), +})); + +export const postsRelations = relations(posts, ({ one }) => ({ + author: one(authors, { + fields: [posts.authorId], + references: [authors.id], + }), +})); + +// Query with relations +const authorsWithPosts = await db.query.authors.findMany({ + with: { posts: true }, +}); +``` + +### Many-to-Many + +```typescript +export const users = pgTable('users', { + id: serial('id').primaryKey(), + name: text('name').notNull(), +}); + +export const groups = pgTable('groups', { + id: serial('id').primaryKey(), + name: text('name').notNull(), +}); + +export const usersToGroups = pgTable('users_to_groups', { + userId: integer('user_id').notNull().references(() => users.id), + groupId: integer('group_id').notNull().references(() => groups.id), +}, (table) => ({ + pk: primaryKey({ columns: [table.userId, table.groupId] }), +})); + +export const usersRelations = relations(users, ({ many }) => ({ + groups: many(usersToGroups), +})); + +export const groupsRelations = relations(groups, ({ many }) => ({ + users: many(usersToGroups), +})); + +export const usersToGroupsRelations = relations(usersToGroups, ({ one }) => ({ + user: one(users, { fields: [usersToGroups.userId], references: [users.id] }), + group: one(groups, { fields: [usersToGroups.groupId], references: [groups.id] }), +})); +``` + +## Queries + +### Filtering + +```typescript +import { eq, ne, gt, gte, lt, lte, like, ilike, inArray, isNull, isNotNull, and, or, between } from 'drizzle-orm'; + +// Equality +await db.select().from(users).where(eq(users.email, 'user@example.com')); + +// Comparison +await db.select().from(users).where(gt(users.id, 10)); + +// Pattern matching +await db.select().from(users).where(like(users.name, '%John%')); + +// Multiple conditions +await db.select().from(users).where( + and( + eq(users.role, 'admin'), + gt(users.createdAt, new Date('2024-01-01')) + ) +); + +// IN clause +await db.select().from(users).where(inArray(users.id, [1, 2, 3])); + +// NULL checks +await db.select().from(users).where(isNull(users.deletedAt)); +``` + +### Joins + +```typescript +import { eq } from 'drizzle-orm'; + +// Inner join +const result = await db + .select({ + user: users, + post: posts, + }) + .from(users) + .innerJoin(posts, eq(users.id, posts.authorId)); + +// Left join +const result = await db + .select({ + user: users, + post: posts, + }) + .from(users) + .leftJoin(posts, eq(users.id, posts.authorId)); + +// Multiple joins with aggregation +import { count, sql } from 'drizzle-orm'; + +const result = await db + .select({ + authorName: authors.name, + postCount: count(posts.id), + }) + .from(authors) + .leftJoin(posts, eq(authors.id, posts.authorId)) + .groupBy(authors.id); +``` + +### Pagination & Sorting + +```typescript +import { desc, asc } from 'drizzle-orm'; + +// Order by +await db.select().from(users).orderBy(desc(users.createdAt)); + +// Limit & offset +await db.select().from(users).limit(10).offset(20); + +// Pagination helper +function paginate(page: number, pageSize: number = 10) { + return db.select().from(users) + .limit(pageSize) + .offset(page * pageSize); +} +``` + +## Transactions + +```typescript +// Auto-rollback on error +await db.transaction(async (tx) => { + await tx.insert(users).values({ email: 'user@example.com', name: 'John' }); + await tx.insert(posts).values({ title: 'First Post', authorId: 1 }); + // If any query fails, entire transaction rolls back +}); + +// Manual control +const tx = db.transaction(async (tx) => { + const user = await tx.insert(users).values({ ... }).returning(); + + if (!user) { + tx.rollback(); + return; + } + + await tx.insert(posts).values({ authorId: user.id }); +}); +``` + +## Migrations + +### Drizzle Kit Configuration + +```typescript +// drizzle.config.ts +import type { Config } from 'drizzle-kit'; + +export default { + schema: './db/schema.ts', + out: './drizzle', + dialect: 'postgresql', + dbCredentials: { + url: process.env.DATABASE_URL!, + }, +} satisfies Config; +``` + +### Migration Workflow + +```bash +# Generate migration +npx drizzle-kit generate + +# View SQL +cat drizzle/0000_migration.sql + +# Apply migration +npx drizzle-kit migrate + +# Introspect existing database +npx drizzle-kit introspect + +# Drizzle Studio (database GUI) +npx drizzle-kit studio +``` + +### Example Migration + +```sql +-- drizzle/0000_initial.sql +CREATE TABLE IF NOT EXISTS "users" ( + "id" serial PRIMARY KEY NOT NULL, + "email" varchar(255) NOT NULL, + "name" text NOT NULL, + "created_at" timestamp DEFAULT now() NOT NULL, + CONSTRAINT "users_email_unique" UNIQUE("email") +); +``` + +## Navigation + +### Detailed References + +- **[🏗️ Advanced Schemas](./references/advanced-schemas.md)** - Custom types, composite keys, indexes, constraints, multi-tenant patterns. Load when designing complex database schemas. + +- **[🔍 Query Patterns](./references/query-patterns.md)** - Subqueries, CTEs, raw SQL, prepared statements, batch operations. Load when optimizing queries or handling complex filtering. + +- **[⚡ Performance](./references/performance.md)** - Connection pooling, query optimization, N+1 prevention, prepared statements, edge runtime integration. Load when scaling or optimizing database performance. + +- **[🔄 vs Prisma](./references/vs-prisma.md)** - Feature comparison, migration guide, when to choose Drizzle over Prisma. Load when evaluating ORMs or migrating from Prisma. + +## Red Flags + +**Stop and reconsider if:** +- Using `any` or `unknown` for JSON columns without type annotation +- Building raw SQL strings without using `sql` template (SQL injection risk) +- Not using transactions for multi-step data modifications +- Fetching all rows without pagination in production queries +- Missing indexes on foreign keys or frequently queried columns +- Using `select()` without specifying columns for large tables + +## Performance Benefits vs Prisma + +| Metric | Drizzle | Prisma | +|--------|---------|--------| +| **Bundle Size** | ~35KB | ~230KB | +| **Cold Start** | ~10ms | ~250ms | +| **Query Speed** | Baseline | ~2-3x slower | +| **Memory** | ~10MB | ~50MB | +| **Type Generation** | Runtime inference | Build-time generation | + +## Integration + +- **typescript-core**: Type-safe schema inference with `satisfies` +- **nextjs-core**: Server Actions, Route Handlers, Middleware integration +- **Database Migration**: Safe schema evolution patterns + +## Related Skills + +When using Drizzle, these skills enhance your workflow: +- **prisma**: Alternative ORM comparison: Drizzle vs Prisma trade-offs +- **typescript**: Advanced TypeScript patterns for type-safe queries +- **nextjs**: Drizzle with Next.js Server Actions and API routes +- **sqlalchemy**: SQLAlchemy patterns for Python developers learning Drizzle + +[Full documentation available in these skills if deployed in your bundle] diff --git a/.claude/skills/drizzle-orm/references/advanced-schemas.md b/.claude/skills/drizzle-orm/references/advanced-schemas.md new file mode 100644 index 0000000..909445c --- /dev/null +++ b/.claude/skills/drizzle-orm/references/advanced-schemas.md @@ -0,0 +1,380 @@ +# Advanced Schemas + +Deep dive into complex schema patterns, custom types, and database-specific features in Drizzle ORM. + +## Custom Column Types + +### Enums + +```typescript +import { pgEnum, pgTable, serial } from 'drizzle-orm/pg-core'; + +// PostgreSQL native enum +export const roleEnum = pgEnum('role', ['admin', 'user', 'guest']); + +export const users = pgTable('users', { + id: serial('id').primaryKey(), + role: roleEnum('role').default('user'), +}); + +// MySQL/SQLite: Use text with constraints +import { mysqlTable, text } from 'drizzle-orm/mysql-core'; + +export const users = mysqlTable('users', { + role: text('role', { enum: ['admin', 'user', 'guest'] }).default('user'), +}); +``` + +### Custom JSON Types + +```typescript +import { pgTable, serial, json } from 'drizzle-orm/pg-core'; +import { z } from 'zod'; + +// Type-safe JSON with Zod +const MetadataSchema = z.object({ + theme: z.enum(['light', 'dark']), + locale: z.string(), + notifications: z.boolean(), +}); + +type Metadata = z.infer; + +export const users = pgTable('users', { + id: serial('id').primaryKey(), + metadata: json('metadata').$type(), +}); + +// Runtime validation +async function updateMetadata(userId: number, metadata: unknown) { + const validated = MetadataSchema.parse(metadata); + await db.update(users).set({ metadata: validated }).where(eq(users.id, userId)); +} +``` + +### Arrays + +```typescript +import { pgTable, serial, text } from 'drizzle-orm/pg-core'; + +export const posts = pgTable('posts', { + id: serial('id').primaryKey(), + tags: text('tags').array(), +}); + +// Query array columns +import { arrayContains, arrayContained } from 'drizzle-orm'; + +await db.select().from(posts).where(arrayContains(posts.tags, ['typescript', 'drizzle'])); +``` + +## Indexes + +### Basic Indexes + +```typescript +import { pgTable, serial, text, varchar, index, uniqueIndex } from 'drizzle-orm/pg-core'; + +export const users = pgTable('users', { + id: serial('id').primaryKey(), + email: varchar('email', { length: 255 }).notNull(), + name: text('name'), + city: text('city'), +}, (table) => ({ + emailIdx: uniqueIndex('email_idx').on(table.email), + nameIdx: index('name_idx').on(table.name), + cityNameIdx: index('city_name_idx').on(table.city, table.name), +})); +``` + +### Partial Indexes + +```typescript +import { sql } from 'drizzle-orm'; + +export const users = pgTable('users', { + id: serial('id').primaryKey(), + email: varchar('email', { length: 255 }), + deletedAt: timestamp('deleted_at'), +}, (table) => ({ + activeEmailIdx: uniqueIndex('active_email_idx') + .on(table.email) + .where(sql`${table.deletedAt} IS NULL`), +})); +``` + +### Full-Text Search + +```typescript +import { pgTable, serial, text, index } from 'drizzle-orm/pg-core'; +import { sql } from 'drizzle-orm'; + +export const posts = pgTable('posts', { + id: serial('id').primaryKey(), + title: text('title').notNull(), + content: text('content').notNull(), +}, (table) => ({ + searchIdx: index('search_idx').using( + 'gin', + sql`to_tsvector('english', ${table.title} || ' ' || ${table.content})` + ), +})); + +// Full-text search query +const results = await db.select().from(posts).where( + sql`to_tsvector('english', ${posts.title} || ' ' || ${posts.content}) @@ plainto_tsquery('english', 'typescript orm')` +); +``` + +## Composite Keys + +```typescript +import { pgTable, text, primaryKey } from 'drizzle-orm/pg-core'; + +export const userPreferences = pgTable('user_preferences', { + userId: integer('user_id').notNull(), + key: text('key').notNull(), + value: text('value').notNull(), +}, (table) => ({ + pk: primaryKey({ columns: [table.userId, table.key] }), +})); +``` + +## Check Constraints + +```typescript +import { pgTable, serial, integer, check } from 'drizzle-orm/pg-core'; +import { sql } from 'drizzle-orm'; + +export const products = pgTable('products', { + id: serial('id').primaryKey(), + price: integer('price').notNull(), + discountPrice: integer('discount_price'), +}, (table) => ({ + priceCheck: check('price_check', sql`${table.price} > 0`), + discountCheck: check('discount_check', sql`${table.discountPrice} < ${table.price}`), +})); +``` + +## Generated Columns + +```typescript +import { pgTable, serial, text, integer } from 'drizzle-orm/pg-core'; +import { sql } from 'drizzle-orm'; + +export const users = pgTable('users', { + id: serial('id').primaryKey(), + firstName: text('first_name').notNull(), + lastName: text('last_name').notNull(), + fullName: text('full_name').generatedAlwaysAs( + (): SQL => sql`${users.firstName} || ' ' || ${users.lastName}`, + { mode: 'stored' } + ), +}); +``` + +## Multi-Tenant Patterns + +### Row-Level Security (PostgreSQL) + +```typescript +import { pgTable, serial, text, uuid } from 'drizzle-orm/pg-core'; + +export const tenants = pgTable('tenants', { + id: uuid('id').defaultRandom().primaryKey(), + name: text('name').notNull(), +}); + +export const documents = pgTable('documents', { + id: serial('id').primaryKey(), + tenantId: uuid('tenant_id').notNull().references(() => tenants.id), + title: text('title').notNull(), + content: text('content'), +}); + +// Apply RLS policy (via migration SQL) +/* +ALTER TABLE documents ENABLE ROW LEVEL SECURITY; + +CREATE POLICY tenant_isolation ON documents + USING (tenant_id = current_setting('app.current_tenant_id')::uuid); +*/ + +// Set tenant context +await db.execute(sql`SET app.current_tenant_id = ${tenantId}`); +``` + +### Schema-Per-Tenant + +```typescript +import { drizzle } from 'drizzle-orm/node-postgres'; + +// Create schema-aware connection +function getTenantDb(tenantId: string) { + const schemaName = `tenant_${tenantId}`; + + return drizzle(pool, { + schema: { + ...schema, + }, + schemaPrefix: schemaName, + }); +} + +// Use tenant-specific DB +const tenantDb = getTenantDb('tenant123'); +await tenantDb.select().from(users); +``` + +## Database-Specific Features + +### PostgreSQL: JSONB Operations + +```typescript +import { pgTable, serial, jsonb } from 'drizzle-orm/pg-core'; +import { sql } from 'drizzle-orm'; + +export const settings = pgTable('settings', { + id: serial('id').primaryKey(), + config: jsonb('config').$type>(), +}); + +// JSONB operators +await db.select().from(settings).where( + sql`${settings.config}->>'theme' = 'dark'` +); + +// JSONB path query +await db.select().from(settings).where( + sql`${settings.config} @> '{"notifications": {"email": true}}'::jsonb` +); +``` + +### MySQL: Spatial Types + +```typescript +import { mysqlTable, serial, geometry } from 'drizzle-orm/mysql-core'; +import { sql } from 'drizzle-orm'; + +export const locations = mysqlTable('locations', { + id: serial('id').primaryKey(), + point: geometry('point', { type: 'point', srid: 4326 }), +}); + +// Spatial query +await db.select().from(locations).where( + sql`ST_Distance_Sphere(${locations.point}, POINT(${lng}, ${lat})) < 1000` +); +``` + +### SQLite: FTS5 + +```typescript +import { sqliteTable, text } from 'drizzle-orm/sqlite-core'; + +export const documents = sqliteTable('documents', { + title: text('title'), + content: text('content'), +}); + +// Create FTS5 virtual table (via migration) +/* +CREATE VIRTUAL TABLE documents_fts USING fts5(title, content, content='documents'); +*/ +``` + +## Schema Versioning + +### Migration Strategy + +```typescript +// db/schema.ts +export const schemaVersion = pgTable('schema_version', { + version: serial('version').primaryKey(), + appliedAt: timestamp('applied_at').defaultNow(), +}); + +// Track migrations +await db.insert(schemaVersion).values({ version: 1 }); + +// Check version +const [currentVersion] = await db.select().from(schemaVersion).orderBy(desc(schemaVersion.version)).limit(1); +``` + +## Type Inference Helpers + +```typescript +import { InferSelectModel, InferInsertModel } from 'drizzle-orm'; + +export const users = pgTable('users', { + id: serial('id').primaryKey(), + email: text('email').notNull(), + name: text('name'), +}); + +// Generate types +export type User = InferSelectModel; +export type NewUser = InferInsertModel; + +// Partial updates +export type UserUpdate = Partial; + +// Nested relation types +export type UserWithPosts = User & { + posts: Post[]; +}; +``` + +## Best Practices + +### Schema Organization + +```typescript +// db/schema/users.ts +export const users = pgTable('users', { ... }); +export const userRelations = relations(users, { ... }); + +// db/schema/posts.ts +export const posts = pgTable('posts', { ... }); +export const postRelations = relations(posts, { ... }); + +// db/schema/index.ts +export * from './users'; +export * from './posts'; + +// db/client.ts +import * as schema from './schema'; +export const db = drizzle(pool, { schema }); +``` + +### Naming Conventions + +```typescript +// ✅ Good: Consistent naming +export const users = pgTable('users', { + id: serial('id').primaryKey(), + firstName: text('first_name'), + createdAt: timestamp('created_at'), +}); + +// ❌ Bad: Inconsistent naming +export const Users = pgTable('user', { + ID: serial('userId').primaryKey(), + first_name: text('firstname'), +}); +``` + +### Default Values + +```typescript +import { sql } from 'drizzle-orm'; + +export const posts = pgTable('posts', { + id: serial('id').primaryKey(), + slug: text('slug').notNull(), + views: integer('views').default(0), + createdAt: timestamp('created_at').defaultNow(), + updatedAt: timestamp('updated_at').default(sql`CURRENT_TIMESTAMP`), + uuid: uuid('uuid').defaultRandom(), +}); +``` diff --git a/.claude/skills/drizzle-orm/references/performance.md b/.claude/skills/drizzle-orm/references/performance.md new file mode 100644 index 0000000..e2c9f98 --- /dev/null +++ b/.claude/skills/drizzle-orm/references/performance.md @@ -0,0 +1,594 @@ +# Performance Optimization + +Connection pooling, query optimization, edge runtime integration, and performance best practices. + +## Connection Pooling + +### PostgreSQL (node-postgres) + +```typescript +import { Pool } from 'pg'; +import { drizzle } from 'drizzle-orm/node-postgres'; + +const pool = new Pool({ + host: process.env.DB_HOST, + port: parseInt(process.env.DB_PORT || '5432'), + database: process.env.DB_NAME, + user: process.env.DB_USER, + password: process.env.DB_PASSWORD, + max: 20, // Maximum pool size + idleTimeoutMillis: 30000, // Close idle clients after 30s + connectionTimeoutMillis: 2000, // Timeout connection attempts +}); + +export const db = drizzle(pool); + +// Graceful shutdown +process.on('SIGTERM', async () => { + await pool.end(); +}); +``` + +### MySQL (mysql2) + +```typescript +import mysql from 'mysql2/promise'; +import { drizzle } from 'drizzle-orm/mysql2'; + +const poolConnection = mysql.createPool({ + host: process.env.DB_HOST, + user: process.env.DB_USER, + password: process.env.DB_PASSWORD, + database: process.env.DB_NAME, + waitForConnections: true, + connectionLimit: 10, + maxIdle: 10, + idleTimeout: 60000, + queueLimit: 0, + enableKeepAlive: true, + keepAliveInitialDelay: 0, +}); + +export const db = drizzle(poolConnection); +``` + +### SQLite (better-sqlite3) + +```typescript +import Database from 'better-sqlite3'; +import { drizzle } from 'drizzle-orm/better-sqlite3'; + +const sqlite = new Database('sqlite.db', { + readonly: false, + fileMustExist: false, + timeout: 5000, + verbose: console.log, // Remove in production +}); + +// Performance pragmas +sqlite.pragma('journal_mode = WAL'); +sqlite.pragma('synchronous = normal'); +sqlite.pragma('cache_size = -64000'); // 64MB cache +sqlite.pragma('temp_store = memory'); + +export const db = drizzle(sqlite); + +process.on('exit', () => sqlite.close()); +``` + +## Query Optimization + +### Select Only Needed Columns + +```typescript +// ❌ Bad: Fetch all columns +const users = await db.select().from(users); + +// ✅ Good: Fetch only needed columns +const users = await db.select({ + id: users.id, + email: users.email, + name: users.name, +}).from(users); +``` + +### Use Indexes Effectively + +```typescript +import { pgTable, serial, text, varchar, index } from 'drizzle-orm/pg-core'; + +export const users = pgTable('users', { + id: serial('id').primaryKey(), + email: varchar('email', { length: 255 }).notNull(), + city: text('city'), + status: text('status'), +}, (table) => ({ + // Index frequently queried columns + emailIdx: index('email_idx').on(table.email), + + // Composite index for common query patterns + cityStatusIdx: index('city_status_idx').on(table.city, table.status), +})); + +// Query uses index +const activeUsersInNYC = await db.select() + .from(users) + .where(and( + eq(users.city, 'NYC'), + eq(users.status, 'active') + )); +``` + +### Analyze Query Plans + +```typescript +import { sql } from 'drizzle-orm'; + +// PostgreSQL EXPLAIN +const plan = await db.execute( + sql`EXPLAIN ANALYZE SELECT * FROM ${users} WHERE ${users.email} = 'user@example.com'` +); + +console.log(plan.rows); + +// Check for: +// - "Seq Scan" (bad) vs "Index Scan" (good) +// - Actual time vs estimated time +// - Rows removed by filter +``` + +### Pagination Performance + +```typescript +// ❌ Bad: OFFSET on large datasets (gets slower as offset increases) +const page = await db.select() + .from(users) + .limit(20) + .offset(10000); // Scans 10,020 rows! + +// ✅ Good: Cursor-based pagination (constant time) +const page = await db.select() + .from(users) + .where(gt(users.id, lastSeenId)) + .orderBy(asc(users.id)) + .limit(20); + +// ✅ Good: Seek method for timestamp-based pagination +const page = await db.select() + .from(posts) + .where(lt(posts.createdAt, lastSeenTimestamp)) + .orderBy(desc(posts.createdAt)) + .limit(20); +``` + +## Edge Runtime Integration + +### Cloudflare Workers (D1) + +```typescript +import { drizzle } from 'drizzle-orm/d1'; + +export default { + async fetch(request: Request, env: Env): Promise { + const db = drizzle(env.DB); + + const users = await db.select().from(users).limit(10); + + return Response.json(users); + }, +}; +``` + +### Vercel Edge (Neon) + +```typescript +import { neon } from '@neondatabase/serverless'; +import { drizzle } from 'drizzle-orm/neon-http'; + +export const runtime = 'edge'; + +export async function GET() { + const sql = neon(process.env.DATABASE_URL!); + const db = drizzle(sql); + + const users = await db.select().from(users); + + return Response.json(users); +} +``` + +### Supabase Edge Functions + +```typescript +import { createClient } from '@supabase/supabase-js'; +import { drizzle } from 'drizzle-orm/postgres-js'; +import postgres from 'postgres'; + +Deno.serve(async (req) => { + const client = postgres(Deno.env.get('DATABASE_URL')!); + const db = drizzle(client); + + const data = await db.select().from(users); + + return new Response(JSON.stringify(data), { + headers: { 'Content-Type': 'application/json' }, + }); +}); +``` + +## Caching Strategies + +### In-Memory Cache + +```typescript +import { LRUCache } from 'lru-cache'; + +const cache = new LRUCache({ + max: 500, + ttl: 1000 * 60 * 5, // 5 minutes +}); + +async function getCachedUser(id: number) { + const key = `user:${id}`; + const cached = cache.get(key); + + if (cached) return cached; + + const user = await db.select().from(users).where(eq(users.id, id)); + cache.set(key, user); + + return user; +} +``` + +### Redis Cache Layer + +```typescript +import { Redis } from 'ioredis'; + +const redis = new Redis(process.env.REDIS_URL); + +async function getCachedData( + key: string, + fetcher: () => Promise, + ttl: number = 300 +): Promise { + // Try cache first + const cached = await redis.get(key); + if (cached) return JSON.parse(cached); + + // Fetch from database + const data = await fetcher(); + + // Store in cache + await redis.setex(key, ttl, JSON.stringify(data)); + + return data; +} + +// Usage +const users = await getCachedData( + 'users:all', + () => db.select().from(users), + 600 +); +``` + +### Materialized Views (PostgreSQL) + +```typescript +// Create materialized view (via migration) +/* +CREATE MATERIALIZED VIEW user_stats AS +SELECT + u.id, + u.name, + COUNT(p.id) AS post_count, + COUNT(c.id) AS comment_count +FROM users u +LEFT JOIN posts p ON p.author_id = u.id +LEFT JOIN comments c ON c.user_id = u.id +GROUP BY u.id; + +CREATE UNIQUE INDEX ON user_stats (id); +*/ + +// Define schema +export const userStats = pgMaterializedView('user_stats').as((qb) => + qb.select({ + id: users.id, + name: users.name, + postCount: sql`COUNT(${posts.id})`, + commentCount: sql`COUNT(${comments.id})`, + }) + .from(users) + .leftJoin(posts, eq(posts.authorId, users.id)) + .leftJoin(comments, eq(comments.userId, users.id)) + .groupBy(users.id) +); + +// Refresh materialized view +await db.execute(sql`REFRESH MATERIALIZED VIEW CONCURRENTLY user_stats`); + +// Query materialized view (fast!) +const stats = await db.select().from(userStats); +``` + +## Batch Operations Optimization + +### Batch Insert with COPY (PostgreSQL) + +```typescript +import { copyFrom } from 'pg-copy-streams'; +import { pipeline } from 'stream/promises'; +import { Readable } from 'stream'; + +async function bulkInsert(data: any[]) { + const client = await pool.connect(); + + try { + const stream = client.query( + copyFrom(`COPY users (email, name) FROM STDIN WITH (FORMAT csv)`) + ); + + const input = Readable.from( + data.map(row => `${row.email},${row.name}\n`) + ); + + await pipeline(input, stream); + } finally { + client.release(); + } +} + +// 10x faster than batch INSERT for large datasets +``` + +### Chunk Processing + +```typescript +async function* chunked(array: T[], size: number) { + for (let i = 0; i < array.length; i += size) { + yield array.slice(i, i + size); + } +} + +async function bulkUpdate(updates: { id: number; name: string }[]) { + for await (const chunk of chunked(updates, 100)) { + await db.transaction(async (tx) => { + for (const update of chunk) { + await tx.update(users) + .set({ name: update.name }) + .where(eq(users.id, update.id)); + } + }); + } +} +``` + +## Connection Management + +### Serverless Optimization + +```typescript +// ❌ Bad: New connection per request +export async function handler() { + const pool = new Pool({ connectionString: process.env.DATABASE_URL }); + const db = drizzle(pool); + + const users = await db.select().from(users); + + await pool.end(); + return users; +} + +// ✅ Good: Reuse connection across warm starts +let cachedDb: ReturnType | null = null; + +export async function handler() { + if (!cachedDb) { + const pool = new Pool({ + connectionString: process.env.DATABASE_URL, + max: 1, // Serverless: single connection per instance + }); + cachedDb = drizzle(pool); + } + + const users = await cachedDb.select().from(users); + return users; +} +``` + +### HTTP-based Databases (Neon, Turso) + +```typescript +// No connection pooling needed - uses HTTP +import { neon } from '@neondatabase/serverless'; +import { drizzle } from 'drizzle-orm/neon-http'; + +const sql = neon(process.env.DATABASE_URL!); +const db = drizzle(sql); + +// Each query is a single HTTP request +const users = await db.select().from(users); +``` + +## Read Replicas + +```typescript +import { Pool } from 'pg'; +import { drizzle } from 'drizzle-orm/node-postgres'; + +// Primary (writes) +const primaryPool = new Pool({ connectionString: process.env.PRIMARY_DB_URL }); +const primaryDb = drizzle(primaryPool); + +// Replica (reads) +const replicaPool = new Pool({ connectionString: process.env.REPLICA_DB_URL }); +const replicaDb = drizzle(replicaPool); + +// Route queries appropriately +async function getUsers() { + return replicaDb.select().from(users); // Read from replica +} + +async function createUser(data: NewUser) { + return primaryDb.insert(users).values(data).returning(); // Write to primary +} +``` + +## Monitoring & Profiling + +### Query Logging + +```typescript +import { drizzle } from 'drizzle-orm/node-postgres'; + +const db = drizzle(pool, { + logger: { + logQuery(query: string, params: unknown[]) { + console.log('Query:', query); + console.log('Params:', params); + console.time('query'); + }, + }, +}); + +// Custom logger with metrics +class MetricsLogger { + private queries: Map = new Map(); + + logQuery(query: string) { + const start = Date.now(); + + return () => { + const duration = Date.now() - start; + const stats = this.queries.get(query) || { count: 0, totalTime: 0 }; + + this.queries.set(query, { + count: stats.count + 1, + totalTime: stats.totalTime + duration, + }); + + if (duration > 1000) { + console.warn(`Slow query (${duration}ms):`, query); + } + }; + } + + getStats() { + return Array.from(this.queries.entries()).map(([query, stats]) => ({ + query, + count: stats.count, + avgTime: stats.totalTime / stats.count, + })); + } +} +``` + +### Performance Monitoring + +```typescript +import { performance } from 'perf_hooks'; + +async function measureQuery( + name: string, + query: Promise +): Promise { + const start = performance.now(); + + try { + const result = await query; + const duration = performance.now() - start; + + console.log(`[${name}] completed in ${duration.toFixed(2)}ms`); + + return result; + } catch (error) { + const duration = performance.now() - start; + console.error(`[${name}] failed after ${duration.toFixed(2)}ms`, error); + throw error; + } +} + +// Usage +const users = await measureQuery( + 'fetchUsers', + db.select().from(users).limit(100) +); +``` + +## Database-Specific Optimizations + +### PostgreSQL + +```typescript +// Connection optimization +const pool = new Pool({ + max: 20, + application_name: 'myapp', + statement_timeout: 30000, // 30s query timeout + query_timeout: 30000, + connectionTimeoutMillis: 5000, + idle_in_transaction_session_timeout: 10000, +}); + +// Session optimization +await db.execute(sql`SET work_mem = '256MB'`); +await db.execute(sql`SET maintenance_work_mem = '512MB'`); +await db.execute(sql`SET effective_cache_size = '4GB'`); +``` + +### MySQL + +```typescript +const pool = mysql.createPool({ + waitForConnections: true, + connectionLimit: 10, + queueLimit: 0, + enableKeepAlive: true, + keepAliveInitialDelay: 0, + dateStrings: false, + supportBigNumbers: true, + bigNumberStrings: false, + multipleStatements: false, // Security + timezone: 'Z', // UTC +}); +``` + +### SQLite + +```typescript +// WAL mode for concurrent reads +sqlite.pragma('journal_mode = WAL'); + +// Optimize for performance +sqlite.pragma('synchronous = NORMAL'); +sqlite.pragma('cache_size = -64000'); // 64MB +sqlite.pragma('temp_store = MEMORY'); +sqlite.pragma('mmap_size = 30000000000'); // 30GB mmap + +// Disable for bulk inserts +const stmt = sqlite.prepare('INSERT INTO users (email, name) VALUES (?, ?)'); + +const insertMany = sqlite.transaction((users) => { + for (const user of users) { + stmt.run(user.email, user.name); + } +}); + +insertMany(users); // 100x faster than individual inserts +``` + +## Best Practices Summary + +1. **Always use connection pooling** in long-running processes +2. **Select only needed columns** to reduce network transfer +3. **Add indexes** on frequently queried columns and foreign keys +4. **Use cursor-based pagination** instead of OFFSET for large datasets +5. **Batch operations** when inserting/updating multiple records +6. **Cache expensive queries** with appropriate TTL +7. **Monitor slow queries** and optimize with EXPLAIN ANALYZE +8. **Use prepared statements** for frequently executed queries +9. **Implement read replicas** for high-traffic read operations +10. **Use HTTP-based databases** (Neon, Turso) for edge/serverless diff --git a/.claude/skills/drizzle-orm/references/query-patterns.md b/.claude/skills/drizzle-orm/references/query-patterns.md new file mode 100644 index 0000000..07a1ffd --- /dev/null +++ b/.claude/skills/drizzle-orm/references/query-patterns.md @@ -0,0 +1,577 @@ +# Query Patterns + +Advanced querying techniques, subqueries, CTEs, and raw SQL in Drizzle ORM. + +## Subqueries + +### SELECT Subqueries + +```typescript +import { sql, eq } from 'drizzle-orm'; + +// Scalar subquery +const avgPrice = db.select({ value: avg(products.price) }).from(products); + +const expensiveProducts = await db + .select() + .from(products) + .where(gt(products.price, avgPrice)); + +// Correlated subquery +const authorsWithPostCount = await db + .select({ + author: authors, + postCount: sql`( + SELECT COUNT(*) + FROM ${posts} + WHERE ${posts.authorId} = ${authors.id} + )`, + }) + .from(authors); +``` + +### EXISTS Subqueries + +```typescript +// Find authors with posts +const authorsWithPosts = await db + .select() + .from(authors) + .where( + sql`EXISTS ( + SELECT 1 + FROM ${posts} + WHERE ${posts.authorId} = ${authors.id} + )` + ); + +// Find authors without posts +const authorsWithoutPosts = await db + .select() + .from(authors) + .where( + sql`NOT EXISTS ( + SELECT 1 + FROM ${posts} + WHERE ${posts.authorId} = ${authors.id} + )` + ); +``` + +### IN Subqueries + +```typescript +// Find users who commented +const usersWhoCommented = await db + .select() + .from(users) + .where( + sql`${users.id} IN ( + SELECT DISTINCT ${comments.userId} + FROM ${comments} + )` + ); +``` + +## Common Table Expressions (CTEs) + +### Basic CTE + +```typescript +import { sql } from 'drizzle-orm'; + +const topAuthors = db.$with('top_authors').as( + db.select({ + id: authors.id, + name: authors.name, + postCount: sql`COUNT(${posts.id})`.as('post_count'), + }) + .from(authors) + .leftJoin(posts, eq(authors.id, posts.authorId)) + .groupBy(authors.id) + .having(sql`COUNT(${posts.id}) > 10`) +); + +const result = await db + .with(topAuthors) + .select() + .from(topAuthors); +``` + +### Recursive CTE + +```typescript +// Organizational hierarchy +export const employees = pgTable('employees', { + id: serial('id').primaryKey(), + name: text('name').notNull(), + managerId: integer('manager_id').references((): AnyPgColumn => employees.id), +}); + +const employeeHierarchy = db.$with('employee_hierarchy').as( + db.select({ + id: employees.id, + name: employees.name, + managerId: employees.managerId, + level: sql`1`.as('level'), + }) + .from(employees) + .where(isNull(employees.managerId)) + .unionAll( + db.select({ + id: employees.id, + name: employees.name, + managerId: employees.managerId, + level: sql`employee_hierarchy.level + 1`, + }) + .from(employees) + .innerJoin( + sql`employee_hierarchy`, + sql`${employees.managerId} = employee_hierarchy.id` + ) + ) +); + +const hierarchy = await db + .with(employeeHierarchy) + .select() + .from(employeeHierarchy); +``` + +### Multiple CTEs + +```typescript +const activeUsers = db.$with('active_users').as( + db.select().from(users).where(eq(users.isActive, true)) +); + +const recentPosts = db.$with('recent_posts').as( + db.select().from(posts).where(gt(posts.createdAt, sql`NOW() - INTERVAL '30 days'`)) +); + +const result = await db + .with(activeUsers, recentPosts) + .select({ + user: activeUsers, + post: recentPosts, + }) + .from(activeUsers) + .leftJoin(recentPosts, eq(activeUsers.id, recentPosts.authorId)); +``` + +## Raw SQL + +### Safe Raw Queries + +```typescript +import { sql } from 'drizzle-orm'; + +// Parameterized query (safe from SQL injection) +const userId = 123; +const user = await db.execute( + sql`SELECT * FROM ${users} WHERE ${users.id} = ${userId}` +); + +// Raw SQL with type safety +const result = await db.execute<{ count: number }>( + sql`SELECT COUNT(*) as count FROM ${users}` +); +``` + +### SQL Template Composition + +```typescript +// Reusable SQL fragments +function whereActive() { + return sql`${users.isActive} = true`; +} + +function whereRole(role: string) { + return sql`${users.role} = ${role}`; +} + +// Compose fragments +const admins = await db + .select() + .from(users) + .where(sql`${whereActive()} AND ${whereRole('admin')}`); +``` + +### Dynamic WHERE Clauses + +```typescript +import { and, SQL } from 'drizzle-orm'; + +interface Filters { + name?: string; + role?: string; + isActive?: boolean; +} + +function buildFilters(filters: Filters): SQL | undefined { + const conditions: SQL[] = []; + + if (filters.name) { + conditions.push(like(users.name, `%${filters.name}%`)); + } + + if (filters.role) { + conditions.push(eq(users.role, filters.role)); + } + + if (filters.isActive !== undefined) { + conditions.push(eq(users.isActive, filters.isActive)); + } + + return conditions.length > 0 ? and(...conditions) : undefined; +} + +// Usage +const filters: Filters = { name: 'John', isActive: true }; +const users = await db + .select() + .from(users) + .where(buildFilters(filters)); +``` + +## Aggregations + +### Basic Aggregates + +```typescript +import { count, sum, avg, min, max, sql } from 'drizzle-orm'; + +// Count +const userCount = await db.select({ count: count() }).from(users); + +// Sum +const totalRevenue = await db.select({ total: sum(orders.amount) }).from(orders); + +// Average +const avgPrice = await db.select({ avg: avg(products.price) }).from(products); + +// Multiple aggregates +const stats = await db + .select({ + count: count(), + total: sum(orders.amount), + avg: avg(orders.amount), + min: min(orders.amount), + max: max(orders.amount), + }) + .from(orders); +``` + +### GROUP BY with HAVING + +```typescript +// Authors with more than 5 posts +const prolificAuthors = await db + .select({ + author: authors.name, + postCount: count(posts.id), + }) + .from(authors) + .leftJoin(posts, eq(authors.id, posts.authorId)) + .groupBy(authors.id) + .having(sql`COUNT(${posts.id}) > 5`); +``` + +### Window Functions + +```typescript +// Rank products by price within category +const rankedProducts = await db + .select({ + product: products, + priceRank: sql`RANK() OVER (PARTITION BY ${products.categoryId} ORDER BY ${products.price} DESC)`, + }) + .from(products); + +// Running total +const ordersWithRunningTotal = await db + .select({ + order: orders, + runningTotal: sql`SUM(${orders.amount}) OVER (ORDER BY ${orders.createdAt})`, + }) + .from(orders); + +// Row number +const numberedUsers = await db + .select({ + user: users, + rowNum: sql`ROW_NUMBER() OVER (ORDER BY ${users.createdAt})`, + }) + .from(users); +``` + +## Prepared Statements + +### Reusable Queries + +```typescript +// Prepare once, execute many times +const getUserById = db + .select() + .from(users) + .where(eq(users.id, sql.placeholder('id'))) + .prepare('get_user_by_id'); + +// Execute with different parameters +const user1 = await getUserById.execute({ id: 1 }); +const user2 = await getUserById.execute({ id: 2 }); + +// Complex prepared statement +const searchUsers = db + .select() + .from(users) + .where( + and( + like(users.name, sql.placeholder('name')), + eq(users.role, sql.placeholder('role')) + ) + ) + .prepare('search_users'); + +const admins = await searchUsers.execute({ name: '%John%', role: 'admin' }); +``` + +## Batch Operations + +### Batch Insert + +```typescript +// Insert multiple rows +const newUsers = await db.insert(users).values([ + { email: 'user1@example.com', name: 'User 1' }, + { email: 'user2@example.com', name: 'User 2' }, + { email: 'user3@example.com', name: 'User 3' }, +]).returning(); + +// Batch with onConflictDoNothing +await db.insert(users).values(bulkUsers).onConflictDoNothing(); + +// Batch with onConflictDoUpdate (upsert) +await db.insert(users) + .values(bulkUsers) + .onConflictDoUpdate({ + target: users.email, + set: { name: sql`EXCLUDED.name` }, + }); +``` + +### Batch Update + +```typescript +// Update multiple specific rows +await db.transaction(async (tx) => { + for (const update of updates) { + await tx.update(users) + .set({ name: update.name }) + .where(eq(users.id, update.id)); + } +}); + +// Bulk update with CASE +await db.execute(sql` + UPDATE ${users} + SET ${users.role} = CASE ${users.id} + ${sql.join( + updates.map((u) => sql`WHEN ${u.id} THEN ${u.role}`), + sql.raw(' ') + )} + END + WHERE ${users.id} IN (${sql.join(updates.map((u) => u.id), sql.raw(', '))}) +`); +``` + +### Batch Delete + +```typescript +// Delete multiple IDs +await db.delete(users).where(inArray(users.id, [1, 2, 3, 4, 5])); + +// Conditional batch delete +await db.delete(posts).where( + and( + lt(posts.createdAt, new Date('2023-01-01')), + eq(posts.isDraft, true) + ) +); +``` + +## LATERAL Joins + +```typescript +// Get top 3 posts for each author +const authorsWithTopPosts = await db + .select({ + author: authors, + post: posts, + }) + .from(authors) + .leftJoin( + sql`LATERAL ( + SELECT * FROM ${posts} + WHERE ${posts.authorId} = ${authors.id} + ORDER BY ${posts.views} DESC + LIMIT 3 + ) AS ${posts}`, + sql`true` + ); +``` + +## UNION Queries + +```typescript +// Combine results from multiple queries +const allContent = await db + .select({ id: posts.id, title: posts.title, type: sql`'post'` }) + .from(posts) + .union( + db.select({ id: articles.id, title: articles.title, type: sql`'article'` }) + .from(articles) + ); + +// UNION ALL (includes duplicates) +const allItems = await db + .select({ id: products.id, name: products.name }) + .from(products) + .unionAll( + db.select({ id: services.id, name: services.name }).from(services) + ); +``` + +## Distinct Queries + +```typescript +// DISTINCT +const uniqueRoles = await db.selectDistinct({ role: users.role }).from(users); + +// DISTINCT ON (PostgreSQL) +const latestPostPerAuthor = await db + .selectDistinctOn([posts.authorId], { + post: posts, + }) + .from(posts) + .orderBy(posts.authorId, desc(posts.createdAt)); +``` + +## Locking Strategies + +```typescript +// FOR UPDATE (pessimistic locking) +await db.transaction(async (tx) => { + const user = await tx + .select() + .from(users) + .where(eq(users.id, userId)) + .for('update'); + + // Critical section - user row is locked + await tx.update(users) + .set({ balance: user.balance - amount }) + .where(eq(users.id, userId)); +}); + +// FOR SHARE (shared lock) +const user = await db + .select() + .from(users) + .where(eq(users.id, userId)) + .for('share'); + +// SKIP LOCKED +const availableTask = await db + .select() + .from(tasks) + .where(eq(tasks.status, 'pending')) + .limit(1) + .for('update', { skipLocked: true }); +``` + +## Query Builder Patterns + +### Type-Safe Query Builder + +```typescript +class UserQueryBuilder { + private query = db.select().from(users); + + whereRole(role: string) { + this.query = this.query.where(eq(users.role, role)); + return this; + } + + whereActive() { + this.query = this.query.where(eq(users.isActive, true)); + return this; + } + + orderByCreated() { + this.query = this.query.orderBy(desc(users.createdAt)); + return this; + } + + async execute() { + return await this.query; + } +} + +// Usage +const admins = await new UserQueryBuilder() + .whereRole('admin') + .whereActive() + .orderByCreated() + .execute(); +``` + +## Best Practices + +### Avoid N+1 Queries + +```typescript +// ❌ Bad: N+1 query +const authors = await db.select().from(authors); +for (const author of authors) { + author.posts = await db.select().from(posts).where(eq(posts.authorId, author.id)); +} + +// ✅ Good: Single query with join +const authorsWithPosts = await db.query.authors.findMany({ + with: { posts: true }, +}); + +// ✅ Good: Dataloader pattern +import DataLoader from 'dataloader'; + +const postLoader = new DataLoader(async (authorIds: number[]) => { + const posts = await db.select().from(posts).where(inArray(posts.authorId, authorIds)); + + const grouped = authorIds.map(id => + posts.filter(post => post.authorId === id) + ); + + return grouped; +}); +``` + +### Query Timeouts + +```typescript +// PostgreSQL statement timeout +await db.execute(sql`SET statement_timeout = '5s'`); + +// Per-query timeout +const withTimeout = async (promise: Promise, ms: number): Promise => { + const timeout = new Promise((_, reject) => + setTimeout(() => reject(new Error('Query timeout')), ms) + ); + return Promise.race([promise, timeout]); +}; + +const users = await withTimeout( + db.select().from(users), + 5000 +); +``` diff --git a/.claude/skills/drizzle-orm/references/vs-prisma.md b/.claude/skills/drizzle-orm/references/vs-prisma.md new file mode 100644 index 0000000..121efd2 --- /dev/null +++ b/.claude/skills/drizzle-orm/references/vs-prisma.md @@ -0,0 +1,503 @@ +# Drizzle vs Prisma Comparison + +Feature comparison, migration guide, and decision framework for choosing between Drizzle and Prisma. + +## Quick Comparison + +| Feature | Drizzle ORM | Prisma | +|---------|-------------|--------| +| **Type Safety** | ✅ Compile-time inference | ✅ Generated types | +| **Bundle Size** | **~35KB** | ~230KB | +| **Runtime** | **Zero dependencies** | Heavy runtime | +| **Cold Start** | **~10ms** | ~250ms | +| **Query Performance** | **Faster (native SQL)** | Slower (translation layer) | +| **Learning Curve** | Moderate (SQL knowledge helpful) | Easier (abstracted) | +| **Migrations** | SQL-based | Declarative schema | +| **Raw SQL** | **First-class support** | Limited support | +| **Edge Runtime** | **Fully compatible** | Limited support | +| **Ecosystem** | Growing | Mature | +| **Studio (GUI)** | ✅ Drizzle Studio | ✅ Prisma Studio | + +## When to Choose Drizzle + +### ✅ Choose Drizzle if you need: + +1. **Performance-critical applications** + - Microservices with tight latency requirements + - High-throughput APIs (>10K req/s) + - Serverless/edge functions with cold start concerns + +2. **Minimal bundle size** + - Client-side database (SQLite in browser) + - Edge runtime deployments + - Mobile applications with bundle constraints + +3. **SQL control** + - Complex queries with CTEs, window functions + - Raw SQL for specific database features + - Database-specific optimizations + +4. **Type inference over generation** + - No build step for type generation + - Immediate TypeScript feedback + - Schema changes reflected instantly + +### Example: Edge Function with Drizzle + +```typescript +import { neon } from '@neondatabase/serverless'; +import { drizzle } from 'drizzle-orm/neon-http'; + +export const runtime = 'edge'; + +export async function GET() { + const sql = neon(process.env.DATABASE_URL!); + const db = drizzle(sql); // ~35KB bundle, <10ms cold start + + const users = await db.select().from(users); + return Response.json(users); +} +``` + +## When to Choose Prisma + +### ✅ Choose Prisma if you need: + +1. **Rapid prototyping** + - Quick schema iterations + - Automatic migrations + - Less SQL knowledge required + +2. **Team with varied SQL experience** + - Abstracted query interface + - Declarative migrations + - Generated documentation + +3. **Mature ecosystem** + - Extensive community resources + - Third-party integrations (Nexus, tRPC) + - Enterprise support options + +4. **Rich developer experience** + - Prisma Studio (GUI) + - VS Code extension + - Comprehensive documentation + +### Example: Next.js App with Prisma + +```typescript +// schema.prisma +model User { + id Int @id @default(autoincrement()) + email String @unique + posts Post[] +} + +model Post { + id Int @id @default(autoincrement()) + title String + authorId Int + author User @relation(fields: [authorId], references: [id]) +} + +// app/api/users/route.ts +import { prisma } from '@/lib/prisma'; + +export async function GET() { + const users = await prisma.user.findMany({ + include: { posts: true }, + }); + return Response.json(users); +} +``` + +## Feature Comparison + +### Schema Definition + +**Drizzle** (TypeScript-first): +```typescript +import { pgTable, serial, text, integer } from 'drizzle-orm/pg-core'; +import { relations } from 'drizzle-orm'; + +export const users = pgTable('users', { + id: serial('id').primaryKey(), + email: text('email').notNull().unique(), +}); + +export const posts = pgTable('posts', { + id: serial('id').primaryKey(), + title: text('title').notNull(), + authorId: integer('author_id').notNull().references(() => users.id), +}); + +export const usersRelations = relations(users, ({ many }) => ({ + posts: many(posts), +})); +``` + +**Prisma** (Schema DSL): +```prisma +model User { + id Int @id @default(autoincrement()) + email String @unique + posts Post[] +} + +model Post { + id Int @id @default(autoincrement()) + title String + authorId Int + author User @relation(fields: [authorId], references: [id]) +} +``` + +### Querying + +**Drizzle** (SQL-like): +```typescript +import { eq, like, and, gt } from 'drizzle-orm'; + +// Simple query +const user = await db.select().from(users).where(eq(users.id, 1)); + +// Complex filtering +const results = await db.select() + .from(users) + .where( + and( + like(users.email, '%@example.com'), + gt(users.createdAt, new Date('2024-01-01')) + ) + ); + +// Joins +const usersWithPosts = await db + .select({ + user: users, + post: posts, + }) + .from(users) + .leftJoin(posts, eq(users.id, posts.authorId)); +``` + +**Prisma** (Fluent API): +```typescript +// Simple query +const user = await prisma.user.findUnique({ where: { id: 1 } }); + +// Complex filtering +const results = await prisma.user.findMany({ + where: { + email: { endsWith: '@example.com' }, + createdAt: { gt: new Date('2024-01-01') }, + }, +}); + +// Relations +const usersWithPosts = await prisma.user.findMany({ + include: { posts: true }, +}); +``` + +### Migrations + +**Drizzle** (SQL-based): +```bash +# Generate migration +npx drizzle-kit generate + +# Output: drizzle/0000_migration.sql +# CREATE TABLE "users" ( +# "id" serial PRIMARY KEY, +# "email" text NOT NULL UNIQUE +# ); + +# Apply migration +npx drizzle-kit migrate +``` + +**Prisma** (Declarative): +```bash +# Generate and apply migration +npx prisma migrate dev --name add_users + +# Prisma compares schema.prisma to database +# Generates SQL automatically +# Applies migration +``` + +### Type Generation + +**Drizzle** (Inferred): +```typescript +// Types are inferred at compile time +type User = typeof users.$inferSelect; +type NewUser = typeof users.$inferInsert; + +// Immediate feedback in IDE +const user: User = await db.select().from(users); +``` + +**Prisma** (Generated): +```typescript +// Types generated after schema change +// Run: npx prisma generate + +import { User, Post } from '@prisma/client'; + +const user: User = await prisma.user.findUnique({ where: { id: 1 } }); +``` + +### Raw SQL + +**Drizzle** (First-class): +```typescript +import { sql } from 'drizzle-orm'; + +// Tagged template with type safety +const result = await db.execute( + sql`SELECT * FROM ${users} WHERE ${users.email} = ${email}` +); + +// Mix ORM and raw SQL +const customQuery = await db + .select({ + user: users, + postCount: sql`COUNT(${posts.id})`, + }) + .from(users) + .leftJoin(posts, eq(users.id, posts.authorId)) + .groupBy(users.id); +``` + +**Prisma** (Limited): +```typescript +// Raw query (loses type safety) +const result = await prisma.$queryRaw` + SELECT * FROM users WHERE email = ${email} +`; + +// Typed raw query (manual type annotation) +const users = await prisma.$queryRaw` + SELECT * FROM users +`; +``` + +## Performance Benchmarks + +### Query Execution Time (1000 queries) + +| Operation | Drizzle | Prisma | Difference | +|-----------|---------|--------|------------| +| findUnique | 1.2s | 3.1s | **2.6x faster** | +| findMany (10 rows) | 1.5s | 3.8s | **2.5x faster** | +| findMany (100 rows) | 2.1s | 5.2s | **2.5x faster** | +| create | 1.8s | 4.1s | **2.3x faster** | +| update | 1.7s | 3.9s | **2.3x faster** | + +### Bundle Size Impact + +```bash +# Next.js production build + +# With Drizzle +├─ Client (First Load JS) +│ └─ pages/index.js: 85 KB (+35KB Drizzle) + +# With Prisma +├─ Client (First Load JS) +│ └─ pages/index.js: 280 KB (+230KB Prisma) +``` + +### Cold Start Times (AWS Lambda) + +| Database | Drizzle | Prisma | +|----------|---------|--------| +| PostgreSQL | ~50ms | ~300ms | +| MySQL | ~45ms | ~280ms | +| SQLite | ~10ms | ~150ms | + +## Migration from Prisma to Drizzle + +### Step 1: Install Drizzle + +```bash +npm install drizzle-orm +npm install -D drizzle-kit + +# Keep Prisma temporarily +# npm uninstall prisma @prisma/client +``` + +### Step 2: Introspect Existing Database + +```typescript +// drizzle.config.ts +import type { Config } from 'drizzle-kit'; + +export default { + schema: './db/schema.ts', + out: './drizzle', + dialect: 'postgresql', + dbCredentials: { + url: process.env.DATABASE_URL!, + }, +} satisfies Config; +``` + +```bash +# Generate Drizzle schema from existing database +npx drizzle-kit introspect +``` + +### Step 3: Convert Queries + +**Prisma**: +```typescript +// Before (Prisma) +const users = await prisma.user.findMany({ + where: { email: { contains: 'example.com' } }, + include: { posts: true }, + orderBy: { createdAt: 'desc' }, + take: 10, +}); +``` + +**Drizzle**: +```typescript +// After (Drizzle) +import { like, desc } from 'drizzle-orm'; + +const users = await db.query.users.findMany({ + where: like(users.email, '%example.com%'), + with: { posts: true }, + orderBy: [desc(users.createdAt)], + limit: 10, +}); + +// Or SQL-style +const users = await db + .select() + .from(users) + .where(like(users.email, '%example.com%')) + .orderBy(desc(users.createdAt)) + .limit(10); +``` + +### Step 4: Conversion Patterns + +```typescript +// Prisma → Drizzle mapping + +// findUnique +await prisma.user.findUnique({ where: { id: 1 } }); +await db.select().from(users).where(eq(users.id, 1)); + +// findMany with filters +await prisma.user.findMany({ where: { role: 'admin' } }); +await db.select().from(users).where(eq(users.role, 'admin')); + +// create +await prisma.user.create({ data: { email: 'user@example.com' } }); +await db.insert(users).values({ email: 'user@example.com' }).returning(); + +// update +await prisma.user.update({ where: { id: 1 }, data: { name: 'John' } }); +await db.update(users).set({ name: 'John' }).where(eq(users.id, 1)); + +// delete +await prisma.user.delete({ where: { id: 1 } }); +await db.delete(users).where(eq(users.id, 1)); + +// count +await prisma.user.count(); +await db.select({ count: count() }).from(users); + +// aggregate +await prisma.post.aggregate({ _avg: { views: true } }); +await db.select({ avg: avg(posts.views) }).from(posts); +``` + +### Step 5: Test & Remove Prisma + +```bash +# Run tests with Drizzle +npm test + +# Remove Prisma when confident +npm uninstall prisma @prisma/client +rm -rf prisma/ +``` + +## Decision Matrix + +| Requirement | Drizzle | Prisma | +|-------------|---------|--------| +| Need minimal bundle size | ✅ | ❌ | +| Edge runtime deployment | ✅ | ⚠️ | +| Team unfamiliar with SQL | ❌ | ✅ | +| Complex raw SQL queries | ✅ | ❌ | +| Rapid prototyping | ⚠️ | ✅ | +| Type-safe migrations | ✅ | ✅ | +| Performance critical | ✅ | ❌ | +| Mature ecosystem | ⚠️ | ✅ | +| First-class TypeScript | ✅ | ✅ | +| Zero dependencies | ✅ | ❌ | + +## Hybrid Approach + +You can use both in the same project: + +```typescript +// Use Drizzle for performance-critical paths +import { db as drizzleDb } from './lib/drizzle'; + +export async function GET() { + const users = await drizzleDb.select().from(users); + return Response.json(users); +} + +// Use Prisma for admin dashboards (less performance-critical) +import { prisma } from './lib/prisma'; + +export async function getStaticProps() { + const stats = await prisma.user.aggregate({ + _count: true, + _avg: { posts: true }, + }); + return { props: { stats } }; +} +``` + +## Community & Resources + +### Drizzle +- Docs: [orm.drizzle.team](https://orm.drizzle.team) +- Discord: [drizzle.team/discord](https://drizzle.team/discord) +- GitHub: [drizzle-team/drizzle-orm](https://github.com/drizzle-team/drizzle-orm) + +### Prisma +- Docs: [prisma.io/docs](https://prisma.io/docs) +- Discord: [pris.ly/discord](https://pris.ly/discord) +- GitHub: [prisma/prisma](https://github.com/prisma/prisma) + +## Final Recommendation + +**Choose Drizzle for:** +- Greenfield projects prioritizing performance +- Edge/serverless applications +- Teams comfortable with SQL +- Minimal bundle size requirements + +**Choose Prisma for:** +- Established teams with Prisma experience +- Rapid MVP development +- Teams new to databases +- Reliance on Prisma ecosystem (Nexus, etc.) + +**Consider migration when:** +- Performance becomes a bottleneck +- Bundle size impacts user experience +- Edge runtime deployment needed +- Team SQL proficiency increases diff --git a/.claude/skills/fastify-best-practices/SKILL.md b/.claude/skills/fastify-best-practices/SKILL.md new file mode 100644 index 0000000..439e684 --- /dev/null +++ b/.claude/skills/fastify-best-practices/SKILL.md @@ -0,0 +1,75 @@ +--- +name: fastify-best-practices +description: "Guides development of Fastify Node.js backend servers and REST APIs using TypeScript or JavaScript. Use when building, configuring, or debugging a Fastify application — including defining routes, implementing plugins, setting up JSON Schema validation, handling errors, optimising performance, managing authentication, configuring CORS and security headers, integrating databases, working with WebSockets, and deploying to production. Covers the full Fastify request lifecycle (hooks, serialization, logging with Pino) and TypeScript integration via strip types. Trigger terms: Fastify, Node.js server, REST API, API routes, backend framework, fastify.config, server.ts, app.ts." +metadata: + tags: fastify, nodejs, typescript, backend, api, server, http +--- + +## When to use + +Use this skill when you need to: +- Develop backend applications using Fastify +- Implement Fastify plugins and route handlers +- Get guidance on Fastify architecture and patterns +- Use TypeScript with Fastify (strip types) +- Implement testing with Fastify's inject method +- Configure validation, serialization, and error handling + +## Quick Start + +A minimal, runnable Fastify server to get started immediately: + +```ts +import Fastify from 'fastify' + +const app = Fastify({ logger: true }) + +app.get('/health', async (request, reply) => { + return { status: 'ok' } +}) + +const start = async () => { + await app.listen({ port: 3000, host: '0.0.0.0' }) +} +start() +``` + +## Recommended Reading Order for Common Scenarios + +- **New to Fastify?** Start with `plugins.md` → `routes.md` → `schemas.md` +- **Adding authentication:** `plugins.md` → `hooks.md` → `authentication.md` +- **Improving performance:** `schemas.md` → `serialization.md` → `performance.md` +- **Setting up testing:** `routes.md` → `testing.md` +- **Going to production:** `logging.md` → `configuration.md` → `deployment.md` + +## How to use + +Read individual rule files for detailed explanations and code examples: + +- [rules/plugins.md](rules/plugins.md) - Plugin development and encapsulation +- [rules/routes.md](rules/routes.md) - Route organization and handlers +- [rules/schemas.md](rules/schemas.md) - JSON Schema validation +- [rules/error-handling.md](rules/error-handling.md) - Error handling patterns +- [rules/hooks.md](rules/hooks.md) - Hooks and request lifecycle +- [rules/authentication.md](rules/authentication.md) - Authentication and authorization +- [rules/testing.md](rules/testing.md) - Testing with inject() +- [rules/performance.md](rules/performance.md) - Performance optimization +- [rules/logging.md](rules/logging.md) - Logging with Pino +- [rules/typescript.md](rules/typescript.md) - TypeScript integration +- [rules/decorators.md](rules/decorators.md) - Decorators and extensions +- [rules/content-type.md](rules/content-type.md) - Content type parsing +- [rules/serialization.md](rules/serialization.md) - Response serialization +- [rules/cors-security.md](rules/cors-security.md) - CORS and security headers +- [rules/websockets.md](rules/websockets.md) - WebSocket support +- [rules/database.md](rules/database.md) - Database integration patterns +- [rules/configuration.md](rules/configuration.md) - Application configuration +- [rules/deployment.md](rules/deployment.md) - Production deployment +- [rules/http-proxy.md](rules/http-proxy.md) - HTTP proxying and reply.from() + +## Core Principles + +- **Encapsulation**: Fastify's plugin system provides automatic encapsulation +- **Schema-first**: Define schemas for validation and serialization +- **Performance**: Fastify is optimized for speed; use its features correctly +- **Async/await**: All handlers and hooks support async functions +- **Minimal dependencies**: Prefer Fastify's built-in features and official plugins diff --git a/.claude/skills/fastify-best-practices/rules/authentication.md b/.claude/skills/fastify-best-practices/rules/authentication.md new file mode 100644 index 0000000..3a84b53 --- /dev/null +++ b/.claude/skills/fastify-best-practices/rules/authentication.md @@ -0,0 +1,521 @@ +--- +name: authentication +description: Authentication and authorization patterns in Fastify +metadata: + tags: auth, jwt, session, oauth, security, authorization +--- + +# Authentication and Authorization + +## JWT Authentication with @fastify/jwt + +Use `@fastify/jwt` for JSON Web Token authentication: + +```typescript +import Fastify from 'fastify'; +import fastifyJwt from '@fastify/jwt'; + +const app = Fastify(); + +app.register(fastifyJwt, { + secret: process.env.JWT_SECRET, + sign: { + expiresIn: '1h', + }, +}); + +// Decorate request with authentication method +app.decorate('authenticate', async function (request, reply) { + try { + await request.jwtVerify(); + } catch (err) { + reply.code(401).send({ error: 'Unauthorized' }); + } +}); + +// Login route +app.post('/login', { + schema: { + body: { + type: 'object', + properties: { + email: { type: 'string', format: 'email' }, + password: { type: 'string' }, + }, + required: ['email', 'password'], + }, + }, +}, async (request, reply) => { + const { email, password } = request.body; + const user = await validateCredentials(email, password); + + if (!user) { + return reply.code(401).send({ error: 'Invalid credentials' }); + } + + const token = app.jwt.sign({ + id: user.id, + email: user.email, + role: user.role, + }); + + return { token }; +}); + +// Protected route +app.get('/profile', { + onRequest: [app.authenticate], +}, async (request) => { + return { user: request.user }; +}); +``` + +## Refresh Tokens + +Implement refresh token rotation: + +```typescript +import fastifyJwt from '@fastify/jwt'; +import { randomBytes } from 'node:crypto'; + +app.register(fastifyJwt, { + secret: process.env.JWT_SECRET, + sign: { + expiresIn: '15m', // Short-lived access tokens + }, +}); + +// Store refresh tokens (use Redis in production) +const refreshTokens = new Map(); + +app.post('/auth/login', async (request, reply) => { + const { email, password } = request.body; + const user = await validateCredentials(email, password); + + if (!user) { + return reply.code(401).send({ error: 'Invalid credentials' }); + } + + const accessToken = app.jwt.sign({ id: user.id, role: user.role }); + const refreshToken = randomBytes(32).toString('hex'); + + refreshTokens.set(refreshToken, { + userId: user.id, + expires: Date.now() + 7 * 24 * 60 * 60 * 1000, // 7 days + }); + + return { accessToken, refreshToken }; +}); + +app.post('/auth/refresh', async (request, reply) => { + const { refreshToken } = request.body; + const stored = refreshTokens.get(refreshToken); + + if (!stored || stored.expires < Date.now()) { + refreshTokens.delete(refreshToken); + return reply.code(401).send({ error: 'Invalid refresh token' }); + } + + // Delete old token (rotation) + refreshTokens.delete(refreshToken); + + const user = await db.users.findById(stored.userId); + const accessToken = app.jwt.sign({ id: user.id, role: user.role }); + const newRefreshToken = randomBytes(32).toString('hex'); + + refreshTokens.set(newRefreshToken, { + userId: user.id, + expires: Date.now() + 7 * 24 * 60 * 60 * 1000, + }); + + return { accessToken, refreshToken: newRefreshToken }; +}); + +app.post('/auth/logout', async (request, reply) => { + const { refreshToken } = request.body; + refreshTokens.delete(refreshToken); + return { success: true }; +}); +``` + +## Role-Based Access Control + +Implement RBAC with decorators: + +```typescript +type Role = 'admin' | 'user' | 'moderator'; + +// Create authorization decorator +app.decorate('authorize', function (...allowedRoles: Role[]) { + return async (request, reply) => { + await request.jwtVerify(); + + const userRole = request.user.role as Role; + if (!allowedRoles.includes(userRole)) { + return reply.code(403).send({ + error: 'Forbidden', + message: `Role '${userRole}' is not authorized for this resource`, + }); + } + }; +}); + +// Admin only route +app.get('/admin/users', { + onRequest: [app.authorize('admin')], +}, async (request) => { + return db.users.findAll(); +}); + +// Admin or moderator +app.delete('/posts/:id', { + onRequest: [app.authorize('admin', 'moderator')], +}, async (request) => { + await db.posts.delete(request.params.id); + return { deleted: true }; +}); +``` + +## Permission-Based Authorization + +Fine-grained permission checks: + +```typescript +interface Permission { + resource: string; + action: 'create' | 'read' | 'update' | 'delete'; +} + +const rolePermissions: Record = { + admin: [ + { resource: '*', action: 'create' }, + { resource: '*', action: 'read' }, + { resource: '*', action: 'update' }, + { resource: '*', action: 'delete' }, + ], + user: [ + { resource: 'posts', action: 'create' }, + { resource: 'posts', action: 'read' }, + { resource: 'comments', action: 'create' }, + { resource: 'comments', action: 'read' }, + ], +}; + +function hasPermission(role: string, resource: string, action: string): boolean { + const permissions = rolePermissions[role] || []; + return permissions.some( + (p) => + (p.resource === '*' || p.resource === resource) && + p.action === action + ); +} + +app.decorate('checkPermission', function (resource: string, action: string) { + return async (request, reply) => { + await request.jwtVerify(); + + if (!hasPermission(request.user.role, resource, action)) { + return reply.code(403).send({ + error: 'Forbidden', + message: `Not allowed to ${action} ${resource}`, + }); + } + }; +}); + +// Usage +app.post('/posts', { + onRequest: [app.checkPermission('posts', 'create')], +}, createPostHandler); + +app.delete('/posts/:id', { + onRequest: [app.checkPermission('posts', 'delete')], +}, deletePostHandler); +``` + +## API Key / Bearer Token Authentication + +Use `@fastify/bearer-auth` for API key and bearer token authentication: + +```typescript +import bearerAuth from '@fastify/bearer-auth'; + +const validKeys = new Set([process.env.API_KEY]); + +app.register(bearerAuth, { + keys: validKeys, + errorResponse: (err) => ({ + error: 'Unauthorized', + message: 'Invalid API key', + }), +}); + +// All routes are now protected +app.get('/api/data', async (request) => { + return { data: [] }; +}); +``` + +For database-backed API keys with custom validation: + +```typescript +import bearerAuth from '@fastify/bearer-auth'; + +app.register(bearerAuth, { + auth: async (key, request) => { + const apiKey = await db.apiKeys.findByKey(key); + + if (!apiKey || !apiKey.active) { + return false; + } + + // Track usage (fire and forget) + db.apiKeys.recordUsage(apiKey.id, { + ip: request.ip, + timestamp: new Date(), + }); + + request.apiKey = apiKey; + return true; + }, + errorResponse: (err) => ({ + error: 'Unauthorized', + message: 'Invalid API key', + }), +}); +``` + +## OAuth 2.0 Integration + +Integrate with OAuth providers using @fastify/oauth2: + +```typescript +import fastifyOauth2 from '@fastify/oauth2'; + +app.register(fastifyOauth2, { + name: 'googleOAuth2', + scope: ['profile', 'email'], + credentials: { + client: { + id: process.env.GOOGLE_CLIENT_ID, + secret: process.env.GOOGLE_CLIENT_SECRET, + }, + }, + startRedirectPath: '/auth/google', + callbackUri: 'http://localhost:3000/auth/google/callback', + discovery: { + issuer: 'https://accounts.google.com', + }, +}); + +app.get('/auth/google/callback', async (request, reply) => { + const { token } = await app.googleOAuth2.getAccessTokenFromAuthorizationCodeFlow(request); + + // Fetch user info from Google + const userInfo = await fetch('https://www.googleapis.com/oauth2/v2/userinfo', { + headers: { Authorization: `Bearer ${token.access_token}` }, + }).then((r) => r.json()); + + // Find or create user + let user = await db.users.findByEmail(userInfo.email); + if (!user) { + user = await db.users.create({ + email: userInfo.email, + name: userInfo.name, + provider: 'google', + providerId: userInfo.id, + }); + } + + // Generate JWT + const jwt = app.jwt.sign({ id: user.id, role: user.role }); + + // Redirect to frontend with token + return reply.redirect(`/auth/success?token=${jwt}`); +}); +``` + +## Session-Based Authentication + +Use @fastify/session for session management: + +```typescript +import fastifyCookie from '@fastify/cookie'; +import fastifySession from '@fastify/session'; +import RedisStore from 'connect-redis'; +import { createClient } from 'redis'; + +const redisClient = createClient({ url: process.env.REDIS_URL }); +await redisClient.connect(); + +app.register(fastifyCookie); +app.register(fastifySession, { + secret: process.env.SESSION_SECRET, + store: new RedisStore({ client: redisClient }), + cookie: { + secure: process.env.NODE_ENV === 'production', + httpOnly: true, + maxAge: 24 * 60 * 60 * 1000, // 1 day + }, +}); + +app.post('/login', async (request, reply) => { + const { email, password } = request.body; + const user = await validateCredentials(email, password); + + if (!user) { + return reply.code(401).send({ error: 'Invalid credentials' }); + } + + request.session.userId = user.id; + request.session.role = user.role; + + return { success: true }; +}); + +app.decorate('requireSession', async function (request, reply) { + if (!request.session.userId) { + return reply.code(401).send({ error: 'Not authenticated' }); + } +}); + +app.get('/profile', { + onRequest: [app.requireSession], +}, async (request) => { + const user = await db.users.findById(request.session.userId); + return { user }; +}); + +app.post('/logout', async (request, reply) => { + await request.session.destroy(); + return { success: true }; +}); +``` + +## Resource-Based Authorization + +Check ownership of resources: + +```typescript +app.decorate('checkOwnership', function (getResourceOwnerId: (request) => Promise) { + return async (request, reply) => { + const ownerId = await getResourceOwnerId(request); + + if (ownerId !== request.user.id && request.user.role !== 'admin') { + return reply.code(403).send({ + error: 'Forbidden', + message: 'You do not own this resource', + }); + } + }; +}); + +// Check post ownership +app.put('/posts/:id', { + onRequest: [ + app.authenticate, + app.checkOwnership(async (request) => { + const post = await db.posts.findById(request.params.id); + return post?.authorId; + }), + ], +}, updatePostHandler); + +// Alternative: inline check +app.put('/posts/:id', { + onRequest: [app.authenticate], +}, async (request, reply) => { + const post = await db.posts.findById(request.params.id); + + if (!post) { + return reply.code(404).send({ error: 'Post not found' }); + } + + if (post.authorId !== request.user.id && request.user.role !== 'admin') { + return reply.code(403).send({ error: 'Forbidden' }); + } + + return db.posts.update(post.id, request.body); +}); +``` + +## Password Hashing + +Use secure password hashing with argon2: + +```typescript +import { hash, verify } from '@node-rs/argon2'; + +async function hashPassword(password: string): Promise { + return hash(password, { + memoryCost: 65536, + timeCost: 3, + parallelism: 4, + }); +} + +async function verifyPassword(hash: string, password: string): Promise { + return verify(hash, password); +} + +app.post('/register', async (request, reply) => { + const { email, password } = request.body; + + const hashedPassword = await hashPassword(password); + const user = await db.users.create({ + email, + password: hashedPassword, + }); + + reply.code(201); + return { id: user.id, email: user.email }; +}); + +app.post('/login', async (request, reply) => { + const { email, password } = request.body; + const user = await db.users.findByEmail(email); + + if (!user || !(await verifyPassword(user.password, password))) { + return reply.code(401).send({ error: 'Invalid credentials' }); + } + + const token = app.jwt.sign({ id: user.id, role: user.role }); + return { token }; +}); +``` + +## Rate Limiting for Auth Endpoints + +Protect auth endpoints from brute force. **IMPORTANT: For production security, you MUST configure rate limiting with a Redis backend.** In-memory rate limiting is not safe for distributed deployments and can be bypassed. + +```typescript +import fastifyRateLimit from '@fastify/rate-limit'; +import Redis from 'ioredis'; + +const redis = new Redis(process.env.REDIS_URL); + +// Global rate limit with Redis backend +app.register(fastifyRateLimit, { + max: 100, + timeWindow: '1 minute', + redis, // REQUIRED for production - ensures rate limiting works across all instances +}); + +// Stricter limit for auth endpoints +app.register(async function authRoutes(fastify) { + await fastify.register(fastifyRateLimit, { + max: 5, + timeWindow: '1 minute', + redis, // REQUIRED for production + keyGenerator: (request) => { + // Rate limit by IP + email combination + const email = request.body?.email || ''; + return `${request.ip}:${email}`; + }, + }); + + fastify.post('/login', loginHandler); + fastify.post('/register', registerHandler); + fastify.post('/forgot-password', forgotPasswordHandler); +}, { prefix: '/auth' }); +``` diff --git a/.claude/skills/fastify-best-practices/rules/configuration.md b/.claude/skills/fastify-best-practices/rules/configuration.md new file mode 100644 index 0000000..ceba3e3 --- /dev/null +++ b/.claude/skills/fastify-best-practices/rules/configuration.md @@ -0,0 +1,217 @@ +--- +name: configuration +description: Application configuration in Fastify using env-schema +metadata: + tags: configuration, environment, env, settings, env-schema +--- + +# Application Configuration + +## Use env-schema for Configuration + +**Always use `env-schema` for configuration validation.** It provides JSON Schema validation for environment variables with sensible defaults. + +```typescript +import Fastify from 'fastify'; +import envSchema from 'env-schema'; +import { Type, type Static } from '@sinclair/typebox'; + +const schema = Type.Object({ + PORT: Type.Number({ default: 3000 }), + HOST: Type.String({ default: '0.0.0.0' }), + DATABASE_URL: Type.String(), + JWT_SECRET: Type.String({ minLength: 32 }), + LOG_LEVEL: Type.Union([ + Type.Literal('trace'), + Type.Literal('debug'), + Type.Literal('info'), + Type.Literal('warn'), + Type.Literal('error'), + Type.Literal('fatal'), + ], { default: 'info' }), +}); + +type Config = Static; + +const config = envSchema({ + schema, + dotenv: true, // Load from .env file +}); + +const app = Fastify({ + logger: { level: config.LOG_LEVEL }, +}); + +app.decorate('config', config); + +declare module 'fastify' { + interface FastifyInstance { + config: Config; + } +} + +await app.listen({ port: config.PORT, host: config.HOST }); +``` + +## Configuration as Plugin + +Encapsulate configuration in a plugin for reuse: + +```typescript +import fp from 'fastify-plugin'; +import envSchema from 'env-schema'; +import { Type, type Static } from '@sinclair/typebox'; + +const schema = Type.Object({ + PORT: Type.Number({ default: 3000 }), + HOST: Type.String({ default: '0.0.0.0' }), + DATABASE_URL: Type.String(), + JWT_SECRET: Type.String({ minLength: 32 }), + LOG_LEVEL: Type.String({ default: 'info' }), +}); + +type Config = Static; + +declare module 'fastify' { + interface FastifyInstance { + config: Config; + } +} + +export default fp(async function configPlugin(fastify) { + const config = envSchema({ + schema, + dotenv: true, + }); + + fastify.decorate('config', config); +}, { + name: 'config', +}); +``` + +## Secrets Management + +Handle secrets securely: + +```typescript +// Never log secrets +const app = Fastify({ + logger: { + level: config.LOG_LEVEL, + redact: ['req.headers.authorization', '*.password', '*.secret', '*.apiKey'], + }, +}); + +// For production, use secret managers (AWS Secrets Manager, Vault, etc.) +// Pass secrets through environment variables - never commit them +``` + +## Feature Flags + +Implement feature flags via environment variables: + +```typescript +import { Type, type Static } from '@sinclair/typebox'; + +const schema = Type.Object({ + // ... other config + FEATURE_NEW_DASHBOARD: Type.Boolean({ default: false }), + FEATURE_BETA_API: Type.Boolean({ default: false }), +}); + +type Config = Static; + +const config = envSchema({ schema, dotenv: true }); + +// Use in routes +app.get('/dashboard', async (request) => { + if (app.config.FEATURE_NEW_DASHBOARD) { + return { version: 'v2', data: await getNewDashboardData() }; + } + return { version: 'v1', data: await getOldDashboardData() }; +}); +``` + +## Anti-Patterns to Avoid + +### NEVER use configuration files + +```typescript +// ❌ NEVER DO THIS - configuration files are an antipattern +import config from './config/production.json'; + +// ❌ NEVER DO THIS - per-environment config files +const env = process.env.NODE_ENV || 'development'; +const config = await import(`./config/${env}.js`); +``` + +Configuration files lead to: +- Security risks (secrets in files) +- Deployment complexity +- Environment drift +- Difficult secret rotation + +### NEVER use per-environment configuration + +```typescript +// ❌ NEVER DO THIS +const configs = { + development: { logLevel: 'debug' }, + production: { logLevel: 'info' }, + test: { logLevel: 'silent' }, +}; +const config = configs[process.env.NODE_ENV]; +``` + +Instead, use a single configuration source (environment variables) with sensible defaults. The environment controls the values, not conditional code. + +### Use specific environment variables, not NODE_ENV + +```typescript +// ❌ AVOID checking NODE_ENV +if (process.env.NODE_ENV === 'production') { + // do something +} + +// ✅ BETTER - use explicit feature flags or configuration +if (app.config.ENABLE_DETAILED_LOGGING) { + // do something +} +``` + +## Dynamic Configuration + +For configuration that needs to change without restart, fetch from an external service: + +```typescript +interface DynamicConfig { + rateLimit: number; + maintenanceMode: boolean; +} + +let dynamicConfig: DynamicConfig = { + rateLimit: 100, + maintenanceMode: false, +}; + +async function refreshConfig() { + try { + const newConfig = await fetchConfigFromService(); + dynamicConfig = newConfig; + app.log.info('Configuration refreshed'); + } catch (error) { + app.log.error({ err: error }, 'Failed to refresh configuration'); + } +} + +// Refresh periodically +setInterval(refreshConfig, 60000); + +// Use in hooks +app.addHook('onRequest', async (request, reply) => { + if (dynamicConfig.maintenanceMode && !request.url.startsWith('/health')) { + reply.code(503).send({ error: 'Service under maintenance' }); + } +}); +``` diff --git a/.claude/skills/fastify-best-practices/rules/content-type.md b/.claude/skills/fastify-best-practices/rules/content-type.md new file mode 100644 index 0000000..8c98f1e --- /dev/null +++ b/.claude/skills/fastify-best-practices/rules/content-type.md @@ -0,0 +1,387 @@ +--- +name: content-type +description: Content type parsing in Fastify +metadata: + tags: content-type, parsing, body, multipart, json +--- + +# Content Type Parsing + +## Default Content Type Parsers + +Fastify includes parsers for common content types: + +```typescript +import Fastify from 'fastify'; + +const app = Fastify(); + +// Built-in parsers: +// - application/json +// - text/plain + +app.post('/json', async (request) => { + // request.body is parsed JSON object + return { received: request.body }; +}); + +app.post('/text', async (request) => { + // request.body is string for text/plain + return { text: request.body }; +}); +``` + +## Custom Content Type Parsers + +Add parsers for additional content types: + +```typescript +// Parse application/x-www-form-urlencoded +app.addContentTypeParser( + 'application/x-www-form-urlencoded', + { parseAs: 'string' }, + (request, body, done) => { + const parsed = new URLSearchParams(body); + done(null, Object.fromEntries(parsed)); + }, +); + +// Async parser +app.addContentTypeParser( + 'application/x-www-form-urlencoded', + { parseAs: 'string' }, + async (request, body) => { + const parsed = new URLSearchParams(body); + return Object.fromEntries(parsed); + }, +); +``` + +## XML Parsing + +Parse XML content: + +```typescript +import { XMLParser } from 'fast-xml-parser'; + +const xmlParser = new XMLParser({ + ignoreAttributes: false, + attributeNamePrefix: '@_', +}); + +app.addContentTypeParser( + 'application/xml', + { parseAs: 'string' }, + async (request, body) => { + return xmlParser.parse(body); + }, +); + +app.addContentTypeParser( + 'text/xml', + { parseAs: 'string' }, + async (request, body) => { + return xmlParser.parse(body); + }, +); + +app.post('/xml', async (request) => { + // request.body is parsed XML as JavaScript object + return { data: request.body }; +}); +``` + +## Multipart Form Data + +Use @fastify/multipart for file uploads. **Configure these critical options:** + +```typescript +import fastifyMultipart from '@fastify/multipart'; + +app.register(fastifyMultipart, { + // CRITICAL: Always set explicit limits + limits: { + fieldNameSize: 100, // Max field name size in bytes + fieldSize: 1024 * 1024, // Max field value size (1MB) + fields: 10, // Max number of non-file fields + fileSize: 10 * 1024 * 1024, // Max file size (10MB) + files: 5, // Max number of files + headerPairs: 2000, // Max number of header pairs + parts: 1000, // Max number of parts (fields + files) + }, + // IMPORTANT: Throw on limit exceeded (default is to truncate silently!) + throwFileSizeLimit: true, + // Attach all fields to request.body for easier access + attachFieldsToBody: true, + // Only accept specific file types (security!) + // onFile: async (part) => { + // if (!['image/jpeg', 'image/png'].includes(part.mimetype)) { + // throw new Error('Invalid file type'); + // } + // }, +}); + +// Handle file upload +app.post('/upload', async (request, reply) => { + const data = await request.file(); + + if (!data) { + return reply.code(400).send({ error: 'No file uploaded' }); + } + + // data.file is a stream + const buffer = await data.toBuffer(); + + return { + filename: data.filename, + mimetype: data.mimetype, + size: buffer.length, + }; +}); + +// Handle multiple files +app.post('/upload-multiple', async (request) => { + const files = []; + + for await (const part of request.files()) { + const buffer = await part.toBuffer(); + files.push({ + filename: part.filename, + mimetype: part.mimetype, + size: buffer.length, + }); + } + + return { files }; +}); + +// Handle mixed form data +app.post('/form', async (request) => { + const parts = request.parts(); + const fields: Record = {}; + const files: Array<{ name: string; size: number }> = []; + + for await (const part of parts) { + if (part.type === 'file') { + const buffer = await part.toBuffer(); + files.push({ name: part.filename, size: buffer.length }); + } else { + fields[part.fieldname] = part.value as string; + } + } + + return { fields, files }; +}); +``` + +## Stream Processing + +Process body as stream for large payloads: + +```typescript +import { pipeline } from 'node:stream/promises'; +import { createWriteStream } from 'node:fs'; + +// Add parser that returns stream +app.addContentTypeParser( + 'application/octet-stream', + async (request, payload) => { + return payload; // Return stream directly + }, +); + +app.post('/upload-stream', async (request, reply) => { + const destination = createWriteStream('./upload.bin'); + + await pipeline(request.body, destination); + + return { success: true }; +}); +``` + +## Custom JSON Parser + +Replace the default JSON parser: + +```typescript +// Remove default parser +app.removeContentTypeParser('application/json'); + +// Add custom parser with error handling +app.addContentTypeParser( + 'application/json', + { parseAs: 'string' }, + async (request, body) => { + try { + return JSON.parse(body); + } catch (error) { + throw { + statusCode: 400, + code: 'INVALID_JSON', + message: 'Invalid JSON payload', + }; + } + }, +); +``` + +## Content Type with Parameters + +Handle content types with parameters: + +```typescript +// Match content type with any charset +app.addContentTypeParser( + 'application/json; charset=utf-8', + { parseAs: 'string' }, + async (request, body) => { + return JSON.parse(body); + }, +); + +// Use regex for flexible matching +app.addContentTypeParser( + /^application\/.*\+json$/, + { parseAs: 'string' }, + async (request, body) => { + return JSON.parse(body); + }, +); +``` + +## Catch-All Parser + +Handle unknown content types: + +```typescript +app.addContentTypeParser('*', async (request, payload) => { + const chunks: Buffer[] = []; + + for await (const chunk of payload) { + chunks.push(chunk); + } + + const buffer = Buffer.concat(chunks); + + // Try to determine content type + const contentType = request.headers['content-type']; + + if (contentType?.includes('json')) { + return JSON.parse(buffer.toString('utf-8')); + } + + if (contentType?.includes('text')) { + return buffer.toString('utf-8'); + } + + return buffer; +}); +``` + +## Body Limit Configuration + +Configure body size limits: + +```typescript +// Global limit +const app = Fastify({ + bodyLimit: 1048576, // 1MB +}); + +// Per-route limit +app.post('/large-upload', { + bodyLimit: 52428800, // 50MB for this route +}, async (request) => { + return { size: JSON.stringify(request.body).length }; +}); + +// Per content type limit +app.addContentTypeParser('application/json', { + parseAs: 'string', + bodyLimit: 2097152, // 2MB for JSON +}, async (request, body) => { + return JSON.parse(body); +}); +``` + +## Protocol Buffers + +Parse protobuf content: + +```typescript +import protobuf from 'protobufjs'; + +const root = await protobuf.load('./schema.proto'); +const MessageType = root.lookupType('package.MessageType'); + +app.addContentTypeParser( + 'application/x-protobuf', + { parseAs: 'buffer' }, + async (request, body) => { + const message = MessageType.decode(body); + return MessageType.toObject(message); + }, +); +``` + +## Form Data with @fastify/formbody + +Simple form parsing: + +```typescript +import formbody from '@fastify/formbody'; + +app.register(formbody); + +app.post('/form', async (request) => { + // request.body is parsed form data + const { name, email } = request.body as { name: string; email: string }; + return { name, email }; +}); +``` + +## Content Negotiation + +Handle different request formats: + +```typescript +app.post('/data', async (request, reply) => { + const contentType = request.headers['content-type']; + + // Body is already parsed by the appropriate parser + const data = request.body; + + // Respond based on Accept header + const accept = request.headers.accept; + + if (accept?.includes('application/xml')) { + reply.type('application/xml'); + return `${JSON.stringify(data)}`; + } + + reply.type('application/json'); + return data; +}); +``` + +## Validation After Parsing + +Validate parsed content: + +```typescript +app.post('/users', { + schema: { + body: { + type: 'object', + properties: { + name: { type: 'string', minLength: 1 }, + email: { type: 'string', format: 'email' }, + }, + required: ['name', 'email'], + }, + }, +}, async (request) => { + // Body is parsed AND validated + return request.body; +}); +``` diff --git a/.claude/skills/fastify-best-practices/rules/cors-security.md b/.claude/skills/fastify-best-practices/rules/cors-security.md new file mode 100644 index 0000000..89833c4 --- /dev/null +++ b/.claude/skills/fastify-best-practices/rules/cors-security.md @@ -0,0 +1,445 @@ +--- +name: cors-security +description: CORS and security headers in Fastify +metadata: + tags: cors, security, headers, helmet, csrf +--- + +# CORS and Security + +## CORS with @fastify/cors + +Enable Cross-Origin Resource Sharing: + +```typescript +import Fastify from 'fastify'; +import cors from '@fastify/cors'; + +const app = Fastify(); + +// Simple CORS - allow all origins +app.register(cors); + +// Configured CORS +app.register(cors, { + origin: ['https://example.com', 'https://app.example.com'], + methods: ['GET', 'POST', 'PUT', 'DELETE'], + allowedHeaders: ['Content-Type', 'Authorization'], + exposedHeaders: ['X-Total-Count'], + credentials: true, + maxAge: 86400, // 24 hours +}); +``` + +## Dynamic CORS Origin + +Validate origins dynamically: + +```typescript +app.register(cors, { + origin: (origin, callback) => { + // Allow requests with no origin (mobile apps, curl, etc.) + if (!origin) { + return callback(null, true); + } + + // Check against allowed origins + const allowedOrigins = [ + 'https://example.com', + 'https://app.example.com', + /\.example\.com$/, + ]; + + const isAllowed = allowedOrigins.some((allowed) => { + if (allowed instanceof RegExp) { + return allowed.test(origin); + } + return allowed === origin; + }); + + if (isAllowed) { + callback(null, true); + } else { + callback(new Error('Not allowed by CORS'), false); + } + }, + credentials: true, +}); +``` + +## Per-Route CORS + +Configure CORS for specific routes: + +```typescript +app.register(cors, { + origin: true, // Reflect request origin + credentials: true, +}); + +// Or disable CORS for specific routes +app.route({ + method: 'GET', + url: '/internal', + config: { + cors: false, + }, + handler: async () => { + return { internal: true }; + }, +}); +``` + +## Security Headers with @fastify/helmet + +Add security headers: + +```typescript +import helmet from '@fastify/helmet'; + +app.register(helmet, { + contentSecurityPolicy: { + directives: { + defaultSrc: ["'self'"], + scriptSrc: ["'self'", "'unsafe-inline'"], + styleSrc: ["'self'", "'unsafe-inline'"], + imgSrc: ["'self'", 'data:', 'https:'], + connectSrc: ["'self'", 'https://api.example.com'], + }, + }, + crossOriginEmbedderPolicy: false, // Disable if embedding external resources +}); +``` + +## Configure Individual Headers + +Fine-tune security headers: + +```typescript +app.register(helmet, { + // Strict Transport Security + hsts: { + maxAge: 31536000, // 1 year + includeSubDomains: true, + preload: true, + }, + + // Content Security Policy + contentSecurityPolicy: { + useDefaults: true, + directives: { + 'script-src': ["'self'", 'https://trusted-cdn.com'], + }, + }, + + // X-Frame-Options + frameguard: { + action: 'deny', // or 'sameorigin' + }, + + // X-Content-Type-Options + noSniff: true, + + // X-XSS-Protection (legacy) + xssFilter: true, + + // Referrer-Policy + referrerPolicy: { + policy: 'strict-origin-when-cross-origin', + }, + + // X-Permitted-Cross-Domain-Policies + permittedCrossDomainPolicies: false, + + // X-DNS-Prefetch-Control + dnsPrefetchControl: { + allow: false, + }, +}); +``` + +## Rate Limiting + +Protect against abuse: + +```typescript +import rateLimit from '@fastify/rate-limit'; + +app.register(rateLimit, { + max: 100, + timeWindow: '1 minute', + errorResponseBuilder: (request, context) => ({ + statusCode: 429, + error: 'Too Many Requests', + message: `Rate limit exceeded. Retry in ${context.after}`, + retryAfter: context.after, + }), +}); + +// Per-route rate limit +app.get('/expensive', { + config: { + rateLimit: { + max: 10, + timeWindow: '1 minute', + }, + }, +}, handler); + +// Skip rate limit for certain routes +app.get('/health', { + config: { + rateLimit: false, + }, +}, () => ({ status: 'ok' })); +``` + +## Redis-Based Rate Limiting + +Use Redis for distributed rate limiting: + +```typescript +import rateLimit from '@fastify/rate-limit'; +import Redis from 'ioredis'; + +const redis = new Redis(process.env.REDIS_URL); + +app.register(rateLimit, { + max: 100, + timeWindow: '1 minute', + redis, + nameSpace: 'rate-limit:', + keyGenerator: (request) => { + // Rate limit by user ID if authenticated, otherwise by IP + return request.user?.id || request.ip; + }, +}); +``` + +## CSRF Protection + +Protect against Cross-Site Request Forgery: + +```typescript +import fastifyCsrf from '@fastify/csrf-protection'; +import fastifyCookie from '@fastify/cookie'; + +app.register(fastifyCookie); +app.register(fastifyCsrf, { + cookieOpts: { + signed: true, + httpOnly: true, + sameSite: 'strict', + }, +}); + +// Generate token +app.get('/csrf-token', async (request, reply) => { + const token = reply.generateCsrf(); + return { token }; +}); + +// Protected route +app.post('/transfer', { + preHandler: app.csrfProtection, +}, async (request) => { + // CSRF token validated + return { success: true }; +}); +``` + +## Custom Security Headers + +Add custom headers: + +```typescript +app.addHook('onSend', async (request, reply) => { + // Custom security headers + reply.header('X-Request-ID', request.id); + reply.header('X-Content-Type-Options', 'nosniff'); + reply.header('X-Frame-Options', 'DENY'); + reply.header('Permissions-Policy', 'geolocation=(), camera=()'); +}); + +// Per-route headers +app.get('/download', async (request, reply) => { + reply.header('Content-Disposition', 'attachment; filename="file.pdf"'); + reply.header('X-Download-Options', 'noopen'); + return reply.send(fileStream); +}); +``` + +## Secure Cookies + +Configure secure cookies: + +```typescript +import cookie from '@fastify/cookie'; + +app.register(cookie, { + secret: process.env.COOKIE_SECRET, + parseOptions: { + httpOnly: true, + secure: process.env.NODE_ENV === 'production', + sameSite: 'strict', + path: '/', + maxAge: 3600, // 1 hour + }, +}); + +// Set secure cookie +app.post('/login', async (request, reply) => { + const token = await createSession(request.body); + + reply.setCookie('session', token, { + httpOnly: true, + secure: true, + sameSite: 'strict', + path: '/', + maxAge: 86400, + signed: true, + }); + + return { success: true }; +}); + +// Read signed cookie +app.get('/profile', async (request) => { + const session = request.cookies.session; + const unsigned = request.unsignCookie(session); + + if (!unsigned.valid) { + throw { statusCode: 401, message: 'Invalid session' }; + } + + return { sessionId: unsigned.value }; +}); +``` + +## Request Validation Security + +Validate and sanitize input: + +```typescript +// Schema-based validation protects against injection +app.post('/users', { + schema: { + body: { + type: 'object', + properties: { + email: { + type: 'string', + format: 'email', + maxLength: 254, + }, + name: { + type: 'string', + minLength: 1, + maxLength: 100, + pattern: '^[a-zA-Z\\s]+$', // Only letters and spaces + }, + }, + required: ['email', 'name'], + additionalProperties: false, + }, + }, +}, handler); +``` + +## IP Filtering + +Restrict access by IP: + +```typescript +const allowedIps = new Set([ + '192.168.1.0/24', + '10.0.0.0/8', +]); + +app.addHook('onRequest', async (request, reply) => { + if (request.url.startsWith('/admin')) { + const clientIp = request.ip; + + if (!isIpAllowed(clientIp, allowedIps)) { + reply.code(403).send({ error: 'Forbidden' }); + } + } +}); + +function isIpAllowed(ip: string, allowed: Set): boolean { + // Implement IP/CIDR matching + for (const range of allowed) { + if (ipInRange(ip, range)) return true; + } + return false; +} +``` + +## Trust Proxy + +Configure for reverse proxy environments: + +```typescript +const app = Fastify({ + trustProxy: true, // Trust X-Forwarded-* headers +}); + +// Or specific proxy configuration +const app = Fastify({ + trustProxy: ['127.0.0.1', '10.0.0.0/8'], +}); + +// Now request.ip returns the real client IP +app.get('/ip', async (request) => { + return { + ip: request.ip, + ips: request.ips, // Array of all IPs in chain + }; +}); +``` + +## HTTPS Redirect + +Force HTTPS in production: + +```typescript +app.addHook('onRequest', async (request, reply) => { + if ( + process.env.NODE_ENV === 'production' && + request.headers['x-forwarded-proto'] !== 'https' + ) { + const httpsUrl = `https://${request.hostname}${request.url}`; + reply.redirect(301, httpsUrl); + } +}); +``` + +## Security Best Practices Summary + +```typescript +import Fastify from 'fastify'; +import cors from '@fastify/cors'; +import helmet from '@fastify/helmet'; +import rateLimit from '@fastify/rate-limit'; + +const app = Fastify({ + trustProxy: true, + bodyLimit: 1048576, // 1MB max body +}); + +// Security plugins +app.register(helmet); +app.register(cors, { + origin: process.env.ALLOWED_ORIGINS?.split(','), + credentials: true, +}); +app.register(rateLimit, { + max: 100, + timeWindow: '1 minute', +}); + +// Validate all input with schemas +// Never expose internal errors in production +// Use parameterized queries for database +// Keep dependencies updated +``` diff --git a/.claude/skills/fastify-best-practices/rules/database.md b/.claude/skills/fastify-best-practices/rules/database.md new file mode 100644 index 0000000..acf6048 --- /dev/null +++ b/.claude/skills/fastify-best-practices/rules/database.md @@ -0,0 +1,320 @@ +--- +name: database +description: Database integration with Fastify using official adapters +metadata: + tags: database, postgres, mysql, mongodb, redis, sql +--- + +# Database Integration + +## Use Official Fastify Database Adapters + +Always use the official Fastify database plugins from the `@fastify` organization. They provide proper connection pooling, encapsulation, and integration with Fastify's lifecycle. + +## PostgreSQL with @fastify/postgres + +```typescript +import Fastify from 'fastify'; +import fastifyPostgres from '@fastify/postgres'; + +const app = Fastify({ logger: true }); + +app.register(fastifyPostgres, { + connectionString: process.env.DATABASE_URL, +}); + +// Use in routes +app.get('/users', async (request) => { + const client = await app.pg.connect(); + try { + const { rows } = await client.query('SELECT * FROM users'); + return rows; + } finally { + client.release(); + } +}); + +// Or use the pool directly for simple queries +app.get('/users/:id', async (request) => { + const { id } = request.params; + const { rows } = await app.pg.query( + 'SELECT * FROM users WHERE id = $1', + [id], + ); + return rows[0]; +}); + +// Transactions +app.post('/transfer', async (request) => { + const { fromId, toId, amount } = request.body; + const client = await app.pg.connect(); + + try { + await client.query('BEGIN'); + await client.query( + 'UPDATE accounts SET balance = balance - $1 WHERE id = $2', + [amount, fromId], + ); + await client.query( + 'UPDATE accounts SET balance = balance + $1 WHERE id = $2', + [amount, toId], + ); + await client.query('COMMIT'); + return { success: true }; + } catch (error) { + await client.query('ROLLBACK'); + throw error; + } finally { + client.release(); + } +}); +``` + +## MySQL with @fastify/mysql + +```typescript +import Fastify from 'fastify'; +import fastifyMysql from '@fastify/mysql'; + +const app = Fastify({ logger: true }); + +app.register(fastifyMysql, { + promise: true, + connectionString: process.env.MYSQL_URL, +}); + +app.get('/users', async (request) => { + const connection = await app.mysql.getConnection(); + try { + const [rows] = await connection.query('SELECT * FROM users'); + return rows; + } finally { + connection.release(); + } +}); +``` + +## MongoDB with @fastify/mongodb + +```typescript +import Fastify from 'fastify'; +import fastifyMongo from '@fastify/mongodb'; + +const app = Fastify({ logger: true }); + +app.register(fastifyMongo, { + url: process.env.MONGODB_URL, +}); + +app.get('/users', async (request) => { + const users = await app.mongo.db + .collection('users') + .find({}) + .toArray(); + return users; +}); + +app.get('/users/:id', async (request) => { + const { id } = request.params; + const user = await app.mongo.db + .collection('users') + .findOne({ _id: new app.mongo.ObjectId(id) }); + return user; +}); + +app.post('/users', async (request) => { + const result = await app.mongo.db + .collection('users') + .insertOne(request.body); + return { id: result.insertedId }; +}); +``` + +## Redis with @fastify/redis + +```typescript +import Fastify from 'fastify'; +import fastifyRedis from '@fastify/redis'; + +const app = Fastify({ logger: true }); + +app.register(fastifyRedis, { + url: process.env.REDIS_URL, +}); + +// Caching example +app.get('/data/:key', async (request) => { + const { key } = request.params; + + // Try cache first + const cached = await app.redis.get(`cache:${key}`); + if (cached) { + return JSON.parse(cached); + } + + // Fetch from database + const data = await fetchFromDatabase(key); + + // Cache for 5 minutes + await app.redis.setex(`cache:${key}`, 300, JSON.stringify(data)); + + return data; +}); +``` + +## Database as Plugin + +Encapsulate database access in a plugin: + +```typescript +// plugins/database.ts +import fp from 'fastify-plugin'; +import fastifyPostgres from '@fastify/postgres'; + +export default fp(async function databasePlugin(fastify) { + await fastify.register(fastifyPostgres, { + connectionString: fastify.config.DATABASE_URL, + }); + + // Add health check + fastify.decorate('checkDatabaseHealth', async () => { + try { + await fastify.pg.query('SELECT 1'); + return true; + } catch { + return false; + } + }); +}, { + name: 'database', + dependencies: ['config'], +}); +``` + +## Repository Pattern + +Abstract database access with repositories: + +```typescript +// repositories/user.repository.ts +import type { FastifyInstance } from 'fastify'; + +export interface User { + id: string; + email: string; + name: string; +} + +export function createUserRepository(app: FastifyInstance) { + return { + async findById(id: string): Promise { + const { rows } = await app.pg.query( + 'SELECT * FROM users WHERE id = $1', + [id], + ); + return rows[0] || null; + }, + + async findByEmail(email: string): Promise { + const { rows } = await app.pg.query( + 'SELECT * FROM users WHERE email = $1', + [email], + ); + return rows[0] || null; + }, + + async create(data: Omit): Promise { + const { rows } = await app.pg.query( + 'INSERT INTO users (email, name) VALUES ($1, $2) RETURNING *', + [data.email, data.name], + ); + return rows[0]; + }, + + async update(id: string, data: Partial): Promise { + const fields = Object.keys(data); + const values = Object.values(data); + const setClause = fields + .map((f, i) => `${f} = $${i + 2}`) + .join(', '); + + const { rows } = await app.pg.query( + `UPDATE users SET ${setClause} WHERE id = $1 RETURNING *`, + [id, ...values], + ); + return rows[0] || null; + }, + + async delete(id: string): Promise { + const { rowCount } = await app.pg.query( + 'DELETE FROM users WHERE id = $1', + [id], + ); + return rowCount > 0; + }, + }; +} + +// Usage in plugin +import fp from 'fastify-plugin'; +import { createUserRepository } from './repositories/user.repository.js'; + +export default fp(async function repositoriesPlugin(fastify) { + fastify.decorate('repositories', { + users: createUserRepository(fastify), + }); +}, { + name: 'repositories', + dependencies: ['database'], +}); +``` + +## Testing with Database + +Use transactions for test isolation: + +```typescript +import { describe, it, beforeEach, afterEach } from 'node:test'; +import { build } from './app.js'; + +describe('User API', () => { + let app; + let client; + + beforeEach(async () => { + app = await build(); + client = await app.pg.connect(); + await client.query('BEGIN'); + }); + + afterEach(async () => { + await client.query('ROLLBACK'); + client.release(); + await app.close(); + }); + + it('should create a user', async (t) => { + const response = await app.inject({ + method: 'POST', + url: '/users', + payload: { email: 'test@example.com', name: 'Test' }, + }); + + t.assert.equal(response.statusCode, 201); + }); +}); +``` + +## Connection Pool Configuration + +Configure connection pools appropriately: + +```typescript +app.register(fastifyPostgres, { + connectionString: process.env.DATABASE_URL, + // Pool configuration + max: 20, // Maximum pool size + idleTimeoutMillis: 30000, // Close idle clients after 30s + connectionTimeoutMillis: 5000, // Timeout for new connections +}); +``` diff --git a/.claude/skills/fastify-best-practices/rules/decorators.md b/.claude/skills/fastify-best-practices/rules/decorators.md new file mode 100644 index 0000000..a9a322a --- /dev/null +++ b/.claude/skills/fastify-best-practices/rules/decorators.md @@ -0,0 +1,416 @@ +--- +name: decorators +description: Decorators and request/reply extensions in Fastify +metadata: + tags: decorators, extensions, customization, utilities +--- + +# Decorators and Extensions + +## Understanding Decorators + +Decorators add custom properties and methods to Fastify instances, requests, and replies: + +```typescript +import Fastify from 'fastify'; + +const app = Fastify(); + +// Decorate the Fastify instance +app.decorate('utility', { + formatDate: (date: Date) => date.toISOString(), + generateId: () => crypto.randomUUID(), +}); + +// Use in routes +app.get('/example', async function (request, reply) { + const id = this.utility.generateId(); + return { id, timestamp: this.utility.formatDate(new Date()) }; +}); +``` + +## Decorator Types + +Three types of decorators for different contexts: + +```typescript +// Instance decorator - available on fastify instance +app.decorate('config', { apiVersion: '1.0.0' }); +app.decorate('db', databaseConnection); +app.decorate('cache', cacheClient); + +// Request decorator - available on each request +app.decorateRequest('user', null); // Object property +app.decorateRequest('startTime', 0); // Primitive +app.decorateRequest('getData', function() { // Method + return this.body; +}); + +// Reply decorator - available on each reply +app.decorateReply('sendError', function(code: number, message: string) { + return this.code(code).send({ error: message }); +}); +app.decorateReply('success', function(data: unknown) { + return this.send({ success: true, data }); +}); +``` + +## TypeScript Declaration Merging + +Extend Fastify types for type safety: + +```typescript +// Declare custom properties +declare module 'fastify' { + interface FastifyInstance { + config: { + apiVersion: string; + environment: string; + }; + db: DatabaseClient; + cache: CacheClient; + } + + interface FastifyRequest { + user: { + id: string; + email: string; + roles: string[]; + } | null; + startTime: number; + requestId: string; + } + + interface FastifyReply { + sendError: (code: number, message: string) => void; + success: (data: unknown) => void; + } +} + +// Register decorators +app.decorate('config', { + apiVersion: '1.0.0', + environment: process.env.NODE_ENV, +}); + +app.decorateRequest('user', null); +app.decorateRequest('startTime', 0); + +app.decorateReply('sendError', function (code: number, message: string) { + this.code(code).send({ error: message }); +}); +``` + +## Decorator Initialization + +Initialize request/reply decorators in hooks: + +```typescript +// Decorators with primitive defaults are copied +app.decorateRequest('startTime', 0); + +// Initialize in hook +app.addHook('onRequest', async (request) => { + request.startTime = Date.now(); +}); + +// Object decorators need getter pattern for proper initialization +app.decorateRequest('context', null); + +app.addHook('onRequest', async (request) => { + request.context = { + traceId: request.headers['x-trace-id'] || crypto.randomUUID(), + clientIp: request.ip, + userAgent: request.headers['user-agent'], + }; +}); +``` + +## Dependency Injection with Decorators + +Use decorators for dependency injection: + +```typescript +import fp from 'fastify-plugin'; + +// Database plugin +export default fp(async function databasePlugin(fastify, options) { + const db = await createDatabaseConnection(options.connectionString); + + fastify.decorate('db', db); + + fastify.addHook('onClose', async () => { + await db.close(); + }); +}); + +// User service plugin +export default fp(async function userServicePlugin(fastify) { + // Depends on db decorator + if (!fastify.hasDecorator('db')) { + throw new Error('Database plugin must be registered first'); + } + + const userService = { + findById: (id: string) => fastify.db.query('SELECT * FROM users WHERE id = $1', [id]), + create: (data: CreateUserInput) => fastify.db.query( + 'INSERT INTO users (name, email) VALUES ($1, $2) RETURNING *', + [data.name, data.email] + ), + }; + + fastify.decorate('userService', userService); +}, { + dependencies: ['database-plugin'], +}); + +// Use in routes +app.get('/users/:id', async function (request) { + const user = await this.userService.findById(request.params.id); + return user; +}); +``` + +## Request Context Pattern + +Build rich request context: + +```typescript +interface RequestContext { + traceId: string; + user: User | null; + permissions: Set; + startTime: number; + metadata: Map; +} + +declare module 'fastify' { + interface FastifyRequest { + ctx: RequestContext; + } +} + +app.decorateRequest('ctx', null); + +app.addHook('onRequest', async (request) => { + request.ctx = { + traceId: request.headers['x-trace-id']?.toString() || crypto.randomUUID(), + user: null, + permissions: new Set(), + startTime: Date.now(), + metadata: new Map(), + }; +}); + +// Auth hook populates user +app.addHook('preHandler', async (request) => { + const token = request.headers.authorization; + if (token) { + const user = await verifyToken(token); + request.ctx.user = user; + request.ctx.permissions = new Set(user.permissions); + } +}); + +// Use in handlers +app.get('/profile', async (request, reply) => { + if (!request.ctx.user) { + return reply.code(401).send({ error: 'Unauthorized' }); + } + + if (!request.ctx.permissions.has('read:profile')) { + return reply.code(403).send({ error: 'Forbidden' }); + } + + return request.ctx.user; +}); +``` + +## Reply Helpers + +Create consistent response methods: + +```typescript +declare module 'fastify' { + interface FastifyReply { + ok: (data?: unknown) => void; + created: (data: unknown) => void; + noContent: () => void; + badRequest: (message: string, details?: unknown) => void; + unauthorized: (message?: string) => void; + forbidden: (message?: string) => void; + notFound: (resource?: string) => void; + conflict: (message: string) => void; + serverError: (message?: string) => void; + } +} + +app.decorateReply('ok', function (data?: unknown) { + this.code(200).send(data ?? { success: true }); +}); + +app.decorateReply('created', function (data: unknown) { + this.code(201).send(data); +}); + +app.decorateReply('noContent', function () { + this.code(204).send(); +}); + +app.decorateReply('badRequest', function (message: string, details?: unknown) { + this.code(400).send({ + statusCode: 400, + error: 'Bad Request', + message, + details, + }); +}); + +app.decorateReply('unauthorized', function (message = 'Authentication required') { + this.code(401).send({ + statusCode: 401, + error: 'Unauthorized', + message, + }); +}); + +app.decorateReply('notFound', function (resource = 'Resource') { + this.code(404).send({ + statusCode: 404, + error: 'Not Found', + message: `${resource} not found`, + }); +}); + +// Usage +app.get('/users/:id', async (request, reply) => { + const user = await db.users.findById(request.params.id); + if (!user) { + return reply.notFound('User'); + } + return reply.ok(user); +}); + +app.post('/users', async (request, reply) => { + const user = await db.users.create(request.body); + return reply.created(user); +}); +``` + +## Checking Decorators + +Check if decorators exist before using: + +```typescript +// Check at registration time +app.register(async function (fastify) { + if (!fastify.hasDecorator('db')) { + throw new Error('Database decorator required'); + } + + if (!fastify.hasRequestDecorator('user')) { + throw new Error('User request decorator required'); + } + + if (!fastify.hasReplyDecorator('sendError')) { + throw new Error('sendError reply decorator required'); + } + + // Safe to use decorators +}); +``` + +## Decorator Encapsulation + +Decorators respect encapsulation by default: + +```typescript +app.register(async function pluginA(fastify) { + fastify.decorate('pluginAUtil', () => 'A'); + + fastify.get('/a', async function () { + return this.pluginAUtil(); // Works + }); +}); + +app.register(async function pluginB(fastify) { + // this.pluginAUtil is NOT available here (encapsulated) + + fastify.get('/b', async function () { + // this.pluginAUtil() would be undefined + }); +}); +``` + +Use `fastify-plugin` to share decorators: + +```typescript +import fp from 'fastify-plugin'; + +export default fp(async function sharedDecorator(fastify) { + fastify.decorate('sharedUtil', () => 'shared'); +}); + +// Now available to parent and sibling plugins +``` + +## Functional Decorators + +Create decorators that return functions: + +```typescript +declare module 'fastify' { + interface FastifyInstance { + createValidator: (schema: object) => (data: unknown) => T; + createRateLimiter: (options: RateLimitOptions) => RateLimiter; + } +} + +app.decorate('createValidator', function (schema: object) { + const validate = ajv.compile(schema); + return (data: unknown): T => { + if (!validate(data)) { + throw new ValidationError(validate.errors); + } + return data as T; + }; +}); + +// Usage +const validateUser = app.createValidator(userSchema); + +app.post('/users', async (request) => { + const user = validateUser(request.body); + return db.users.create(user); +}); +``` + +## Async Decorator Initialization + +Handle async initialization properly: + +```typescript +import fp from 'fastify-plugin'; + +export default fp(async function asyncPlugin(fastify) { + // Async initialization + const connection = await createAsyncConnection(); + const cache = await initializeCache(); + + fastify.decorate('asyncService', { + connection, + cache, + query: async (sql: string) => connection.query(sql), + }); + + fastify.addHook('onClose', async () => { + await connection.close(); + await cache.disconnect(); + }); +}); + +// Plugin is fully initialized before routes execute +app.get('/data', async function () { + return this.asyncService.query('SELECT * FROM data'); +}); +``` diff --git a/.claude/skills/fastify-best-practices/rules/deployment.md b/.claude/skills/fastify-best-practices/rules/deployment.md new file mode 100644 index 0000000..00a29eb --- /dev/null +++ b/.claude/skills/fastify-best-practices/rules/deployment.md @@ -0,0 +1,425 @@ +--- +name: deployment +description: Production deployment for Fastify applications +metadata: + tags: deployment, production, docker, kubernetes, scaling +--- + +# Production Deployment + +## Graceful Shutdown with close-with-grace + +Use `close-with-grace` for proper shutdown handling: + +```typescript +import Fastify from 'fastify'; +import closeWithGrace from 'close-with-grace'; + +const app = Fastify({ logger: true }); + +// Register plugins and routes +await app.register(import('./plugins/index.js')); +await app.register(import('./routes/index.js')); + +// Graceful shutdown handler +closeWithGrace({ delay: 10000 }, async ({ signal, err }) => { + if (err) { + app.log.error({ err }, 'Server closing due to error'); + } else { + app.log.info({ signal }, 'Server closing due to signal'); + } + + await app.close(); +}); + +// Start server +await app.listen({ + port: parseInt(process.env.PORT || '3000', 10), + host: '0.0.0.0', +}); + +app.log.info(`Server listening on ${app.server.address()}`); +``` + +## Health Check Endpoints + +Implement comprehensive health checks: + +```typescript +app.get('/health', async () => { + return { status: 'ok', timestamp: new Date().toISOString() }; +}); + +app.get('/health/live', async () => { + return { status: 'ok' }; +}); + +app.get('/health/ready', async (request, reply) => { + const checks = { + database: false, + cache: false, + }; + + try { + await app.db`SELECT 1`; + checks.database = true; + } catch { + // Database not ready + } + + try { + await app.cache.ping(); + checks.cache = true; + } catch { + // Cache not ready + } + + const allHealthy = Object.values(checks).every(Boolean); + + if (!allHealthy) { + reply.code(503); + } + + return { + status: allHealthy ? 'ok' : 'degraded', + checks, + timestamp: new Date().toISOString(), + }; +}); + +// Detailed health for monitoring +app.get('/health/details', { + preHandler: [app.authenticate, app.requireAdmin], +}, async () => { + const memory = process.memoryUsage(); + + return { + status: 'ok', + uptime: process.uptime(), + memory: { + heapUsed: Math.round(memory.heapUsed / 1024 / 1024), + heapTotal: Math.round(memory.heapTotal / 1024 / 1024), + rss: Math.round(memory.rss / 1024 / 1024), + }, + version: process.env.APP_VERSION, + nodeVersion: process.version, + }; +}); +``` + +## Docker Configuration + +Create an optimized Dockerfile: + +```dockerfile +# Build stage +FROM node:22-alpine AS builder + +WORKDIR /app + +COPY package*.json ./ +RUN npm ci --only=production + +COPY . . + +# Production stage +FROM node:22-alpine + +WORKDIR /app + +# Run as non-root user +RUN addgroup -g 1001 -S nodejs && \ + adduser -S nodejs -u 1001 + +# Copy from builder +COPY --from=builder --chown=nodejs:nodejs /app/node_modules ./node_modules +COPY --from=builder --chown=nodejs:nodejs /app/src ./src +COPY --from=builder --chown=nodejs:nodejs /app/package.json ./ + +USER nodejs + +EXPOSE 3000 + +ENV NODE_ENV=production +ENV PORT=3000 + +# Health check +HEALTHCHECK --interval=30s --timeout=5s --start-period=5s --retries=3 \ + CMD wget --no-verbose --tries=1 --spider http://localhost:3000/health || exit 1 + +CMD ["node", "src/app.ts"] +``` + +```yaml +# docker-compose.yml +services: + api: + build: . + ports: + - "3000:3000" + environment: + - NODE_ENV=production + - DATABASE_URL=postgres://user:pass@db:5432/app + - JWT_SECRET=${JWT_SECRET} + depends_on: + db: + condition: service_healthy + restart: unless-stopped + + db: + image: postgres:16-alpine + environment: + - POSTGRES_USER=user + - POSTGRES_PASSWORD=pass + - POSTGRES_DB=app + volumes: + - pgdata:/var/lib/postgresql/data + healthcheck: + test: ["CMD-SHELL", "pg_isready -U user -d app"] + interval: 5s + timeout: 5s + retries: 5 + +volumes: + pgdata: +``` + +## Kubernetes Deployment + +Deploy to Kubernetes: + +```yaml +# deployment.yaml +apiVersion: apps/v1 +kind: Deployment +metadata: + name: fastify-api +spec: + replicas: 3 + selector: + matchLabels: + app: fastify-api + template: + metadata: + labels: + app: fastify-api + spec: + containers: + - name: api + image: my-registry/fastify-api:latest + ports: + - containerPort: 3000 + env: + - name: NODE_ENV + value: "production" + - name: DATABASE_URL + valueFrom: + secretKeyRef: + name: api-secrets + key: database-url + resources: + requests: + memory: "256Mi" + cpu: "100m" + limits: + memory: "512Mi" + cpu: "500m" + livenessProbe: + httpGet: + path: /health/live + port: 3000 + initialDelaySeconds: 5 + periodSeconds: 10 + readinessProbe: + httpGet: + path: /health/ready + port: 3000 + initialDelaySeconds: 5 + periodSeconds: 5 + lifecycle: + preStop: + exec: + command: ["/bin/sh", "-c", "sleep 5"] +--- +apiVersion: v1 +kind: Service +metadata: + name: fastify-api +spec: + selector: + app: fastify-api + ports: + - port: 80 + targetPort: 3000 + type: ClusterIP +``` + +## Production Logger Configuration + +Configure logging for production: + +```typescript +import Fastify from 'fastify'; + +const app = Fastify({ + logger: { + level: process.env.LOG_LEVEL || 'info', + // JSON output for log aggregation + formatters: { + level: (label) => ({ level: label }), + bindings: (bindings) => ({ + pid: bindings.pid, + hostname: bindings.hostname, + service: 'fastify-api', + version: process.env.APP_VERSION, + }), + }, + timestamp: () => `,"time":"${new Date().toISOString()}"`, + // Redact sensitive data + redact: { + paths: [ + 'req.headers.authorization', + 'req.headers.cookie', + '*.password', + '*.token', + '*.secret', + ], + censor: '[REDACTED]', + }, + }, +}); +``` + +## Request Timeouts + +Configure appropriate timeouts: + +```typescript +const app = Fastify({ + connectionTimeout: 30000, // 30s connection timeout + keepAliveTimeout: 72000, // 72s keep-alive (longer than ALB 60s) + requestTimeout: 30000, // 30s request timeout + bodyLimit: 1048576, // 1MB body limit +}); + +// Per-route timeout +app.get('/long-operation', { + config: { + timeout: 60000, // 60s for this route + }, +}, longOperationHandler); +``` + +## Trust Proxy Settings + +Configure for load balancers: + +```typescript +const app = Fastify({ + // Trust first proxy (load balancer) + trustProxy: true, + + // Or trust specific proxies + trustProxy: ['127.0.0.1', '10.0.0.0/8'], + + // Or number of proxies to trust + trustProxy: 1, +}); + +// Now request.ip returns real client IP +``` + +## Static File Serving + +Serve static files efficiently. **Always use `import.meta.dirname` as the base path**, never `process.cwd()`: + +```typescript +import fastifyStatic from '@fastify/static'; +import { join } from 'node:path'; + +app.register(fastifyStatic, { + root: join(import.meta.dirname, '..', 'public'), + prefix: '/static/', + maxAge: '1d', + immutable: true, + etag: true, + lastModified: true, +}); +``` + +## Compression + +Enable response compression: + +```typescript +import fastifyCompress from '@fastify/compress'; + +app.register(fastifyCompress, { + global: true, + threshold: 1024, // Only compress > 1KB + encodings: ['gzip', 'deflate'], +}); +``` + +## Metrics and Monitoring + +Expose Prometheus metrics: + +```typescript +import { register, collectDefaultMetrics, Counter, Histogram } from 'prom-client'; + +collectDefaultMetrics(); + +const httpRequestDuration = new Histogram({ + name: 'http_request_duration_seconds', + help: 'Duration of HTTP requests in seconds', + labelNames: ['method', 'route', 'status'], + buckets: [0.01, 0.05, 0.1, 0.5, 1, 5], +}); + +const httpRequestTotal = new Counter({ + name: 'http_requests_total', + help: 'Total number of HTTP requests', + labelNames: ['method', 'route', 'status'], +}); + +app.addHook('onResponse', (request, reply, done) => { + const route = request.routeOptions.url || request.url; + const labels = { + method: request.method, + route, + status: reply.statusCode, + }; + + httpRequestDuration.observe(labels, reply.elapsedTime / 1000); + httpRequestTotal.inc(labels); + done(); +}); + +app.get('/metrics', async (request, reply) => { + reply.header('Content-Type', register.contentType); + return register.metrics(); +}); +``` + +## Zero-Downtime Deployments + +Support rolling updates: + +```typescript +import closeWithGrace from 'close-with-grace'; + +// Stop accepting new connections gracefully +closeWithGrace({ delay: 30000 }, async ({ signal }) => { + app.log.info({ signal }, 'Received shutdown signal'); + + // Stop accepting new connections + // Existing connections continue to be served + + // Wait for in-flight requests (handled by close-with-grace delay) + await app.close(); + + app.log.info('Server closed'); +}); +``` + diff --git a/.claude/skills/fastify-best-practices/rules/error-handling.md b/.claude/skills/fastify-best-practices/rules/error-handling.md new file mode 100644 index 0000000..8e43c85 --- /dev/null +++ b/.claude/skills/fastify-best-practices/rules/error-handling.md @@ -0,0 +1,412 @@ +--- +name: error-handling +description: Error handling patterns in Fastify +metadata: + tags: errors, exceptions, error-handler, validation +--- + +# Error Handling in Fastify + +## Default Error Handler + +Fastify has a built-in error handler. Thrown errors automatically become HTTP responses: + +```typescript +import Fastify from 'fastify'; + +const app = Fastify({ logger: true }); + +app.get('/users/:id', async (request) => { + const user = await findUser(request.params.id); + if (!user) { + // Throwing an error with statusCode sets the response status + const error = new Error('User not found'); + error.statusCode = 404; + throw error; + } + return user; +}); +``` + +## Custom Error Classes + +Use `@fastify/error` for creating typed errors: + +```typescript +import createError from '@fastify/error'; + +const NotFoundError = createError('NOT_FOUND', '%s not found', 404); +const UnauthorizedError = createError('UNAUTHORIZED', 'Authentication required', 401); +const ForbiddenError = createError('FORBIDDEN', 'Access denied: %s', 403); +const ValidationError = createError('VALIDATION_ERROR', '%s', 400); +const ConflictError = createError('CONFLICT', '%s already exists', 409); + +// Usage +app.get('/users/:id', async (request) => { + const user = await findUser(request.params.id); + if (!user) { + throw new NotFoundError('User'); + } + return user; +}); + +app.post('/users', async (request) => { + const exists = await userExists(request.body.email); + if (exists) { + throw new ConflictError('Email'); + } + return createUser(request.body); +}); +``` + +## Custom Error Handler + +Implement a centralized error handler: + +```typescript +import Fastify from 'fastify'; +import type { FastifyError, FastifyRequest, FastifyReply } from 'fastify'; + +const app = Fastify({ logger: true }); + +app.setErrorHandler((error: FastifyError, request: FastifyRequest, reply: FastifyReply) => { + // Log the error + request.log.error({ err: error }, 'Request error'); + + // Handle validation errors + if (error.validation) { + return reply.code(400).send({ + statusCode: 400, + error: 'Bad Request', + message: 'Validation failed', + details: error.validation, + }); + } + + // Handle known errors with status codes + const statusCode = error.statusCode ?? 500; + const code = error.code ?? 'INTERNAL_ERROR'; + + // Don't expose internal error details in production + const message = statusCode >= 500 && process.env.NODE_ENV === 'production' + ? 'Internal Server Error' + : error.message; + + return reply.code(statusCode).send({ + statusCode, + error: code, + message, + }); +}); +``` + +## Error Response Schema + +Define consistent error response schemas: + +```typescript +app.addSchema({ + $id: 'httpError', + type: 'object', + properties: { + statusCode: { type: 'integer' }, + error: { type: 'string' }, + message: { type: 'string' }, + details: { + type: 'array', + items: { + type: 'object', + properties: { + field: { type: 'string' }, + message: { type: 'string' }, + }, + }, + }, + }, + required: ['statusCode', 'error', 'message'], +}); + +// Use in route schemas +app.get('/users/:id', { + schema: { + params: { + type: 'object', + properties: { id: { type: 'string' } }, + required: ['id'], + }, + response: { + 200: { $ref: 'user#' }, + 404: { $ref: 'httpError#' }, + 500: { $ref: 'httpError#' }, + }, + }, +}, handler); +``` + +## Reply Helpers with @fastify/sensible + +Use `@fastify/sensible` for standard HTTP errors: + +```typescript +import fastifySensible from '@fastify/sensible'; + +app.register(fastifySensible); + +app.get('/users/:id', async (request, reply) => { + const user = await findUser(request.params.id); + if (!user) { + return reply.notFound('User not found'); + } + if (!hasAccess(request.user, user)) { + return reply.forbidden('You cannot access this user'); + } + return user; +}); + +// Available methods: +// reply.badRequest(message?) +// reply.unauthorized(message?) +// reply.forbidden(message?) +// reply.notFound(message?) +// reply.methodNotAllowed(message?) +// reply.conflict(message?) +// reply.gone(message?) +// reply.unprocessableEntity(message?) +// reply.tooManyRequests(message?) +// reply.internalServerError(message?) +// reply.notImplemented(message?) +// reply.badGateway(message?) +// reply.serviceUnavailable(message?) +// reply.gatewayTimeout(message?) +``` + +## Async Error Handling + +Errors in async handlers are automatically caught: + +```typescript +// Errors are automatically caught and passed to error handler +app.get('/users', async (request) => { + const users = await db.users.findAll(); // If this throws, error handler catches it + return users; +}); + +// Explicit error handling for custom logic +app.get('/users/:id', async (request, reply) => { + try { + const user = await db.users.findById(request.params.id); + if (!user) { + return reply.code(404).send({ error: 'User not found' }); + } + return user; + } catch (error) { + // Transform database errors + if (error.code === 'CONNECTION_ERROR') { + request.log.error({ err: error }, 'Database connection failed'); + return reply.code(503).send({ error: 'Service temporarily unavailable' }); + } + throw error; // Re-throw for error handler + } +}); +``` + +## Hook Error Handling + +Errors in hooks are handled the same way: + +```typescript +app.addHook('onRequest', async (request, reply) => { + const token = request.headers.authorization; + if (!token) { + // This error goes to the error handler + throw new UnauthorizedError(); + } + + try { + request.user = await verifyToken(token); + } catch (error) { + throw new UnauthorizedError(); + } +}); + +// Or use reply to send response directly +app.addHook('onRequest', async (request, reply) => { + if (!request.headers.authorization) { + reply.code(401).send({ error: 'Unauthorized' }); + return; // Must return to stop processing + } +}); +``` + +## Not Found Handler + +Customize the 404 response: + +```typescript +app.setNotFoundHandler(async (request, reply) => { + return reply.code(404).send({ + statusCode: 404, + error: 'Not Found', + message: `Route ${request.method} ${request.url} not found`, + }); +}); + +// With schema validation +app.setNotFoundHandler({ + preValidation: async (request, reply) => { + // Pre-validation hook for 404 handler + }, +}, async (request, reply) => { + return reply.code(404).send({ error: 'Not Found' }); +}); +``` + +## Error Wrapping + +Wrap external errors with context: + +```typescript +import createError from '@fastify/error'; + +const DatabaseError = createError('DATABASE_ERROR', 'Database operation failed: %s', 500); +const ExternalServiceError = createError('EXTERNAL_SERVICE_ERROR', 'External service failed: %s', 502); + +app.get('/users/:id', async (request) => { + try { + return await db.users.findById(request.params.id); + } catch (error) { + throw new DatabaseError(error.message, { cause: error }); + } +}); + +app.get('/weather', async (request) => { + try { + return await weatherApi.fetch(request.query.city); + } catch (error) { + throw new ExternalServiceError(error.message, { cause: error }); + } +}); +``` + +## Validation Error Customization + +Customize validation error format: + +```typescript +app.setErrorHandler((error, request, reply) => { + if (error.validation) { + const details = error.validation.map((err) => { + const field = err.instancePath + ? err.instancePath.slice(1).replace(/\//g, '.') + : err.params?.missingProperty || 'unknown'; + + return { + field, + message: err.message, + value: err.data, + }; + }); + + return reply.code(400).send({ + statusCode: 400, + error: 'Validation Error', + message: `Invalid ${error.validationContext}: ${details.map(d => d.field).join(', ')}`, + details, + }); + } + + // Handle other errors... + throw error; +}); +``` + +## Error Cause Chain + +Preserve error chains for debugging: + +```typescript +app.get('/complex-operation', async (request) => { + try { + await step1(); + } catch (error) { + const wrapped = new Error('Step 1 failed', { cause: error }); + wrapped.statusCode = 500; + throw wrapped; + } +}); + +// In error handler, log the full chain +app.setErrorHandler((error, request, reply) => { + // Log error with cause chain + let current = error; + const chain = []; + while (current) { + chain.push({ + message: current.message, + code: current.code, + stack: current.stack, + }); + current = current.cause; + } + + request.log.error({ errorChain: chain }, 'Request failed'); + + reply.code(error.statusCode || 500).send({ + error: error.message, + }); +}); +``` + +## Plugin-Scoped Error Handlers + +Set error handlers at the plugin level: + +```typescript +app.register(async function apiRoutes(fastify) { + // This error handler only applies to routes in this plugin + fastify.setErrorHandler((error, request, reply) => { + request.log.error({ err: error }, 'API error'); + + reply.code(error.statusCode || 500).send({ + error: { + code: error.code || 'API_ERROR', + message: error.message, + }, + }); + }); + + fastify.get('/data', async () => { + throw new Error('API-specific error'); + }); +}, { prefix: '/api' }); +``` + +## Graceful Error Recovery + +Handle errors gracefully without crashing: + +```typescript +app.get('/resilient', async (request, reply) => { + const results = await Promise.allSettled([ + fetchPrimaryData(), + fetchSecondaryData(), + fetchOptionalData(), + ]); + + const [primary, secondary, optional] = results; + + if (primary.status === 'rejected') { + // Primary data is required + throw new Error('Primary data unavailable'); + } + + return { + data: primary.value, + secondary: secondary.status === 'fulfilled' ? secondary.value : null, + optional: optional.status === 'fulfilled' ? optional.value : null, + warnings: results + .filter((r) => r.status === 'rejected') + .map((r) => r.reason.message), + }; +}); +``` diff --git a/.claude/skills/fastify-best-practices/rules/hooks.md b/.claude/skills/fastify-best-practices/rules/hooks.md new file mode 100644 index 0000000..d992a27 --- /dev/null +++ b/.claude/skills/fastify-best-practices/rules/hooks.md @@ -0,0 +1,464 @@ +--- +name: hooks +description: Hooks and request lifecycle in Fastify +metadata: + tags: hooks, lifecycle, middleware, onRequest, preHandler +--- + +# Hooks and Request Lifecycle + +## Request Lifecycle Overview + +Fastify executes hooks in a specific order: + +``` +Incoming Request + | + onRequest + | + preParsing + | + preValidation + | + preHandler + | + Handler + | + preSerialization + | + onSend + | + onResponse +``` + +## onRequest Hook + +First hook to execute, before body parsing. Use for authentication, request ID setup: + +```typescript +import Fastify from 'fastify'; + +const app = Fastify(); + +// Global onRequest hook +app.addHook('onRequest', async (request, reply) => { + request.startTime = Date.now(); + request.log.info({ url: request.url, method: request.method }, 'Request started'); +}); + +// Authentication check +app.addHook('onRequest', async (request, reply) => { + // Skip auth for public routes + if (request.url.startsWith('/public')) { + return; + } + + const token = request.headers.authorization?.replace('Bearer ', ''); + if (!token) { + reply.code(401).send({ error: 'Unauthorized' }); + return; // Stop processing + } + + try { + request.user = await verifyToken(token); + } catch { + reply.code(401).send({ error: 'Invalid token' }); + } +}); +``` + +## preParsing Hook + +Execute before body parsing. Can modify the payload stream: + +```typescript +app.addHook('preParsing', async (request, reply, payload) => { + // Log raw payload size + request.log.debug({ contentLength: request.headers['content-length'] }, 'Parsing body'); + + // Return modified payload stream if needed + return payload; +}); + +// Decompress incoming data +app.addHook('preParsing', async (request, reply, payload) => { + if (request.headers['content-encoding'] === 'gzip') { + return payload.pipe(zlib.createGunzip()); + } + return payload; +}); +``` + +## preValidation Hook + +Execute after parsing, before schema validation: + +```typescript +app.addHook('preValidation', async (request, reply) => { + // Modify body before validation + if (request.body && typeof request.body === 'object') { + // Normalize data + request.body.email = request.body.email?.toLowerCase().trim(); + } +}); + +// Rate limiting check +app.addHook('preValidation', async (request, reply) => { + const key = request.ip; + const count = await redis.incr(`ratelimit:${key}`); + + if (count === 1) { + await redis.expire(`ratelimit:${key}`, 60); + } + + if (count > 100) { + reply.code(429).send({ error: 'Too many requests' }); + } +}); +``` + +## preHandler Hook + +Most common hook, execute after validation, before handler: + +```typescript +// Authorization check +app.addHook('preHandler', async (request, reply) => { + const { userId } = request.params as { userId: string }; + + if (request.user.id !== userId && !request.user.isAdmin) { + reply.code(403).send({ error: 'Forbidden' }); + } +}); + +// Load related data +app.addHook('preHandler', async (request, reply) => { + if (request.params?.projectId) { + request.project = await db.projects.findById(request.params.projectId); + if (!request.project) { + reply.code(404).send({ error: 'Project not found' }); + } + } +}); + +// Transaction wrapper +app.addHook('preHandler', async (request) => { + request.transaction = await db.beginTransaction(); +}); + +app.addHook('onResponse', async (request) => { + if (request.transaction) { + await request.transaction.commit(); + } +}); + +app.addHook('onError', async (request, reply, error) => { + if (request.transaction) { + await request.transaction.rollback(); + } +}); +``` + +## preSerialization Hook + +Modify payload before serialization: + +```typescript +app.addHook('preSerialization', async (request, reply, payload) => { + // Add metadata to all responses + if (payload && typeof payload === 'object') { + return { + ...payload, + _meta: { + requestId: request.id, + timestamp: new Date().toISOString(), + }, + }; + } + return payload; +}); + +// Remove sensitive fields +app.addHook('preSerialization', async (request, reply, payload) => { + if (payload?.user?.password) { + const { password, ...user } = payload.user; + return { ...payload, user }; + } + return payload; +}); +``` + +## onSend Hook + +Modify response after serialization: + +```typescript +app.addHook('onSend', async (request, reply, payload) => { + // Add response headers + reply.header('X-Response-Time', Date.now() - request.startTime); + + // Compress response + if (payload && payload.length > 1024) { + const compressed = await gzip(payload); + reply.header('Content-Encoding', 'gzip'); + return compressed; + } + + return payload; +}); + +// Transform JSON string response +app.addHook('onSend', async (request, reply, payload) => { + if (reply.getHeader('content-type')?.includes('application/json')) { + // payload is already a string at this point + return payload; + } + return payload; +}); +``` + +## onResponse Hook + +Execute after response is sent. Cannot modify response: + +```typescript +app.addHook('onResponse', async (request, reply) => { + // Log response time + const responseTime = Date.now() - request.startTime; + request.log.info({ + method: request.method, + url: request.url, + statusCode: reply.statusCode, + responseTime, + }, 'Request completed'); + + // Track metrics + metrics.histogram('http_request_duration', responseTime, { + method: request.method, + route: request.routeOptions.url, + status: reply.statusCode, + }); +}); +``` + +## onError Hook + +Execute when an error is thrown: + +```typescript +app.addHook('onError', async (request, reply, error) => { + // Log error details + request.log.error({ + err: error, + url: request.url, + method: request.method, + body: request.body, + }, 'Request error'); + + // Track error metrics + metrics.increment('http_errors', { + error: error.code || 'UNKNOWN', + route: request.routeOptions.url, + }); + + // Cleanup resources + if (request.tempFile) { + await fs.unlink(request.tempFile).catch(() => {}); + } +}); +``` + +## onTimeout Hook + +Execute when request times out: + +```typescript +const app = Fastify({ + connectionTimeout: 30000, // 30 seconds +}); + +app.addHook('onTimeout', async (request, reply) => { + request.log.warn({ + url: request.url, + method: request.method, + }, 'Request timeout'); + + // Cleanup + if (request.abortController) { + request.abortController.abort(); + } +}); +``` + +## onRequestAbort Hook + +Execute when client closes connection: + +```typescript +app.addHook('onRequestAbort', async (request) => { + request.log.info('Client aborted request'); + + // Cancel ongoing operations + if (request.abortController) { + request.abortController.abort(); + } + + // Cleanup uploaded files + if (request.uploadedFiles) { + for (const file of request.uploadedFiles) { + await fs.unlink(file.path).catch(() => {}); + } + } +}); +``` + +## Application Lifecycle Hooks + +Hooks that run at application startup/shutdown: + +```typescript +// After all plugins are loaded +app.addHook('onReady', async function () { + this.log.info('Server is ready'); + + // Initialize connections + await this.db.connect(); + await this.redis.connect(); + + // Warm caches + await this.cache.warmup(); +}); + +// When server is closing +app.addHook('onClose', async function () { + this.log.info('Server is closing'); + + // Cleanup connections + await this.db.close(); + await this.redis.disconnect(); +}); + +// After routes are registered +app.addHook('onRoute', (routeOptions) => { + console.log(`Route registered: ${routeOptions.method} ${routeOptions.url}`); + + // Track all routes + routes.push({ + method: routeOptions.method, + url: routeOptions.url, + schema: routeOptions.schema, + }); +}); + +// After plugin is registered +app.addHook('onRegister', (instance, options) => { + console.log(`Plugin registered with prefix: ${options.prefix}`); +}); +``` + +## Scoped Hooks + +Hooks are scoped to their encapsulation context: + +```typescript +app.addHook('onRequest', async (request) => { + // Runs for ALL routes + request.log.info('Global hook'); +}); + +app.register(async function adminRoutes(fastify) { + // Only runs for routes in this plugin + fastify.addHook('onRequest', async (request, reply) => { + if (!request.user?.isAdmin) { + reply.code(403).send({ error: 'Admin only' }); + } + }); + + fastify.get('/admin/users', async () => { + return { users: [] }; + }); +}, { prefix: '/admin' }); +``` + +## Hook Execution Order + +Multiple hooks of the same type execute in registration order: + +```typescript +app.addHook('onRequest', async () => { + console.log('First'); +}); + +app.addHook('onRequest', async () => { + console.log('Second'); +}); + +app.addHook('onRequest', async () => { + console.log('Third'); +}); + +// Output: First, Second, Third +``` + +## Stopping Hook Execution + +Return early from hooks to stop processing: + +```typescript +app.addHook('preHandler', async (request, reply) => { + if (!request.user) { + // Send response and return to stop further processing + reply.code(401).send({ error: 'Unauthorized' }); + return; + } + // Continue to next hook and handler +}); +``` + +## Route-Level Hooks + +Add hooks to specific routes: + +```typescript +const adminOnlyHook = async (request, reply) => { + if (!request.user?.isAdmin) { + reply.code(403).send({ error: 'Forbidden' }); + } +}; + +app.get('/admin/settings', { + preHandler: [adminOnlyHook], + handler: async (request) => { + return { settings: {} }; + }, +}); + +// Multiple hooks +app.post('/orders', { + preValidation: [validateApiKey], + preHandler: [loadUser, checkQuota, logOrder], + handler: createOrderHandler, +}); +``` + +## Async Hook Patterns + +Always use async/await in hooks: + +```typescript +// GOOD - async hook +app.addHook('preHandler', async (request, reply) => { + const user = await loadUser(request.headers.authorization); + request.user = user; +}); + +// AVOID - callback style (deprecated) +app.addHook('preHandler', (request, reply, done) => { + loadUser(request.headers.authorization) + .then((user) => { + request.user = user; + done(); + }) + .catch(done); +}); +``` diff --git a/.claude/skills/fastify-best-practices/rules/http-proxy.md b/.claude/skills/fastify-best-practices/rules/http-proxy.md new file mode 100644 index 0000000..e4e0884 --- /dev/null +++ b/.claude/skills/fastify-best-practices/rules/http-proxy.md @@ -0,0 +1,247 @@ +--- +name: http-proxy +description: HTTP proxying and reply.from() in Fastify +metadata: + tags: proxy, gateway, reverse-proxy, microservices +--- + +# HTTP Proxy and Reply.from() + +## @fastify/http-proxy + +Use `@fastify/http-proxy` for simple reverse proxy scenarios: + +```typescript +import Fastify from 'fastify'; +import httpProxy from '@fastify/http-proxy'; + +const app = Fastify({ logger: true }); + +// Proxy all requests to /api/* to another service +app.register(httpProxy, { + upstream: 'http://backend-service:3001', + prefix: '/api', + rewritePrefix: '/v1', + http2: false, +}); + +// With authentication +app.register(httpProxy, { + upstream: 'http://internal-api:3002', + prefix: '/internal', + preHandler: async (request, reply) => { + // Verify authentication before proxying + if (!request.headers.authorization) { + reply.code(401).send({ error: 'Unauthorized' }); + } + }, +}); + +await app.listen({ port: 3000 }); +``` + +## @fastify/reply-from + +For more control over proxying, use `@fastify/reply-from` with `reply.from()`: + +```typescript +import Fastify from 'fastify'; +import replyFrom from '@fastify/reply-from'; + +const app = Fastify({ logger: true }); + +app.register(replyFrom, { + base: 'http://backend-service:3001', + http2: false, +}); + +// Proxy with request/response manipulation +app.get('/users/:id', async (request, reply) => { + const { id } = request.params; + + return reply.from(`/api/users/${id}`, { + // Modify request before forwarding + rewriteRequestHeaders: (originalReq, headers) => ({ + ...headers, + 'x-request-id': request.id, + 'x-forwarded-for': request.ip, + }), + // Modify response before sending + onResponse: (request, reply, res) => { + reply.header('x-proxy', 'fastify'); + reply.send(res); + }, + }); +}); + +// Conditional routing +app.all('/api/*', async (request, reply) => { + const upstream = selectUpstream(request); + + return reply.from(request.url, { + base: upstream, + }); +}); + +function selectUpstream(request) { + // Route to different backends based on request + if (request.headers['x-beta']) { + return 'http://beta-backend:3001'; + } + return 'http://stable-backend:3001'; +} +``` + +## API Gateway Pattern + +Build an API gateway with multiple backends: + +```typescript +import Fastify from 'fastify'; +import replyFrom from '@fastify/reply-from'; + +const app = Fastify({ logger: true }); + +// Configure multiple upstreams +const services = { + users: 'http://users-service:3001', + orders: 'http://orders-service:3002', + products: 'http://products-service:3003', +}; + +app.register(replyFrom); + +// Route to user service +app.register(async function (fastify) { + fastify.all('/*', async (request, reply) => { + return reply.from(request.url.replace('/users', ''), { + base: services.users, + }); + }); +}, { prefix: '/users' }); + +// Route to orders service +app.register(async function (fastify) { + fastify.all('/*', async (request, reply) => { + return reply.from(request.url.replace('/orders', ''), { + base: services.orders, + }); + }); +}, { prefix: '/orders' }); + +// Route to products service +app.register(async function (fastify) { + fastify.all('/*', async (request, reply) => { + return reply.from(request.url.replace('/products', ''), { + base: services.products, + }); + }); +}, { prefix: '/products' }); +``` + +## Request Body Handling + +Handle request bodies when proxying: + +```typescript +app.post('/api/data', async (request, reply) => { + return reply.from('/data', { + body: request.body, + contentType: request.headers['content-type'], + }); +}); + +// Stream large bodies +app.post('/upload', async (request, reply) => { + return reply.from('/upload', { + body: request.raw, + contentType: request.headers['content-type'], + }); +}); +``` + +## Error Handling + +Handle upstream errors gracefully: + +```typescript +app.register(replyFrom, { + base: 'http://backend:3001', + // Called when upstream returns an error + onError: (reply, error) => { + reply.log.error({ err: error }, 'Proxy error'); + reply.code(502).send({ + error: 'Bad Gateway', + message: 'Upstream service unavailable', + }); + }, +}); + +// Custom error handling per route +app.get('/data', async (request, reply) => { + try { + return await reply.from('/data'); + } catch (error) { + request.log.error({ err: error }, 'Failed to proxy request'); + return reply.code(503).send({ + error: 'Service Unavailable', + retryAfter: 30, + }); + } +}); +``` + +## WebSocket Proxying + +Proxy WebSocket connections: + +```typescript +import Fastify from 'fastify'; +import httpProxy from '@fastify/http-proxy'; + +const app = Fastify({ logger: true }); + +app.register(httpProxy, { + upstream: 'http://ws-backend:3001', + prefix: '/ws', + websocket: true, +}); +``` + +## Timeout Configuration + +Configure proxy timeouts: + +```typescript +app.register(replyFrom, { + base: 'http://backend:3001', + http: { + requestOptions: { + timeout: 30000, // 30 seconds + }, + }, +}); +``` + +## Caching Proxied Responses + +Add caching to proxied responses: + +```typescript +import { createCache } from 'async-cache-dedupe'; + +const cache = createCache({ + ttl: 60, + storage: { type: 'memory' }, +}); + +cache.define('proxyGet', async (url: string) => { + const response = await fetch(`http://backend:3001${url}`); + return response.json(); +}); + +app.get('/cached/*', async (request, reply) => { + const data = await cache.proxyGet(request.url); + return data; +}); +``` diff --git a/.claude/skills/fastify-best-practices/rules/logging.md b/.claude/skills/fastify-best-practices/rules/logging.md new file mode 100644 index 0000000..fdc2c61 --- /dev/null +++ b/.claude/skills/fastify-best-practices/rules/logging.md @@ -0,0 +1,402 @@ +--- +name: logging +description: Logging with Pino in Fastify +metadata: + tags: logging, pino, debugging, observability +--- + +# Logging with Pino + +## Built-in Pino Integration + +Fastify uses Pino for high-performance logging: + +```typescript +import Fastify from 'fastify'; + +const app = Fastify({ + logger: true, // Enable default logging +}); + +// Or with configuration +const app = Fastify({ + logger: { + level: 'info', + transport: { + target: 'pino-pretty', + options: { + colorize: true, + }, + }, + }, +}); +``` + +## Log Levels + +Available log levels (in order of severity): + +```typescript +app.log.trace('Detailed debugging'); +app.log.debug('Debugging information'); +app.log.info('General information'); +app.log.warn('Warning messages'); +app.log.error('Error messages'); +app.log.fatal('Fatal errors'); +``` + +## Request-Scoped Logging + +Each request has its own logger with request context: + +```typescript +app.get('/users/:id', async (request) => { + // Logs include request ID automatically + request.log.info('Fetching user'); + + const user = await db.users.findById(request.params.id); + + if (!user) { + request.log.warn({ userId: request.params.id }, 'User not found'); + return { error: 'Not found' }; + } + + request.log.info({ userId: user.id }, 'User fetched'); + return user; +}); +``` + +## Structured Logging + +Always use structured logging with objects: + +```typescript +// GOOD - structured, searchable +request.log.info({ + action: 'user_created', + userId: user.id, + email: user.email, +}, 'User created successfully'); + +request.log.error({ + err: error, + userId: request.params.id, + operation: 'fetch_user', +}, 'Failed to fetch user'); + +// BAD - unstructured, hard to parse +request.log.info(`User ${user.id} created with email ${user.email}`); +request.log.error(`Failed to fetch user: ${error.message}`); +``` + +## Logging Configuration by Environment + +```typescript +function getLoggerConfig() { + if (process.env.NODE_ENV === 'production') { + return { + level: 'info', + // JSON output for log aggregation + }; + } + + if (process.env.NODE_ENV === 'test') { + return false; // Disable logging in tests + } + + // Development + return { + level: 'debug', + transport: { + target: 'pino-pretty', + options: { + colorize: true, + translateTime: 'HH:MM:ss Z', + ignore: 'pid,hostname', + }, + }, + }; +} + +const app = Fastify({ + logger: getLoggerConfig(), +}); +``` + +## Custom Serializers + +Customize how objects are serialized: + +```typescript +const app = Fastify({ + logger: { + level: 'info', + serializers: { + // Customize request serialization + req: (request) => ({ + method: request.method, + url: request.url, + headers: { + host: request.headers.host, + 'user-agent': request.headers['user-agent'], + }, + remoteAddress: request.ip, + }), + + // Customize response serialization + res: (response) => ({ + statusCode: response.statusCode, + }), + + // Custom serializer for users + user: (user) => ({ + id: user.id, + email: user.email, + // Exclude sensitive fields + }), + }, + }, +}); + +// Use custom serializer +request.log.info({ user: request.user }, 'User action'); +``` + +## Redacting Sensitive Data + +Prevent logging sensitive information: + +```typescript +import Fastify from 'fastify'; + +const app = Fastify({ + logger: { + level: 'info', + redact: { + paths: [ + 'req.headers.authorization', + 'req.headers.cookie', + 'body.password', + 'body.creditCard', + '*.password', + '*.secret', + '*.token', + ], + censor: '[REDACTED]', + }, + }, +}); +``` + +## Child Loggers + +Create child loggers with additional context: + +```typescript +app.addHook('onRequest', async (request) => { + // Add user context to all logs for this request + if (request.user) { + request.log = request.log.child({ + userId: request.user.id, + userRole: request.user.role, + }); + } +}); + +// Service-level child logger +const userService = { + log: app.log.child({ service: 'UserService' }), + + async create(data) { + this.log.info({ email: data.email }, 'Creating user'); + // ... + }, +}; +``` + +## Request Logging Configuration + +Customize automatic request logging: + +```typescript +const app = Fastify({ + logger: true, + disableRequestLogging: true, // Disable default request/response logs +}); + +// Custom request logging +app.addHook('onRequest', async (request) => { + request.log.info({ + method: request.method, + url: request.url, + query: request.query, + }, 'Request received'); +}); + +app.addHook('onResponse', async (request, reply) => { + request.log.info({ + statusCode: reply.statusCode, + responseTime: reply.elapsedTime, + }, 'Request completed'); +}); +``` + +## Logging Errors + +Properly log errors with stack traces: + +```typescript +app.setErrorHandler((error, request, reply) => { + // Log error with full details + request.log.error({ + err: error, // Pino serializes error objects properly + url: request.url, + method: request.method, + body: request.body, + query: request.query, + }, 'Request error'); + + reply.code(error.statusCode || 500).send({ + error: error.message, + }); +}); + +// In handlers +app.get('/data', async (request) => { + try { + return await fetchData(); + } catch (error) { + request.log.error({ err: error }, 'Failed to fetch data'); + throw error; + } +}); +``` + +## Log Destinations + +Configure where logs are sent: + +```typescript +import { createWriteStream } from 'node:fs'; + +// File output +const app = Fastify({ + logger: { + level: 'info', + stream: createWriteStream('./app.log'), + }, +}); + +// Multiple destinations with pino.multistream +import pino from 'pino'; + +const streams = [ + { stream: process.stdout }, + { stream: createWriteStream('./app.log') }, + { level: 'error', stream: createWriteStream('./error.log') }, +]; + +const app = Fastify({ + logger: pino({ level: 'info' }, pino.multistream(streams)), +}); +``` + +## Log Rotation + +Use pino-roll for log rotation: + +```bash +node app.js | pino-roll --frequency daily --extension .log +``` + +Or configure programmatically: + +```typescript +import { createStream } from 'rotating-file-stream'; + +const stream = createStream('app.log', { + size: '10M', // Rotate every 10MB + interval: '1d', // Rotate daily + compress: 'gzip', + path: './logs', +}); + +const app = Fastify({ + logger: { + level: 'info', + stream, + }, +}); +``` + +## Log Aggregation + +Format logs for aggregation services: + +```typescript +// For ELK Stack, Datadog, etc. - use default JSON format +const app = Fastify({ + logger: { + level: 'info', + // Default JSON output works with most log aggregators + }, +}); + +// Add service metadata +const app = Fastify({ + logger: { + level: 'info', + base: { + service: 'user-api', + version: process.env.APP_VERSION, + environment: process.env.NODE_ENV, + }, + }, +}); +``` + +## Request ID Tracking + +Use request IDs for distributed tracing: + +```typescript +const app = Fastify({ + logger: true, + requestIdHeader: 'x-request-id', // Use incoming header + genReqId: (request) => { + // Generate ID if not provided + return request.headers['x-request-id'] || crypto.randomUUID(); + }, +}); + +// Forward request ID to downstream services +app.addHook('onRequest', async (request) => { + request.requestId = request.id; +}); + +// Include in outgoing requests +const response = await fetch('http://other-service/api', { + headers: { + 'x-request-id': request.id, + }, +}); +``` + +## Performance Considerations + +Pino is fast, but consider: + +```typescript +// Avoid string concatenation in log calls +// BAD +request.log.info('User ' + user.id + ' did ' + action); + +// GOOD +request.log.info({ userId: user.id, action }, 'User action'); + +// Use appropriate log levels +// Don't log at info level in hot paths +if (app.log.isLevelEnabled('debug')) { + request.log.debug({ details: expensiveToCompute() }, 'Debug info'); +} +``` diff --git a/.claude/skills/fastify-best-practices/rules/performance.md b/.claude/skills/fastify-best-practices/rules/performance.md new file mode 100644 index 0000000..7f59b7d --- /dev/null +++ b/.claude/skills/fastify-best-practices/rules/performance.md @@ -0,0 +1,425 @@ +--- +name: performance +description: Performance optimization for Fastify applications +metadata: + tags: performance, optimization, speed, benchmarking +--- + +# Performance Optimization + +## Fastify is Fast by Default + +Fastify is designed for performance. Key optimizations are built-in: + +- Fast JSON serialization with `fast-json-stringify` +- Efficient routing with `find-my-way` +- Schema-based validation with `ajv` (compiled validators) +- Low overhead request/response handling + +## Use @fastify/under-pressure for Load Shedding + +Protect your application from overload with `@fastify/under-pressure`: + +```typescript +import underPressure from '@fastify/under-pressure'; + +app.register(underPressure, { + maxEventLoopDelay: 1000, // Max event loop delay in ms + maxHeapUsedBytes: 1000000000, // Max heap used (~1GB) + maxRssBytes: 1500000000, // Max RSS (~1.5GB) + maxEventLoopUtilization: 0.98, // Max event loop utilization + pressureHandler: (request, reply, type, value) => { + reply.code(503).send({ + error: 'Service Unavailable', + message: `Server under pressure: ${type}`, + }); + }, +}); + +// Health check that respects pressure +app.get('/health', async (request, reply) => { + return { status: 'ok' }; +}); +``` + +## Always Define Response Schemas + +Response schemas enable fast-json-stringify, which is significantly faster than JSON.stringify: + +```typescript +// FAST - uses fast-json-stringify +app.get('/users', { + schema: { + response: { + 200: { + type: 'array', + items: { + type: 'object', + properties: { + id: { type: 'string' }, + name: { type: 'string' }, + email: { type: 'string' }, + }, + }, + }, + }, + }, +}, async () => { + return db.users.findAll(); +}); + +// SLOW - uses JSON.stringify +app.get('/users-slow', async () => { + return db.users.findAll(); +}); +``` + +## Avoid Dynamic Schema Compilation + +Add schemas at startup, not at request time: + +```typescript +// GOOD - schemas compiled at startup +app.addSchema({ $id: 'user', ... }); + +app.get('/users', { + schema: { response: { 200: { $ref: 'user#' } } }, +}, handler); + +// BAD - schema compiled per request +app.get('/users', async (request, reply) => { + const schema = getSchemaForUser(request.user); + // This is slow! +}); +``` + +## Use Logger Wisely + +Pino is fast, but excessive logging has overhead: + +```typescript +import Fastify from 'fastify'; + +// Set log level via environment variable +const app = Fastify({ + logger: { + level: process.env.LOG_LEVEL || 'info', + }, +}); + +// Avoid logging large objects +app.get('/data', async (request) => { + // BAD - logs entire payload + request.log.info({ data: largeObject }, 'Processing'); + + // GOOD - log only what's needed + request.log.info({ id: largeObject.id }, 'Processing'); + + return largeObject; +}); +``` + +## Connection Pooling + +Use connection pools for databases: + +```typescript +import postgres from 'postgres'; + +// Create pool at startup +const sql = postgres(process.env.DATABASE_URL, { + max: 20, // Maximum pool size + idle_timeout: 20, + connect_timeout: 10, +}); + +app.decorate('db', sql); + +// Connections are reused +app.get('/users', async () => { + return app.db`SELECT * FROM users LIMIT 100`; +}); +``` + +## Avoid Blocking the Event Loop + +Use `piscina` for CPU-intensive operations. It provides a robust worker thread pool: + +```typescript +import Piscina from 'piscina'; +import { join } from 'node:path'; + +const piscina = new Piscina({ + filename: join(import.meta.dirname, 'workers', 'compute.js'), +}); + +app.post('/compute', async (request) => { + const result = await piscina.run(request.body); + return result; +}); +``` + +```typescript +// workers/compute.js +export default function compute(data) { + // CPU-intensive work here + return processedResult; +} +``` + +## Stream Large Responses + +Stream large payloads instead of buffering: + +```typescript +import { createReadStream } from 'node:fs'; +import { pipeline } from 'node:stream/promises'; + +// GOOD - stream file +app.get('/large-file', async (request, reply) => { + const stream = createReadStream('./large-file.json'); + reply.type('application/json'); + return reply.send(stream); +}); + +// BAD - load entire file into memory +app.get('/large-file-bad', async () => { + const content = await fs.readFile('./large-file.json', 'utf-8'); + return JSON.parse(content); +}); + +// Stream database results +app.get('/export', async (request, reply) => { + reply.type('application/json'); + + const cursor = db.users.findCursor(); + reply.raw.write('['); + + let first = true; + for await (const user of cursor) { + if (!first) reply.raw.write(','); + reply.raw.write(JSON.stringify(user)); + first = false; + } + + reply.raw.write(']'); + reply.raw.end(); +}); +``` + +## Caching Strategies + +Implement caching for expensive operations: + +```typescript +import { LRUCache } from 'lru-cache'; + +const cache = new LRUCache({ + max: 1000, + ttl: 60000, // 1 minute +}); + +app.get('/expensive/:id', async (request) => { + const { id } = request.params; + const cacheKey = `expensive:${id}`; + + const cached = cache.get(cacheKey); + if (cached) { + return cached; + } + + const result = await expensiveOperation(id); + cache.set(cacheKey, result); + + return result; +}); + +// Cache control headers +app.get('/static-data', async (request, reply) => { + reply.header('Cache-Control', 'public, max-age=3600'); + return { data: 'static' }; +}); +``` + +## Request Coalescing with async-cache-dedupe + +Use `async-cache-dedupe` for deduplicating concurrent identical requests and caching: + +```typescript +import { createCache } from 'async-cache-dedupe'; + +const cache = createCache({ + ttl: 60, // seconds + stale: 5, // serve stale while revalidating + storage: { type: 'memory' }, +}); + +cache.define('fetchData', async (id: string) => { + return db.findById(id); +}); + +app.get('/data/:id', async (request) => { + const { id } = request.params; + // Automatically deduplicates concurrent requests for the same id + // and caches the result + return cache.fetchData(id); +}); +``` + +For distributed caching, use Redis storage: + +```typescript +import { createCache } from 'async-cache-dedupe'; +import Redis from 'ioredis'; + +const redis = new Redis(process.env.REDIS_URL); + +const cache = createCache({ + ttl: 60, + storage: { type: 'redis', options: { client: redis } }, +}); +``` + +## Payload Limits + +Set appropriate payload limits: + +```typescript +import Fastify from 'fastify'; + +const app = Fastify({ + bodyLimit: 1048576, // 1MB default +}); + +// Per-route limit for file uploads +app.post('/upload', { + bodyLimit: 10485760, // 10MB for this route +}, uploadHandler); +``` + +## Compression + +Use compression for responses: + +```typescript +import fastifyCompress from '@fastify/compress'; + +app.register(fastifyCompress, { + global: true, + threshold: 1024, // Only compress responses > 1KB + encodings: ['gzip', 'deflate'], +}); + +// Disable for specific route +app.get('/already-compressed', { + compress: false, +}, handler); +``` + +## Connection Timeouts + +Configure appropriate timeouts: + +```typescript +import Fastify from 'fastify'; + +const app = Fastify({ + connectionTimeout: 30000, // 30 seconds + keepAliveTimeout: 5000, // 5 seconds +}); + +// Per-route timeout +app.get('/long-operation', { + config: { + timeout: 60000, // 60 seconds + }, +}, async (request) => { + return longOperation(); +}); +``` + +## Disable Unnecessary Features + +Disable features you don't need: + +```typescript +import Fastify from 'fastify'; + +const app = Fastify({ + disableRequestLogging: true, // If you don't need request logs + trustProxy: false, // If not behind proxy + caseSensitive: true, // Enable for slight performance gain + ignoreDuplicateSlashes: false, +}); +``` + +## Benchmarking + +Use autocannon for load testing: + +```bash +# Install +npm install -g autocannon + +# Basic benchmark +autocannon http://localhost:3000/api/users + +# With options +autocannon -c 100 -d 30 -p 10 http://localhost:3000/api/users +# -c: connections +# -d: duration in seconds +# -p: pipelining factor +``` + +```typescript +// Programmatic benchmarking +import autocannon from 'autocannon'; + +const result = await autocannon({ + url: 'http://localhost:3000/api/users', + connections: 100, + duration: 30, + pipelining: 10, +}); + +console.log(autocannon.printResult(result)); +``` + +## Profiling + +Use `@platformatic/flame` for flame graph profiling: + +```bash +npx @platformatic/flame app.js +``` + +This generates an interactive flame graph to identify performance bottlenecks. + +## Memory Management + +Monitor and optimize memory usage: + +```typescript +// Add health endpoint with memory info +app.get('/health', async () => { + const memory = process.memoryUsage(); + return { + status: 'ok', + memory: { + heapUsed: Math.round(memory.heapUsed / 1024 / 1024) + 'MB', + heapTotal: Math.round(memory.heapTotal / 1024 / 1024) + 'MB', + rss: Math.round(memory.rss / 1024 / 1024) + 'MB', + }, + }; +}); + +// Avoid memory leaks in closures +app.addHook('onRequest', async (request) => { + // BAD - holding reference to large object + const largeData = await loadLargeData(); + request.getData = () => largeData; + + // GOOD - load on demand + request.getData = () => loadLargeData(); +}); +``` diff --git a/.claude/skills/fastify-best-practices/rules/plugins.md b/.claude/skills/fastify-best-practices/rules/plugins.md new file mode 100644 index 0000000..f76a474 --- /dev/null +++ b/.claude/skills/fastify-best-practices/rules/plugins.md @@ -0,0 +1,320 @@ +--- +name: plugins +description: Plugin development and encapsulation in Fastify +metadata: + tags: plugins, encapsulation, modules, architecture +--- + +# Plugin Development and Encapsulation + +## Understanding Encapsulation + +Fastify's plugin system provides automatic encapsulation. Each plugin creates its own context, isolating decorators, hooks, and plugins registered within it: + +```typescript +import Fastify from 'fastify'; +import fp from 'fastify-plugin'; + +const app = Fastify(); + +// This plugin is encapsulated - its decorators are NOT available to siblings +app.register(async function childPlugin(fastify) { + fastify.decorate('privateUtil', () => 'only available here'); + + // This decorator is only available within this plugin and its children + fastify.get('/child', async function (request, reply) { + return this.privateUtil(); + }); +}); + +// This route CANNOT access privateUtil - it's in a different context +app.get('/parent', async function (request, reply) { + // this.privateUtil is undefined here + return { status: 'ok' }; +}); +``` + +## Breaking Encapsulation with fastify-plugin + +Use `fastify-plugin` when you need to share decorators, hooks, or plugins with the parent context: + +```typescript +import fp from 'fastify-plugin'; + +// This plugin's decorators will be available to the parent and siblings +export default fp(async function databasePlugin(fastify, options) { + const db = await createConnection(options.connectionString); + + fastify.decorate('db', db); + + fastify.addHook('onClose', async () => { + await db.close(); + }); +}, { + name: 'database-plugin', + dependencies: [], // List plugin dependencies +}); +``` + +## Plugin Registration Order + +Plugins are registered in order, but loading is asynchronous. Use `after()` for sequential dependencies: + +```typescript +import Fastify from 'fastify'; +import databasePlugin from './plugins/database.js'; +import authPlugin from './plugins/auth.js'; +import routesPlugin from './routes/index.js'; + +const app = Fastify(); + +// Database must be ready before auth +app.register(databasePlugin); + +// Auth depends on database +app.register(authPlugin); + +// Routes depend on both +app.register(routesPlugin); + +// Or use after() for explicit sequencing +app.register(databasePlugin).after(() => { + app.register(authPlugin).after(() => { + app.register(routesPlugin); + }); +}); + +await app.ready(); +``` + +## Plugin Options + +Always validate and document plugin options: + +```typescript +import fp from 'fastify-plugin'; + +interface CachePluginOptions { + ttl: number; + maxSize?: number; + prefix?: string; +} + +export default fp(async function cachePlugin(fastify, options) { + const { ttl, maxSize = 1000, prefix = 'cache:' } = options; + + if (typeof ttl !== 'number' || ttl <= 0) { + throw new Error('Cache plugin requires a positive ttl option'); + } + + const cache = new Map(); + + fastify.decorate('cache', { + get(key: string): unknown | undefined { + const item = cache.get(prefix + key); + if (!item) return undefined; + if (Date.now() > item.expires) { + cache.delete(prefix + key); + return undefined; + } + return item.value; + }, + set(key: string, value: unknown): void { + if (cache.size >= maxSize) { + const firstKey = cache.keys().next().value; + cache.delete(firstKey); + } + cache.set(prefix + key, { value, expires: Date.now() + ttl }); + }, + }); +}, { + name: 'cache-plugin', +}); +``` + +## Plugin Factory Pattern + +Create configurable plugins using factory functions: + +```typescript +import fp from 'fastify-plugin'; + +interface RateLimitOptions { + max: number; + timeWindow: number; +} + +function createRateLimiter(defaults: Partial = {}) { + return fp(async function rateLimitPlugin(fastify, options) { + const config = { ...defaults, ...options }; + + // Implementation + fastify.decorate('rateLimit', config); + }, { + name: 'rate-limiter', + }); +} + +// Usage +app.register(createRateLimiter({ max: 100 }), { timeWindow: 60000 }); +``` + +## Plugin Dependencies + +Declare dependencies to ensure proper load order: + +```typescript +import fp from 'fastify-plugin'; + +export default fp(async function authPlugin(fastify) { + // This plugin requires 'database-plugin' to be loaded first + if (!fastify.hasDecorator('db')) { + throw new Error('Auth plugin requires database plugin'); + } + + fastify.decorate('authenticate', async (request) => { + const user = await fastify.db.users.findByToken(request.headers.authorization); + return user; + }); +}, { + name: 'auth-plugin', + dependencies: ['database-plugin'], +}); +``` + +## Scoped Plugins for Route Groups + +Use encapsulation to scope plugins to specific routes: + +```typescript +import Fastify from 'fastify'; + +const app = Fastify(); + +// Public routes - no auth required +app.register(async function publicRoutes(fastify) { + fastify.get('/health', async () => ({ status: 'ok' })); + fastify.get('/docs', async () => ({ version: '1.0.0' })); +}); + +// Protected routes - auth required +app.register(async function protectedRoutes(fastify) { + // Auth hook only applies to routes in this plugin + fastify.addHook('onRequest', async (request, reply) => { + const token = request.headers.authorization; + if (!token) { + reply.code(401).send({ error: 'Unauthorized' }); + return; + } + request.user = await verifyToken(token); + }); + + fastify.get('/profile', async (request) => { + return { user: request.user }; + }); + + fastify.get('/settings', async (request) => { + return { settings: await getSettings(request.user.id) }; + }); +}); +``` + +## Prefix Routes with Register + +Use the `prefix` option to namespace routes: + +```typescript +app.register(import('./routes/users.js'), { prefix: '/api/v1/users' }); +app.register(import('./routes/posts.js'), { prefix: '/api/v1/posts' }); + +// In routes/users.js +export default async function userRoutes(fastify) { + // Becomes /api/v1/users + fastify.get('/', async () => { + return { users: [] }; + }); + + // Becomes /api/v1/users/:id + fastify.get('/:id', async (request) => { + return { user: { id: request.params.id } }; + }); +} +``` + +## Plugin Metadata + +Add metadata for documentation and tooling: + +```typescript +import fp from 'fastify-plugin'; + +async function metricsPlugin(fastify) { + // Implementation +} + +export default fp(metricsPlugin, { + name: 'metrics-plugin', + fastify: '5.x', // Fastify version compatibility + dependencies: ['pino-plugin'], + decorators: { + fastify: ['db'], // Required decorators + request: [], + reply: [], + }, +}); +``` + +## Autoload Plugins + +Use `@fastify/autoload` for automatic plugin loading: + +```typescript +import Fastify from 'fastify'; +import autoload from '@fastify/autoload'; +import { fileURLToPath } from 'node:url'; +import { dirname, join } from 'node:path'; + +const __dirname = dirname(fileURLToPath(import.meta.url)); + +const app = Fastify(); + +// Load all plugins from the plugins directory +app.register(autoload, { + dir: join(__dirname, 'plugins'), + options: { prefix: '/api' }, +}); + +// Load all routes from the routes directory +app.register(autoload, { + dir: join(__dirname, 'routes'), + options: { prefix: '/api' }, +}); +``` + +## Testing Plugins in Isolation + +Test plugins independently: + +```typescript +import { describe, it, before, after } from 'node:test'; +import Fastify from 'fastify'; +import myPlugin from './my-plugin.js'; + +describe('MyPlugin', () => { + let app; + + before(async () => { + app = Fastify(); + app.register(myPlugin, { option: 'value' }); + await app.ready(); + }); + + after(async () => { + await app.close(); + }); + + it('should decorate fastify instance', (t) => { + t.assert.ok(app.hasDecorator('myDecorator')); + }); +}); +``` diff --git a/.claude/skills/fastify-best-practices/rules/routes.md b/.claude/skills/fastify-best-practices/rules/routes.md new file mode 100644 index 0000000..2924560 --- /dev/null +++ b/.claude/skills/fastify-best-practices/rules/routes.md @@ -0,0 +1,467 @@ +--- +name: routes +description: Route organization and handlers in Fastify +metadata: + tags: routes, handlers, http, rest, api +--- + +# Route Organization and Handlers + +## Basic Route Definition + +Define routes with the shorthand methods or the full route method: + +```typescript +import Fastify from 'fastify'; + +const app = Fastify(); + +// Shorthand methods +app.get('/users', async (request, reply) => { + return { users: [] }; +}); + +app.post('/users', async (request, reply) => { + return { created: true }; +}); + +// Full route method with all options +app.route({ + method: 'GET', + url: '/users/:id', + schema: { + params: { + type: 'object', + properties: { + id: { type: 'string' }, + }, + required: ['id'], + }, + }, + handler: async (request, reply) => { + return { id: request.params.id }; + }, +}); +``` + +## Route Parameters + +Access URL parameters through `request.params`: + +```typescript +// Single parameter +app.get('/users/:id', async (request) => { + const { id } = request.params as { id: string }; + return { userId: id }; +}); + +// Multiple parameters +app.get('/users/:userId/posts/:postId', async (request) => { + const { userId, postId } = request.params as { userId: string; postId: string }; + return { userId, postId }; +}); + +// Wildcard parameter (captures everything after) +app.get('/files/*', async (request) => { + const path = (request.params as { '*': string })['*']; + return { filePath: path }; +}); + +// Regex parameters (Fastify uses find-my-way) +app.get('/orders/:id(\\d+)', async (request) => { + // Only matches numeric IDs + const { id } = request.params as { id: string }; + return { orderId: parseInt(id, 10) }; +}); +``` + +## Query String Parameters + +Access query parameters through `request.query`: + +```typescript +app.get('/search', { + schema: { + querystring: { + type: 'object', + properties: { + q: { type: 'string' }, + page: { type: 'integer', default: 1 }, + limit: { type: 'integer', default: 10, maximum: 100 }, + }, + required: ['q'], + }, + }, + handler: async (request) => { + const { q, page, limit } = request.query as { + q: string; + page: number; + limit: number; + }; + return { query: q, page, limit }; + }, +}); +``` + +## Request Body + +Access the request body through `request.body`: + +```typescript +app.post('/users', { + schema: { + body: { + type: 'object', + properties: { + name: { type: 'string', minLength: 1 }, + email: { type: 'string', format: 'email' }, + age: { type: 'integer', minimum: 0 }, + }, + required: ['name', 'email'], + }, + }, + handler: async (request, reply) => { + const user = request.body as { name: string; email: string; age?: number }; + // Create user... + reply.code(201); + return { user }; + }, +}); +``` + +## Headers + +Access request headers through `request.headers`: + +```typescript +app.get('/protected', { + schema: { + headers: { + type: 'object', + properties: { + authorization: { type: 'string' }, + }, + required: ['authorization'], + }, + }, + handler: async (request) => { + const token = request.headers.authorization; + return { authenticated: true }; + }, +}); +``` + +## Reply Methods + +Use reply methods to control the response: + +```typescript +app.get('/examples', async (request, reply) => { + // Set status code + reply.code(201); + + // Set headers + reply.header('X-Custom-Header', 'value'); + reply.headers({ 'X-Another': 'value', 'X-Third': 'value' }); + + // Set content type + reply.type('application/json'); + + // Redirect + // reply.redirect('/other-url'); + // reply.redirect(301, '/permanent-redirect'); + + // Return response (automatic serialization) + return { status: 'ok' }; +}); + +// Explicit send (useful in non-async handlers) +app.get('/explicit', (request, reply) => { + reply.send({ status: 'ok' }); +}); + +// Stream response +app.get('/stream', async (request, reply) => { + const stream = fs.createReadStream('./large-file.txt'); + reply.type('text/plain'); + return reply.send(stream); +}); +``` + +## Route Organization by Feature + +Organize routes by feature/domain in separate files: + +``` +src/ + routes/ + users/ + index.ts # Route definitions + handlers.ts # Handler functions + schemas.ts # JSON schemas + posts/ + index.ts + handlers.ts + schemas.ts +``` + +```typescript +// routes/users/schemas.ts +export const userSchema = { + type: 'object', + properties: { + id: { type: 'string', format: 'uuid' }, + name: { type: 'string' }, + email: { type: 'string', format: 'email' }, + }, +}; + +export const createUserSchema = { + body: { + type: 'object', + properties: { + name: { type: 'string', minLength: 1 }, + email: { type: 'string', format: 'email' }, + }, + required: ['name', 'email'], + }, + response: { + 201: userSchema, + }, +}; + +// routes/users/handlers.ts +import type { FastifyRequest, FastifyReply } from 'fastify'; + +export async function createUser( + request: FastifyRequest<{ Body: { name: string; email: string } }>, + reply: FastifyReply, +) { + const { name, email } = request.body; + const user = await request.server.db.users.create({ name, email }); + reply.code(201); + return user; +} + +export async function getUsers(request: FastifyRequest) { + return request.server.db.users.findAll(); +} + +// routes/users/index.ts +import type { FastifyInstance } from 'fastify'; +import { createUser, getUsers } from './handlers.js'; +import { createUserSchema } from './schemas.js'; + +export default async function userRoutes(fastify: FastifyInstance) { + fastify.get('/', getUsers); + fastify.post('/', { schema: createUserSchema }, createUser); +} +``` + +## Route Constraints + +Add constraints to routes for versioning or host-based routing: + +```typescript +// Version constraint +app.get('/users', { + constraints: { version: '1.0.0' }, + handler: async () => ({ version: '1.0.0', users: [] }), +}); + +app.get('/users', { + constraints: { version: '2.0.0' }, + handler: async () => ({ version: '2.0.0', data: { users: [] } }), +}); + +// Client sends: Accept-Version: 1.0.0 + +// Host constraint +app.get('/', { + constraints: { host: 'api.example.com' }, + handler: async () => ({ api: true }), +}); + +app.get('/', { + constraints: { host: 'www.example.com' }, + handler: async () => ({ web: true }), +}); +``` + +## Route Prefixing + +Use prefixes to namespace routes: + +```typescript +// Using register +app.register(async function (fastify) { + fastify.get('/list', async () => ({ users: [] })); + fastify.get('/:id', async (request) => ({ id: request.params.id })); +}, { prefix: '/users' }); + +// Results in: +// GET /users/list +// GET /users/:id +``` + +## Multiple Methods + +Handle multiple HTTP methods with one handler: + +```typescript +app.route({ + method: ['GET', 'HEAD'], + url: '/resource', + handler: async (request) => { + return { data: 'resource' }; + }, +}); +``` + +## 404 Handler + +Customize the not found handler: + +```typescript +app.setNotFoundHandler({ + preValidation: async (request, reply) => { + // Optional pre-validation hook + }, + preHandler: async (request, reply) => { + // Optional pre-handler hook + }, +}, async (request, reply) => { + reply.code(404); + return { + error: 'Not Found', + message: `Route ${request.method} ${request.url} not found`, + statusCode: 404, + }; +}); +``` + +## Method Not Allowed + +Handle method not allowed responses: + +```typescript +// Fastify doesn't have built-in 405 handling +// Implement with a custom not found handler that checks allowed methods +app.setNotFoundHandler(async (request, reply) => { + // Check if the URL exists with a different method + const route = app.hasRoute({ + url: request.url, + method: 'GET', // Check other methods + }); + + if (route) { + reply.code(405); + return { error: 'Method Not Allowed' }; + } + + reply.code(404); + return { error: 'Not Found' }; +}); +``` + +## Route-Level Configuration + +Apply configuration to specific routes: + +```typescript +app.get('/slow-operation', { + config: { + rateLimit: { max: 10, timeWindow: '1 minute' }, + }, + handler: async (request) => { + return { result: await slowOperation() }; + }, +}); + +// Access config in hooks +app.addHook('onRequest', async (request, reply) => { + const config = request.routeOptions.config; + if (config.rateLimit) { + // Apply rate limiting + } +}); +``` + +## Async Route Registration + +Register routes from async sources: + +```typescript +app.register(async function (fastify) { + const routeConfigs = await loadRoutesFromDatabase(); + + for (const config of routeConfigs) { + fastify.route({ + method: config.method, + url: config.path, + handler: createDynamicHandler(config), + }); + } +}); +``` + +## Auto-loading Routes with @fastify/autoload + +Use `@fastify/autoload` to automatically load routes from a directory structure: + +```typescript +import Fastify from 'fastify'; +import autoload from '@fastify/autoload'; +import { join } from 'node:path'; + +const app = Fastify({ logger: true }); + +// Auto-load plugins +app.register(autoload, { + dir: join(import.meta.dirname, 'plugins'), + options: { prefix: '' }, +}); + +// Auto-load routes +app.register(autoload, { + dir: join(import.meta.dirname, 'routes'), + options: { prefix: '/api' }, +}); + +await app.listen({ port: 3000 }); +``` + +Directory structure: + +``` +src/ + plugins/ + database.ts # Loaded automatically + auth.ts # Loaded automatically + routes/ + users/ + index.ts # GET/POST /api/users + _id/ + index.ts # GET/PUT/DELETE /api/users/:id + posts/ + index.ts # GET/POST /api/posts +``` + +Route file example: + +```typescript +// routes/users/index.ts +import type { FastifyPluginAsync } from 'fastify'; + +const users: FastifyPluginAsync = async (fastify) => { + fastify.get('/', async () => { + return fastify.repositories.users.findAll(); + }); + + fastify.post('/', async (request) => { + return fastify.repositories.users.create(request.body); + }); +}; + +export default users; +``` diff --git a/.claude/skills/fastify-best-practices/rules/schemas.md b/.claude/skills/fastify-best-practices/rules/schemas.md new file mode 100644 index 0000000..404f3f9 --- /dev/null +++ b/.claude/skills/fastify-best-practices/rules/schemas.md @@ -0,0 +1,585 @@ +--- +name: schemas +description: JSON Schema validation in Fastify with TypeBox +metadata: + tags: validation, json-schema, schemas, ajv, typebox +--- + +# JSON Schema Validation + +## Use TypeBox for Type-Safe Schemas + +**Prefer TypeBox for defining schemas.** It provides TypeScript types automatically and compiles to JSON Schema: + +```typescript +import Fastify from 'fastify'; +import { Type, type Static } from '@sinclair/typebox'; + +const app = Fastify(); + +// Define schema with TypeBox - get TypeScript types for free +const CreateUserBody = Type.Object({ + name: Type.String({ minLength: 1, maxLength: 100 }), + email: Type.String({ format: 'email' }), + age: Type.Optional(Type.Integer({ minimum: 0, maximum: 150 })), +}); + +const UserResponse = Type.Object({ + id: Type.String({ format: 'uuid' }), + name: Type.String(), + email: Type.String(), + createdAt: Type.String({ format: 'date-time' }), +}); + +// TypeScript types are derived automatically +type CreateUserBodyType = Static; +type UserResponseType = Static; + +app.post<{ + Body: CreateUserBodyType; + Reply: UserResponseType; +}>('/users', { + schema: { + body: CreateUserBody, + response: { + 201: UserResponse, + }, + }, +}, async (request, reply) => { + // request.body is fully typed as CreateUserBodyType + const user = await createUser(request.body); + reply.code(201); + return user; +}); +``` + +## TypeBox Common Patterns + +```typescript +import { Type, type Static } from '@sinclair/typebox'; + +// Enums +const Status = Type.Union([ + Type.Literal('active'), + Type.Literal('inactive'), + Type.Literal('pending'), +]); + +// Arrays +const Tags = Type.Array(Type.String(), { minItems: 1, maxItems: 10 }); + +// Nested objects +const Address = Type.Object({ + street: Type.String(), + city: Type.String(), + country: Type.String(), + zip: Type.Optional(Type.String()), +}); + +// References (reusable schemas) +const User = Type.Object({ + id: Type.String({ format: 'uuid' }), + name: Type.String(), + address: Address, + tags: Tags, + status: Status, +}); + +// Nullable +const NullableString = Type.Union([Type.String(), Type.Null()]); + +// Record/Map +const Metadata = Type.Record(Type.String(), Type.Unknown()); +``` + +## Register TypeBox Schemas Globally + +```typescript +import { Type, type Static } from '@sinclair/typebox'; + +// Define shared schemas +const ErrorResponse = Type.Object({ + error: Type.String(), + message: Type.String(), + statusCode: Type.Integer(), +}); + +const PaginationQuery = Type.Object({ + page: Type.Integer({ minimum: 1, default: 1 }), + limit: Type.Integer({ minimum: 1, maximum: 100, default: 20 }), +}); + +// Register globally +app.addSchema(Type.Object({ $id: 'ErrorResponse', ...ErrorResponse })); +app.addSchema(Type.Object({ $id: 'PaginationQuery', ...PaginationQuery })); + +// Reference in routes +app.get('/items', { + schema: { + querystring: { $ref: 'PaginationQuery#' }, + response: { + 400: { $ref: 'ErrorResponse#' }, + }, + }, +}, handler); +``` + +## Plain JSON Schema (Alternative) + +You can also use plain JSON Schema directly: + +```typescript +import Fastify from 'fastify'; + +const app = Fastify(); + +const createUserSchema = { + body: { + type: 'object', + properties: { + name: { type: 'string', minLength: 1, maxLength: 100 }, + email: { type: 'string', format: 'email' }, + age: { type: 'integer', minimum: 0, maximum: 150 }, + }, + required: ['name', 'email'], + additionalProperties: false, + }, + response: { + 201: { + type: 'object', + properties: { + id: { type: 'string', format: 'uuid' }, + name: { type: 'string' }, + email: { type: 'string' }, + createdAt: { type: 'string', format: 'date-time' }, + }, + }, + }, +}; + +app.post('/users', { schema: createUserSchema }, async (request, reply) => { + const user = await createUser(request.body); + reply.code(201); + return user; +}); +``` + +## Request Validation Parts + +Validate different parts of the request: + +```typescript +const fullRequestSchema = { + // URL parameters + params: { + type: 'object', + properties: { + id: { type: 'string', format: 'uuid' }, + }, + required: ['id'], + }, + + // Query string + querystring: { + type: 'object', + properties: { + include: { type: 'string', enum: ['posts', 'comments', 'all'] }, + limit: { type: 'integer', minimum: 1, maximum: 100, default: 10 }, + }, + }, + + // Request headers + headers: { + type: 'object', + properties: { + 'x-api-key': { type: 'string', minLength: 32 }, + }, + required: ['x-api-key'], + }, + + // Request body + body: { + type: 'object', + properties: { + data: { type: 'object' }, + }, + required: ['data'], + }, +}; + +app.put('/resources/:id', { schema: fullRequestSchema }, handler); +``` + +## Shared Schemas with $id + +Define reusable schemas with `$id` and reference them with `$ref`: + +```typescript +// Add shared schemas to Fastify +app.addSchema({ + $id: 'user', + type: 'object', + properties: { + id: { type: 'string', format: 'uuid' }, + name: { type: 'string' }, + email: { type: 'string', format: 'email' }, + createdAt: { type: 'string', format: 'date-time' }, + }, + required: ['id', 'name', 'email'], +}); + +app.addSchema({ + $id: 'userCreate', + type: 'object', + properties: { + name: { type: 'string', minLength: 1 }, + email: { type: 'string', format: 'email' }, + }, + required: ['name', 'email'], + additionalProperties: false, +}); + +app.addSchema({ + $id: 'error', + type: 'object', + properties: { + statusCode: { type: 'integer' }, + error: { type: 'string' }, + message: { type: 'string' }, + }, +}); + +// Reference shared schemas +app.post('/users', { + schema: { + body: { $ref: 'userCreate#' }, + response: { + 201: { $ref: 'user#' }, + 400: { $ref: 'error#' }, + }, + }, +}, handler); + +app.get('/users/:id', { + schema: { + params: { + type: 'object', + properties: { id: { type: 'string', format: 'uuid' } }, + required: ['id'], + }, + response: { + 200: { $ref: 'user#' }, + 404: { $ref: 'error#' }, + }, + }, +}, handler); +``` + +## Array Schemas + +Define schemas for array responses: + +```typescript +app.addSchema({ + $id: 'userList', + type: 'object', + properties: { + users: { + type: 'array', + items: { $ref: 'user#' }, + }, + total: { type: 'integer' }, + page: { type: 'integer' }, + pageSize: { type: 'integer' }, + }, +}); + +app.get('/users', { + schema: { + querystring: { + type: 'object', + properties: { + page: { type: 'integer', minimum: 1, default: 1 }, + pageSize: { type: 'integer', minimum: 1, maximum: 100, default: 20 }, + }, + }, + response: { + 200: { $ref: 'userList#' }, + }, + }, +}, handler); +``` + +## Custom Formats + +Add custom validation formats: + +```typescript +import Fastify from 'fastify'; + +const app = Fastify({ + ajv: { + customOptions: { + formats: { + 'iso-country': /^[A-Z]{2}$/, + 'phone': /^\+?[1-9]\d{1,14}$/, + }, + }, + }, +}); + +// Or add formats dynamically +app.addSchema({ + $id: 'address', + type: 'object', + properties: { + street: { type: 'string' }, + country: { type: 'string', format: 'iso-country' }, + phone: { type: 'string', format: 'phone' }, + }, +}); +``` + +## Custom Keywords + +Add custom validation keywords: + +```typescript +import Fastify from 'fastify'; +import Ajv from 'ajv'; + +const app = Fastify({ + ajv: { + customOptions: { + keywords: [ + { + keyword: 'isEven', + type: 'number', + validate: (schema: boolean, data: number) => { + if (schema) { + return data % 2 === 0; + } + return true; + }, + errors: false, + }, + ], + }, + }, +}); + +// Use custom keyword +app.post('/numbers', { + schema: { + body: { + type: 'object', + properties: { + value: { type: 'integer', isEven: true }, + }, + }, + }, +}, handler); +``` + +## Coercion + +Fastify coerces types by default for query strings and params: + +```typescript +// Query string "?page=5&active=true" becomes: +// { page: 5, active: true } (number and boolean, not strings) + +app.get('/items', { + schema: { + querystring: { + type: 'object', + properties: { + page: { type: 'integer' }, // "5" -> 5 + active: { type: 'boolean' }, // "true" -> true + tags: { + type: 'array', + items: { type: 'string' }, // "a,b,c" -> ["a", "b", "c"] + }, + }, + }, + }, +}, handler); +``` + +## Validation Error Handling + +Customize validation error responses: + +```typescript +app.setErrorHandler((error, request, reply) => { + if (error.validation) { + reply.code(400).send({ + error: 'Validation Error', + message: 'Request validation failed', + details: error.validation.map((err) => ({ + field: err.instancePath || err.params?.missingProperty, + message: err.message, + keyword: err.keyword, + })), + }); + return; + } + + // Handle other errors + reply.code(error.statusCode || 500).send({ + error: error.name, + message: error.message, + }); +}); +``` + +## Schema Compiler Options + +Configure the Ajv schema compiler: + +```typescript +import Fastify from 'fastify'; + +const app = Fastify({ + ajv: { + customOptions: { + removeAdditional: 'all', // Remove extra properties + useDefaults: true, // Apply default values + coerceTypes: true, // Coerce types + allErrors: true, // Report all errors, not just first + }, + plugins: [ + require('ajv-formats'), // Add format validators + ], + }, +}); +``` + +## Nullable Fields + +Handle nullable fields properly: + +```typescript +app.addSchema({ + $id: 'profile', + type: 'object', + properties: { + name: { type: 'string' }, + bio: { type: ['string', 'null'] }, // Can be string or null + avatar: { + oneOf: [ + { type: 'string', format: 'uri' }, + { type: 'null' }, + ], + }, + }, +}); +``` + +## Conditional Validation + +Use if/then/else for conditional validation: + +```typescript +app.addSchema({ + $id: 'payment', + type: 'object', + properties: { + method: { type: 'string', enum: ['card', 'bank'] }, + cardNumber: { type: 'string' }, + bankAccount: { type: 'string' }, + }, + required: ['method'], + if: { + properties: { method: { const: 'card' } }, + }, + then: { + required: ['cardNumber'], + }, + else: { + required: ['bankAccount'], + }, +}); +``` + +## Schema Organization + +Organize schemas in a dedicated file: + +```typescript +// schemas/index.ts +export const schemas = [ + { + $id: 'user', + type: 'object', + properties: { + id: { type: 'string', format: 'uuid' }, + name: { type: 'string' }, + email: { type: 'string', format: 'email' }, + }, + }, + { + $id: 'error', + type: 'object', + properties: { + statusCode: { type: 'integer' }, + error: { type: 'string' }, + message: { type: 'string' }, + }, + }, +]; + +// app.ts +import { schemas } from './schemas/index.js'; + +for (const schema of schemas) { + app.addSchema(schema); +} +``` + +## OpenAPI/Swagger Integration + +Schemas work directly with @fastify/swagger: + +```typescript +import fastifySwagger from '@fastify/swagger'; +import fastifySwaggerUi from '@fastify/swagger-ui'; + +app.register(fastifySwagger, { + openapi: { + info: { + title: 'My API', + version: '1.0.0', + }, + }, +}); + +app.register(fastifySwaggerUi, { + routePrefix: '/docs', +}); + +// Schemas are automatically converted to OpenAPI definitions +``` + +## Performance Considerations + +Response schemas enable fast-json-stringify for serialization: + +```typescript +// With response schema - uses fast-json-stringify (faster) +app.get('/users', { + schema: { + response: { + 200: { + type: 'array', + items: { $ref: 'user#' }, + }, + }, + }, +}, handler); + +// Without response schema - uses JSON.stringify (slower) +app.get('/users-slow', handler); +``` + +Always define response schemas for production APIs to benefit from optimized serialization. diff --git a/.claude/skills/fastify-best-practices/rules/serialization.md b/.claude/skills/fastify-best-practices/rules/serialization.md new file mode 100644 index 0000000..88029b3 --- /dev/null +++ b/.claude/skills/fastify-best-practices/rules/serialization.md @@ -0,0 +1,475 @@ +--- +name: serialization +description: Response serialization in Fastify with TypeBox +metadata: + tags: serialization, response, json, fast-json-stringify, typebox +--- + +# Response Serialization + +## Use TypeBox for Type-Safe Response Schemas + +Define response schemas with TypeBox for automatic TypeScript types and fast serialization: + +```typescript +import Fastify from 'fastify'; +import { Type, type Static } from '@sinclair/typebox'; + +const app = Fastify(); + +// Define response schema with TypeBox +const UserResponse = Type.Object({ + id: Type.String(), + name: Type.String(), + email: Type.String(), +}); + +const UsersResponse = Type.Array(UserResponse); + +type UserResponseType = Static; + +// With TypeBox schema - uses fast-json-stringify (faster) + TypeScript types +app.get<{ Reply: Static }>('/users', { + schema: { + response: { + 200: UsersResponse, + }, + }, +}, async () => { + return db.users.findAll(); +}); + +// Without schema - uses JSON.stringify (slower), no type safety +app.get('/users-slow', async () => { + return db.users.findAll(); +}); +``` + +## Fast JSON Stringify + +Fastify uses `fast-json-stringify` when response schemas are defined. This provides: + +1. **Performance**: 2-3x faster serialization than JSON.stringify +2. **Security**: Only defined properties are serialized (strips sensitive data) +3. **Type coercion**: Ensures output matches the schema +4. **TypeScript**: Full type inference with TypeBox + +## Response Schema Benefits + +1. **Performance**: 2-3x faster serialization +2. **Security**: Only defined properties are included +3. **Documentation**: OpenAPI/Swagger integration +4. **Type coercion**: Ensures correct output types + +```typescript +app.get('/user/:id', { + schema: { + response: { + 200: { + type: 'object', + properties: { + id: { type: 'string' }, + name: { type: 'string' }, + // password is NOT in schema, so it's stripped + }, + }, + }, + }, +}, async (request) => { + const user = await db.users.findById(request.params.id); + // Even if user has password field, it won't be serialized + return user; +}); +``` + +## Multiple Status Code Schemas + +Define schemas for different response codes: + +```typescript +app.get('/users/:id', { + schema: { + response: { + 200: { + type: 'object', + properties: { + id: { type: 'string' }, + name: { type: 'string' }, + email: { type: 'string' }, + }, + }, + 404: { + type: 'object', + properties: { + statusCode: { type: 'integer' }, + error: { type: 'string' }, + message: { type: 'string' }, + }, + }, + }, + }, +}, async (request, reply) => { + const user = await db.users.findById(request.params.id); + + if (!user) { + reply.code(404); + return { statusCode: 404, error: 'Not Found', message: 'User not found' }; + } + + return user; +}); +``` + +## Default Response Schema + +Use 'default' for common error responses: + +```typescript +app.get('/resource', { + schema: { + response: { + 200: { $ref: 'resource#' }, + '4xx': { + type: 'object', + properties: { + statusCode: { type: 'integer' }, + error: { type: 'string' }, + message: { type: 'string' }, + }, + }, + '5xx': { + type: 'object', + properties: { + statusCode: { type: 'integer' }, + error: { type: 'string' }, + }, + }, + }, + }, +}, handler); +``` + +## Custom Serializers + +Create custom serialization functions: + +```typescript +// Per-route serializer +app.get('/custom', { + schema: { + response: { + 200: { + type: 'object', + properties: { + value: { type: 'string' }, + }, + }, + }, + }, + serializerCompiler: ({ schema }) => { + return (data) => { + // Custom serialization logic + return JSON.stringify({ + value: String(data.value).toUpperCase(), + serializedAt: new Date().toISOString(), + }); + }; + }, +}, async () => { + return { value: 'hello' }; +}); +``` + +## Shared Serializers + +Use the global serializer compiler: + +```typescript +import Fastify from 'fastify'; + +const app = Fastify({ + serializerCompiler: ({ schema, method, url, httpStatus }) => { + // Custom compilation logic + const stringify = fastJson(schema); + return (data) => stringify(data); + }, +}); +``` + +## Serialization with Type Coercion + +fast-json-stringify coerces types: + +```typescript +app.get('/data', { + schema: { + response: { + 200: { + type: 'object', + properties: { + count: { type: 'integer' }, // '5' -> 5 + active: { type: 'boolean' }, // 'true' -> true + tags: { + type: 'array', + items: { type: 'string' }, // [1, 2] -> ['1', '2'] + }, + }, + }, + }, + }, +}, async () => { + return { + count: '5', // Coerced to integer + active: 'true', // Coerced to boolean + tags: [1, 2, 3], // Coerced to strings + }; +}); +``` + +## Nullable Fields + +Handle nullable fields properly: + +```typescript +app.get('/profile', { + schema: { + response: { + 200: { + type: 'object', + properties: { + name: { type: 'string' }, + bio: { type: ['string', 'null'] }, + avatar: { + oneOf: [ + { type: 'string', format: 'uri' }, + { type: 'null' }, + ], + }, + }, + }, + }, + }, +}, async () => { + return { + name: 'John', + bio: null, + avatar: null, + }; +}); +``` + +## Additional Properties + +Control extra properties in response: + +```typescript +// Strip additional properties (default) +app.get('/strict', { + schema: { + response: { + 200: { + type: 'object', + properties: { + id: { type: 'string' }, + name: { type: 'string' }, + }, + additionalProperties: false, + }, + }, + }, +}, async () => { + return { id: '1', name: 'John', secret: 'hidden' }; + // Output: { "id": "1", "name": "John" } +}); + +// Allow additional properties +app.get('/flexible', { + schema: { + response: { + 200: { + type: 'object', + properties: { + id: { type: 'string' }, + }, + additionalProperties: true, + }, + }, + }, +}, async () => { + return { id: '1', extra: 'included' }; + // Output: { "id": "1", "extra": "included" } +}); +``` + +## Nested Objects + +Serialize nested structures: + +```typescript +app.addSchema({ + $id: 'address', + type: 'object', + properties: { + street: { type: 'string' }, + city: { type: 'string' }, + country: { type: 'string' }, + }, +}); + +app.get('/user', { + schema: { + response: { + 200: { + type: 'object', + properties: { + name: { type: 'string' }, + address: { $ref: 'address#' }, + contacts: { + type: 'array', + items: { + type: 'object', + properties: { + type: { type: 'string' }, + value: { type: 'string' }, + }, + }, + }, + }, + }, + }, + }, +}, async () => { + return { + name: 'John', + address: { street: '123 Main', city: 'Boston', country: 'USA' }, + contacts: [ + { type: 'email', value: 'john@example.com' }, + { type: 'phone', value: '+1234567890' }, + ], + }; +}); +``` + +## Date Serialization + +Handle dates consistently: + +```typescript +app.get('/events', { + schema: { + response: { + 200: { + type: 'array', + items: { + type: 'object', + properties: { + name: { type: 'string' }, + date: { type: 'string', format: 'date-time' }, + }, + }, + }, + }, + }, +}, async () => { + const events = await db.events.findAll(); + + // Convert Date objects to ISO strings + return events.map((e) => ({ + ...e, + date: e.date.toISOString(), + })); +}); +``` + +## BigInt Serialization + +Handle BigInt values: + +```typescript +// BigInt is not JSON serializable by default +app.get('/large-number', { + schema: { + response: { + 200: { + type: 'object', + properties: { + id: { type: 'string' }, // Serialize as string + count: { type: 'integer' }, + }, + }, + }, + }, +}, async () => { + const bigValue = 9007199254740993n; + + return { + id: bigValue.toString(), // Convert to string + count: Number(bigValue), // Or number if safe + }; +}); +``` + +## Stream Responses + +Stream responses bypass serialization: + +```typescript +import { createReadStream } from 'node:fs'; + +app.get('/file', async (request, reply) => { + const stream = createReadStream('./data.json'); + reply.type('application/json'); + return reply.send(stream); +}); + +// Streaming JSON array +app.get('/stream', async (request, reply) => { + reply.type('application/json'); + + const cursor = db.users.findCursor(); + + reply.raw.write('['); + let first = true; + + for await (const user of cursor) { + if (!first) reply.raw.write(','); + reply.raw.write(JSON.stringify(user)); + first = false; + } + + reply.raw.write(']'); + reply.raw.end(); +}); +``` + +## Pre-Serialization Hook + +Modify data before serialization: + +```typescript +app.addHook('preSerialization', async (request, reply, payload) => { + // Add metadata to responses + if (payload && typeof payload === 'object' && !Array.isArray(payload)) { + return { + ...payload, + _links: { + self: request.url, + }, + }; + } + return payload; +}); +``` + +## Disable Serialization + +Skip serialization for specific routes: + +```typescript +app.get('/raw', async (request, reply) => { + const data = JSON.stringify({ raw: true }); + reply.type('application/json'); + reply.serializer((payload) => payload); // Pass through + return data; +}); +``` diff --git a/.claude/skills/fastify-best-practices/rules/testing.md b/.claude/skills/fastify-best-practices/rules/testing.md new file mode 100644 index 0000000..beed39c --- /dev/null +++ b/.claude/skills/fastify-best-practices/rules/testing.md @@ -0,0 +1,536 @@ +--- +name: testing +description: Testing Fastify applications with inject() +metadata: + tags: testing, inject, node-test, integration, unit +--- + +# Testing Fastify Applications + +## Using inject() for Request Testing + +Fastify's `inject()` method simulates HTTP requests without network overhead: + +```typescript +import { describe, it, before, after } from 'node:test'; +import Fastify from 'fastify'; +import { buildApp } from './app.js'; + +describe('User API', () => { + let app; + + before(async () => { + app = await buildApp(); + await app.ready(); + }); + + after(async () => { + await app.close(); + }); + + it('should return users list', async (t) => { + const response = await app.inject({ + method: 'GET', + url: '/users', + }); + + t.assert.equal(response.statusCode, 200); + t.assert.equal(response.headers['content-type'], 'application/json; charset=utf-8'); + + const body = response.json(); + t.assert.ok(Array.isArray(body.users)); + }); + + it('should create a user', async (t) => { + const response = await app.inject({ + method: 'POST', + url: '/users', + payload: { + name: 'John Doe', + email: 'john@example.com', + }, + }); + + t.assert.equal(response.statusCode, 201); + + const body = response.json(); + t.assert.equal(body.name, 'John Doe'); + t.assert.ok(body.id); + }); +}); +``` + +## Testing with Headers and Authentication + +Test authenticated endpoints: + +```typescript +describe('Protected Routes', () => { + let app; + let authToken; + + before(async () => { + app = await buildApp(); + await app.ready(); + + // Get auth token + const loginResponse = await app.inject({ + method: 'POST', + url: '/auth/login', + payload: { + email: 'test@example.com', + password: 'password123', + }, + }); + + authToken = loginResponse.json().token; + }); + + after(async () => { + await app.close(); + }); + + it('should reject unauthenticated requests', async (t) => { + const response = await app.inject({ + method: 'GET', + url: '/profile', + }); + + t.assert.equal(response.statusCode, 401); + }); + + it('should return profile for authenticated user', async (t) => { + const response = await app.inject({ + method: 'GET', + url: '/profile', + headers: { + authorization: `Bearer ${authToken}`, + }, + }); + + t.assert.equal(response.statusCode, 200); + t.assert.equal(response.json().email, 'test@example.com'); + }); +}); +``` + +## Testing Query Parameters + +Test routes with query strings: + +```typescript +it('should filter users by status', async (t) => { + const response = await app.inject({ + method: 'GET', + url: '/users', + query: { + status: 'active', + page: '1', + limit: '10', + }, + }); + + t.assert.equal(response.statusCode, 200); + const body = response.json(); + t.assert.ok(body.users.every((u) => u.status === 'active')); +}); + +// Or use URL with query string +it('should search users', async (t) => { + const response = await app.inject({ + method: 'GET', + url: '/users?q=john&sort=name', + }); + + t.assert.equal(response.statusCode, 200); +}); +``` + +## Testing URL Parameters + +Test routes with path parameters: + +```typescript +it('should return user by id', async (t) => { + const userId = 'user-123'; + + const response = await app.inject({ + method: 'GET', + url: `/users/${userId}`, + }); + + t.assert.equal(response.statusCode, 200); + t.assert.equal(response.json().id, userId); +}); + +it('should return 404 for non-existent user', async (t) => { + const response = await app.inject({ + method: 'GET', + url: '/users/non-existent', + }); + + t.assert.equal(response.statusCode, 404); +}); +``` + +## Testing Validation Errors + +Test schema validation: + +```typescript +describe('Validation', () => { + it('should reject invalid email', async (t) => { + const response = await app.inject({ + method: 'POST', + url: '/users', + payload: { + name: 'John', + email: 'not-an-email', + }, + }); + + t.assert.equal(response.statusCode, 400); + const body = response.json(); + t.assert.ok(body.message.includes('email')); + }); + + it('should reject missing required fields', async (t) => { + const response = await app.inject({ + method: 'POST', + url: '/users', + payload: { + name: 'John', + // missing email + }, + }); + + t.assert.equal(response.statusCode, 400); + }); + + it('should coerce query parameters', async (t) => { + const response = await app.inject({ + method: 'GET', + url: '/items?limit=10&active=true', + }); + + t.assert.equal(response.statusCode, 200); + // limit is coerced to number, active to boolean + }); +}); +``` + +## Testing File Uploads + +Test multipart form data: + +```typescript +import { createReadStream } from 'node:fs'; +import FormData from 'form-data'; + +it('should upload file', async (t) => { + const form = new FormData(); + form.append('file', createReadStream('./test/fixtures/test.pdf')); + form.append('name', 'test-document'); + + const response = await app.inject({ + method: 'POST', + url: '/upload', + payload: form, + headers: form.getHeaders(), + }); + + t.assert.equal(response.statusCode, 200); + t.assert.ok(response.json().fileId); +}); +``` + +## Testing Streams + +Test streaming responses: + +```typescript +it('should stream large file', async (t) => { + const response = await app.inject({ + method: 'GET', + url: '/files/large-file', + }); + + t.assert.equal(response.statusCode, 200); + t.assert.ok(response.rawPayload.length > 0); +}); +``` + +## Mocking Dependencies + +Mock external services and databases: + +```typescript +import { describe, it, before, after, mock } from 'node:test'; + +describe('User Service', () => { + let app; + + before(async () => { + // Create app with mocked dependencies + const mockDb = { + users: { + findAll: mock.fn(async () => [ + { id: '1', name: 'User 1' }, + { id: '2', name: 'User 2' }, + ]), + findById: mock.fn(async (id) => { + if (id === '1') return { id: '1', name: 'User 1' }; + return null; + }), + create: mock.fn(async (data) => ({ id: 'new-id', ...data })), + }, + }; + + app = Fastify(); + app.decorate('db', mockDb); + app.register(import('./routes/users.js')); + await app.ready(); + }); + + after(async () => { + await app.close(); + }); + + it('should call findAll', async (t) => { + const response = await app.inject({ + method: 'GET', + url: '/users', + }); + + t.assert.equal(response.statusCode, 200); + t.assert.equal(app.db.users.findAll.mock.calls.length, 1); + }); +}); +``` + +## Testing Plugins in Isolation + +Test plugins independently: + +```typescript +import { describe, it, before, after } from 'node:test'; +import Fastify from 'fastify'; +import cachePlugin from './plugins/cache.js'; + +describe('Cache Plugin', () => { + let app; + + before(async () => { + app = Fastify(); + app.register(cachePlugin, { ttl: 1000 }); + await app.ready(); + }); + + after(async () => { + await app.close(); + }); + + it('should decorate fastify with cache', (t) => { + t.assert.ok(app.hasDecorator('cache')); + t.assert.equal(typeof app.cache.get, 'function'); + t.assert.equal(typeof app.cache.set, 'function'); + }); + + it('should cache and retrieve values', (t) => { + app.cache.set('key', 'value'); + t.assert.equal(app.cache.get('key'), 'value'); + }); +}); +``` + +## Testing Hooks + +Test hook behavior: + +```typescript +describe('Hooks', () => { + it('should add request id header', async (t) => { + const response = await app.inject({ + method: 'GET', + url: '/health', + }); + + t.assert.ok(response.headers['x-request-id']); + }); + + it('should log request timing', async (t) => { + const logs = []; + const app = Fastify({ + logger: { + level: 'info', + stream: { + write: (msg) => logs.push(JSON.parse(msg)), + }, + }, + }); + + app.register(import('./app.js')); + await app.ready(); + + await app.inject({ method: 'GET', url: '/health' }); + + const responseLog = logs.find((l) => l.msg?.includes('completed')); + t.assert.ok(responseLog); + t.assert.ok(responseLog.responseTime); + + await app.close(); + }); +}); +``` + +## Test Factory Pattern + +Create a reusable test app builder: + +```typescript +// test/helper.ts +import Fastify from 'fastify'; +import type { FastifyInstance } from 'fastify'; + +interface TestContext { + app: FastifyInstance; + inject: FastifyInstance['inject']; +} + +export async function buildTestApp(options = {}): Promise { + const app = Fastify({ + logger: false, // Disable logging in tests + ...options, + }); + + // Register plugins + app.register(import('../src/plugins/database.js'), { + connectionString: process.env.TEST_DATABASE_URL, + }); + app.register(import('../src/routes/index.js')); + + await app.ready(); + + return { + app, + inject: app.inject.bind(app), + }; +} + +// Usage in tests +describe('API Tests', () => { + let ctx: TestContext; + + before(async () => { + ctx = await buildTestApp(); + }); + + after(async () => { + await ctx.app.close(); + }); + + it('should work', async (t) => { + const response = await ctx.inject({ + method: 'GET', + url: '/health', + }); + t.assert.equal(response.statusCode, 200); + }); +}); +``` + +## Database Testing with Transactions + +Use transactions for test isolation: + +```typescript +describe('Database Integration', () => { + let app; + let transaction; + + before(async () => { + app = await buildApp(); + await app.ready(); + }); + + after(async () => { + await app.close(); + }); + + beforeEach(async () => { + transaction = await app.db.beginTransaction(); + app.db.setTransaction(transaction); + }); + + afterEach(async () => { + await transaction.rollback(); + }); + + it('should create user', async (t) => { + const response = await app.inject({ + method: 'POST', + url: '/users', + payload: { name: 'Test', email: 'test@example.com' }, + }); + + t.assert.equal(response.statusCode, 201); + // Transaction is rolled back after test + }); +}); +``` + +## Parallel Test Execution + +Structure tests for parallel execution: + +```typescript +// Tests run in parallel by default with node:test +// Use separate app instances or proper isolation + +import { describe, it } from 'node:test'; + +describe('User API', async () => { + // Each test suite gets its own app instance + const app = await buildTestApp(); + + it('test 1', async (t) => { + // ... + }); + + it('test 2', async (t) => { + // ... + }); + + // Cleanup after all tests in this suite + after(() => app.close()); +}); + +describe('Post API', async () => { + const app = await buildTestApp(); + + it('test 1', async (t) => { + // ... + }); + + after(() => app.close()); +}); +``` + +## Running Tests + +```bash +# Run all tests +node --test + +# Run with TypeScript +node --test src/**/*.test.ts + +# Run specific file +node --test src/routes/users.test.ts + +# With coverage +node --test --experimental-test-coverage + +# Watch mode +node --test --watch +``` diff --git a/.claude/skills/fastify-best-practices/rules/typescript.md b/.claude/skills/fastify-best-practices/rules/typescript.md new file mode 100644 index 0000000..b948478 --- /dev/null +++ b/.claude/skills/fastify-best-practices/rules/typescript.md @@ -0,0 +1,458 @@ +--- +name: typescript +description: TypeScript integration with Fastify +metadata: + tags: typescript, types, generics, type-safety +--- + +# TypeScript Integration + +## Type Stripping with Node.js + +Use Node.js built-in type stripping (Node.js 22.6+): + +```bash +# Run TypeScript directly +node --experimental-strip-types app.ts + +# In Node.js 23+ +node app.ts +``` + +```json +// package.json +{ + "type": "module", + "scripts": { + "start": "node app.ts", + "dev": "node --watch app.ts" + } +} +``` + +```typescript +// tsconfig.json for type stripping +{ + "compilerOptions": { + "target": "ESNext", + "module": "NodeNext", + "moduleResolution": "NodeNext", + "verbatimModuleSyntax": true, + "erasableSyntaxOnly": true, + "noEmit": true, + "strict": true + } +} +``` + +## Basic Type Safety + +Type your Fastify application: + +```typescript +import Fastify, { type FastifyInstance, type FastifyRequest, type FastifyReply } from 'fastify'; + +const app: FastifyInstance = Fastify({ logger: true }); + +app.get('/health', async (request: FastifyRequest, reply: FastifyReply) => { + return { status: 'ok' }; +}); + +await app.listen({ port: 3000 }); +``` + +## Typing Route Handlers + +Use generics to type request parts: + +```typescript +import type { FastifyRequest, FastifyReply } from 'fastify'; + +interface CreateUserBody { + name: string; + email: string; +} + +interface UserParams { + id: string; +} + +interface UserQuery { + include?: string; +} + +// Type the request with generics +app.post<{ + Body: CreateUserBody; +}>('/users', async (request, reply) => { + const { name, email } = request.body; // Fully typed + return { name, email }; +}); + +app.get<{ + Params: UserParams; + Querystring: UserQuery; +}>('/users/:id', async (request) => { + const { id } = request.params; // string + const { include } = request.query; // string | undefined + return { id, include }; +}); + +// Full route options typing +app.route<{ + Params: UserParams; + Querystring: UserQuery; + Body: CreateUserBody; + Reply: { user: { id: string; name: string } }; +}>({ + method: 'PUT', + url: '/users/:id', + handler: async (request, reply) => { + return { user: { id: request.params.id, name: request.body.name } }; + }, +}); +``` + +## Type Providers + +Use @fastify/type-provider-typebox for runtime + compile-time safety: + +```typescript +import Fastify from 'fastify'; +import { TypeBoxTypeProvider } from '@fastify/type-provider-typebox'; +import { Type } from '@sinclair/typebox'; + +const app = Fastify().withTypeProvider(); + +const UserSchema = Type.Object({ + id: Type.String(), + name: Type.String(), + email: Type.String({ format: 'email' }), +}); + +const CreateUserSchema = Type.Object({ + name: Type.String({ minLength: 1 }), + email: Type.String({ format: 'email' }), +}); + +app.post('/users', { + schema: { + body: CreateUserSchema, + response: { + 201: UserSchema, + }, + }, +}, async (request, reply) => { + // request.body is typed as { name: string; email: string } + const { name, email } = request.body; + + reply.code(201); + return { id: 'generated', name, email }; +}); +``` + +## Typing Decorators + +Extend Fastify types with declaration merging: + +```typescript +import Fastify from 'fastify'; + +// Declare types for decorators +declare module 'fastify' { + interface FastifyInstance { + config: { + port: number; + host: string; + }; + db: Database; + } + + interface FastifyRequest { + user?: { + id: string; + email: string; + role: string; + }; + startTime: number; + } + + interface FastifyReply { + sendSuccess: (data: unknown) => void; + } +} + +const app = Fastify(); + +// Add decorators +app.decorate('config', { port: 3000, host: 'localhost' }); +app.decorate('db', new Database()); + +app.decorateRequest('user', null); +app.decorateRequest('startTime', 0); + +app.decorateReply('sendSuccess', function (data: unknown) { + this.send({ success: true, data }); +}); + +// Now fully typed +app.get('/profile', async (request, reply) => { + const user = request.user; // { id: string; email: string; role: string } | undefined + const config = app.config; // { port: number; host: string } + + reply.sendSuccess({ user }); +}); +``` + +## Typing Plugins + +Type plugin options and exports: + +```typescript +import fp from 'fastify-plugin'; +import type { FastifyPluginAsync } from 'fastify'; + +interface DatabasePluginOptions { + connectionString: string; + poolSize?: number; +} + +declare module 'fastify' { + interface FastifyInstance { + db: { + query: (sql: string, params?: unknown[]) => Promise; + close: () => Promise; + }; + } +} + +const databasePlugin: FastifyPluginAsync = async ( + fastify, + options, +) => { + const { connectionString, poolSize = 10 } = options; + + const db = await createConnection(connectionString, poolSize); + + fastify.decorate('db', { + query: (sql: string, params?: unknown[]) => db.query(sql, params), + close: () => db.end(), + }); + + fastify.addHook('onClose', async () => { + await db.end(); + }); +}; + +export default fp(databasePlugin, { + name: 'database', +}); +``` + +## Typing Hooks + +Type hook functions: + +```typescript +import type { + FastifyRequest, + FastifyReply, + onRequestHookHandler, + preHandlerHookHandler, +} from 'fastify'; + +const authHook: preHandlerHookHandler = async ( + request: FastifyRequest, + reply: FastifyReply, +) => { + const token = request.headers.authorization; + if (!token) { + reply.code(401).send({ error: 'Unauthorized' }); + return; + } + request.user = await verifyToken(token); +}; + +const timingHook: onRequestHookHandler = async (request) => { + request.startTime = Date.now(); +}; + +app.addHook('onRequest', timingHook); +app.addHook('preHandler', authHook); +``` + +## Typing Schema Objects + +Create reusable typed schemas: + +```typescript +import type { JSONSchema7 } from 'json-schema'; + +// Define schema with const assertion for type inference +const userSchema = { + type: 'object', + properties: { + id: { type: 'string' }, + name: { type: 'string' }, + email: { type: 'string', format: 'email' }, + }, + required: ['id', 'name', 'email'], +} as const satisfies JSONSchema7; + +// Infer TypeScript type from schema +type User = { + id: string; + name: string; + email: string; +}; + +app.get<{ Reply: User }>('/users/:id', { + schema: { + response: { + 200: userSchema, + }, + }, +}, async (request) => { + return { id: '1', name: 'John', email: 'john@example.com' }; +}); +``` + +## Shared Types + +Organize types in dedicated files: + +```typescript +// types/index.ts +export interface User { + id: string; + name: string; + email: string; + role: 'admin' | 'user'; +} + +export interface CreateUserInput { + name: string; + email: string; +} + +export interface PaginationQuery { + page?: number; + limit?: number; + sort?: string; +} + +// routes/users.ts +import type { FastifyInstance } from 'fastify'; +import type { User, CreateUserInput, PaginationQuery } from '../types/index.js'; + +export default async function userRoutes(fastify: FastifyInstance) { + fastify.get<{ + Querystring: PaginationQuery; + Reply: { users: User[]; total: number }; + }>('/', async (request) => { + const { page = 1, limit = 10 } = request.query; + // ... + }); + + fastify.post<{ + Body: CreateUserInput; + Reply: User; + }>('/', async (request, reply) => { + reply.code(201); + // ... + }); +} +``` + +## Type-Safe Route Registration + +Create typed route factories: + +```typescript +import type { FastifyInstance, RouteOptions } from 'fastify'; + +function createCrudRoutes( + fastify: FastifyInstance, + options: { + prefix: string; + schema: { + item: object; + create: object; + update: object; + }; + handlers: { + list: () => Promise; + get: (id: string) => Promise; + create: (data: unknown) => Promise; + update: (id: string, data: unknown) => Promise; + delete: (id: string) => Promise; + }; + }, +) { + const { prefix, schema, handlers } = options; + + fastify.get(`${prefix}`, { + schema: { response: { 200: { type: 'array', items: schema.item } } }, + }, async () => handlers.list()); + + fastify.get(`${prefix}/:id`, { + schema: { response: { 200: schema.item } }, + }, async (request) => { + const item = await handlers.get((request.params as { id: string }).id); + if (!item) throw { statusCode: 404, message: 'Not found' }; + return item; + }); + + // ... more routes +} +``` + +## Avoiding Type Gymnastics + +Keep types simple and practical: + +```typescript +// GOOD - simple, readable types +interface UserRequest { + Params: { id: string }; + Body: { name: string }; +} + +app.put('/users/:id', handler); + +// AVOID - overly complex generic types +type DeepPartial = T extends object ? { + [P in keyof T]?: DeepPartial; +} : T; + +// AVOID - excessive type inference +type InferSchemaType = T extends { properties: infer P } + ? { [K in keyof P]: InferPropertyType } + : never; +``` + +## Type Checking Without Compilation + +Use TypeScript for type checking only: + +```bash +# Type check without emitting +npx tsc --noEmit + +# Watch mode +npx tsc --noEmit --watch + +# In CI +npm run typecheck +``` + +```json +// package.json +{ + "scripts": { + "start": "node app.ts", + "typecheck": "tsc --noEmit", + "test": "npm run typecheck && node --test" + } +} +``` diff --git a/.claude/skills/fastify-best-practices/rules/websockets.md b/.claude/skills/fastify-best-practices/rules/websockets.md new file mode 100644 index 0000000..3481570 --- /dev/null +++ b/.claude/skills/fastify-best-practices/rules/websockets.md @@ -0,0 +1,421 @@ +--- +name: websockets +description: WebSocket support in Fastify +metadata: + tags: websockets, realtime, ws, socket +--- + +# WebSocket Support + +## Using @fastify/websocket + +Add WebSocket support to Fastify: + +```typescript +import Fastify from 'fastify'; +import websocket from '@fastify/websocket'; + +const app = Fastify(); + +app.register(websocket); + +app.get('/ws', { websocket: true }, (socket, request) => { + socket.on('message', (message) => { + const data = message.toString(); + console.log('Received:', data); + + // Echo back + socket.send(`Echo: ${data}`); + }); + + socket.on('close', () => { + console.log('Client disconnected'); + }); + + socket.on('error', (error) => { + console.error('WebSocket error:', error); + }); +}); + +await app.listen({ port: 3000 }); +``` + +## WebSocket with Hooks + +Use Fastify hooks with WebSocket routes: + +```typescript +app.register(async function wsRoutes(fastify) { + // This hook runs before WebSocket upgrade + fastify.addHook('preValidation', async (request, reply) => { + const token = request.headers.authorization; + if (!token) { + reply.code(401).send({ error: 'Unauthorized' }); + return; + } + request.user = await verifyToken(token); + }); + + fastify.get('/ws', { websocket: true }, (socket, request) => { + console.log('Connected user:', request.user.id); + + socket.on('message', (message) => { + // Handle authenticated messages + }); + }); +}); +``` + +## Connection Options + +Configure WebSocket server options: + +```typescript +app.register(websocket, { + options: { + maxPayload: 1048576, // 1MB max message size + clientTracking: true, + perMessageDeflate: { + zlibDeflateOptions: { + chunkSize: 1024, + memLevel: 7, + level: 3, + }, + zlibInflateOptions: { + chunkSize: 10 * 1024, + }, + }, + }, +}); +``` + +## Broadcast to All Clients + +Broadcast messages to connected clients: + +```typescript +const clients = new Set(); + +app.get('/ws', { websocket: true }, (socket, request) => { + clients.add(socket); + + socket.on('close', () => { + clients.delete(socket); + }); + + socket.on('message', (message) => { + // Broadcast to all other clients + for (const client of clients) { + if (client !== socket && client.readyState === WebSocket.OPEN) { + client.send(message); + } + } + }); +}); + +// Broadcast from HTTP route +app.post('/broadcast', async (request) => { + const { message } = request.body; + + for (const client of clients) { + if (client.readyState === WebSocket.OPEN) { + client.send(JSON.stringify({ type: 'broadcast', message })); + } + } + + return { sent: clients.size }; +}); +``` + +## Rooms/Channels Pattern + +Organize connections into rooms: + +```typescript +const rooms = new Map>(); + +function joinRoom(socket: WebSocket, roomId: string) { + if (!rooms.has(roomId)) { + rooms.set(roomId, new Set()); + } + rooms.get(roomId)!.add(socket); +} + +function leaveRoom(socket: WebSocket, roomId: string) { + rooms.get(roomId)?.delete(socket); + if (rooms.get(roomId)?.size === 0) { + rooms.delete(roomId); + } +} + +function broadcastToRoom(roomId: string, message: string, exclude?: WebSocket) { + const room = rooms.get(roomId); + if (!room) return; + + for (const client of room) { + if (client !== exclude && client.readyState === WebSocket.OPEN) { + client.send(message); + } + } +} + +app.get('/ws/:roomId', { websocket: true }, (socket, request) => { + const { roomId } = request.params as { roomId: string }; + + joinRoom(socket, roomId); + + socket.on('message', (message) => { + broadcastToRoom(roomId, message.toString(), socket); + }); + + socket.on('close', () => { + leaveRoom(socket, roomId); + }); +}); +``` + +## Structured Message Protocol + +Use JSON for structured messages: + +```typescript +interface WSMessage { + type: string; + payload?: unknown; + id?: string; +} + +app.get('/ws', { websocket: true }, (socket, request) => { + function send(message: WSMessage) { + socket.send(JSON.stringify(message)); + } + + socket.on('message', (raw) => { + let message: WSMessage; + + try { + message = JSON.parse(raw.toString()); + } catch { + send({ type: 'error', payload: 'Invalid JSON' }); + return; + } + + switch (message.type) { + case 'ping': + send({ type: 'pong', id: message.id }); + break; + + case 'subscribe': + handleSubscribe(socket, message.payload); + send({ type: 'subscribed', payload: message.payload, id: message.id }); + break; + + case 'message': + handleMessage(socket, message.payload); + break; + + default: + send({ type: 'error', payload: 'Unknown message type' }); + } + }); +}); +``` + +## Heartbeat/Ping-Pong + +Keep connections alive: + +```typescript +const HEARTBEAT_INTERVAL = 30000; +const clients = new Map(); + +app.get('/ws', { websocket: true }, (socket, request) => { + clients.set(socket, { isAlive: true }); + + socket.on('pong', () => { + const client = clients.get(socket); + if (client) client.isAlive = true; + }); + + socket.on('close', () => { + clients.delete(socket); + }); +}); + +// Heartbeat interval +setInterval(() => { + for (const [socket, state] of clients) { + if (!state.isAlive) { + socket.terminate(); + clients.delete(socket); + continue; + } + + state.isAlive = false; + socket.ping(); + } +}, HEARTBEAT_INTERVAL); +``` + +## Authentication + +Authenticate WebSocket connections: + +```typescript +app.get('/ws', { + websocket: true, + preValidation: async (request, reply) => { + // Authenticate via query parameter or header + const token = request.query.token || request.headers.authorization?.replace('Bearer ', ''); + + if (!token) { + reply.code(401).send({ error: 'Token required' }); + return; + } + + try { + request.user = await verifyToken(token); + } catch { + reply.code(401).send({ error: 'Invalid token' }); + } + }, +}, (socket, request) => { + console.log('Authenticated user:', request.user); + + socket.on('message', (message) => { + // Handle authenticated messages + }); +}); +``` + +## Error Handling + +Handle WebSocket errors properly: + +```typescript +app.get('/ws', { websocket: true }, (socket, request) => { + socket.on('error', (error) => { + request.log.error({ err: error }, 'WebSocket error'); + }); + + socket.on('message', async (raw) => { + try { + const message = JSON.parse(raw.toString()); + const result = await processMessage(message); + socket.send(JSON.stringify({ success: true, result })); + } catch (error) { + request.log.error({ err: error }, 'Message processing error'); + socket.send(JSON.stringify({ + success: false, + error: error.message, + })); + } + }); +}); +``` + +## Rate Limiting WebSocket Messages + +Limit message frequency: + +```typescript +const rateLimits = new Map(); + +function checkRateLimit(socket: WebSocket, limit: number, window: number): boolean { + const now = Date.now(); + let state = rateLimits.get(socket); + + if (!state || now > state.resetAt) { + state = { count: 0, resetAt: now + window }; + rateLimits.set(socket, state); + } + + state.count++; + + if (state.count > limit) { + return false; + } + + return true; +} + +app.get('/ws', { websocket: true }, (socket, request) => { + socket.on('message', (message) => { + if (!checkRateLimit(socket, 100, 60000)) { + socket.send(JSON.stringify({ error: 'Rate limit exceeded' })); + return; + } + + // Process message + }); + + socket.on('close', () => { + rateLimits.delete(socket); + }); +}); +``` + +## Graceful Shutdown + +Close WebSocket connections on shutdown: + +```typescript +import closeWithGrace from 'close-with-grace'; + +const connections = new Set(); + +app.get('/ws', { websocket: true }, (socket, request) => { + connections.add(socket); + + socket.on('close', () => { + connections.delete(socket); + }); +}); + +closeWithGrace({ delay: 5000 }, async ({ signal }) => { + // Notify clients + for (const socket of connections) { + if (socket.readyState === WebSocket.OPEN) { + socket.send(JSON.stringify({ type: 'shutdown', message: 'Server is shutting down' })); + socket.close(1001, 'Server shutdown'); + } + } + + await app.close(); +}); +``` + +## Full-Duplex Stream Pattern + +Use WebSocket for streaming data: + +```typescript +app.get('/ws/stream', { websocket: true }, async (socket, request) => { + const stream = createDataStream(); + + stream.on('data', (data) => { + if (socket.readyState === WebSocket.OPEN) { + socket.send(JSON.stringify({ type: 'data', payload: data })); + } + }); + + stream.on('end', () => { + socket.send(JSON.stringify({ type: 'end' })); + socket.close(); + }); + + socket.on('message', (message) => { + const { type, payload } = JSON.parse(message.toString()); + + if (type === 'pause') { + stream.pause(); + } else if (type === 'resume') { + stream.resume(); + } + }); + + socket.on('close', () => { + stream.destroy(); + }); +}); +``` diff --git a/.claude/skills/fastify-best-practices/tile.json b/.claude/skills/fastify-best-practices/tile.json new file mode 100644 index 0000000..56a442a --- /dev/null +++ b/.claude/skills/fastify-best-practices/tile.json @@ -0,0 +1,11 @@ +{ + "name": "mcollina/fastify-best-practices", + "version": "0.1.0", + "private": false, + "summary": "Guides development of Fastify Node.js backend servers and REST APIs using TypeScript or JavaScript. Use when building, configuring, or debugging a Fastify application — including defining routes, implementing plugins, setting up JSON Schema validation, handling errors, optimising performance, managing authentication, configuring CORS and security headers, integrating databases, working with WebSockets, and deploying to production. Covers the full Fastify request lifecycle (hooks, serialization, logging with Pino) and TypeScript integration via strip types. Trigger terms: Fastify, Node.js server, REST API, API routes, backend framework, fastify.config, server.ts, app.ts.", + "skills": { + "fastify-best-practices": { + "path": "SKILL.md" + } + } +} diff --git a/.claude/skills/fastify-typescript/SKILL.md b/.claude/skills/fastify-typescript/SKILL.md new file mode 100644 index 0000000..10e673e --- /dev/null +++ b/.claude/skills/fastify-typescript/SKILL.md @@ -0,0 +1,244 @@ +--- +name: fastify-typescript +description: Guidelines for building high-performance APIs with Fastify and TypeScript, covering validation, Prisma integration, and testing best practices +--- + +# Fastify TypeScript Development + +You are an expert in Fastify and TypeScript development with deep knowledge of building high-performance, type-safe APIs. + +## TypeScript General Guidelines + +### Basic Principles + +- Use English for all code and documentation +- Always declare types for variables and functions (parameters and return values) +- Avoid using `any` type - create necessary types instead +- Use JSDoc to document public classes and methods +- Write concise, maintainable, and technically accurate code +- Use functional and declarative programming patterns; avoid classes +- Prefer iteration and modularization to adhere to DRY principles + +### Nomenclature + +- Use PascalCase for types and interfaces +- Use camelCase for variables, functions, and methods +- Use kebab-case for file and directory names +- Use UPPERCASE for environment variables +- Use descriptive variable names with auxiliary verbs: `isLoading`, `hasError`, `canDelete` +- Start each function with a verb + +### Functions + +- Write short functions with a single purpose +- Use arrow functions for simple operations +- Use async/await consistently throughout the codebase +- Use the RO-RO pattern (Receive an Object, Return an Object) for multiple parameters + +### Types and Interfaces + +- Prefer interfaces over types for object shapes +- Avoid enums; use maps or const objects instead +- Use Zod for runtime validation with inferred types +- Use `readonly` for immutable properties +- Use `import type` for type-only imports + +## Fastify-Specific Guidelines + +### Project Structure + +``` +src/ + routes/ + {resource}/ + index.ts + handlers.ts + schemas.ts + plugins/ + auth.ts + database.ts + cors.ts + services/ + {domain}Service.ts + repositories/ + {entity}Repository.ts + types/ + index.ts + utils/ + config/ + app.ts + server.ts +``` + +### Route Organization + +- Organize routes by resource/domain +- Use route plugins for modular registration +- Define schemas alongside route handlers +- Use route prefixes for API versioning + +```typescript +import { FastifyPluginAsync } from 'fastify'; + +const usersRoutes: FastifyPluginAsync = async (fastify) => { + fastify.get('/', { schema: listUsersSchema }, listUsersHandler); + fastify.get('/:id', { schema: getUserSchema }, getUserHandler); + fastify.post('/', { schema: createUserSchema }, createUserHandler); + fastify.put('/:id', { schema: updateUserSchema }, updateUserHandler); + fastify.delete('/:id', { schema: deleteUserSchema }, deleteUserHandler); +}; + +export default usersRoutes; +``` + +### Schema Validation with JSON Schema / Ajv + +- Define JSON schemas for all request/response validation +- Use @sinclair/typebox for type-safe schema definitions +- Leverage Fastify's built-in Ajv integration + +```typescript +import { Type, Static } from '@sinclair/typebox'; + +const UserSchema = Type.Object({ + id: Type.String({ format: 'uuid' }), + name: Type.String({ minLength: 1 }), + email: Type.String({ format: 'email' }), + createdAt: Type.String({ format: 'date-time' }), +}); + +type User = Static; + +const createUserSchema = { + body: Type.Object({ + name: Type.String({ minLength: 1 }), + email: Type.String({ format: 'email' }), + }), + response: { + 201: UserSchema, + 400: ErrorSchema, + }, +}; +``` + +### Plugins and Decorators + +- Use plugins for shared functionality +- Decorate Fastify instance with services and utilities +- Register plugins with proper encapsulation + +```typescript +import fp from 'fastify-plugin'; + +const databasePlugin = fp(async (fastify) => { + const prisma = new PrismaClient(); + + await prisma.$connect(); + + fastify.decorate('prisma', prisma); + + fastify.addHook('onClose', async () => { + await prisma.$disconnect(); + }); +}); + +export default databasePlugin; +``` + +### Prisma Integration + +- Use Prisma as the ORM for database operations +- Create repository classes for data access +- Use transactions for complex operations + +```typescript +class UserRepository { + constructor(private prisma: PrismaClient) {} + + async findById(id: string): Promise { + return this.prisma.user.findUnique({ where: { id } }); + } + + async create(data: CreateUserInput): Promise { + return this.prisma.user.create({ data }); + } +} +``` + +### Error Handling + +- Use Fastify's built-in error handling +- Create custom error classes for domain errors +- Return consistent error responses + +```typescript +import { FastifyError } from 'fastify'; + +class NotFoundError extends Error implements FastifyError { + code = 'NOT_FOUND'; + statusCode = 404; + + constructor(resource: string, id: string) { + super(`${resource} with id ${id} not found`); + this.name = 'NotFoundError'; + } +} + +// Global error handler +fastify.setErrorHandler((error, request, reply) => { + const statusCode = error.statusCode || 500; + + reply.status(statusCode).send({ + error: error.name, + message: error.message, + statusCode, + }); +}); +``` + +### Testing with Jest + +- Write unit tests for services and handlers +- Use integration tests for routes +- Mock external dependencies + +```typescript +import { build } from '../app'; + +describe('Users API', () => { + let app: FastifyInstance; + + beforeAll(async () => { + app = await build(); + }); + + afterAll(async () => { + await app.close(); + }); + + it('should list users', async () => { + const response = await app.inject({ + method: 'GET', + url: '/api/users', + }); + + expect(response.statusCode).toBe(200); + expect(JSON.parse(response.payload)).toBeInstanceOf(Array); + }); +}); +``` + +### Performance + +- Fastify is one of the fastest Node.js frameworks +- Use schema validation for automatic serialization optimization +- Enable logging only when needed in production +- Use connection pooling for database connections + +### Security + +- Use @fastify/helmet for security headers +- Implement rate limiting with @fastify/rate-limit +- Use @fastify/cors for CORS configuration +- Validate all inputs with JSON Schema +- Use JWT for authentication with @fastify/jwt diff --git a/.dockerignore b/.dockerignore new file mode 100644 index 0000000..5330ba2 --- /dev/null +++ b/.dockerignore @@ -0,0 +1,46 @@ +# Dependencies (reinstalled inside container) +node_modules/ + +# Build output (rebuilt inside container) +dist/ + +# Version control +.git/ + +# GSD agent artifacts +.gsd/ + +# Runtime data (mounted as volumes) +data/ +media/ + +# Environment files (secrets — not baked into image) +.env +.env.* +!.env.example + +# Test coverage +coverage/ + +# Logs +*.log + +# Temp / cache +tmp/ +.cache/ + +# IDE +.idea/ +.vscode/ +*.code-workspace + +# OS +.DS_Store +Thumbs.db + +# Keep these (needed in build): +# - drizzle/ (migration SQL files, copied into runtime image) +# - .env.example (reference, excluded above via negation) +# - package.json (dependency manifest) +# - package-lock.json (lockfile for deterministic installs) +# - src/ (compiled during build stage) diff --git a/.env.example b/.env.example new file mode 100644 index 0000000..d04cc5b --- /dev/null +++ b/.env.example @@ -0,0 +1,17 @@ +# Tubearr Environment Configuration +# Copy this file to .env and customize as needed + +# Server port (default: 8989) +TUBEARR_PORT=8989 + +# Database file path (default: ./data/tubearr.db) +TUBEARR_DB_PATH=./data/tubearr.db + +# Log level: trace, debug, info, warn, error, fatal (default: info) +TUBEARR_LOG_LEVEL=info + +# API key for authentication (optional — auto-generated on first run if not set) +# TUBEARR_API_KEY= + +# Node environment +NODE_ENV=development diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000..da69842 --- /dev/null +++ b/.gitignore @@ -0,0 +1,53 @@ +# Dependencies +node_modules/ + +# Build output +dist/ + +# Environment variables +.env +.env.* +!.env.example + +# Database +data/ + +# Drizzle migrations meta (generated) +# Keep the SQL files, but the meta/ directory is generated +# drizzle/meta/ — actually we want to keep this for migration tracking + +# IDE +.idea/ +.vscode/ +*.code-workspace + +# OS +.DS_Store +Thumbs.db + +# Logs +*.log + +# Test coverage +coverage/ + +# Temp +tmp/ +.cache/ + +# GSD +.gsd/ + +# ── GSD baseline (auto-generated) ── +.gsd +*.swp +*.swo +*~ +.next/ +build/ +__pycache__/ +*.pyc +.venv/ +venv/ +target/ +vendor/ diff --git a/Dockerfile b/Dockerfile new file mode 100644 index 0000000..6c5cf76 --- /dev/null +++ b/Dockerfile @@ -0,0 +1,66 @@ +# ============================================================ +# Tubearr — Multi-stage Docker build +# ============================================================ +# Stage 1: Install all dependencies (including devDependencies) +# Stage 2: Compile TypeScript backend + Vite frontend +# Stage 3: Slim Alpine runtime with Node 22, yt-dlp, ffmpeg +# ============================================================ + +# ── Stage 1: Dependencies ────────────────────────────────── +FROM node:22-alpine AS deps + +WORKDIR /app + +COPY package.json package-lock.json ./ +RUN npm ci + +# ── Stage 2: Build ───────────────────────────────────────── +FROM deps AS build + +# Copy source and config files needed for compilation +COPY src/ ./src/ +COPY tsconfig.json ./ +COPY drizzle/ ./drizzle/ + +# Compile TypeScript backend (outputs to dist/) +RUN npm run build + +# Build Vite frontend SPA (outputs to dist/frontend/) +RUN npm run build:frontend + +# ── Stage 3: Runtime ─────────────────────────────────────── +FROM node:22-alpine AS runtime + +# Install yt-dlp and ffmpeg — the core download/transcode tools +RUN apk add --no-cache python3 py3-pip ffmpeg \ + && pip install --no-cache-dir --break-system-packages yt-dlp + +WORKDIR /app + +# Copy only what the runtime needs from the build stage +COPY --from=build /app/dist/ ./dist/ +COPY --from=build /app/drizzle/ ./drizzle/ +COPY package.json package-lock.json ./ + +# Install production-only dependencies (no devDependencies) +RUN npm ci --omit=dev + +# Add tsx for ESM-compatible execution (handles extensionless imports that +# tsc emits but Node's native ESM loader rejects). +# Installed locally alongside production deps so Node's --import can resolve it. +RUN npm install tsx + +# Create default directories following *arr family conventions +# /config — DB, logs, cookies, settings (like Radarr/Sonarr /config) +# /media — downloaded/organized media files +RUN mkdir -p /config /media + +# Runtime environment defaults +ENV NODE_ENV=production +ENV TUBEARR_DB_PATH=/config/tubearr.db +ENV TUBEARR_MEDIA_PATH=/media +ENV TUBEARR_COOKIE_PATH=/config/cookies + +EXPOSE 8989 + +CMD ["node", "--import", "tsx/esm", "dist/index.js"] diff --git a/docker-compose.yml b/docker-compose.yml new file mode 100644 index 0000000..4fd08fe --- /dev/null +++ b/docker-compose.yml @@ -0,0 +1,20 @@ +services: + tubearr: + build: + context: . + container_name: tubearr + ports: + - "8989:8989" + volumes: + - ./config:/config + - ./media:/media + environment: + - NODE_ENV=production + - TUBEARR_PORT=8989 + healthcheck: + test: ["CMD", "wget", "--no-verbose", "--tries=1", "--spider", "http://127.0.0.1:8989/ping"] + interval: 30s + timeout: 10s + retries: 3 + start_period: 15s + restart: unless-stopped diff --git a/drizzle.config.ts b/drizzle.config.ts new file mode 100644 index 0000000..81a9903 --- /dev/null +++ b/drizzle.config.ts @@ -0,0 +1,7 @@ +import { defineConfig } from 'drizzle-kit'; + +export default defineConfig({ + schema: './src/db/schema/*.ts', + out: './drizzle', + dialect: 'sqlite', +}); diff --git a/drizzle/0000_colossal_jubilee.sql b/drizzle/0000_colossal_jubilee.sql new file mode 100644 index 0000000..45c8e6f --- /dev/null +++ b/drizzle/0000_colossal_jubilee.sql @@ -0,0 +1,90 @@ +CREATE TABLE `content_items` ( + `id` integer PRIMARY KEY AUTOINCREMENT NOT NULL, + `creator_id` integer NOT NULL, + `title` text NOT NULL, + `platform_content_id` text NOT NULL, + `url` text NOT NULL, + `content_type` text NOT NULL, + `duration` integer, + `file_path` text, + `file_size` integer, + `format` text, + `quality_metadata` text, + `status` text DEFAULT 'monitored' NOT NULL, + `created_at` text DEFAULT (datetime('now')) NOT NULL, + `updated_at` text DEFAULT (datetime('now')) NOT NULL, + FOREIGN KEY (`creator_id`) REFERENCES `creators`(`id`) ON UPDATE no action ON DELETE cascade +); +--> statement-breakpoint +CREATE TABLE `format_profiles` ( + `id` integer PRIMARY KEY AUTOINCREMENT NOT NULL, + `name` text NOT NULL, + `video_resolution` text, + `audio_codec` text, + `audio_bitrate` text, + `container_format` text, + `is_default` integer DEFAULT false NOT NULL, + `created_at` text DEFAULT (datetime('now')) NOT NULL, + `updated_at` text DEFAULT (datetime('now')) NOT NULL +); +--> statement-breakpoint +CREATE TABLE `creators` ( + `id` integer PRIMARY KEY AUTOINCREMENT NOT NULL, + `name` text NOT NULL, + `platform` text NOT NULL, + `platform_id` text NOT NULL, + `url` text NOT NULL, + `monitoring_enabled` integer DEFAULT true NOT NULL, + `check_interval` integer DEFAULT 360 NOT NULL, + `image_url` text, + `metadata` text, + `created_at` text DEFAULT (datetime('now')) NOT NULL, + `updated_at` text DEFAULT (datetime('now')) NOT NULL +); +--> statement-breakpoint +CREATE TABLE `download_history` ( + `id` integer PRIMARY KEY AUTOINCREMENT NOT NULL, + `content_item_id` integer, + `creator_id` integer, + `event_type` text NOT NULL, + `status` text NOT NULL, + `details` text, + `created_at` text DEFAULT (datetime('now')) NOT NULL, + FOREIGN KEY (`content_item_id`) REFERENCES `content_items`(`id`) ON UPDATE no action ON DELETE set null, + FOREIGN KEY (`creator_id`) REFERENCES `creators`(`id`) ON UPDATE no action ON DELETE set null +); +--> statement-breakpoint +CREATE TABLE `notification_settings` ( + `id` integer PRIMARY KEY AUTOINCREMENT NOT NULL, + `type` text NOT NULL, + `name` text NOT NULL, + `enabled` integer DEFAULT true NOT NULL, + `config` text NOT NULL, + `on_grab` integer DEFAULT true NOT NULL, + `on_download` integer DEFAULT true NOT NULL, + `on_failure` integer DEFAULT true NOT NULL, + `created_at` text DEFAULT (datetime('now')) NOT NULL, + `updated_at` text DEFAULT (datetime('now')) NOT NULL +); +--> statement-breakpoint +CREATE TABLE `queue_items` ( + `id` integer PRIMARY KEY AUTOINCREMENT NOT NULL, + `content_item_id` integer NOT NULL, + `status` text DEFAULT 'pending' NOT NULL, + `priority` integer DEFAULT 0 NOT NULL, + `attempts` integer DEFAULT 0 NOT NULL, + `max_attempts` integer DEFAULT 3 NOT NULL, + `error` text, + `started_at` text, + `completed_at` text, + `created_at` text DEFAULT (datetime('now')) NOT NULL, + `updated_at` text DEFAULT (datetime('now')) NOT NULL, + FOREIGN KEY (`content_item_id`) REFERENCES `content_items`(`id`) ON UPDATE no action ON DELETE cascade +); +--> statement-breakpoint +CREATE TABLE `system_config` ( + `key` text PRIMARY KEY NOT NULL, + `value` text NOT NULL, + `created_at` text DEFAULT (datetime('now')) NOT NULL, + `updated_at` text DEFAULT (datetime('now')) NOT NULL +); diff --git a/drizzle/0001_natural_toad_men.sql b/drizzle/0001_natural_toad_men.sql new file mode 100644 index 0000000..58582ae --- /dev/null +++ b/drizzle/0001_natural_toad_men.sql @@ -0,0 +1,2 @@ +ALTER TABLE `creators` ADD `last_checked_at` text;--> statement-breakpoint +ALTER TABLE `creators` ADD `last_check_status` text; \ No newline at end of file diff --git a/drizzle/0002_lonely_nico_minoru.sql b/drizzle/0002_lonely_nico_minoru.sql new file mode 100644 index 0000000..329123f --- /dev/null +++ b/drizzle/0002_lonely_nico_minoru.sql @@ -0,0 +1 @@ +ALTER TABLE `creators` ADD `format_profile_id` integer REFERENCES format_profiles(id); \ No newline at end of file diff --git a/drizzle/0003_moaning_vertigo.sql b/drizzle/0003_moaning_vertigo.sql new file mode 100644 index 0000000..b597453 --- /dev/null +++ b/drizzle/0003_moaning_vertigo.sql @@ -0,0 +1,3 @@ +ALTER TABLE `content_items` ADD `thumbnail_url` text;--> statement-breakpoint +ALTER TABLE `format_profiles` ADD `subtitle_languages` text;--> statement-breakpoint +ALTER TABLE `format_profiles` ADD `embed_subtitles` integer DEFAULT false NOT NULL; \ No newline at end of file diff --git a/drizzle/0004_platform_settings.sql b/drizzle/0004_platform_settings.sql new file mode 100644 index 0000000..7655e2b --- /dev/null +++ b/drizzle/0004_platform_settings.sql @@ -0,0 +1,11 @@ +CREATE TABLE `platform_settings` ( + `platform` text PRIMARY KEY NOT NULL, + `default_format_profile_id` integer REFERENCES `format_profiles`(`id`) ON DELETE SET NULL, + `check_interval` integer DEFAULT 360, + `concurrency_limit` integer DEFAULT 2, + `subtitle_languages` text, + `grab_all_enabled` integer DEFAULT false NOT NULL, + `grab_all_order` text DEFAULT 'newest' NOT NULL, + `created_at` text DEFAULT (datetime('now')) NOT NULL, + `updated_at` text DEFAULT (datetime('now')) NOT NULL +); diff --git a/drizzle/0005_monitoring_playlists.sql b/drizzle/0005_monitoring_playlists.sql new file mode 100644 index 0000000..4f1db56 --- /dev/null +++ b/drizzle/0005_monitoring_playlists.sql @@ -0,0 +1,23 @@ +CREATE TABLE `content_playlist` ( + `content_item_id` integer NOT NULL, + `playlist_id` integer NOT NULL, + PRIMARY KEY(`content_item_id`, `playlist_id`), + FOREIGN KEY (`content_item_id`) REFERENCES `content_items`(`id`) ON UPDATE no action ON DELETE cascade, + FOREIGN KEY (`playlist_id`) REFERENCES `playlists`(`id`) ON UPDATE no action ON DELETE cascade +); +--> statement-breakpoint +CREATE TABLE `playlists` ( + `id` integer PRIMARY KEY AUTOINCREMENT NOT NULL, + `creator_id` integer NOT NULL, + `platform_playlist_id` text NOT NULL, + `title` text NOT NULL, + `position` integer DEFAULT 0 NOT NULL, + `created_at` text DEFAULT (datetime('now')) NOT NULL, + `updated_at` text DEFAULT (datetime('now')) NOT NULL, + FOREIGN KEY (`creator_id`) REFERENCES `creators`(`id`) ON UPDATE no action ON DELETE cascade +); +--> statement-breakpoint +ALTER TABLE `content_items` ADD `published_at` text;--> statement-breakpoint +ALTER TABLE `content_items` ADD `downloaded_at` text;--> statement-breakpoint +ALTER TABLE `content_items` ADD `monitored` integer DEFAULT true NOT NULL;--> statement-breakpoint +ALTER TABLE `creators` ADD `monitoring_mode` text DEFAULT 'all' NOT NULL; \ No newline at end of file diff --git a/drizzle/0006_rename_creators_to_channels.sql b/drizzle/0006_rename_creators_to_channels.sql new file mode 100644 index 0000000..668b345 --- /dev/null +++ b/drizzle/0006_rename_creators_to_channels.sql @@ -0,0 +1,4 @@ +ALTER TABLE creators RENAME TO channels;--> statement-breakpoint +ALTER TABLE content_items RENAME COLUMN creator_id TO channel_id;--> statement-breakpoint +ALTER TABLE download_history RENAME COLUMN creator_id TO channel_id;--> statement-breakpoint +ALTER TABLE playlists RENAME COLUMN creator_id TO channel_id; diff --git a/drizzle/0007_steep_the_watchers.sql b/drizzle/0007_steep_the_watchers.sql new file mode 100644 index 0000000..801a6c8 --- /dev/null +++ b/drizzle/0007_steep_the_watchers.sql @@ -0,0 +1,2 @@ +ALTER TABLE `platform_settings` ADD `scan_limit` integer DEFAULT 100;--> statement-breakpoint +ALTER TABLE `platform_settings` ADD `rate_limit_delay` integer DEFAULT 1000; \ No newline at end of file diff --git a/drizzle/meta/0000_snapshot.json b/drizzle/meta/0000_snapshot.json new file mode 100644 index 0000000..1304ac2 --- /dev/null +++ b/drizzle/meta/0000_snapshot.json @@ -0,0 +1,630 @@ +{ + "version": "6", + "dialect": "sqlite", + "id": "ee333905-695c-4855-ae88-77eed7c0ac4d", + "prevId": "00000000-0000-0000-0000-000000000000", + "tables": { + "content_items": { + "name": "content_items", + "columns": { + "id": { + "name": "id", + "type": "integer", + "primaryKey": true, + "notNull": true, + "autoincrement": true + }, + "creator_id": { + "name": "creator_id", + "type": "integer", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "title": { + "name": "title", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "platform_content_id": { + "name": "platform_content_id", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "url": { + "name": "url", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "content_type": { + "name": "content_type", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "duration": { + "name": "duration", + "type": "integer", + "primaryKey": false, + "notNull": false, + "autoincrement": false + }, + "file_path": { + "name": "file_path", + "type": "text", + "primaryKey": false, + "notNull": false, + "autoincrement": false + }, + "file_size": { + "name": "file_size", + "type": "integer", + "primaryKey": false, + "notNull": false, + "autoincrement": false + }, + "format": { + "name": "format", + "type": "text", + "primaryKey": false, + "notNull": false, + "autoincrement": false + }, + "quality_metadata": { + "name": "quality_metadata", + "type": "text", + "primaryKey": false, + "notNull": false, + "autoincrement": false + }, + "status": { + "name": "status", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false, + "default": "'monitored'" + }, + "created_at": { + "name": "created_at", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false, + "default": "(datetime('now'))" + }, + "updated_at": { + "name": "updated_at", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false, + "default": "(datetime('now'))" + } + }, + "indexes": {}, + "foreignKeys": { + "content_items_creator_id_creators_id_fk": { + "name": "content_items_creator_id_creators_id_fk", + "tableFrom": "content_items", + "tableTo": "creators", + "columnsFrom": [ + "creator_id" + ], + "columnsTo": [ + "id" + ], + "onDelete": "cascade", + "onUpdate": "no action" + } + }, + "compositePrimaryKeys": {}, + "uniqueConstraints": {}, + "checkConstraints": {} + }, + "format_profiles": { + "name": "format_profiles", + "columns": { + "id": { + "name": "id", + "type": "integer", + "primaryKey": true, + "notNull": true, + "autoincrement": true + }, + "name": { + "name": "name", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "video_resolution": { + "name": "video_resolution", + "type": "text", + "primaryKey": false, + "notNull": false, + "autoincrement": false + }, + "audio_codec": { + "name": "audio_codec", + "type": "text", + "primaryKey": false, + "notNull": false, + "autoincrement": false + }, + "audio_bitrate": { + "name": "audio_bitrate", + "type": "text", + "primaryKey": false, + "notNull": false, + "autoincrement": false + }, + "container_format": { + "name": "container_format", + "type": "text", + "primaryKey": false, + "notNull": false, + "autoincrement": false + }, + "is_default": { + "name": "is_default", + "type": "integer", + "primaryKey": false, + "notNull": true, + "autoincrement": false, + "default": false + }, + "created_at": { + "name": "created_at", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false, + "default": "(datetime('now'))" + }, + "updated_at": { + "name": "updated_at", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false, + "default": "(datetime('now'))" + } + }, + "indexes": {}, + "foreignKeys": {}, + "compositePrimaryKeys": {}, + "uniqueConstraints": {}, + "checkConstraints": {} + }, + "creators": { + "name": "creators", + "columns": { + "id": { + "name": "id", + "type": "integer", + "primaryKey": true, + "notNull": true, + "autoincrement": true + }, + "name": { + "name": "name", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "platform": { + "name": "platform", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "platform_id": { + "name": "platform_id", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "url": { + "name": "url", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "monitoring_enabled": { + "name": "monitoring_enabled", + "type": "integer", + "primaryKey": false, + "notNull": true, + "autoincrement": false, + "default": true + }, + "check_interval": { + "name": "check_interval", + "type": "integer", + "primaryKey": false, + "notNull": true, + "autoincrement": false, + "default": 360 + }, + "image_url": { + "name": "image_url", + "type": "text", + "primaryKey": false, + "notNull": false, + "autoincrement": false + }, + "metadata": { + "name": "metadata", + "type": "text", + "primaryKey": false, + "notNull": false, + "autoincrement": false + }, + "created_at": { + "name": "created_at", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false, + "default": "(datetime('now'))" + }, + "updated_at": { + "name": "updated_at", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false, + "default": "(datetime('now'))" + } + }, + "indexes": {}, + "foreignKeys": {}, + "compositePrimaryKeys": {}, + "uniqueConstraints": {}, + "checkConstraints": {} + }, + "download_history": { + "name": "download_history", + "columns": { + "id": { + "name": "id", + "type": "integer", + "primaryKey": true, + "notNull": true, + "autoincrement": true + }, + "content_item_id": { + "name": "content_item_id", + "type": "integer", + "primaryKey": false, + "notNull": false, + "autoincrement": false + }, + "creator_id": { + "name": "creator_id", + "type": "integer", + "primaryKey": false, + "notNull": false, + "autoincrement": false + }, + "event_type": { + "name": "event_type", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "status": { + "name": "status", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "details": { + "name": "details", + "type": "text", + "primaryKey": false, + "notNull": false, + "autoincrement": false + }, + "created_at": { + "name": "created_at", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false, + "default": "(datetime('now'))" + } + }, + "indexes": {}, + "foreignKeys": { + "download_history_content_item_id_content_items_id_fk": { + "name": "download_history_content_item_id_content_items_id_fk", + "tableFrom": "download_history", + "tableTo": "content_items", + "columnsFrom": [ + "content_item_id" + ], + "columnsTo": [ + "id" + ], + "onDelete": "set null", + "onUpdate": "no action" + }, + "download_history_creator_id_creators_id_fk": { + "name": "download_history_creator_id_creators_id_fk", + "tableFrom": "download_history", + "tableTo": "creators", + "columnsFrom": [ + "creator_id" + ], + "columnsTo": [ + "id" + ], + "onDelete": "set null", + "onUpdate": "no action" + } + }, + "compositePrimaryKeys": {}, + "uniqueConstraints": {}, + "checkConstraints": {} + }, + "notification_settings": { + "name": "notification_settings", + "columns": { + "id": { + "name": "id", + "type": "integer", + "primaryKey": true, + "notNull": true, + "autoincrement": true + }, + "type": { + "name": "type", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "name": { + "name": "name", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "enabled": { + "name": "enabled", + "type": "integer", + "primaryKey": false, + "notNull": true, + "autoincrement": false, + "default": true + }, + "config": { + "name": "config", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "on_grab": { + "name": "on_grab", + "type": "integer", + "primaryKey": false, + "notNull": true, + "autoincrement": false, + "default": true + }, + "on_download": { + "name": "on_download", + "type": "integer", + "primaryKey": false, + "notNull": true, + "autoincrement": false, + "default": true + }, + "on_failure": { + "name": "on_failure", + "type": "integer", + "primaryKey": false, + "notNull": true, + "autoincrement": false, + "default": true + }, + "created_at": { + "name": "created_at", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false, + "default": "(datetime('now'))" + }, + "updated_at": { + "name": "updated_at", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false, + "default": "(datetime('now'))" + } + }, + "indexes": {}, + "foreignKeys": {}, + "compositePrimaryKeys": {}, + "uniqueConstraints": {}, + "checkConstraints": {} + }, + "queue_items": { + "name": "queue_items", + "columns": { + "id": { + "name": "id", + "type": "integer", + "primaryKey": true, + "notNull": true, + "autoincrement": true + }, + "content_item_id": { + "name": "content_item_id", + "type": "integer", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "status": { + "name": "status", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false, + "default": "'pending'" + }, + "priority": { + "name": "priority", + "type": "integer", + "primaryKey": false, + "notNull": true, + "autoincrement": false, + "default": 0 + }, + "attempts": { + "name": "attempts", + "type": "integer", + "primaryKey": false, + "notNull": true, + "autoincrement": false, + "default": 0 + }, + "max_attempts": { + "name": "max_attempts", + "type": "integer", + "primaryKey": false, + "notNull": true, + "autoincrement": false, + "default": 3 + }, + "error": { + "name": "error", + "type": "text", + "primaryKey": false, + "notNull": false, + "autoincrement": false + }, + "started_at": { + "name": "started_at", + "type": "text", + "primaryKey": false, + "notNull": false, + "autoincrement": false + }, + "completed_at": { + "name": "completed_at", + "type": "text", + "primaryKey": false, + "notNull": false, + "autoincrement": false + }, + "created_at": { + "name": "created_at", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false, + "default": "(datetime('now'))" + }, + "updated_at": { + "name": "updated_at", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false, + "default": "(datetime('now'))" + } + }, + "indexes": {}, + "foreignKeys": { + "queue_items_content_item_id_content_items_id_fk": { + "name": "queue_items_content_item_id_content_items_id_fk", + "tableFrom": "queue_items", + "tableTo": "content_items", + "columnsFrom": [ + "content_item_id" + ], + "columnsTo": [ + "id" + ], + "onDelete": "cascade", + "onUpdate": "no action" + } + }, + "compositePrimaryKeys": {}, + "uniqueConstraints": {}, + "checkConstraints": {} + }, + "system_config": { + "name": "system_config", + "columns": { + "key": { + "name": "key", + "type": "text", + "primaryKey": true, + "notNull": true, + "autoincrement": false + }, + "value": { + "name": "value", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "created_at": { + "name": "created_at", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false, + "default": "(datetime('now'))" + }, + "updated_at": { + "name": "updated_at", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false, + "default": "(datetime('now'))" + } + }, + "indexes": {}, + "foreignKeys": {}, + "compositePrimaryKeys": {}, + "uniqueConstraints": {}, + "checkConstraints": {} + } + }, + "views": {}, + "enums": {}, + "_meta": { + "schemas": {}, + "tables": {}, + "columns": {} + }, + "internal": { + "indexes": {} + } +} \ No newline at end of file diff --git a/drizzle/meta/0001_snapshot.json b/drizzle/meta/0001_snapshot.json new file mode 100644 index 0000000..9560cde --- /dev/null +++ b/drizzle/meta/0001_snapshot.json @@ -0,0 +1,644 @@ +{ + "version": "6", + "dialect": "sqlite", + "id": "f9f9642b-8498-4158-aebb-814eb2a363d0", + "prevId": "ee333905-695c-4855-ae88-77eed7c0ac4d", + "tables": { + "content_items": { + "name": "content_items", + "columns": { + "id": { + "name": "id", + "type": "integer", + "primaryKey": true, + "notNull": true, + "autoincrement": true + }, + "creator_id": { + "name": "creator_id", + "type": "integer", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "title": { + "name": "title", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "platform_content_id": { + "name": "platform_content_id", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "url": { + "name": "url", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "content_type": { + "name": "content_type", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "duration": { + "name": "duration", + "type": "integer", + "primaryKey": false, + "notNull": false, + "autoincrement": false + }, + "file_path": { + "name": "file_path", + "type": "text", + "primaryKey": false, + "notNull": false, + "autoincrement": false + }, + "file_size": { + "name": "file_size", + "type": "integer", + "primaryKey": false, + "notNull": false, + "autoincrement": false + }, + "format": { + "name": "format", + "type": "text", + "primaryKey": false, + "notNull": false, + "autoincrement": false + }, + "quality_metadata": { + "name": "quality_metadata", + "type": "text", + "primaryKey": false, + "notNull": false, + "autoincrement": false + }, + "status": { + "name": "status", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false, + "default": "'monitored'" + }, + "created_at": { + "name": "created_at", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false, + "default": "(datetime('now'))" + }, + "updated_at": { + "name": "updated_at", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false, + "default": "(datetime('now'))" + } + }, + "indexes": {}, + "foreignKeys": { + "content_items_creator_id_creators_id_fk": { + "name": "content_items_creator_id_creators_id_fk", + "tableFrom": "content_items", + "tableTo": "creators", + "columnsFrom": [ + "creator_id" + ], + "columnsTo": [ + "id" + ], + "onDelete": "cascade", + "onUpdate": "no action" + } + }, + "compositePrimaryKeys": {}, + "uniqueConstraints": {}, + "checkConstraints": {} + }, + "format_profiles": { + "name": "format_profiles", + "columns": { + "id": { + "name": "id", + "type": "integer", + "primaryKey": true, + "notNull": true, + "autoincrement": true + }, + "name": { + "name": "name", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "video_resolution": { + "name": "video_resolution", + "type": "text", + "primaryKey": false, + "notNull": false, + "autoincrement": false + }, + "audio_codec": { + "name": "audio_codec", + "type": "text", + "primaryKey": false, + "notNull": false, + "autoincrement": false + }, + "audio_bitrate": { + "name": "audio_bitrate", + "type": "text", + "primaryKey": false, + "notNull": false, + "autoincrement": false + }, + "container_format": { + "name": "container_format", + "type": "text", + "primaryKey": false, + "notNull": false, + "autoincrement": false + }, + "is_default": { + "name": "is_default", + "type": "integer", + "primaryKey": false, + "notNull": true, + "autoincrement": false, + "default": false + }, + "created_at": { + "name": "created_at", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false, + "default": "(datetime('now'))" + }, + "updated_at": { + "name": "updated_at", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false, + "default": "(datetime('now'))" + } + }, + "indexes": {}, + "foreignKeys": {}, + "compositePrimaryKeys": {}, + "uniqueConstraints": {}, + "checkConstraints": {} + }, + "creators": { + "name": "creators", + "columns": { + "id": { + "name": "id", + "type": "integer", + "primaryKey": true, + "notNull": true, + "autoincrement": true + }, + "name": { + "name": "name", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "platform": { + "name": "platform", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "platform_id": { + "name": "platform_id", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "url": { + "name": "url", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "monitoring_enabled": { + "name": "monitoring_enabled", + "type": "integer", + "primaryKey": false, + "notNull": true, + "autoincrement": false, + "default": true + }, + "check_interval": { + "name": "check_interval", + "type": "integer", + "primaryKey": false, + "notNull": true, + "autoincrement": false, + "default": 360 + }, + "image_url": { + "name": "image_url", + "type": "text", + "primaryKey": false, + "notNull": false, + "autoincrement": false + }, + "metadata": { + "name": "metadata", + "type": "text", + "primaryKey": false, + "notNull": false, + "autoincrement": false + }, + "created_at": { + "name": "created_at", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false, + "default": "(datetime('now'))" + }, + "updated_at": { + "name": "updated_at", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false, + "default": "(datetime('now'))" + }, + "last_checked_at": { + "name": "last_checked_at", + "type": "text", + "primaryKey": false, + "notNull": false, + "autoincrement": false + }, + "last_check_status": { + "name": "last_check_status", + "type": "text", + "primaryKey": false, + "notNull": false, + "autoincrement": false + } + }, + "indexes": {}, + "foreignKeys": {}, + "compositePrimaryKeys": {}, + "uniqueConstraints": {}, + "checkConstraints": {} + }, + "download_history": { + "name": "download_history", + "columns": { + "id": { + "name": "id", + "type": "integer", + "primaryKey": true, + "notNull": true, + "autoincrement": true + }, + "content_item_id": { + "name": "content_item_id", + "type": "integer", + "primaryKey": false, + "notNull": false, + "autoincrement": false + }, + "creator_id": { + "name": "creator_id", + "type": "integer", + "primaryKey": false, + "notNull": false, + "autoincrement": false + }, + "event_type": { + "name": "event_type", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "status": { + "name": "status", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "details": { + "name": "details", + "type": "text", + "primaryKey": false, + "notNull": false, + "autoincrement": false + }, + "created_at": { + "name": "created_at", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false, + "default": "(datetime('now'))" + } + }, + "indexes": {}, + "foreignKeys": { + "download_history_content_item_id_content_items_id_fk": { + "name": "download_history_content_item_id_content_items_id_fk", + "tableFrom": "download_history", + "tableTo": "content_items", + "columnsFrom": [ + "content_item_id" + ], + "columnsTo": [ + "id" + ], + "onDelete": "set null", + "onUpdate": "no action" + }, + "download_history_creator_id_creators_id_fk": { + "name": "download_history_creator_id_creators_id_fk", + "tableFrom": "download_history", + "tableTo": "creators", + "columnsFrom": [ + "creator_id" + ], + "columnsTo": [ + "id" + ], + "onDelete": "set null", + "onUpdate": "no action" + } + }, + "compositePrimaryKeys": {}, + "uniqueConstraints": {}, + "checkConstraints": {} + }, + "notification_settings": { + "name": "notification_settings", + "columns": { + "id": { + "name": "id", + "type": "integer", + "primaryKey": true, + "notNull": true, + "autoincrement": true + }, + "type": { + "name": "type", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "name": { + "name": "name", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "enabled": { + "name": "enabled", + "type": "integer", + "primaryKey": false, + "notNull": true, + "autoincrement": false, + "default": true + }, + "config": { + "name": "config", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "on_grab": { + "name": "on_grab", + "type": "integer", + "primaryKey": false, + "notNull": true, + "autoincrement": false, + "default": true + }, + "on_download": { + "name": "on_download", + "type": "integer", + "primaryKey": false, + "notNull": true, + "autoincrement": false, + "default": true + }, + "on_failure": { + "name": "on_failure", + "type": "integer", + "primaryKey": false, + "notNull": true, + "autoincrement": false, + "default": true + }, + "created_at": { + "name": "created_at", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false, + "default": "(datetime('now'))" + }, + "updated_at": { + "name": "updated_at", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false, + "default": "(datetime('now'))" + } + }, + "indexes": {}, + "foreignKeys": {}, + "compositePrimaryKeys": {}, + "uniqueConstraints": {}, + "checkConstraints": {} + }, + "queue_items": { + "name": "queue_items", + "columns": { + "id": { + "name": "id", + "type": "integer", + "primaryKey": true, + "notNull": true, + "autoincrement": true + }, + "content_item_id": { + "name": "content_item_id", + "type": "integer", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "status": { + "name": "status", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false, + "default": "'pending'" + }, + "priority": { + "name": "priority", + "type": "integer", + "primaryKey": false, + "notNull": true, + "autoincrement": false, + "default": 0 + }, + "attempts": { + "name": "attempts", + "type": "integer", + "primaryKey": false, + "notNull": true, + "autoincrement": false, + "default": 0 + }, + "max_attempts": { + "name": "max_attempts", + "type": "integer", + "primaryKey": false, + "notNull": true, + "autoincrement": false, + "default": 3 + }, + "error": { + "name": "error", + "type": "text", + "primaryKey": false, + "notNull": false, + "autoincrement": false + }, + "started_at": { + "name": "started_at", + "type": "text", + "primaryKey": false, + "notNull": false, + "autoincrement": false + }, + "completed_at": { + "name": "completed_at", + "type": "text", + "primaryKey": false, + "notNull": false, + "autoincrement": false + }, + "created_at": { + "name": "created_at", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false, + "default": "(datetime('now'))" + }, + "updated_at": { + "name": "updated_at", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false, + "default": "(datetime('now'))" + } + }, + "indexes": {}, + "foreignKeys": { + "queue_items_content_item_id_content_items_id_fk": { + "name": "queue_items_content_item_id_content_items_id_fk", + "tableFrom": "queue_items", + "tableTo": "content_items", + "columnsFrom": [ + "content_item_id" + ], + "columnsTo": [ + "id" + ], + "onDelete": "cascade", + "onUpdate": "no action" + } + }, + "compositePrimaryKeys": {}, + "uniqueConstraints": {}, + "checkConstraints": {} + }, + "system_config": { + "name": "system_config", + "columns": { + "key": { + "name": "key", + "type": "text", + "primaryKey": true, + "notNull": true, + "autoincrement": false + }, + "value": { + "name": "value", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "created_at": { + "name": "created_at", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false, + "default": "(datetime('now'))" + }, + "updated_at": { + "name": "updated_at", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false, + "default": "(datetime('now'))" + } + }, + "indexes": {}, + "foreignKeys": {}, + "compositePrimaryKeys": {}, + "uniqueConstraints": {}, + "checkConstraints": {} + } + }, + "views": {}, + "enums": {}, + "_meta": { + "schemas": {}, + "tables": {}, + "columns": {} + }, + "internal": { + "indexes": {} + } +} \ No newline at end of file diff --git a/drizzle/meta/0002_snapshot.json b/drizzle/meta/0002_snapshot.json new file mode 100644 index 0000000..14adfee --- /dev/null +++ b/drizzle/meta/0002_snapshot.json @@ -0,0 +1,665 @@ +{ + "version": "6", + "dialect": "sqlite", + "id": "9c1ae9c3-7499-4269-b4a2-21d5ec040367", + "prevId": "f9f9642b-8498-4158-aebb-814eb2a363d0", + "tables": { + "content_items": { + "name": "content_items", + "columns": { + "id": { + "name": "id", + "type": "integer", + "primaryKey": true, + "notNull": true, + "autoincrement": true + }, + "creator_id": { + "name": "creator_id", + "type": "integer", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "title": { + "name": "title", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "platform_content_id": { + "name": "platform_content_id", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "url": { + "name": "url", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "content_type": { + "name": "content_type", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "duration": { + "name": "duration", + "type": "integer", + "primaryKey": false, + "notNull": false, + "autoincrement": false + }, + "file_path": { + "name": "file_path", + "type": "text", + "primaryKey": false, + "notNull": false, + "autoincrement": false + }, + "file_size": { + "name": "file_size", + "type": "integer", + "primaryKey": false, + "notNull": false, + "autoincrement": false + }, + "format": { + "name": "format", + "type": "text", + "primaryKey": false, + "notNull": false, + "autoincrement": false + }, + "quality_metadata": { + "name": "quality_metadata", + "type": "text", + "primaryKey": false, + "notNull": false, + "autoincrement": false + }, + "status": { + "name": "status", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false, + "default": "'monitored'" + }, + "created_at": { + "name": "created_at", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false, + "default": "(datetime('now'))" + }, + "updated_at": { + "name": "updated_at", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false, + "default": "(datetime('now'))" + } + }, + "indexes": {}, + "foreignKeys": { + "content_items_creator_id_creators_id_fk": { + "name": "content_items_creator_id_creators_id_fk", + "tableFrom": "content_items", + "tableTo": "creators", + "columnsFrom": [ + "creator_id" + ], + "columnsTo": [ + "id" + ], + "onDelete": "cascade", + "onUpdate": "no action" + } + }, + "compositePrimaryKeys": {}, + "uniqueConstraints": {}, + "checkConstraints": {} + }, + "format_profiles": { + "name": "format_profiles", + "columns": { + "id": { + "name": "id", + "type": "integer", + "primaryKey": true, + "notNull": true, + "autoincrement": true + }, + "name": { + "name": "name", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "video_resolution": { + "name": "video_resolution", + "type": "text", + "primaryKey": false, + "notNull": false, + "autoincrement": false + }, + "audio_codec": { + "name": "audio_codec", + "type": "text", + "primaryKey": false, + "notNull": false, + "autoincrement": false + }, + "audio_bitrate": { + "name": "audio_bitrate", + "type": "text", + "primaryKey": false, + "notNull": false, + "autoincrement": false + }, + "container_format": { + "name": "container_format", + "type": "text", + "primaryKey": false, + "notNull": false, + "autoincrement": false + }, + "is_default": { + "name": "is_default", + "type": "integer", + "primaryKey": false, + "notNull": true, + "autoincrement": false, + "default": false + }, + "created_at": { + "name": "created_at", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false, + "default": "(datetime('now'))" + }, + "updated_at": { + "name": "updated_at", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false, + "default": "(datetime('now'))" + } + }, + "indexes": {}, + "foreignKeys": {}, + "compositePrimaryKeys": {}, + "uniqueConstraints": {}, + "checkConstraints": {} + }, + "creators": { + "name": "creators", + "columns": { + "id": { + "name": "id", + "type": "integer", + "primaryKey": true, + "notNull": true, + "autoincrement": true + }, + "name": { + "name": "name", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "platform": { + "name": "platform", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "platform_id": { + "name": "platform_id", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "url": { + "name": "url", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "monitoring_enabled": { + "name": "monitoring_enabled", + "type": "integer", + "primaryKey": false, + "notNull": true, + "autoincrement": false, + "default": true + }, + "check_interval": { + "name": "check_interval", + "type": "integer", + "primaryKey": false, + "notNull": true, + "autoincrement": false, + "default": 360 + }, + "image_url": { + "name": "image_url", + "type": "text", + "primaryKey": false, + "notNull": false, + "autoincrement": false + }, + "metadata": { + "name": "metadata", + "type": "text", + "primaryKey": false, + "notNull": false, + "autoincrement": false + }, + "format_profile_id": { + "name": "format_profile_id", + "type": "integer", + "primaryKey": false, + "notNull": false, + "autoincrement": false + }, + "created_at": { + "name": "created_at", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false, + "default": "(datetime('now'))" + }, + "updated_at": { + "name": "updated_at", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false, + "default": "(datetime('now'))" + }, + "last_checked_at": { + "name": "last_checked_at", + "type": "text", + "primaryKey": false, + "notNull": false, + "autoincrement": false + }, + "last_check_status": { + "name": "last_check_status", + "type": "text", + "primaryKey": false, + "notNull": false, + "autoincrement": false + } + }, + "indexes": {}, + "foreignKeys": { + "creators_format_profile_id_format_profiles_id_fk": { + "name": "creators_format_profile_id_format_profiles_id_fk", + "tableFrom": "creators", + "tableTo": "format_profiles", + "columnsFrom": [ + "format_profile_id" + ], + "columnsTo": [ + "id" + ], + "onDelete": "set null", + "onUpdate": "no action" + } + }, + "compositePrimaryKeys": {}, + "uniqueConstraints": {}, + "checkConstraints": {} + }, + "download_history": { + "name": "download_history", + "columns": { + "id": { + "name": "id", + "type": "integer", + "primaryKey": true, + "notNull": true, + "autoincrement": true + }, + "content_item_id": { + "name": "content_item_id", + "type": "integer", + "primaryKey": false, + "notNull": false, + "autoincrement": false + }, + "creator_id": { + "name": "creator_id", + "type": "integer", + "primaryKey": false, + "notNull": false, + "autoincrement": false + }, + "event_type": { + "name": "event_type", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "status": { + "name": "status", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "details": { + "name": "details", + "type": "text", + "primaryKey": false, + "notNull": false, + "autoincrement": false + }, + "created_at": { + "name": "created_at", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false, + "default": "(datetime('now'))" + } + }, + "indexes": {}, + "foreignKeys": { + "download_history_content_item_id_content_items_id_fk": { + "name": "download_history_content_item_id_content_items_id_fk", + "tableFrom": "download_history", + "tableTo": "content_items", + "columnsFrom": [ + "content_item_id" + ], + "columnsTo": [ + "id" + ], + "onDelete": "set null", + "onUpdate": "no action" + }, + "download_history_creator_id_creators_id_fk": { + "name": "download_history_creator_id_creators_id_fk", + "tableFrom": "download_history", + "tableTo": "creators", + "columnsFrom": [ + "creator_id" + ], + "columnsTo": [ + "id" + ], + "onDelete": "set null", + "onUpdate": "no action" + } + }, + "compositePrimaryKeys": {}, + "uniqueConstraints": {}, + "checkConstraints": {} + }, + "notification_settings": { + "name": "notification_settings", + "columns": { + "id": { + "name": "id", + "type": "integer", + "primaryKey": true, + "notNull": true, + "autoincrement": true + }, + "type": { + "name": "type", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "name": { + "name": "name", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "enabled": { + "name": "enabled", + "type": "integer", + "primaryKey": false, + "notNull": true, + "autoincrement": false, + "default": true + }, + "config": { + "name": "config", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "on_grab": { + "name": "on_grab", + "type": "integer", + "primaryKey": false, + "notNull": true, + "autoincrement": false, + "default": true + }, + "on_download": { + "name": "on_download", + "type": "integer", + "primaryKey": false, + "notNull": true, + "autoincrement": false, + "default": true + }, + "on_failure": { + "name": "on_failure", + "type": "integer", + "primaryKey": false, + "notNull": true, + "autoincrement": false, + "default": true + }, + "created_at": { + "name": "created_at", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false, + "default": "(datetime('now'))" + }, + "updated_at": { + "name": "updated_at", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false, + "default": "(datetime('now'))" + } + }, + "indexes": {}, + "foreignKeys": {}, + "compositePrimaryKeys": {}, + "uniqueConstraints": {}, + "checkConstraints": {} + }, + "queue_items": { + "name": "queue_items", + "columns": { + "id": { + "name": "id", + "type": "integer", + "primaryKey": true, + "notNull": true, + "autoincrement": true + }, + "content_item_id": { + "name": "content_item_id", + "type": "integer", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "status": { + "name": "status", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false, + "default": "'pending'" + }, + "priority": { + "name": "priority", + "type": "integer", + "primaryKey": false, + "notNull": true, + "autoincrement": false, + "default": 0 + }, + "attempts": { + "name": "attempts", + "type": "integer", + "primaryKey": false, + "notNull": true, + "autoincrement": false, + "default": 0 + }, + "max_attempts": { + "name": "max_attempts", + "type": "integer", + "primaryKey": false, + "notNull": true, + "autoincrement": false, + "default": 3 + }, + "error": { + "name": "error", + "type": "text", + "primaryKey": false, + "notNull": false, + "autoincrement": false + }, + "started_at": { + "name": "started_at", + "type": "text", + "primaryKey": false, + "notNull": false, + "autoincrement": false + }, + "completed_at": { + "name": "completed_at", + "type": "text", + "primaryKey": false, + "notNull": false, + "autoincrement": false + }, + "created_at": { + "name": "created_at", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false, + "default": "(datetime('now'))" + }, + "updated_at": { + "name": "updated_at", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false, + "default": "(datetime('now'))" + } + }, + "indexes": {}, + "foreignKeys": { + "queue_items_content_item_id_content_items_id_fk": { + "name": "queue_items_content_item_id_content_items_id_fk", + "tableFrom": "queue_items", + "tableTo": "content_items", + "columnsFrom": [ + "content_item_id" + ], + "columnsTo": [ + "id" + ], + "onDelete": "cascade", + "onUpdate": "no action" + } + }, + "compositePrimaryKeys": {}, + "uniqueConstraints": {}, + "checkConstraints": {} + }, + "system_config": { + "name": "system_config", + "columns": { + "key": { + "name": "key", + "type": "text", + "primaryKey": true, + "notNull": true, + "autoincrement": false + }, + "value": { + "name": "value", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "created_at": { + "name": "created_at", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false, + "default": "(datetime('now'))" + }, + "updated_at": { + "name": "updated_at", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false, + "default": "(datetime('now'))" + } + }, + "indexes": {}, + "foreignKeys": {}, + "compositePrimaryKeys": {}, + "uniqueConstraints": {}, + "checkConstraints": {} + } + }, + "views": {}, + "enums": {}, + "_meta": { + "schemas": {}, + "tables": {}, + "columns": {} + }, + "internal": { + "indexes": {} + } +} \ No newline at end of file diff --git a/drizzle/meta/0003_snapshot.json b/drizzle/meta/0003_snapshot.json new file mode 100644 index 0000000..1f0fbf7 --- /dev/null +++ b/drizzle/meta/0003_snapshot.json @@ -0,0 +1,687 @@ +{ + "version": "6", + "dialect": "sqlite", + "id": "389cb6a3-b0dc-4103-b786-3014023f5ed2", + "prevId": "9c1ae9c3-7499-4269-b4a2-21d5ec040367", + "tables": { + "content_items": { + "name": "content_items", + "columns": { + "id": { + "name": "id", + "type": "integer", + "primaryKey": true, + "notNull": true, + "autoincrement": true + }, + "creator_id": { + "name": "creator_id", + "type": "integer", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "title": { + "name": "title", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "platform_content_id": { + "name": "platform_content_id", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "url": { + "name": "url", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "content_type": { + "name": "content_type", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "duration": { + "name": "duration", + "type": "integer", + "primaryKey": false, + "notNull": false, + "autoincrement": false + }, + "file_path": { + "name": "file_path", + "type": "text", + "primaryKey": false, + "notNull": false, + "autoincrement": false + }, + "file_size": { + "name": "file_size", + "type": "integer", + "primaryKey": false, + "notNull": false, + "autoincrement": false + }, + "format": { + "name": "format", + "type": "text", + "primaryKey": false, + "notNull": false, + "autoincrement": false + }, + "quality_metadata": { + "name": "quality_metadata", + "type": "text", + "primaryKey": false, + "notNull": false, + "autoincrement": false + }, + "status": { + "name": "status", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false, + "default": "'monitored'" + }, + "thumbnail_url": { + "name": "thumbnail_url", + "type": "text", + "primaryKey": false, + "notNull": false, + "autoincrement": false + }, + "created_at": { + "name": "created_at", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false, + "default": "(datetime('now'))" + }, + "updated_at": { + "name": "updated_at", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false, + "default": "(datetime('now'))" + } + }, + "indexes": {}, + "foreignKeys": { + "content_items_creator_id_creators_id_fk": { + "name": "content_items_creator_id_creators_id_fk", + "tableFrom": "content_items", + "tableTo": "creators", + "columnsFrom": [ + "creator_id" + ], + "columnsTo": [ + "id" + ], + "onDelete": "cascade", + "onUpdate": "no action" + } + }, + "compositePrimaryKeys": {}, + "uniqueConstraints": {}, + "checkConstraints": {} + }, + "format_profiles": { + "name": "format_profiles", + "columns": { + "id": { + "name": "id", + "type": "integer", + "primaryKey": true, + "notNull": true, + "autoincrement": true + }, + "name": { + "name": "name", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "video_resolution": { + "name": "video_resolution", + "type": "text", + "primaryKey": false, + "notNull": false, + "autoincrement": false + }, + "audio_codec": { + "name": "audio_codec", + "type": "text", + "primaryKey": false, + "notNull": false, + "autoincrement": false + }, + "audio_bitrate": { + "name": "audio_bitrate", + "type": "text", + "primaryKey": false, + "notNull": false, + "autoincrement": false + }, + "container_format": { + "name": "container_format", + "type": "text", + "primaryKey": false, + "notNull": false, + "autoincrement": false + }, + "is_default": { + "name": "is_default", + "type": "integer", + "primaryKey": false, + "notNull": true, + "autoincrement": false, + "default": false + }, + "subtitle_languages": { + "name": "subtitle_languages", + "type": "text", + "primaryKey": false, + "notNull": false, + "autoincrement": false + }, + "embed_subtitles": { + "name": "embed_subtitles", + "type": "integer", + "primaryKey": false, + "notNull": true, + "autoincrement": false, + "default": false + }, + "created_at": { + "name": "created_at", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false, + "default": "(datetime('now'))" + }, + "updated_at": { + "name": "updated_at", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false, + "default": "(datetime('now'))" + } + }, + "indexes": {}, + "foreignKeys": {}, + "compositePrimaryKeys": {}, + "uniqueConstraints": {}, + "checkConstraints": {} + }, + "creators": { + "name": "creators", + "columns": { + "id": { + "name": "id", + "type": "integer", + "primaryKey": true, + "notNull": true, + "autoincrement": true + }, + "name": { + "name": "name", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "platform": { + "name": "platform", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "platform_id": { + "name": "platform_id", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "url": { + "name": "url", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "monitoring_enabled": { + "name": "monitoring_enabled", + "type": "integer", + "primaryKey": false, + "notNull": true, + "autoincrement": false, + "default": true + }, + "check_interval": { + "name": "check_interval", + "type": "integer", + "primaryKey": false, + "notNull": true, + "autoincrement": false, + "default": 360 + }, + "image_url": { + "name": "image_url", + "type": "text", + "primaryKey": false, + "notNull": false, + "autoincrement": false + }, + "metadata": { + "name": "metadata", + "type": "text", + "primaryKey": false, + "notNull": false, + "autoincrement": false + }, + "format_profile_id": { + "name": "format_profile_id", + "type": "integer", + "primaryKey": false, + "notNull": false, + "autoincrement": false + }, + "created_at": { + "name": "created_at", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false, + "default": "(datetime('now'))" + }, + "updated_at": { + "name": "updated_at", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false, + "default": "(datetime('now'))" + }, + "last_checked_at": { + "name": "last_checked_at", + "type": "text", + "primaryKey": false, + "notNull": false, + "autoincrement": false + }, + "last_check_status": { + "name": "last_check_status", + "type": "text", + "primaryKey": false, + "notNull": false, + "autoincrement": false + } + }, + "indexes": {}, + "foreignKeys": { + "creators_format_profile_id_format_profiles_id_fk": { + "name": "creators_format_profile_id_format_profiles_id_fk", + "tableFrom": "creators", + "tableTo": "format_profiles", + "columnsFrom": [ + "format_profile_id" + ], + "columnsTo": [ + "id" + ], + "onDelete": "set null", + "onUpdate": "no action" + } + }, + "compositePrimaryKeys": {}, + "uniqueConstraints": {}, + "checkConstraints": {} + }, + "download_history": { + "name": "download_history", + "columns": { + "id": { + "name": "id", + "type": "integer", + "primaryKey": true, + "notNull": true, + "autoincrement": true + }, + "content_item_id": { + "name": "content_item_id", + "type": "integer", + "primaryKey": false, + "notNull": false, + "autoincrement": false + }, + "creator_id": { + "name": "creator_id", + "type": "integer", + "primaryKey": false, + "notNull": false, + "autoincrement": false + }, + "event_type": { + "name": "event_type", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "status": { + "name": "status", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "details": { + "name": "details", + "type": "text", + "primaryKey": false, + "notNull": false, + "autoincrement": false + }, + "created_at": { + "name": "created_at", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false, + "default": "(datetime('now'))" + } + }, + "indexes": {}, + "foreignKeys": { + "download_history_content_item_id_content_items_id_fk": { + "name": "download_history_content_item_id_content_items_id_fk", + "tableFrom": "download_history", + "tableTo": "content_items", + "columnsFrom": [ + "content_item_id" + ], + "columnsTo": [ + "id" + ], + "onDelete": "set null", + "onUpdate": "no action" + }, + "download_history_creator_id_creators_id_fk": { + "name": "download_history_creator_id_creators_id_fk", + "tableFrom": "download_history", + "tableTo": "creators", + "columnsFrom": [ + "creator_id" + ], + "columnsTo": [ + "id" + ], + "onDelete": "set null", + "onUpdate": "no action" + } + }, + "compositePrimaryKeys": {}, + "uniqueConstraints": {}, + "checkConstraints": {} + }, + "notification_settings": { + "name": "notification_settings", + "columns": { + "id": { + "name": "id", + "type": "integer", + "primaryKey": true, + "notNull": true, + "autoincrement": true + }, + "type": { + "name": "type", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "name": { + "name": "name", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "enabled": { + "name": "enabled", + "type": "integer", + "primaryKey": false, + "notNull": true, + "autoincrement": false, + "default": true + }, + "config": { + "name": "config", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "on_grab": { + "name": "on_grab", + "type": "integer", + "primaryKey": false, + "notNull": true, + "autoincrement": false, + "default": true + }, + "on_download": { + "name": "on_download", + "type": "integer", + "primaryKey": false, + "notNull": true, + "autoincrement": false, + "default": true + }, + "on_failure": { + "name": "on_failure", + "type": "integer", + "primaryKey": false, + "notNull": true, + "autoincrement": false, + "default": true + }, + "created_at": { + "name": "created_at", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false, + "default": "(datetime('now'))" + }, + "updated_at": { + "name": "updated_at", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false, + "default": "(datetime('now'))" + } + }, + "indexes": {}, + "foreignKeys": {}, + "compositePrimaryKeys": {}, + "uniqueConstraints": {}, + "checkConstraints": {} + }, + "queue_items": { + "name": "queue_items", + "columns": { + "id": { + "name": "id", + "type": "integer", + "primaryKey": true, + "notNull": true, + "autoincrement": true + }, + "content_item_id": { + "name": "content_item_id", + "type": "integer", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "status": { + "name": "status", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false, + "default": "'pending'" + }, + "priority": { + "name": "priority", + "type": "integer", + "primaryKey": false, + "notNull": true, + "autoincrement": false, + "default": 0 + }, + "attempts": { + "name": "attempts", + "type": "integer", + "primaryKey": false, + "notNull": true, + "autoincrement": false, + "default": 0 + }, + "max_attempts": { + "name": "max_attempts", + "type": "integer", + "primaryKey": false, + "notNull": true, + "autoincrement": false, + "default": 3 + }, + "error": { + "name": "error", + "type": "text", + "primaryKey": false, + "notNull": false, + "autoincrement": false + }, + "started_at": { + "name": "started_at", + "type": "text", + "primaryKey": false, + "notNull": false, + "autoincrement": false + }, + "completed_at": { + "name": "completed_at", + "type": "text", + "primaryKey": false, + "notNull": false, + "autoincrement": false + }, + "created_at": { + "name": "created_at", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false, + "default": "(datetime('now'))" + }, + "updated_at": { + "name": "updated_at", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false, + "default": "(datetime('now'))" + } + }, + "indexes": {}, + "foreignKeys": { + "queue_items_content_item_id_content_items_id_fk": { + "name": "queue_items_content_item_id_content_items_id_fk", + "tableFrom": "queue_items", + "tableTo": "content_items", + "columnsFrom": [ + "content_item_id" + ], + "columnsTo": [ + "id" + ], + "onDelete": "cascade", + "onUpdate": "no action" + } + }, + "compositePrimaryKeys": {}, + "uniqueConstraints": {}, + "checkConstraints": {} + }, + "system_config": { + "name": "system_config", + "columns": { + "key": { + "name": "key", + "type": "text", + "primaryKey": true, + "notNull": true, + "autoincrement": false + }, + "value": { + "name": "value", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "created_at": { + "name": "created_at", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false, + "default": "(datetime('now'))" + }, + "updated_at": { + "name": "updated_at", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false, + "default": "(datetime('now'))" + } + }, + "indexes": {}, + "foreignKeys": {}, + "compositePrimaryKeys": {}, + "uniqueConstraints": {}, + "checkConstraints": {} + } + }, + "views": {}, + "enums": {}, + "_meta": { + "schemas": {}, + "tables": {}, + "columns": {} + }, + "internal": { + "indexes": {} + } +} \ No newline at end of file diff --git a/drizzle/meta/0004_snapshot.json b/drizzle/meta/0004_snapshot.json new file mode 100644 index 0000000..d4c638c --- /dev/null +++ b/drizzle/meta/0004_snapshot.json @@ -0,0 +1,780 @@ +{ + "version": "6", + "dialect": "sqlite", + "id": "f54a9d63-7807-4753-894f-6822f36471f2", + "prevId": "389cb6a3-b0dc-4103-b786-3014023f5ed2", + "tables": { + "content_items": { + "name": "content_items", + "columns": { + "id": { + "name": "id", + "type": "integer", + "primaryKey": true, + "notNull": true, + "autoincrement": true + }, + "creator_id": { + "name": "creator_id", + "type": "integer", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "title": { + "name": "title", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "platform_content_id": { + "name": "platform_content_id", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "url": { + "name": "url", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "content_type": { + "name": "content_type", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "duration": { + "name": "duration", + "type": "integer", + "primaryKey": false, + "notNull": false, + "autoincrement": false + }, + "file_path": { + "name": "file_path", + "type": "text", + "primaryKey": false, + "notNull": false, + "autoincrement": false + }, + "file_size": { + "name": "file_size", + "type": "integer", + "primaryKey": false, + "notNull": false, + "autoincrement": false + }, + "format": { + "name": "format", + "type": "text", + "primaryKey": false, + "notNull": false, + "autoincrement": false + }, + "quality_metadata": { + "name": "quality_metadata", + "type": "text", + "primaryKey": false, + "notNull": false, + "autoincrement": false + }, + "status": { + "name": "status", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false, + "default": "'monitored'" + }, + "thumbnail_url": { + "name": "thumbnail_url", + "type": "text", + "primaryKey": false, + "notNull": false, + "autoincrement": false + }, + "created_at": { + "name": "created_at", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false, + "default": "(datetime('now'))" + }, + "updated_at": { + "name": "updated_at", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false, + "default": "(datetime('now'))" + } + }, + "indexes": {}, + "foreignKeys": { + "content_items_creator_id_creators_id_fk": { + "name": "content_items_creator_id_creators_id_fk", + "tableFrom": "content_items", + "tableTo": "creators", + "columnsFrom": [ + "creator_id" + ], + "columnsTo": [ + "id" + ], + "onDelete": "cascade", + "onUpdate": "no action" + } + }, + "compositePrimaryKeys": {}, + "uniqueConstraints": {}, + "checkConstraints": {} + }, + "format_profiles": { + "name": "format_profiles", + "columns": { + "id": { + "name": "id", + "type": "integer", + "primaryKey": true, + "notNull": true, + "autoincrement": true + }, + "name": { + "name": "name", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "video_resolution": { + "name": "video_resolution", + "type": "text", + "primaryKey": false, + "notNull": false, + "autoincrement": false + }, + "audio_codec": { + "name": "audio_codec", + "type": "text", + "primaryKey": false, + "notNull": false, + "autoincrement": false + }, + "audio_bitrate": { + "name": "audio_bitrate", + "type": "text", + "primaryKey": false, + "notNull": false, + "autoincrement": false + }, + "container_format": { + "name": "container_format", + "type": "text", + "primaryKey": false, + "notNull": false, + "autoincrement": false + }, + "is_default": { + "name": "is_default", + "type": "integer", + "primaryKey": false, + "notNull": true, + "autoincrement": false, + "default": false + }, + "subtitle_languages": { + "name": "subtitle_languages", + "type": "text", + "primaryKey": false, + "notNull": false, + "autoincrement": false + }, + "embed_subtitles": { + "name": "embed_subtitles", + "type": "integer", + "primaryKey": false, + "notNull": true, + "autoincrement": false, + "default": false + }, + "created_at": { + "name": "created_at", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false, + "default": "(datetime('now'))" + }, + "updated_at": { + "name": "updated_at", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false, + "default": "(datetime('now'))" + } + }, + "indexes": {}, + "foreignKeys": {}, + "compositePrimaryKeys": {}, + "uniqueConstraints": {}, + "checkConstraints": {} + }, + "creators": { + "name": "creators", + "columns": { + "id": { + "name": "id", + "type": "integer", + "primaryKey": true, + "notNull": true, + "autoincrement": true + }, + "name": { + "name": "name", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "platform": { + "name": "platform", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "platform_id": { + "name": "platform_id", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "url": { + "name": "url", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "monitoring_enabled": { + "name": "monitoring_enabled", + "type": "integer", + "primaryKey": false, + "notNull": true, + "autoincrement": false, + "default": true + }, + "check_interval": { + "name": "check_interval", + "type": "integer", + "primaryKey": false, + "notNull": true, + "autoincrement": false, + "default": 360 + }, + "image_url": { + "name": "image_url", + "type": "text", + "primaryKey": false, + "notNull": false, + "autoincrement": false + }, + "metadata": { + "name": "metadata", + "type": "text", + "primaryKey": false, + "notNull": false, + "autoincrement": false + }, + "format_profile_id": { + "name": "format_profile_id", + "type": "integer", + "primaryKey": false, + "notNull": false, + "autoincrement": false + }, + "created_at": { + "name": "created_at", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false, + "default": "(datetime('now'))" + }, + "updated_at": { + "name": "updated_at", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false, + "default": "(datetime('now'))" + }, + "last_checked_at": { + "name": "last_checked_at", + "type": "text", + "primaryKey": false, + "notNull": false, + "autoincrement": false + }, + "last_check_status": { + "name": "last_check_status", + "type": "text", + "primaryKey": false, + "notNull": false, + "autoincrement": false + } + }, + "indexes": {}, + "foreignKeys": { + "creators_format_profile_id_format_profiles_id_fk": { + "name": "creators_format_profile_id_format_profiles_id_fk", + "tableFrom": "creators", + "tableTo": "format_profiles", + "columnsFrom": [ + "format_profile_id" + ], + "columnsTo": [ + "id" + ], + "onDelete": "set null", + "onUpdate": "no action" + } + }, + "compositePrimaryKeys": {}, + "uniqueConstraints": {}, + "checkConstraints": {} + }, + "download_history": { + "name": "download_history", + "columns": { + "id": { + "name": "id", + "type": "integer", + "primaryKey": true, + "notNull": true, + "autoincrement": true + }, + "content_item_id": { + "name": "content_item_id", + "type": "integer", + "primaryKey": false, + "notNull": false, + "autoincrement": false + }, + "creator_id": { + "name": "creator_id", + "type": "integer", + "primaryKey": false, + "notNull": false, + "autoincrement": false + }, + "event_type": { + "name": "event_type", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "status": { + "name": "status", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "details": { + "name": "details", + "type": "text", + "primaryKey": false, + "notNull": false, + "autoincrement": false + }, + "created_at": { + "name": "created_at", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false, + "default": "(datetime('now'))" + } + }, + "indexes": {}, + "foreignKeys": { + "download_history_content_item_id_content_items_id_fk": { + "name": "download_history_content_item_id_content_items_id_fk", + "tableFrom": "download_history", + "tableTo": "content_items", + "columnsFrom": [ + "content_item_id" + ], + "columnsTo": [ + "id" + ], + "onDelete": "set null", + "onUpdate": "no action" + }, + "download_history_creator_id_creators_id_fk": { + "name": "download_history_creator_id_creators_id_fk", + "tableFrom": "download_history", + "tableTo": "creators", + "columnsFrom": [ + "creator_id" + ], + "columnsTo": [ + "id" + ], + "onDelete": "set null", + "onUpdate": "no action" + } + }, + "compositePrimaryKeys": {}, + "uniqueConstraints": {}, + "checkConstraints": {} + }, + "notification_settings": { + "name": "notification_settings", + "columns": { + "id": { + "name": "id", + "type": "integer", + "primaryKey": true, + "notNull": true, + "autoincrement": true + }, + "type": { + "name": "type", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "name": { + "name": "name", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "enabled": { + "name": "enabled", + "type": "integer", + "primaryKey": false, + "notNull": true, + "autoincrement": false, + "default": true + }, + "config": { + "name": "config", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "on_grab": { + "name": "on_grab", + "type": "integer", + "primaryKey": false, + "notNull": true, + "autoincrement": false, + "default": true + }, + "on_download": { + "name": "on_download", + "type": "integer", + "primaryKey": false, + "notNull": true, + "autoincrement": false, + "default": true + }, + "on_failure": { + "name": "on_failure", + "type": "integer", + "primaryKey": false, + "notNull": true, + "autoincrement": false, + "default": true + }, + "created_at": { + "name": "created_at", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false, + "default": "(datetime('now'))" + }, + "updated_at": { + "name": "updated_at", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false, + "default": "(datetime('now'))" + } + }, + "indexes": {}, + "foreignKeys": {}, + "compositePrimaryKeys": {}, + "uniqueConstraints": {}, + "checkConstraints": {} + }, + "queue_items": { + "name": "queue_items", + "columns": { + "id": { + "name": "id", + "type": "integer", + "primaryKey": true, + "notNull": true, + "autoincrement": true + }, + "content_item_id": { + "name": "content_item_id", + "type": "integer", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "status": { + "name": "status", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false, + "default": "'pending'" + }, + "priority": { + "name": "priority", + "type": "integer", + "primaryKey": false, + "notNull": true, + "autoincrement": false, + "default": 0 + }, + "attempts": { + "name": "attempts", + "type": "integer", + "primaryKey": false, + "notNull": true, + "autoincrement": false, + "default": 0 + }, + "max_attempts": { + "name": "max_attempts", + "type": "integer", + "primaryKey": false, + "notNull": true, + "autoincrement": false, + "default": 3 + }, + "error": { + "name": "error", + "type": "text", + "primaryKey": false, + "notNull": false, + "autoincrement": false + }, + "started_at": { + "name": "started_at", + "type": "text", + "primaryKey": false, + "notNull": false, + "autoincrement": false + }, + "completed_at": { + "name": "completed_at", + "type": "text", + "primaryKey": false, + "notNull": false, + "autoincrement": false + }, + "created_at": { + "name": "created_at", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false, + "default": "(datetime('now'))" + }, + "updated_at": { + "name": "updated_at", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false, + "default": "(datetime('now'))" + } + }, + "indexes": {}, + "foreignKeys": { + "queue_items_content_item_id_content_items_id_fk": { + "name": "queue_items_content_item_id_content_items_id_fk", + "tableFrom": "queue_items", + "tableTo": "content_items", + "columnsFrom": [ + "content_item_id" + ], + "columnsTo": [ + "id" + ], + "onDelete": "cascade", + "onUpdate": "no action" + } + }, + "compositePrimaryKeys": {}, + "uniqueConstraints": {}, + "checkConstraints": {} + }, + "system_config": { + "name": "system_config", + "columns": { + "key": { + "name": "key", + "type": "text", + "primaryKey": true, + "notNull": true, + "autoincrement": false + }, + "value": { + "name": "value", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "created_at": { + "name": "created_at", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false, + "default": "(datetime('now'))" + }, + "updated_at": { + "name": "updated_at", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false, + "default": "(datetime('now'))" + } + }, + "indexes": {}, + "foreignKeys": {}, + "compositePrimaryKeys": {}, + "uniqueConstraints": {}, + "checkConstraints": {} + }, + "platform_settings": { + "name": "platform_settings", + "columns": { + "platform": { + "name": "platform", + "type": "text", + "primaryKey": true, + "notNull": true, + "autoincrement": false + }, + "default_format_profile_id": { + "name": "default_format_profile_id", + "type": "integer", + "primaryKey": false, + "notNull": false, + "autoincrement": false + }, + "check_interval": { + "name": "check_interval", + "type": "integer", + "primaryKey": false, + "notNull": false, + "autoincrement": false, + "default": 360 + }, + "concurrency_limit": { + "name": "concurrency_limit", + "type": "integer", + "primaryKey": false, + "notNull": false, + "autoincrement": false, + "default": 2 + }, + "subtitle_languages": { + "name": "subtitle_languages", + "type": "text", + "primaryKey": false, + "notNull": false, + "autoincrement": false + }, + "grab_all_enabled": { + "name": "grab_all_enabled", + "type": "integer", + "primaryKey": false, + "notNull": true, + "autoincrement": false, + "default": false + }, + "grab_all_order": { + "name": "grab_all_order", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false, + "default": "'newest'" + }, + "created_at": { + "name": "created_at", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false, + "default": "(datetime('now'))" + }, + "updated_at": { + "name": "updated_at", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false, + "default": "(datetime('now'))" + } + }, + "indexes": {}, + "foreignKeys": { + "platform_settings_default_format_profile_id_format_profiles_id_fk": { + "name": "platform_settings_default_format_profile_id_format_profiles_id_fk", + "tableFrom": "platform_settings", + "tableTo": "format_profiles", + "columnsFrom": [ + "default_format_profile_id" + ], + "columnsTo": [ + "id" + ], + "onDelete": "set null", + "onUpdate": "no action" + } + }, + "compositePrimaryKeys": {}, + "uniqueConstraints": {}, + "checkConstraints": {} + } + }, + "views": {}, + "enums": {}, + "_meta": { + "schemas": {}, + "tables": {}, + "columns": {} + }, + "internal": { + "indexes": {} + } +} \ No newline at end of file diff --git a/drizzle/meta/0005_snapshot.json b/drizzle/meta/0005_snapshot.json new file mode 100644 index 0000000..ebddf02 --- /dev/null +++ b/drizzle/meta/0005_snapshot.json @@ -0,0 +1,945 @@ +{ + "version": "6", + "dialect": "sqlite", + "id": "8270642c-9deb-4bba-b798-bb3739dbe29f", + "prevId": "f54a9d63-7807-4753-894f-6822f36471f2", + "tables": { + "content_items": { + "name": "content_items", + "columns": { + "id": { + "name": "id", + "type": "integer", + "primaryKey": true, + "notNull": true, + "autoincrement": true + }, + "creator_id": { + "name": "creator_id", + "type": "integer", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "title": { + "name": "title", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "platform_content_id": { + "name": "platform_content_id", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "url": { + "name": "url", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "content_type": { + "name": "content_type", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "duration": { + "name": "duration", + "type": "integer", + "primaryKey": false, + "notNull": false, + "autoincrement": false + }, + "file_path": { + "name": "file_path", + "type": "text", + "primaryKey": false, + "notNull": false, + "autoincrement": false + }, + "file_size": { + "name": "file_size", + "type": "integer", + "primaryKey": false, + "notNull": false, + "autoincrement": false + }, + "format": { + "name": "format", + "type": "text", + "primaryKey": false, + "notNull": false, + "autoincrement": false + }, + "quality_metadata": { + "name": "quality_metadata", + "type": "text", + "primaryKey": false, + "notNull": false, + "autoincrement": false + }, + "status": { + "name": "status", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false, + "default": "'monitored'" + }, + "thumbnail_url": { + "name": "thumbnail_url", + "type": "text", + "primaryKey": false, + "notNull": false, + "autoincrement": false + }, + "published_at": { + "name": "published_at", + "type": "text", + "primaryKey": false, + "notNull": false, + "autoincrement": false + }, + "downloaded_at": { + "name": "downloaded_at", + "type": "text", + "primaryKey": false, + "notNull": false, + "autoincrement": false + }, + "monitored": { + "name": "monitored", + "type": "integer", + "primaryKey": false, + "notNull": true, + "autoincrement": false, + "default": true + }, + "created_at": { + "name": "created_at", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false, + "default": "(datetime('now'))" + }, + "updated_at": { + "name": "updated_at", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false, + "default": "(datetime('now'))" + } + }, + "indexes": {}, + "foreignKeys": { + "content_items_creator_id_creators_id_fk": { + "name": "content_items_creator_id_creators_id_fk", + "tableFrom": "content_items", + "tableTo": "creators", + "columnsFrom": [ + "creator_id" + ], + "columnsTo": [ + "id" + ], + "onDelete": "cascade", + "onUpdate": "no action" + } + }, + "compositePrimaryKeys": {}, + "uniqueConstraints": {}, + "checkConstraints": {} + }, + "format_profiles": { + "name": "format_profiles", + "columns": { + "id": { + "name": "id", + "type": "integer", + "primaryKey": true, + "notNull": true, + "autoincrement": true + }, + "name": { + "name": "name", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "video_resolution": { + "name": "video_resolution", + "type": "text", + "primaryKey": false, + "notNull": false, + "autoincrement": false + }, + "audio_codec": { + "name": "audio_codec", + "type": "text", + "primaryKey": false, + "notNull": false, + "autoincrement": false + }, + "audio_bitrate": { + "name": "audio_bitrate", + "type": "text", + "primaryKey": false, + "notNull": false, + "autoincrement": false + }, + "container_format": { + "name": "container_format", + "type": "text", + "primaryKey": false, + "notNull": false, + "autoincrement": false + }, + "is_default": { + "name": "is_default", + "type": "integer", + "primaryKey": false, + "notNull": true, + "autoincrement": false, + "default": false + }, + "subtitle_languages": { + "name": "subtitle_languages", + "type": "text", + "primaryKey": false, + "notNull": false, + "autoincrement": false + }, + "embed_subtitles": { + "name": "embed_subtitles", + "type": "integer", + "primaryKey": false, + "notNull": true, + "autoincrement": false, + "default": false + }, + "created_at": { + "name": "created_at", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false, + "default": "(datetime('now'))" + }, + "updated_at": { + "name": "updated_at", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false, + "default": "(datetime('now'))" + } + }, + "indexes": {}, + "foreignKeys": {}, + "compositePrimaryKeys": {}, + "uniqueConstraints": {}, + "checkConstraints": {} + }, + "creators": { + "name": "creators", + "columns": { + "id": { + "name": "id", + "type": "integer", + "primaryKey": true, + "notNull": true, + "autoincrement": true + }, + "name": { + "name": "name", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "platform": { + "name": "platform", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "platform_id": { + "name": "platform_id", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "url": { + "name": "url", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "monitoring_enabled": { + "name": "monitoring_enabled", + "type": "integer", + "primaryKey": false, + "notNull": true, + "autoincrement": false, + "default": true + }, + "check_interval": { + "name": "check_interval", + "type": "integer", + "primaryKey": false, + "notNull": true, + "autoincrement": false, + "default": 360 + }, + "image_url": { + "name": "image_url", + "type": "text", + "primaryKey": false, + "notNull": false, + "autoincrement": false + }, + "metadata": { + "name": "metadata", + "type": "text", + "primaryKey": false, + "notNull": false, + "autoincrement": false + }, + "format_profile_id": { + "name": "format_profile_id", + "type": "integer", + "primaryKey": false, + "notNull": false, + "autoincrement": false + }, + "created_at": { + "name": "created_at", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false, + "default": "(datetime('now'))" + }, + "updated_at": { + "name": "updated_at", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false, + "default": "(datetime('now'))" + }, + "last_checked_at": { + "name": "last_checked_at", + "type": "text", + "primaryKey": false, + "notNull": false, + "autoincrement": false + }, + "last_check_status": { + "name": "last_check_status", + "type": "text", + "primaryKey": false, + "notNull": false, + "autoincrement": false + }, + "monitoring_mode": { + "name": "monitoring_mode", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false, + "default": "'all'" + } + }, + "indexes": {}, + "foreignKeys": { + "creators_format_profile_id_format_profiles_id_fk": { + "name": "creators_format_profile_id_format_profiles_id_fk", + "tableFrom": "creators", + "tableTo": "format_profiles", + "columnsFrom": [ + "format_profile_id" + ], + "columnsTo": [ + "id" + ], + "onDelete": "set null", + "onUpdate": "no action" + } + }, + "compositePrimaryKeys": {}, + "uniqueConstraints": {}, + "checkConstraints": {} + }, + "download_history": { + "name": "download_history", + "columns": { + "id": { + "name": "id", + "type": "integer", + "primaryKey": true, + "notNull": true, + "autoincrement": true + }, + "content_item_id": { + "name": "content_item_id", + "type": "integer", + "primaryKey": false, + "notNull": false, + "autoincrement": false + }, + "creator_id": { + "name": "creator_id", + "type": "integer", + "primaryKey": false, + "notNull": false, + "autoincrement": false + }, + "event_type": { + "name": "event_type", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "status": { + "name": "status", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "details": { + "name": "details", + "type": "text", + "primaryKey": false, + "notNull": false, + "autoincrement": false + }, + "created_at": { + "name": "created_at", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false, + "default": "(datetime('now'))" + } + }, + "indexes": {}, + "foreignKeys": { + "download_history_content_item_id_content_items_id_fk": { + "name": "download_history_content_item_id_content_items_id_fk", + "tableFrom": "download_history", + "tableTo": "content_items", + "columnsFrom": [ + "content_item_id" + ], + "columnsTo": [ + "id" + ], + "onDelete": "set null", + "onUpdate": "no action" + }, + "download_history_creator_id_creators_id_fk": { + "name": "download_history_creator_id_creators_id_fk", + "tableFrom": "download_history", + "tableTo": "creators", + "columnsFrom": [ + "creator_id" + ], + "columnsTo": [ + "id" + ], + "onDelete": "set null", + "onUpdate": "no action" + } + }, + "compositePrimaryKeys": {}, + "uniqueConstraints": {}, + "checkConstraints": {} + }, + "content_playlist": { + "name": "content_playlist", + "columns": { + "content_item_id": { + "name": "content_item_id", + "type": "integer", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "playlist_id": { + "name": "playlist_id", + "type": "integer", + "primaryKey": false, + "notNull": true, + "autoincrement": false + } + }, + "indexes": {}, + "foreignKeys": { + "content_playlist_content_item_id_content_items_id_fk": { + "name": "content_playlist_content_item_id_content_items_id_fk", + "tableFrom": "content_playlist", + "tableTo": "content_items", + "columnsFrom": [ + "content_item_id" + ], + "columnsTo": [ + "id" + ], + "onDelete": "cascade", + "onUpdate": "no action" + }, + "content_playlist_playlist_id_playlists_id_fk": { + "name": "content_playlist_playlist_id_playlists_id_fk", + "tableFrom": "content_playlist", + "tableTo": "playlists", + "columnsFrom": [ + "playlist_id" + ], + "columnsTo": [ + "id" + ], + "onDelete": "cascade", + "onUpdate": "no action" + } + }, + "compositePrimaryKeys": { + "content_playlist_content_item_id_playlist_id_pk": { + "columns": [ + "content_item_id", + "playlist_id" + ], + "name": "content_playlist_content_item_id_playlist_id_pk" + } + }, + "uniqueConstraints": {}, + "checkConstraints": {} + }, + "notification_settings": { + "name": "notification_settings", + "columns": { + "id": { + "name": "id", + "type": "integer", + "primaryKey": true, + "notNull": true, + "autoincrement": true + }, + "type": { + "name": "type", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "name": { + "name": "name", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "enabled": { + "name": "enabled", + "type": "integer", + "primaryKey": false, + "notNull": true, + "autoincrement": false, + "default": true + }, + "config": { + "name": "config", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "on_grab": { + "name": "on_grab", + "type": "integer", + "primaryKey": false, + "notNull": true, + "autoincrement": false, + "default": true + }, + "on_download": { + "name": "on_download", + "type": "integer", + "primaryKey": false, + "notNull": true, + "autoincrement": false, + "default": true + }, + "on_failure": { + "name": "on_failure", + "type": "integer", + "primaryKey": false, + "notNull": true, + "autoincrement": false, + "default": true + }, + "created_at": { + "name": "created_at", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false, + "default": "(datetime('now'))" + }, + "updated_at": { + "name": "updated_at", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false, + "default": "(datetime('now'))" + } + }, + "indexes": {}, + "foreignKeys": {}, + "compositePrimaryKeys": {}, + "uniqueConstraints": {}, + "checkConstraints": {} + }, + "platform_settings": { + "name": "platform_settings", + "columns": { + "platform": { + "name": "platform", + "type": "text", + "primaryKey": true, + "notNull": true, + "autoincrement": false + }, + "default_format_profile_id": { + "name": "default_format_profile_id", + "type": "integer", + "primaryKey": false, + "notNull": false, + "autoincrement": false + }, + "check_interval": { + "name": "check_interval", + "type": "integer", + "primaryKey": false, + "notNull": false, + "autoincrement": false, + "default": 360 + }, + "concurrency_limit": { + "name": "concurrency_limit", + "type": "integer", + "primaryKey": false, + "notNull": false, + "autoincrement": false, + "default": 2 + }, + "subtitle_languages": { + "name": "subtitle_languages", + "type": "text", + "primaryKey": false, + "notNull": false, + "autoincrement": false + }, + "grab_all_enabled": { + "name": "grab_all_enabled", + "type": "integer", + "primaryKey": false, + "notNull": true, + "autoincrement": false, + "default": false + }, + "grab_all_order": { + "name": "grab_all_order", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false, + "default": "'newest'" + }, + "created_at": { + "name": "created_at", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false, + "default": "(datetime('now'))" + }, + "updated_at": { + "name": "updated_at", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false, + "default": "(datetime('now'))" + } + }, + "indexes": {}, + "foreignKeys": { + "platform_settings_default_format_profile_id_format_profiles_id_fk": { + "name": "platform_settings_default_format_profile_id_format_profiles_id_fk", + "tableFrom": "platform_settings", + "tableTo": "format_profiles", + "columnsFrom": [ + "default_format_profile_id" + ], + "columnsTo": [ + "id" + ], + "onDelete": "set null", + "onUpdate": "no action" + } + }, + "compositePrimaryKeys": {}, + "uniqueConstraints": {}, + "checkConstraints": {} + }, + "playlists": { + "name": "playlists", + "columns": { + "id": { + "name": "id", + "type": "integer", + "primaryKey": true, + "notNull": true, + "autoincrement": true + }, + "creator_id": { + "name": "creator_id", + "type": "integer", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "platform_playlist_id": { + "name": "platform_playlist_id", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "title": { + "name": "title", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "position": { + "name": "position", + "type": "integer", + "primaryKey": false, + "notNull": true, + "autoincrement": false, + "default": 0 + }, + "created_at": { + "name": "created_at", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false, + "default": "(datetime('now'))" + }, + "updated_at": { + "name": "updated_at", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false, + "default": "(datetime('now'))" + } + }, + "indexes": {}, + "foreignKeys": { + "playlists_creator_id_creators_id_fk": { + "name": "playlists_creator_id_creators_id_fk", + "tableFrom": "playlists", + "tableTo": "creators", + "columnsFrom": [ + "creator_id" + ], + "columnsTo": [ + "id" + ], + "onDelete": "cascade", + "onUpdate": "no action" + } + }, + "compositePrimaryKeys": {}, + "uniqueConstraints": {}, + "checkConstraints": {} + }, + "queue_items": { + "name": "queue_items", + "columns": { + "id": { + "name": "id", + "type": "integer", + "primaryKey": true, + "notNull": true, + "autoincrement": true + }, + "content_item_id": { + "name": "content_item_id", + "type": "integer", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "status": { + "name": "status", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false, + "default": "'pending'" + }, + "priority": { + "name": "priority", + "type": "integer", + "primaryKey": false, + "notNull": true, + "autoincrement": false, + "default": 0 + }, + "attempts": { + "name": "attempts", + "type": "integer", + "primaryKey": false, + "notNull": true, + "autoincrement": false, + "default": 0 + }, + "max_attempts": { + "name": "max_attempts", + "type": "integer", + "primaryKey": false, + "notNull": true, + "autoincrement": false, + "default": 3 + }, + "error": { + "name": "error", + "type": "text", + "primaryKey": false, + "notNull": false, + "autoincrement": false + }, + "started_at": { + "name": "started_at", + "type": "text", + "primaryKey": false, + "notNull": false, + "autoincrement": false + }, + "completed_at": { + "name": "completed_at", + "type": "text", + "primaryKey": false, + "notNull": false, + "autoincrement": false + }, + "created_at": { + "name": "created_at", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false, + "default": "(datetime('now'))" + }, + "updated_at": { + "name": "updated_at", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false, + "default": "(datetime('now'))" + } + }, + "indexes": {}, + "foreignKeys": { + "queue_items_content_item_id_content_items_id_fk": { + "name": "queue_items_content_item_id_content_items_id_fk", + "tableFrom": "queue_items", + "tableTo": "content_items", + "columnsFrom": [ + "content_item_id" + ], + "columnsTo": [ + "id" + ], + "onDelete": "cascade", + "onUpdate": "no action" + } + }, + "compositePrimaryKeys": {}, + "uniqueConstraints": {}, + "checkConstraints": {} + }, + "system_config": { + "name": "system_config", + "columns": { + "key": { + "name": "key", + "type": "text", + "primaryKey": true, + "notNull": true, + "autoincrement": false + }, + "value": { + "name": "value", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "created_at": { + "name": "created_at", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false, + "default": "(datetime('now'))" + }, + "updated_at": { + "name": "updated_at", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false, + "default": "(datetime('now'))" + } + }, + "indexes": {}, + "foreignKeys": {}, + "compositePrimaryKeys": {}, + "uniqueConstraints": {}, + "checkConstraints": {} + } + }, + "views": {}, + "enums": {}, + "_meta": { + "schemas": {}, + "tables": {}, + "columns": {} + }, + "internal": { + "indexes": {} + } +} \ No newline at end of file diff --git a/drizzle/meta/0006_snapshot.json b/drizzle/meta/0006_snapshot.json new file mode 100644 index 0000000..ee3b5ca --- /dev/null +++ b/drizzle/meta/0006_snapshot.json @@ -0,0 +1,951 @@ +{ + "version": "6", + "dialect": "sqlite", + "id": "a1b2c3d4-e5f6-7890-abcd-ef1234567890", + "prevId": "8270642c-9deb-4bba-b798-bb3739dbe29f", + "tables": { + "content_items": { + "name": "content_items", + "columns": { + "id": { + "name": "id", + "type": "integer", + "primaryKey": true, + "notNull": true, + "autoincrement": true + }, + "title": { + "name": "title", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "platform_content_id": { + "name": "platform_content_id", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "url": { + "name": "url", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "content_type": { + "name": "content_type", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "duration": { + "name": "duration", + "type": "integer", + "primaryKey": false, + "notNull": false, + "autoincrement": false + }, + "file_path": { + "name": "file_path", + "type": "text", + "primaryKey": false, + "notNull": false, + "autoincrement": false + }, + "file_size": { + "name": "file_size", + "type": "integer", + "primaryKey": false, + "notNull": false, + "autoincrement": false + }, + "format": { + "name": "format", + "type": "text", + "primaryKey": false, + "notNull": false, + "autoincrement": false + }, + "quality_metadata": { + "name": "quality_metadata", + "type": "text", + "primaryKey": false, + "notNull": false, + "autoincrement": false + }, + "status": { + "name": "status", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false, + "default": "'monitored'" + }, + "thumbnail_url": { + "name": "thumbnail_url", + "type": "text", + "primaryKey": false, + "notNull": false, + "autoincrement": false + }, + "published_at": { + "name": "published_at", + "type": "text", + "primaryKey": false, + "notNull": false, + "autoincrement": false + }, + "downloaded_at": { + "name": "downloaded_at", + "type": "text", + "primaryKey": false, + "notNull": false, + "autoincrement": false + }, + "monitored": { + "name": "monitored", + "type": "integer", + "primaryKey": false, + "notNull": true, + "autoincrement": false, + "default": true + }, + "created_at": { + "name": "created_at", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false, + "default": "(datetime('now'))" + }, + "updated_at": { + "name": "updated_at", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false, + "default": "(datetime('now'))" + }, + "channel_id": { + "name": "channel_id", + "type": "integer", + "primaryKey": false, + "notNull": true, + "autoincrement": false + } + }, + "indexes": {}, + "foreignKeys": { + "content_items_channel_id_channels_id_fk": { + "name": "content_items_channel_id_channels_id_fk", + "tableFrom": "content_items", + "tableTo": "channels", + "columnsFrom": [ + "channel_id" + ], + "columnsTo": [ + "id" + ], + "onDelete": "cascade", + "onUpdate": "no action" + } + }, + "compositePrimaryKeys": {}, + "uniqueConstraints": {}, + "checkConstraints": {} + }, + "format_profiles": { + "name": "format_profiles", + "columns": { + "id": { + "name": "id", + "type": "integer", + "primaryKey": true, + "notNull": true, + "autoincrement": true + }, + "name": { + "name": "name", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "video_resolution": { + "name": "video_resolution", + "type": "text", + "primaryKey": false, + "notNull": false, + "autoincrement": false + }, + "audio_codec": { + "name": "audio_codec", + "type": "text", + "primaryKey": false, + "notNull": false, + "autoincrement": false + }, + "audio_bitrate": { + "name": "audio_bitrate", + "type": "text", + "primaryKey": false, + "notNull": false, + "autoincrement": false + }, + "container_format": { + "name": "container_format", + "type": "text", + "primaryKey": false, + "notNull": false, + "autoincrement": false + }, + "is_default": { + "name": "is_default", + "type": "integer", + "primaryKey": false, + "notNull": true, + "autoincrement": false, + "default": false + }, + "subtitle_languages": { + "name": "subtitle_languages", + "type": "text", + "primaryKey": false, + "notNull": false, + "autoincrement": false + }, + "embed_subtitles": { + "name": "embed_subtitles", + "type": "integer", + "primaryKey": false, + "notNull": true, + "autoincrement": false, + "default": false + }, + "created_at": { + "name": "created_at", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false, + "default": "(datetime('now'))" + }, + "updated_at": { + "name": "updated_at", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false, + "default": "(datetime('now'))" + } + }, + "indexes": {}, + "foreignKeys": {}, + "compositePrimaryKeys": {}, + "uniqueConstraints": {}, + "checkConstraints": {} + }, + "download_history": { + "name": "download_history", + "columns": { + "id": { + "name": "id", + "type": "integer", + "primaryKey": true, + "notNull": true, + "autoincrement": true + }, + "content_item_id": { + "name": "content_item_id", + "type": "integer", + "primaryKey": false, + "notNull": false, + "autoincrement": false + }, + "event_type": { + "name": "event_type", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "status": { + "name": "status", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "details": { + "name": "details", + "type": "text", + "primaryKey": false, + "notNull": false, + "autoincrement": false + }, + "created_at": { + "name": "created_at", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false, + "default": "(datetime('now'))" + }, + "channel_id": { + "name": "channel_id", + "type": "integer", + "primaryKey": false, + "notNull": false, + "autoincrement": false + } + }, + "indexes": {}, + "foreignKeys": { + "download_history_content_item_id_content_items_id_fk": { + "name": "download_history_content_item_id_content_items_id_fk", + "tableFrom": "download_history", + "tableTo": "content_items", + "columnsFrom": [ + "content_item_id" + ], + "columnsTo": [ + "id" + ], + "onDelete": "set null", + "onUpdate": "no action" + }, + "download_history_channel_id_channels_id_fk": { + "name": "download_history_channel_id_channels_id_fk", + "tableFrom": "download_history", + "tableTo": "channels", + "columnsFrom": [ + "channel_id" + ], + "columnsTo": [ + "id" + ], + "onDelete": "set null", + "onUpdate": "no action" + } + }, + "compositePrimaryKeys": {}, + "uniqueConstraints": {}, + "checkConstraints": {} + }, + "content_playlist": { + "name": "content_playlist", + "columns": { + "content_item_id": { + "name": "content_item_id", + "type": "integer", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "playlist_id": { + "name": "playlist_id", + "type": "integer", + "primaryKey": false, + "notNull": true, + "autoincrement": false + } + }, + "indexes": {}, + "foreignKeys": { + "content_playlist_content_item_id_content_items_id_fk": { + "name": "content_playlist_content_item_id_content_items_id_fk", + "tableFrom": "content_playlist", + "tableTo": "content_items", + "columnsFrom": [ + "content_item_id" + ], + "columnsTo": [ + "id" + ], + "onDelete": "cascade", + "onUpdate": "no action" + }, + "content_playlist_playlist_id_playlists_id_fk": { + "name": "content_playlist_playlist_id_playlists_id_fk", + "tableFrom": "content_playlist", + "tableTo": "playlists", + "columnsFrom": [ + "playlist_id" + ], + "columnsTo": [ + "id" + ], + "onDelete": "cascade", + "onUpdate": "no action" + } + }, + "compositePrimaryKeys": { + "content_playlist_content_item_id_playlist_id_pk": { + "columns": [ + "content_item_id", + "playlist_id" + ], + "name": "content_playlist_content_item_id_playlist_id_pk" + } + }, + "uniqueConstraints": {}, + "checkConstraints": {} + }, + "notification_settings": { + "name": "notification_settings", + "columns": { + "id": { + "name": "id", + "type": "integer", + "primaryKey": true, + "notNull": true, + "autoincrement": true + }, + "type": { + "name": "type", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "name": { + "name": "name", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "enabled": { + "name": "enabled", + "type": "integer", + "primaryKey": false, + "notNull": true, + "autoincrement": false, + "default": true + }, + "config": { + "name": "config", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "on_grab": { + "name": "on_grab", + "type": "integer", + "primaryKey": false, + "notNull": true, + "autoincrement": false, + "default": true + }, + "on_download": { + "name": "on_download", + "type": "integer", + "primaryKey": false, + "notNull": true, + "autoincrement": false, + "default": true + }, + "on_failure": { + "name": "on_failure", + "type": "integer", + "primaryKey": false, + "notNull": true, + "autoincrement": false, + "default": true + }, + "created_at": { + "name": "created_at", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false, + "default": "(datetime('now'))" + }, + "updated_at": { + "name": "updated_at", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false, + "default": "(datetime('now'))" + } + }, + "indexes": {}, + "foreignKeys": {}, + "compositePrimaryKeys": {}, + "uniqueConstraints": {}, + "checkConstraints": {} + }, + "platform_settings": { + "name": "platform_settings", + "columns": { + "platform": { + "name": "platform", + "type": "text", + "primaryKey": true, + "notNull": true, + "autoincrement": false + }, + "default_format_profile_id": { + "name": "default_format_profile_id", + "type": "integer", + "primaryKey": false, + "notNull": false, + "autoincrement": false + }, + "check_interval": { + "name": "check_interval", + "type": "integer", + "primaryKey": false, + "notNull": false, + "autoincrement": false, + "default": 360 + }, + "concurrency_limit": { + "name": "concurrency_limit", + "type": "integer", + "primaryKey": false, + "notNull": false, + "autoincrement": false, + "default": 2 + }, + "subtitle_languages": { + "name": "subtitle_languages", + "type": "text", + "primaryKey": false, + "notNull": false, + "autoincrement": false + }, + "grab_all_enabled": { + "name": "grab_all_enabled", + "type": "integer", + "primaryKey": false, + "notNull": true, + "autoincrement": false, + "default": false + }, + "grab_all_order": { + "name": "grab_all_order", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false, + "default": "'newest'" + }, + "created_at": { + "name": "created_at", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false, + "default": "(datetime('now'))" + }, + "updated_at": { + "name": "updated_at", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false, + "default": "(datetime('now'))" + } + }, + "indexes": {}, + "foreignKeys": { + "platform_settings_default_format_profile_id_format_profiles_id_fk": { + "name": "platform_settings_default_format_profile_id_format_profiles_id_fk", + "tableFrom": "platform_settings", + "tableTo": "format_profiles", + "columnsFrom": [ + "default_format_profile_id" + ], + "columnsTo": [ + "id" + ], + "onDelete": "set null", + "onUpdate": "no action" + } + }, + "compositePrimaryKeys": {}, + "uniqueConstraints": {}, + "checkConstraints": {} + }, + "playlists": { + "name": "playlists", + "columns": { + "id": { + "name": "id", + "type": "integer", + "primaryKey": true, + "notNull": true, + "autoincrement": true + }, + "platform_playlist_id": { + "name": "platform_playlist_id", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "title": { + "name": "title", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "position": { + "name": "position", + "type": "integer", + "primaryKey": false, + "notNull": true, + "autoincrement": false, + "default": 0 + }, + "created_at": { + "name": "created_at", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false, + "default": "(datetime('now'))" + }, + "updated_at": { + "name": "updated_at", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false, + "default": "(datetime('now'))" + }, + "channel_id": { + "name": "channel_id", + "type": "integer", + "primaryKey": false, + "notNull": true, + "autoincrement": false + } + }, + "indexes": {}, + "foreignKeys": { + "playlists_channel_id_channels_id_fk": { + "name": "playlists_channel_id_channels_id_fk", + "tableFrom": "playlists", + "tableTo": "channels", + "columnsFrom": [ + "channel_id" + ], + "columnsTo": [ + "id" + ], + "onDelete": "cascade", + "onUpdate": "no action" + } + }, + "compositePrimaryKeys": {}, + "uniqueConstraints": {}, + "checkConstraints": {} + }, + "queue_items": { + "name": "queue_items", + "columns": { + "id": { + "name": "id", + "type": "integer", + "primaryKey": true, + "notNull": true, + "autoincrement": true + }, + "content_item_id": { + "name": "content_item_id", + "type": "integer", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "status": { + "name": "status", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false, + "default": "'pending'" + }, + "priority": { + "name": "priority", + "type": "integer", + "primaryKey": false, + "notNull": true, + "autoincrement": false, + "default": 0 + }, + "attempts": { + "name": "attempts", + "type": "integer", + "primaryKey": false, + "notNull": true, + "autoincrement": false, + "default": 0 + }, + "max_attempts": { + "name": "max_attempts", + "type": "integer", + "primaryKey": false, + "notNull": true, + "autoincrement": false, + "default": 3 + }, + "error": { + "name": "error", + "type": "text", + "primaryKey": false, + "notNull": false, + "autoincrement": false + }, + "started_at": { + "name": "started_at", + "type": "text", + "primaryKey": false, + "notNull": false, + "autoincrement": false + }, + "completed_at": { + "name": "completed_at", + "type": "text", + "primaryKey": false, + "notNull": false, + "autoincrement": false + }, + "created_at": { + "name": "created_at", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false, + "default": "(datetime('now'))" + }, + "updated_at": { + "name": "updated_at", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false, + "default": "(datetime('now'))" + } + }, + "indexes": {}, + "foreignKeys": { + "queue_items_content_item_id_content_items_id_fk": { + "name": "queue_items_content_item_id_content_items_id_fk", + "tableFrom": "queue_items", + "tableTo": "content_items", + "columnsFrom": [ + "content_item_id" + ], + "columnsTo": [ + "id" + ], + "onDelete": "cascade", + "onUpdate": "no action" + } + }, + "compositePrimaryKeys": {}, + "uniqueConstraints": {}, + "checkConstraints": {} + }, + "system_config": { + "name": "system_config", + "columns": { + "key": { + "name": "key", + "type": "text", + "primaryKey": true, + "notNull": true, + "autoincrement": false + }, + "value": { + "name": "value", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "created_at": { + "name": "created_at", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false, + "default": "(datetime('now'))" + }, + "updated_at": { + "name": "updated_at", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false, + "default": "(datetime('now'))" + } + }, + "indexes": {}, + "foreignKeys": {}, + "compositePrimaryKeys": {}, + "uniqueConstraints": {}, + "checkConstraints": {} + }, + "channels": { + "name": "channels", + "columns": { + "id": { + "name": "id", + "type": "integer", + "primaryKey": true, + "notNull": true, + "autoincrement": true + }, + "name": { + "name": "name", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "platform": { + "name": "platform", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "platform_id": { + "name": "platform_id", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "url": { + "name": "url", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "monitoring_enabled": { + "name": "monitoring_enabled", + "type": "integer", + "primaryKey": false, + "notNull": true, + "autoincrement": false, + "default": true + }, + "check_interval": { + "name": "check_interval", + "type": "integer", + "primaryKey": false, + "notNull": true, + "autoincrement": false, + "default": 360 + }, + "image_url": { + "name": "image_url", + "type": "text", + "primaryKey": false, + "notNull": false, + "autoincrement": false + }, + "metadata": { + "name": "metadata", + "type": "text", + "primaryKey": false, + "notNull": false, + "autoincrement": false + }, + "format_profile_id": { + "name": "format_profile_id", + "type": "integer", + "primaryKey": false, + "notNull": false, + "autoincrement": false + }, + "created_at": { + "name": "created_at", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false, + "default": "(datetime('now'))" + }, + "updated_at": { + "name": "updated_at", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false, + "default": "(datetime('now'))" + }, + "last_checked_at": { + "name": "last_checked_at", + "type": "text", + "primaryKey": false, + "notNull": false, + "autoincrement": false + }, + "last_check_status": { + "name": "last_check_status", + "type": "text", + "primaryKey": false, + "notNull": false, + "autoincrement": false + }, + "monitoring_mode": { + "name": "monitoring_mode", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false, + "default": "'all'" + } + }, + "indexes": {}, + "foreignKeys": { + "channels_format_profile_id_format_profiles_id_fk": { + "name": "channels_format_profile_id_format_profiles_id_fk", + "tableFrom": "channels", + "tableTo": "format_profiles", + "columnsFrom": [ + "format_profile_id" + ], + "columnsTo": [ + "id" + ], + "onDelete": "set null", + "onUpdate": "no action" + } + }, + "compositePrimaryKeys": {}, + "uniqueConstraints": {}, + "checkConstraints": {} + } + }, + "views": {}, + "enums": {}, + "_meta": { + "schemas": {}, + "tables": { + "\"creators\"": "\"channels\"" + }, + "columns": { + "\"content_items\".\"creator_id\"": "\"content_items\".\"channel_id\"", + "\"download_history\".\"creator_id\"": "\"download_history\".\"channel_id\"", + "\"playlists\".\"creator_id\"": "\"playlists\".\"channel_id\"" + } + }, + "internal": { + "indexes": {} + } +} \ No newline at end of file diff --git a/drizzle/meta/0007_snapshot.json b/drizzle/meta/0007_snapshot.json new file mode 100644 index 0000000..f4c2a01 --- /dev/null +++ b/drizzle/meta/0007_snapshot.json @@ -0,0 +1,961 @@ +{ + "version": "6", + "dialect": "sqlite", + "id": "24769c36-328d-4e54-8ba1-e74a2681bef5", + "prevId": "a1b2c3d4-e5f6-7890-abcd-ef1234567890", + "tables": { + "channels": { + "name": "channels", + "columns": { + "id": { + "name": "id", + "type": "integer", + "primaryKey": true, + "notNull": true, + "autoincrement": true + }, + "name": { + "name": "name", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "platform": { + "name": "platform", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "platform_id": { + "name": "platform_id", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "url": { + "name": "url", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "monitoring_enabled": { + "name": "monitoring_enabled", + "type": "integer", + "primaryKey": false, + "notNull": true, + "autoincrement": false, + "default": true + }, + "check_interval": { + "name": "check_interval", + "type": "integer", + "primaryKey": false, + "notNull": true, + "autoincrement": false, + "default": 360 + }, + "image_url": { + "name": "image_url", + "type": "text", + "primaryKey": false, + "notNull": false, + "autoincrement": false + }, + "metadata": { + "name": "metadata", + "type": "text", + "primaryKey": false, + "notNull": false, + "autoincrement": false + }, + "format_profile_id": { + "name": "format_profile_id", + "type": "integer", + "primaryKey": false, + "notNull": false, + "autoincrement": false + }, + "created_at": { + "name": "created_at", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false, + "default": "(datetime('now'))" + }, + "updated_at": { + "name": "updated_at", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false, + "default": "(datetime('now'))" + }, + "last_checked_at": { + "name": "last_checked_at", + "type": "text", + "primaryKey": false, + "notNull": false, + "autoincrement": false + }, + "last_check_status": { + "name": "last_check_status", + "type": "text", + "primaryKey": false, + "notNull": false, + "autoincrement": false + }, + "monitoring_mode": { + "name": "monitoring_mode", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false, + "default": "'all'" + } + }, + "indexes": {}, + "foreignKeys": { + "channels_format_profile_id_format_profiles_id_fk": { + "name": "channels_format_profile_id_format_profiles_id_fk", + "tableFrom": "channels", + "tableTo": "format_profiles", + "columnsFrom": [ + "format_profile_id" + ], + "columnsTo": [ + "id" + ], + "onDelete": "set null", + "onUpdate": "no action" + } + }, + "compositePrimaryKeys": {}, + "uniqueConstraints": {}, + "checkConstraints": {} + }, + "content_items": { + "name": "content_items", + "columns": { + "id": { + "name": "id", + "type": "integer", + "primaryKey": true, + "notNull": true, + "autoincrement": true + }, + "channel_id": { + "name": "channel_id", + "type": "integer", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "title": { + "name": "title", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "platform_content_id": { + "name": "platform_content_id", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "url": { + "name": "url", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "content_type": { + "name": "content_type", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "duration": { + "name": "duration", + "type": "integer", + "primaryKey": false, + "notNull": false, + "autoincrement": false + }, + "file_path": { + "name": "file_path", + "type": "text", + "primaryKey": false, + "notNull": false, + "autoincrement": false + }, + "file_size": { + "name": "file_size", + "type": "integer", + "primaryKey": false, + "notNull": false, + "autoincrement": false + }, + "format": { + "name": "format", + "type": "text", + "primaryKey": false, + "notNull": false, + "autoincrement": false + }, + "quality_metadata": { + "name": "quality_metadata", + "type": "text", + "primaryKey": false, + "notNull": false, + "autoincrement": false + }, + "status": { + "name": "status", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false, + "default": "'monitored'" + }, + "thumbnail_url": { + "name": "thumbnail_url", + "type": "text", + "primaryKey": false, + "notNull": false, + "autoincrement": false + }, + "published_at": { + "name": "published_at", + "type": "text", + "primaryKey": false, + "notNull": false, + "autoincrement": false + }, + "downloaded_at": { + "name": "downloaded_at", + "type": "text", + "primaryKey": false, + "notNull": false, + "autoincrement": false + }, + "monitored": { + "name": "monitored", + "type": "integer", + "primaryKey": false, + "notNull": true, + "autoincrement": false, + "default": true + }, + "created_at": { + "name": "created_at", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false, + "default": "(datetime('now'))" + }, + "updated_at": { + "name": "updated_at", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false, + "default": "(datetime('now'))" + } + }, + "indexes": {}, + "foreignKeys": { + "content_items_channel_id_channels_id_fk": { + "name": "content_items_channel_id_channels_id_fk", + "tableFrom": "content_items", + "tableTo": "channels", + "columnsFrom": [ + "channel_id" + ], + "columnsTo": [ + "id" + ], + "onDelete": "cascade", + "onUpdate": "no action" + } + }, + "compositePrimaryKeys": {}, + "uniqueConstraints": {}, + "checkConstraints": {} + }, + "format_profiles": { + "name": "format_profiles", + "columns": { + "id": { + "name": "id", + "type": "integer", + "primaryKey": true, + "notNull": true, + "autoincrement": true + }, + "name": { + "name": "name", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "video_resolution": { + "name": "video_resolution", + "type": "text", + "primaryKey": false, + "notNull": false, + "autoincrement": false + }, + "audio_codec": { + "name": "audio_codec", + "type": "text", + "primaryKey": false, + "notNull": false, + "autoincrement": false + }, + "audio_bitrate": { + "name": "audio_bitrate", + "type": "text", + "primaryKey": false, + "notNull": false, + "autoincrement": false + }, + "container_format": { + "name": "container_format", + "type": "text", + "primaryKey": false, + "notNull": false, + "autoincrement": false + }, + "is_default": { + "name": "is_default", + "type": "integer", + "primaryKey": false, + "notNull": true, + "autoincrement": false, + "default": false + }, + "subtitle_languages": { + "name": "subtitle_languages", + "type": "text", + "primaryKey": false, + "notNull": false, + "autoincrement": false + }, + "embed_subtitles": { + "name": "embed_subtitles", + "type": "integer", + "primaryKey": false, + "notNull": true, + "autoincrement": false, + "default": false + }, + "created_at": { + "name": "created_at", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false, + "default": "(datetime('now'))" + }, + "updated_at": { + "name": "updated_at", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false, + "default": "(datetime('now'))" + } + }, + "indexes": {}, + "foreignKeys": {}, + "compositePrimaryKeys": {}, + "uniqueConstraints": {}, + "checkConstraints": {} + }, + "download_history": { + "name": "download_history", + "columns": { + "id": { + "name": "id", + "type": "integer", + "primaryKey": true, + "notNull": true, + "autoincrement": true + }, + "content_item_id": { + "name": "content_item_id", + "type": "integer", + "primaryKey": false, + "notNull": false, + "autoincrement": false + }, + "channel_id": { + "name": "channel_id", + "type": "integer", + "primaryKey": false, + "notNull": false, + "autoincrement": false + }, + "event_type": { + "name": "event_type", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "status": { + "name": "status", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "details": { + "name": "details", + "type": "text", + "primaryKey": false, + "notNull": false, + "autoincrement": false + }, + "created_at": { + "name": "created_at", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false, + "default": "(datetime('now'))" + } + }, + "indexes": {}, + "foreignKeys": { + "download_history_content_item_id_content_items_id_fk": { + "name": "download_history_content_item_id_content_items_id_fk", + "tableFrom": "download_history", + "tableTo": "content_items", + "columnsFrom": [ + "content_item_id" + ], + "columnsTo": [ + "id" + ], + "onDelete": "set null", + "onUpdate": "no action" + }, + "download_history_channel_id_channels_id_fk": { + "name": "download_history_channel_id_channels_id_fk", + "tableFrom": "download_history", + "tableTo": "channels", + "columnsFrom": [ + "channel_id" + ], + "columnsTo": [ + "id" + ], + "onDelete": "set null", + "onUpdate": "no action" + } + }, + "compositePrimaryKeys": {}, + "uniqueConstraints": {}, + "checkConstraints": {} + }, + "content_playlist": { + "name": "content_playlist", + "columns": { + "content_item_id": { + "name": "content_item_id", + "type": "integer", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "playlist_id": { + "name": "playlist_id", + "type": "integer", + "primaryKey": false, + "notNull": true, + "autoincrement": false + } + }, + "indexes": {}, + "foreignKeys": { + "content_playlist_content_item_id_content_items_id_fk": { + "name": "content_playlist_content_item_id_content_items_id_fk", + "tableFrom": "content_playlist", + "tableTo": "content_items", + "columnsFrom": [ + "content_item_id" + ], + "columnsTo": [ + "id" + ], + "onDelete": "cascade", + "onUpdate": "no action" + }, + "content_playlist_playlist_id_playlists_id_fk": { + "name": "content_playlist_playlist_id_playlists_id_fk", + "tableFrom": "content_playlist", + "tableTo": "playlists", + "columnsFrom": [ + "playlist_id" + ], + "columnsTo": [ + "id" + ], + "onDelete": "cascade", + "onUpdate": "no action" + } + }, + "compositePrimaryKeys": { + "content_playlist_content_item_id_playlist_id_pk": { + "columns": [ + "content_item_id", + "playlist_id" + ], + "name": "content_playlist_content_item_id_playlist_id_pk" + } + }, + "uniqueConstraints": {}, + "checkConstraints": {} + }, + "notification_settings": { + "name": "notification_settings", + "columns": { + "id": { + "name": "id", + "type": "integer", + "primaryKey": true, + "notNull": true, + "autoincrement": true + }, + "type": { + "name": "type", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "name": { + "name": "name", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "enabled": { + "name": "enabled", + "type": "integer", + "primaryKey": false, + "notNull": true, + "autoincrement": false, + "default": true + }, + "config": { + "name": "config", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "on_grab": { + "name": "on_grab", + "type": "integer", + "primaryKey": false, + "notNull": true, + "autoincrement": false, + "default": true + }, + "on_download": { + "name": "on_download", + "type": "integer", + "primaryKey": false, + "notNull": true, + "autoincrement": false, + "default": true + }, + "on_failure": { + "name": "on_failure", + "type": "integer", + "primaryKey": false, + "notNull": true, + "autoincrement": false, + "default": true + }, + "created_at": { + "name": "created_at", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false, + "default": "(datetime('now'))" + }, + "updated_at": { + "name": "updated_at", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false, + "default": "(datetime('now'))" + } + }, + "indexes": {}, + "foreignKeys": {}, + "compositePrimaryKeys": {}, + "uniqueConstraints": {}, + "checkConstraints": {} + }, + "platform_settings": { + "name": "platform_settings", + "columns": { + "platform": { + "name": "platform", + "type": "text", + "primaryKey": true, + "notNull": true, + "autoincrement": false + }, + "default_format_profile_id": { + "name": "default_format_profile_id", + "type": "integer", + "primaryKey": false, + "notNull": false, + "autoincrement": false + }, + "check_interval": { + "name": "check_interval", + "type": "integer", + "primaryKey": false, + "notNull": false, + "autoincrement": false, + "default": 360 + }, + "concurrency_limit": { + "name": "concurrency_limit", + "type": "integer", + "primaryKey": false, + "notNull": false, + "autoincrement": false, + "default": 2 + }, + "subtitle_languages": { + "name": "subtitle_languages", + "type": "text", + "primaryKey": false, + "notNull": false, + "autoincrement": false + }, + "grab_all_enabled": { + "name": "grab_all_enabled", + "type": "integer", + "primaryKey": false, + "notNull": true, + "autoincrement": false, + "default": false + }, + "grab_all_order": { + "name": "grab_all_order", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false, + "default": "'newest'" + }, + "scan_limit": { + "name": "scan_limit", + "type": "integer", + "primaryKey": false, + "notNull": false, + "autoincrement": false, + "default": 100 + }, + "rate_limit_delay": { + "name": "rate_limit_delay", + "type": "integer", + "primaryKey": false, + "notNull": false, + "autoincrement": false, + "default": 1000 + }, + "created_at": { + "name": "created_at", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false, + "default": "(datetime('now'))" + }, + "updated_at": { + "name": "updated_at", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false, + "default": "(datetime('now'))" + } + }, + "indexes": {}, + "foreignKeys": { + "platform_settings_default_format_profile_id_format_profiles_id_fk": { + "name": "platform_settings_default_format_profile_id_format_profiles_id_fk", + "tableFrom": "platform_settings", + "tableTo": "format_profiles", + "columnsFrom": [ + "default_format_profile_id" + ], + "columnsTo": [ + "id" + ], + "onDelete": "set null", + "onUpdate": "no action" + } + }, + "compositePrimaryKeys": {}, + "uniqueConstraints": {}, + "checkConstraints": {} + }, + "playlists": { + "name": "playlists", + "columns": { + "id": { + "name": "id", + "type": "integer", + "primaryKey": true, + "notNull": true, + "autoincrement": true + }, + "channel_id": { + "name": "channel_id", + "type": "integer", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "platform_playlist_id": { + "name": "platform_playlist_id", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "title": { + "name": "title", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "position": { + "name": "position", + "type": "integer", + "primaryKey": false, + "notNull": true, + "autoincrement": false, + "default": 0 + }, + "created_at": { + "name": "created_at", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false, + "default": "(datetime('now'))" + }, + "updated_at": { + "name": "updated_at", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false, + "default": "(datetime('now'))" + } + }, + "indexes": {}, + "foreignKeys": { + "playlists_channel_id_channels_id_fk": { + "name": "playlists_channel_id_channels_id_fk", + "tableFrom": "playlists", + "tableTo": "channels", + "columnsFrom": [ + "channel_id" + ], + "columnsTo": [ + "id" + ], + "onDelete": "cascade", + "onUpdate": "no action" + } + }, + "compositePrimaryKeys": {}, + "uniqueConstraints": {}, + "checkConstraints": {} + }, + "queue_items": { + "name": "queue_items", + "columns": { + "id": { + "name": "id", + "type": "integer", + "primaryKey": true, + "notNull": true, + "autoincrement": true + }, + "content_item_id": { + "name": "content_item_id", + "type": "integer", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "status": { + "name": "status", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false, + "default": "'pending'" + }, + "priority": { + "name": "priority", + "type": "integer", + "primaryKey": false, + "notNull": true, + "autoincrement": false, + "default": 0 + }, + "attempts": { + "name": "attempts", + "type": "integer", + "primaryKey": false, + "notNull": true, + "autoincrement": false, + "default": 0 + }, + "max_attempts": { + "name": "max_attempts", + "type": "integer", + "primaryKey": false, + "notNull": true, + "autoincrement": false, + "default": 3 + }, + "error": { + "name": "error", + "type": "text", + "primaryKey": false, + "notNull": false, + "autoincrement": false + }, + "started_at": { + "name": "started_at", + "type": "text", + "primaryKey": false, + "notNull": false, + "autoincrement": false + }, + "completed_at": { + "name": "completed_at", + "type": "text", + "primaryKey": false, + "notNull": false, + "autoincrement": false + }, + "created_at": { + "name": "created_at", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false, + "default": "(datetime('now'))" + }, + "updated_at": { + "name": "updated_at", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false, + "default": "(datetime('now'))" + } + }, + "indexes": {}, + "foreignKeys": { + "queue_items_content_item_id_content_items_id_fk": { + "name": "queue_items_content_item_id_content_items_id_fk", + "tableFrom": "queue_items", + "tableTo": "content_items", + "columnsFrom": [ + "content_item_id" + ], + "columnsTo": [ + "id" + ], + "onDelete": "cascade", + "onUpdate": "no action" + } + }, + "compositePrimaryKeys": {}, + "uniqueConstraints": {}, + "checkConstraints": {} + }, + "system_config": { + "name": "system_config", + "columns": { + "key": { + "name": "key", + "type": "text", + "primaryKey": true, + "notNull": true, + "autoincrement": false + }, + "value": { + "name": "value", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "created_at": { + "name": "created_at", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false, + "default": "(datetime('now'))" + }, + "updated_at": { + "name": "updated_at", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false, + "default": "(datetime('now'))" + } + }, + "indexes": {}, + "foreignKeys": {}, + "compositePrimaryKeys": {}, + "uniqueConstraints": {}, + "checkConstraints": {} + } + }, + "views": {}, + "enums": {}, + "_meta": { + "schemas": {}, + "tables": {}, + "columns": {} + }, + "internal": { + "indexes": {} + } +} \ No newline at end of file diff --git a/drizzle/meta/_journal.json b/drizzle/meta/_journal.json new file mode 100644 index 0000000..ebd1114 --- /dev/null +++ b/drizzle/meta/_journal.json @@ -0,0 +1,62 @@ +{ + "version": "7", + "dialect": "sqlite", + "entries": [ + { + "idx": 0, + "version": "6", + "when": 1774243438376, + "tag": "0000_colossal_jubilee", + "breakpoints": true + }, + { + "idx": 1, + "version": "6", + "when": 1774245174383, + "tag": "0001_natural_toad_men", + "breakpoints": true + }, + { + "idx": 2, + "version": "6", + "when": 1774246365287, + "tag": "0002_lonely_nico_minoru", + "breakpoints": true + }, + { + "idx": 3, + "version": "6", + "when": 1774310330436, + "tag": "0003_moaning_vertigo", + "breakpoints": true + }, + { + "idx": 4, + "version": "6", + "when": 1774312153698, + "tag": "0004_platform_settings", + "breakpoints": true + }, + { + "idx": 5, + "version": "6", + "when": 1774325204862, + "tag": "0005_monitoring_playlists", + "breakpoints": true + }, + { + "idx": 6, + "version": "6", + "when": 1774656000000, + "tag": "0006_rename_creators_to_channels", + "breakpoints": true + }, + { + "idx": 7, + "version": "6", + "when": 1774396066443, + "tag": "0007_steep_the_watchers", + "breakpoints": true + } + ] +} \ No newline at end of file diff --git a/package-lock.json b/package-lock.json new file mode 100644 index 0000000..b57c497 --- /dev/null +++ b/package-lock.json @@ -0,0 +1,5266 @@ +{ + "name": "tubearr", + "version": "0.1.0", + "lockfileVersion": 3, + "requires": true, + "packages": { + "": { + "name": "tubearr", + "version": "0.1.0", + "dependencies": { + "@fastify/cors": "^11.0.0", + "@fastify/rate-limit": "^10.2.1", + "@fastify/static": "^9.0.0", + "@libsql/client": "^0.14.0", + "@tanstack/react-query": "^5.95.0", + "croner": "^10.0.1", + "dotenv": "^16.4.7", + "drizzle-orm": "^0.38.4", + "fastify": "^5.2.1", + "fastify-plugin": "^5.1.0", + "lucide-react": "^0.577.0", + "react": "^19.2.4", + "react-dom": "^19.2.4", + "react-router": "^7.13.1", + "react-router-dom": "^7.13.1" + }, + "devDependencies": { + "@types/node": "^22.12.0", + "@types/react": "^19.2.14", + "@types/react-dom": "^19.2.3", + "@vitejs/plugin-react": "^4.7.0", + "drizzle-kit": "^0.30.4", + "tsx": "^4.19.2", + "typescript": "^5.7.3", + "vitest": "^3.0.5" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@babel/code-frame": { + "version": "7.29.0", + "resolved": "https://registry.npmjs.org/@babel/code-frame/-/code-frame-7.29.0.tgz", + "integrity": "sha512-9NhCeYjq9+3uxgdtp20LSiJXJvN0FeCtNGpJxuMFZ1Kv3cWUNb6DOhJwUvcVCzKGR66cw4njwM6hrJLqgOwbcw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/helper-validator-identifier": "^7.28.5", + "js-tokens": "^4.0.0", + "picocolors": "^1.1.1" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/code-frame/node_modules/js-tokens": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/js-tokens/-/js-tokens-4.0.0.tgz", + "integrity": "sha512-RdJUflcE3cUzKiMqQgsCu06FPu9UdIJO0beYbPhHN4k6apgJtifcoCtT9bcxOpYBtpD2kCM6Sbzg4CausW/PKQ==", + "dev": true, + "license": "MIT" + }, + "node_modules/@babel/compat-data": { + "version": "7.29.0", + "resolved": "https://registry.npmjs.org/@babel/compat-data/-/compat-data-7.29.0.tgz", + "integrity": "sha512-T1NCJqT/j9+cn8fvkt7jtwbLBfLC/1y1c7NtCeXFRgzGTsafi68MRv8yzkYSapBnFA6L3U2VSc02ciDzoAJhJg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/core": { + "version": "7.29.0", + "resolved": "https://registry.npmjs.org/@babel/core/-/core-7.29.0.tgz", + "integrity": "sha512-CGOfOJqWjg2qW/Mb6zNsDm+u5vFQ8DxXfbM09z69p5Z6+mE1ikP2jUXw+j42Pf1XTYED2Rni5f95npYeuwMDQA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/code-frame": "^7.29.0", + "@babel/generator": "^7.29.0", + "@babel/helper-compilation-targets": "^7.28.6", + "@babel/helper-module-transforms": "^7.28.6", + "@babel/helpers": "^7.28.6", + "@babel/parser": "^7.29.0", + "@babel/template": "^7.28.6", + "@babel/traverse": "^7.29.0", + "@babel/types": "^7.29.0", + "@jridgewell/remapping": "^2.3.5", + "convert-source-map": "^2.0.0", + "debug": "^4.1.0", + "gensync": "^1.0.0-beta.2", + "json5": "^2.2.3", + "semver": "^6.3.1" + }, + "engines": { + "node": ">=6.9.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/babel" + } + }, + "node_modules/@babel/core/node_modules/semver": { + "version": "6.3.1", + "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.1.tgz", + "integrity": "sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA==", + "dev": true, + "license": "ISC", + "bin": { + "semver": "bin/semver.js" + } + }, + "node_modules/@babel/generator": { + "version": "7.29.1", + "resolved": "https://registry.npmjs.org/@babel/generator/-/generator-7.29.1.tgz", + "integrity": "sha512-qsaF+9Qcm2Qv8SRIMMscAvG4O3lJ0F1GuMo5HR/Bp02LopNgnZBC/EkbevHFeGs4ls/oPz9v+Bsmzbkbe+0dUw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/parser": "^7.29.0", + "@babel/types": "^7.29.0", + "@jridgewell/gen-mapping": "^0.3.12", + "@jridgewell/trace-mapping": "^0.3.28", + "jsesc": "^3.0.2" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/helper-compilation-targets": { + "version": "7.28.6", + "resolved": "https://registry.npmjs.org/@babel/helper-compilation-targets/-/helper-compilation-targets-7.28.6.tgz", + "integrity": "sha512-JYtls3hqi15fcx5GaSNL7SCTJ2MNmjrkHXg4FSpOA/grxK8KwyZ5bubHsCq8FXCkua6xhuaaBit+3b7+VZRfcA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/compat-data": "^7.28.6", + "@babel/helper-validator-option": "^7.27.1", + "browserslist": "^4.24.0", + "lru-cache": "^5.1.1", + "semver": "^6.3.1" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/helper-compilation-targets/node_modules/lru-cache": { + "version": "5.1.1", + "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-5.1.1.tgz", + "integrity": "sha512-KpNARQA3Iwv+jTA0utUVVbrh+Jlrr1Fv0e56GGzAFOXN7dk/FviaDW8LHmK52DlcH4WP2n6gI8vN1aesBFgo9w==", + "dev": true, + "license": "ISC", + "dependencies": { + "yallist": "^3.0.2" + } + }, + "node_modules/@babel/helper-compilation-targets/node_modules/semver": { + "version": "6.3.1", + "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.1.tgz", + "integrity": "sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA==", + "dev": true, + "license": "ISC", + "bin": { + "semver": "bin/semver.js" + } + }, + "node_modules/@babel/helper-globals": { + "version": "7.28.0", + "resolved": "https://registry.npmjs.org/@babel/helper-globals/-/helper-globals-7.28.0.tgz", + "integrity": "sha512-+W6cISkXFa1jXsDEdYA8HeevQT/FULhxzR99pxphltZcVaugps53THCeiWA8SguxxpSp3gKPiuYfSWopkLQ4hw==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/helper-module-imports": { + "version": "7.28.6", + "resolved": "https://registry.npmjs.org/@babel/helper-module-imports/-/helper-module-imports-7.28.6.tgz", + "integrity": "sha512-l5XkZK7r7wa9LucGw9LwZyyCUscb4x37JWTPz7swwFE/0FMQAGpiWUZn8u9DzkSBWEcK25jmvubfpw2dnAMdbw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/traverse": "^7.28.6", + "@babel/types": "^7.28.6" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/helper-module-transforms": { + "version": "7.28.6", + "resolved": "https://registry.npmjs.org/@babel/helper-module-transforms/-/helper-module-transforms-7.28.6.tgz", + "integrity": "sha512-67oXFAYr2cDLDVGLXTEABjdBJZ6drElUSI7WKp70NrpyISso3plG9SAGEF6y7zbha/wOzUByWWTJvEDVNIUGcA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/helper-module-imports": "^7.28.6", + "@babel/helper-validator-identifier": "^7.28.5", + "@babel/traverse": "^7.28.6" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0" + } + }, + "node_modules/@babel/helper-plugin-utils": { + "version": "7.28.6", + "resolved": "https://registry.npmjs.org/@babel/helper-plugin-utils/-/helper-plugin-utils-7.28.6.tgz", + "integrity": "sha512-S9gzZ/bz83GRysI7gAD4wPT/AI3uCnY+9xn+Mx/KPs2JwHJIz1W8PZkg2cqyt3RNOBM8ejcXhV6y8Og7ly/Dug==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/helper-string-parser": { + "version": "7.27.1", + "resolved": "https://registry.npmjs.org/@babel/helper-string-parser/-/helper-string-parser-7.27.1.tgz", + "integrity": "sha512-qMlSxKbpRlAridDExk92nSobyDdpPijUq2DW6oDnUqd0iOGxmQjyqhMIihI9+zv4LPyZdRje2cavWPbCbWm3eA==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/helper-validator-identifier": { + "version": "7.28.5", + "resolved": "https://registry.npmjs.org/@babel/helper-validator-identifier/-/helper-validator-identifier-7.28.5.tgz", + "integrity": "sha512-qSs4ifwzKJSV39ucNjsvc6WVHs6b7S03sOh2OcHF9UHfVPqWWALUsNUVzhSBiItjRZoLHx7nIarVjqKVusUZ1Q==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/helper-validator-option": { + "version": "7.27.1", + "resolved": "https://registry.npmjs.org/@babel/helper-validator-option/-/helper-validator-option-7.27.1.tgz", + "integrity": "sha512-YvjJow9FxbhFFKDSuFnVCe2WxXk1zWc22fFePVNEaWJEu8IrZVlda6N0uHwzZrUM1il7NC9Mlp4MaJYbYd9JSg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/helpers": { + "version": "7.29.2", + "resolved": "https://registry.npmjs.org/@babel/helpers/-/helpers-7.29.2.tgz", + "integrity": "sha512-HoGuUs4sCZNezVEKdVcwqmZN8GoHirLUcLaYVNBK2J0DadGtdcqgr3BCbvH8+XUo4NGjNl3VOtSjEKNzqfFgKw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/template": "^7.28.6", + "@babel/types": "^7.29.0" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/parser": { + "version": "7.29.2", + "resolved": "https://registry.npmjs.org/@babel/parser/-/parser-7.29.2.tgz", + "integrity": "sha512-4GgRzy/+fsBa72/RZVJmGKPmZu9Byn8o4MoLpmNe1m8ZfYnz5emHLQz3U4gLud6Zwl0RZIcgiLD7Uq7ySFuDLA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/types": "^7.29.0" + }, + "bin": { + "parser": "bin/babel-parser.js" + }, + "engines": { + "node": ">=6.0.0" + } + }, + "node_modules/@babel/plugin-transform-react-jsx-self": { + "version": "7.27.1", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-react-jsx-self/-/plugin-transform-react-jsx-self-7.27.1.tgz", + "integrity": "sha512-6UzkCs+ejGdZ5mFFC/OCUrv028ab2fp1znZmCZjAOBKiBK2jXD1O+BPSfX8X2qjJ75fZBMSnQn3Rq2mrBJK2mw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/helper-plugin-utils": "^7.27.1" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-transform-react-jsx-source": { + "version": "7.27.1", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-react-jsx-source/-/plugin-transform-react-jsx-source-7.27.1.tgz", + "integrity": "sha512-zbwoTsBruTeKB9hSq73ha66iFeJHuaFkUbwvqElnygoNbj/jHRsSeokowZFN3CZ64IvEqcmmkVe89OPXc7ldAw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/helper-plugin-utils": "^7.27.1" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/template": { + "version": "7.28.6", + "resolved": "https://registry.npmjs.org/@babel/template/-/template-7.28.6.tgz", + "integrity": "sha512-YA6Ma2KsCdGb+WC6UpBVFJGXL58MDA6oyONbjyF/+5sBgxY/dwkhLogbMT2GXXyU84/IhRw/2D1Os1B/giz+BQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/code-frame": "^7.28.6", + "@babel/parser": "^7.28.6", + "@babel/types": "^7.28.6" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/traverse": { + "version": "7.29.0", + "resolved": "https://registry.npmjs.org/@babel/traverse/-/traverse-7.29.0.tgz", + "integrity": "sha512-4HPiQr0X7+waHfyXPZpWPfWL/J7dcN1mx9gL6WdQVMbPnF3+ZhSMs8tCxN7oHddJE9fhNE7+lxdnlyemKfJRuA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/code-frame": "^7.29.0", + "@babel/generator": "^7.29.0", + "@babel/helper-globals": "^7.28.0", + "@babel/parser": "^7.29.0", + "@babel/template": "^7.28.6", + "@babel/types": "^7.29.0", + "debug": "^4.3.1" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/types": { + "version": "7.29.0", + "resolved": "https://registry.npmjs.org/@babel/types/-/types-7.29.0.tgz", + "integrity": "sha512-LwdZHpScM4Qz8Xw2iKSzS+cfglZzJGvofQICy7W7v4caru4EaAmyUuO6BGrbyQ2mYV11W0U8j5mBhd14dd3B0A==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/helper-string-parser": "^7.27.1", + "@babel/helper-validator-identifier": "^7.28.5" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@drizzle-team/brocli": { + "version": "0.10.2", + "resolved": "https://registry.npmjs.org/@drizzle-team/brocli/-/brocli-0.10.2.tgz", + "integrity": "sha512-z33Il7l5dKjUgGULTqBsQBQwckHh5AbIuxhdsIxDDiZAzBOrZO6q9ogcWC65kU382AfynTfgNumVcNIjuIua6w==", + "dev": true, + "license": "Apache-2.0" + }, + "node_modules/@esbuild-kit/core-utils": { + "version": "3.3.2", + "resolved": "https://registry.npmjs.org/@esbuild-kit/core-utils/-/core-utils-3.3.2.tgz", + "integrity": "sha512-sPRAnw9CdSsRmEtnsl2WXWdyquogVpB3yZ3dgwJfe8zrOzTsV7cJvmwrKVa+0ma5BoiGJ+BoqkMvawbayKUsqQ==", + "deprecated": "Merged into tsx: https://tsx.is", + "dev": true, + "license": "MIT", + "dependencies": { + "esbuild": "~0.18.20", + "source-map-support": "^0.5.21" + } + }, + "node_modules/@esbuild-kit/core-utils/node_modules/@esbuild/android-arm": { + "version": "0.18.20", + "resolved": "https://registry.npmjs.org/@esbuild/android-arm/-/android-arm-0.18.20.tgz", + "integrity": "sha512-fyi7TDI/ijKKNZTUJAQqiG5T7YjJXgnzkURqmGj13C6dCqckZBLdl4h7bkhHt/t0WP+zO9/zwroDvANaOqO5Sw==", + "cpu": [ + "arm" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "android" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@esbuild-kit/core-utils/node_modules/@esbuild/android-arm64": { + "version": "0.18.20", + "resolved": "https://registry.npmjs.org/@esbuild/android-arm64/-/android-arm64-0.18.20.tgz", + "integrity": "sha512-Nz4rJcchGDtENV0eMKUNa6L12zz2zBDXuhj/Vjh18zGqB44Bi7MBMSXjgunJgjRhCmKOjnPuZp4Mb6OKqtMHLQ==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "android" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@esbuild-kit/core-utils/node_modules/@esbuild/android-x64": { + "version": "0.18.20", + "resolved": "https://registry.npmjs.org/@esbuild/android-x64/-/android-x64-0.18.20.tgz", + "integrity": "sha512-8GDdlePJA8D6zlZYJV/jnrRAi6rOiNaCC/JclcXpB+KIuvfBN4owLtgzY2bsxnx666XjJx2kDPUmnTtR8qKQUg==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "android" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@esbuild-kit/core-utils/node_modules/@esbuild/darwin-arm64": { + "version": "0.18.20", + "resolved": "https://registry.npmjs.org/@esbuild/darwin-arm64/-/darwin-arm64-0.18.20.tgz", + "integrity": "sha512-bxRHW5kHU38zS2lPTPOyuyTm+S+eobPUnTNkdJEfAddYgEcll4xkT8DB9d2008DtTbl7uJag2HuE5NZAZgnNEA==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "darwin" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@esbuild-kit/core-utils/node_modules/@esbuild/darwin-x64": { + "version": "0.18.20", + "resolved": "https://registry.npmjs.org/@esbuild/darwin-x64/-/darwin-x64-0.18.20.tgz", + "integrity": "sha512-pc5gxlMDxzm513qPGbCbDukOdsGtKhfxD1zJKXjCCcU7ju50O7MeAZ8c4krSJcOIJGFR+qx21yMMVYwiQvyTyQ==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "darwin" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@esbuild-kit/core-utils/node_modules/@esbuild/freebsd-arm64": { + "version": "0.18.20", + "resolved": "https://registry.npmjs.org/@esbuild/freebsd-arm64/-/freebsd-arm64-0.18.20.tgz", + "integrity": "sha512-yqDQHy4QHevpMAaxhhIwYPMv1NECwOvIpGCZkECn8w2WFHXjEwrBn3CeNIYsibZ/iZEUemj++M26W3cNR5h+Tw==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "freebsd" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@esbuild-kit/core-utils/node_modules/@esbuild/freebsd-x64": { + "version": "0.18.20", + "resolved": "https://registry.npmjs.org/@esbuild/freebsd-x64/-/freebsd-x64-0.18.20.tgz", + "integrity": "sha512-tgWRPPuQsd3RmBZwarGVHZQvtzfEBOreNuxEMKFcd5DaDn2PbBxfwLcj4+aenoh7ctXcbXmOQIn8HI6mCSw5MQ==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "freebsd" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@esbuild-kit/core-utils/node_modules/@esbuild/linux-arm": { + "version": "0.18.20", + "resolved": "https://registry.npmjs.org/@esbuild/linux-arm/-/linux-arm-0.18.20.tgz", + "integrity": "sha512-/5bHkMWnq1EgKr1V+Ybz3s1hWXok7mDFUMQ4cG10AfW3wL02PSZi5kFpYKrptDsgb2WAJIvRcDm+qIvXf/apvg==", + "cpu": [ + "arm" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@esbuild-kit/core-utils/node_modules/@esbuild/linux-arm64": { + "version": "0.18.20", + "resolved": "https://registry.npmjs.org/@esbuild/linux-arm64/-/linux-arm64-0.18.20.tgz", + "integrity": "sha512-2YbscF+UL7SQAVIpnWvYwM+3LskyDmPhe31pE7/aoTMFKKzIc9lLbyGUpmmb8a8AixOL61sQ/mFh3jEjHYFvdA==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@esbuild-kit/core-utils/node_modules/@esbuild/linux-ia32": { + "version": "0.18.20", + "resolved": "https://registry.npmjs.org/@esbuild/linux-ia32/-/linux-ia32-0.18.20.tgz", + "integrity": "sha512-P4etWwq6IsReT0E1KHU40bOnzMHoH73aXp96Fs8TIT6z9Hu8G6+0SHSw9i2isWrD2nbx2qo5yUqACgdfVGx7TA==", + "cpu": [ + "ia32" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@esbuild-kit/core-utils/node_modules/@esbuild/linux-loong64": { + "version": "0.18.20", + "resolved": "https://registry.npmjs.org/@esbuild/linux-loong64/-/linux-loong64-0.18.20.tgz", + "integrity": "sha512-nXW8nqBTrOpDLPgPY9uV+/1DjxoQ7DoB2N8eocyq8I9XuqJ7BiAMDMf9n1xZM9TgW0J8zrquIb/A7s3BJv7rjg==", + "cpu": [ + "loong64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@esbuild-kit/core-utils/node_modules/@esbuild/linux-mips64el": { + "version": "0.18.20", + "resolved": "https://registry.npmjs.org/@esbuild/linux-mips64el/-/linux-mips64el-0.18.20.tgz", + "integrity": "sha512-d5NeaXZcHp8PzYy5VnXV3VSd2D328Zb+9dEq5HE6bw6+N86JVPExrA6O68OPwobntbNJ0pzCpUFZTo3w0GyetQ==", + "cpu": [ + "mips64el" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@esbuild-kit/core-utils/node_modules/@esbuild/linux-ppc64": { + "version": "0.18.20", + "resolved": "https://registry.npmjs.org/@esbuild/linux-ppc64/-/linux-ppc64-0.18.20.tgz", + "integrity": "sha512-WHPyeScRNcmANnLQkq6AfyXRFr5D6N2sKgkFo2FqguP44Nw2eyDlbTdZwd9GYk98DZG9QItIiTlFLHJHjxP3FA==", + "cpu": [ + "ppc64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@esbuild-kit/core-utils/node_modules/@esbuild/linux-riscv64": { + "version": "0.18.20", + "resolved": "https://registry.npmjs.org/@esbuild/linux-riscv64/-/linux-riscv64-0.18.20.tgz", + "integrity": "sha512-WSxo6h5ecI5XH34KC7w5veNnKkju3zBRLEQNY7mv5mtBmrP/MjNBCAlsM2u5hDBlS3NGcTQpoBvRzqBcRtpq1A==", + "cpu": [ + "riscv64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@esbuild-kit/core-utils/node_modules/@esbuild/linux-s390x": { + "version": "0.18.20", + "resolved": "https://registry.npmjs.org/@esbuild/linux-s390x/-/linux-s390x-0.18.20.tgz", + "integrity": "sha512-+8231GMs3mAEth6Ja1iK0a1sQ3ohfcpzpRLH8uuc5/KVDFneH6jtAJLFGafpzpMRO6DzJ6AvXKze9LfFMrIHVQ==", + "cpu": [ + "s390x" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@esbuild-kit/core-utils/node_modules/@esbuild/linux-x64": { + "version": "0.18.20", + "resolved": "https://registry.npmjs.org/@esbuild/linux-x64/-/linux-x64-0.18.20.tgz", + "integrity": "sha512-UYqiqemphJcNsFEskc73jQ7B9jgwjWrSayxawS6UVFZGWrAAtkzjxSqnoclCXxWtfwLdzU+vTpcNYhpn43uP1w==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@esbuild-kit/core-utils/node_modules/@esbuild/netbsd-x64": { + "version": "0.18.20", + "resolved": "https://registry.npmjs.org/@esbuild/netbsd-x64/-/netbsd-x64-0.18.20.tgz", + "integrity": "sha512-iO1c++VP6xUBUmltHZoMtCUdPlnPGdBom6IrO4gyKPFFVBKioIImVooR5I83nTew5UOYrk3gIJhbZh8X44y06A==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "netbsd" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@esbuild-kit/core-utils/node_modules/@esbuild/openbsd-x64": { + "version": "0.18.20", + "resolved": "https://registry.npmjs.org/@esbuild/openbsd-x64/-/openbsd-x64-0.18.20.tgz", + "integrity": "sha512-e5e4YSsuQfX4cxcygw/UCPIEP6wbIL+se3sxPdCiMbFLBWu0eiZOJ7WoD+ptCLrmjZBK1Wk7I6D/I3NglUGOxg==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "openbsd" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@esbuild-kit/core-utils/node_modules/@esbuild/sunos-x64": { + "version": "0.18.20", + "resolved": "https://registry.npmjs.org/@esbuild/sunos-x64/-/sunos-x64-0.18.20.tgz", + "integrity": "sha512-kDbFRFp0YpTQVVrqUd5FTYmWo45zGaXe0X8E1G/LKFC0v8x0vWrhOWSLITcCn63lmZIxfOMXtCfti/RxN/0wnQ==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "sunos" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@esbuild-kit/core-utils/node_modules/@esbuild/win32-arm64": { + "version": "0.18.20", + "resolved": "https://registry.npmjs.org/@esbuild/win32-arm64/-/win32-arm64-0.18.20.tgz", + "integrity": "sha512-ddYFR6ItYgoaq4v4JmQQaAI5s7npztfV4Ag6NrhiaW0RrnOXqBkgwZLofVTlq1daVTQNhtI5oieTvkRPfZrePg==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "win32" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@esbuild-kit/core-utils/node_modules/@esbuild/win32-ia32": { + "version": "0.18.20", + "resolved": "https://registry.npmjs.org/@esbuild/win32-ia32/-/win32-ia32-0.18.20.tgz", + "integrity": "sha512-Wv7QBi3ID/rROT08SABTS7eV4hX26sVduqDOTe1MvGMjNd3EjOz4b7zeexIR62GTIEKrfJXKL9LFxTYgkyeu7g==", + "cpu": [ + "ia32" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "win32" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@esbuild-kit/core-utils/node_modules/@esbuild/win32-x64": { + "version": "0.18.20", + "resolved": "https://registry.npmjs.org/@esbuild/win32-x64/-/win32-x64-0.18.20.tgz", + "integrity": "sha512-kTdfRcSiDfQca/y9QIkng02avJ+NCaQvrMejlsB3RRv5sE9rRoeBPISaZpKxHELzRxZyLvNts1P27W3wV+8geQ==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "win32" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@esbuild-kit/core-utils/node_modules/esbuild": { + "version": "0.18.20", + "resolved": "https://registry.npmjs.org/esbuild/-/esbuild-0.18.20.tgz", + "integrity": "sha512-ceqxoedUrcayh7Y7ZX6NdbbDzGROiyVBgC4PriJThBKSVPWnnFHZAkfI1lJT8QFkOwH4qOS2SJkS4wvpGl8BpA==", + "dev": true, + "hasInstallScript": true, + "license": "MIT", + "bin": { + "esbuild": "bin/esbuild" + }, + "engines": { + "node": ">=12" + }, + "optionalDependencies": { + "@esbuild/android-arm": "0.18.20", + "@esbuild/android-arm64": "0.18.20", + "@esbuild/android-x64": "0.18.20", + "@esbuild/darwin-arm64": "0.18.20", + "@esbuild/darwin-x64": "0.18.20", + "@esbuild/freebsd-arm64": "0.18.20", + "@esbuild/freebsd-x64": "0.18.20", + "@esbuild/linux-arm": "0.18.20", + "@esbuild/linux-arm64": "0.18.20", + "@esbuild/linux-ia32": "0.18.20", + "@esbuild/linux-loong64": "0.18.20", + "@esbuild/linux-mips64el": "0.18.20", + "@esbuild/linux-ppc64": "0.18.20", + "@esbuild/linux-riscv64": "0.18.20", + "@esbuild/linux-s390x": "0.18.20", + "@esbuild/linux-x64": "0.18.20", + "@esbuild/netbsd-x64": "0.18.20", + "@esbuild/openbsd-x64": "0.18.20", + "@esbuild/sunos-x64": "0.18.20", + "@esbuild/win32-arm64": "0.18.20", + "@esbuild/win32-ia32": "0.18.20", + "@esbuild/win32-x64": "0.18.20" + } + }, + "node_modules/@esbuild-kit/esm-loader": { + "version": "2.6.5", + "resolved": "https://registry.npmjs.org/@esbuild-kit/esm-loader/-/esm-loader-2.6.5.tgz", + "integrity": "sha512-FxEMIkJKnodyA1OaCUoEvbYRkoZlLZ4d/eXFu9Fh8CbBBgP5EmZxrfTRyN0qpXZ4vOvqnE5YdRdcrmUUXuU+dA==", + "deprecated": "Merged into tsx: https://tsx.is", + "dev": true, + "license": "MIT", + "dependencies": { + "@esbuild-kit/core-utils": "^3.3.2", + "get-tsconfig": "^4.7.0" + } + }, + "node_modules/@esbuild/aix-ppc64": { + "version": "0.19.12", + "resolved": "https://registry.npmjs.org/@esbuild/aix-ppc64/-/aix-ppc64-0.19.12.tgz", + "integrity": "sha512-bmoCYyWdEL3wDQIVbcyzRyeKLgk2WtWLTWz1ZIAZF/EGbNOwSA6ew3PftJ1PqMiOOGu0OyFMzG53L0zqIpPeNA==", + "cpu": [ + "ppc64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "aix" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@esbuild/android-arm": { + "version": "0.19.12", + "resolved": "https://registry.npmjs.org/@esbuild/android-arm/-/android-arm-0.19.12.tgz", + "integrity": "sha512-qg/Lj1mu3CdQlDEEiWrlC4eaPZ1KztwGJ9B6J+/6G+/4ewxJg7gqj8eVYWvao1bXrqGiW2rsBZFSX3q2lcW05w==", + "cpu": [ + "arm" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "android" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@esbuild/android-arm64": { + "version": "0.19.12", + "resolved": "https://registry.npmjs.org/@esbuild/android-arm64/-/android-arm64-0.19.12.tgz", + "integrity": "sha512-P0UVNGIienjZv3f5zq0DP3Nt2IE/3plFzuaS96vihvD0Hd6H/q4WXUGpCxD/E8YrSXfNyRPbpTq+T8ZQioSuPA==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "android" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@esbuild/android-x64": { + "version": "0.19.12", + "resolved": "https://registry.npmjs.org/@esbuild/android-x64/-/android-x64-0.19.12.tgz", + "integrity": "sha512-3k7ZoUW6Q6YqhdhIaq/WZ7HwBpnFBlW905Fa4s4qWJyiNOgT1dOqDiVAQFwBH7gBRZr17gLrlFCRzF6jFh7Kew==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "android" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@esbuild/darwin-arm64": { + "version": "0.19.12", + "resolved": "https://registry.npmjs.org/@esbuild/darwin-arm64/-/darwin-arm64-0.19.12.tgz", + "integrity": "sha512-B6IeSgZgtEzGC42jsI+YYu9Z3HKRxp8ZT3cqhvliEHovq8HSX2YX8lNocDn79gCKJXOSaEot9MVYky7AKjCs8g==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "darwin" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@esbuild/darwin-x64": { + "version": "0.19.12", + "resolved": "https://registry.npmjs.org/@esbuild/darwin-x64/-/darwin-x64-0.19.12.tgz", + "integrity": "sha512-hKoVkKzFiToTgn+41qGhsUJXFlIjxI/jSYeZf3ugemDYZldIXIxhvwN6erJGlX4t5h417iFuheZ7l+YVn05N3A==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "darwin" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@esbuild/freebsd-arm64": { + "version": "0.19.12", + "resolved": "https://registry.npmjs.org/@esbuild/freebsd-arm64/-/freebsd-arm64-0.19.12.tgz", + "integrity": "sha512-4aRvFIXmwAcDBw9AueDQ2YnGmz5L6obe5kmPT8Vd+/+x/JMVKCgdcRwH6APrbpNXsPz+K653Qg8HB/oXvXVukA==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "freebsd" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@esbuild/freebsd-x64": { + "version": "0.19.12", + "resolved": "https://registry.npmjs.org/@esbuild/freebsd-x64/-/freebsd-x64-0.19.12.tgz", + "integrity": "sha512-EYoXZ4d8xtBoVN7CEwWY2IN4ho76xjYXqSXMNccFSx2lgqOG/1TBPW0yPx1bJZk94qu3tX0fycJeeQsKovA8gg==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "freebsd" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@esbuild/linux-arm": { + "version": "0.19.12", + "resolved": "https://registry.npmjs.org/@esbuild/linux-arm/-/linux-arm-0.19.12.tgz", + "integrity": "sha512-J5jPms//KhSNv+LO1S1TX1UWp1ucM6N6XuL6ITdKWElCu8wXP72l9MM0zDTzzeikVyqFE6U8YAV9/tFyj0ti+w==", + "cpu": [ + "arm" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@esbuild/linux-arm64": { + "version": "0.19.12", + "resolved": "https://registry.npmjs.org/@esbuild/linux-arm64/-/linux-arm64-0.19.12.tgz", + "integrity": "sha512-EoTjyYyLuVPfdPLsGVVVC8a0p1BFFvtpQDB/YLEhaXyf/5bczaGeN15QkR+O4S5LeJ92Tqotve7i1jn35qwvdA==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@esbuild/linux-ia32": { + "version": "0.19.12", + "resolved": "https://registry.npmjs.org/@esbuild/linux-ia32/-/linux-ia32-0.19.12.tgz", + "integrity": "sha512-Thsa42rrP1+UIGaWz47uydHSBOgTUnwBwNq59khgIwktK6x60Hivfbux9iNR0eHCHzOLjLMLfUMLCypBkZXMHA==", + "cpu": [ + "ia32" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@esbuild/linux-loong64": { + "version": "0.19.12", + "resolved": "https://registry.npmjs.org/@esbuild/linux-loong64/-/linux-loong64-0.19.12.tgz", + "integrity": "sha512-LiXdXA0s3IqRRjm6rV6XaWATScKAXjI4R4LoDlvO7+yQqFdlr1Bax62sRwkVvRIrwXxvtYEHHI4dm50jAXkuAA==", + "cpu": [ + "loong64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@esbuild/linux-mips64el": { + "version": "0.19.12", + "resolved": "https://registry.npmjs.org/@esbuild/linux-mips64el/-/linux-mips64el-0.19.12.tgz", + "integrity": "sha512-fEnAuj5VGTanfJ07ff0gOA6IPsvrVHLVb6Lyd1g2/ed67oU1eFzL0r9WL7ZzscD+/N6i3dWumGE1Un4f7Amf+w==", + "cpu": [ + "mips64el" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@esbuild/linux-ppc64": { + "version": "0.19.12", + "resolved": "https://registry.npmjs.org/@esbuild/linux-ppc64/-/linux-ppc64-0.19.12.tgz", + "integrity": "sha512-nYJA2/QPimDQOh1rKWedNOe3Gfc8PabU7HT3iXWtNUbRzXS9+vgB0Fjaqr//XNbd82mCxHzik2qotuI89cfixg==", + "cpu": [ + "ppc64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@esbuild/linux-riscv64": { + "version": "0.19.12", + "resolved": "https://registry.npmjs.org/@esbuild/linux-riscv64/-/linux-riscv64-0.19.12.tgz", + "integrity": "sha512-2MueBrlPQCw5dVJJpQdUYgeqIzDQgw3QtiAHUC4RBz9FXPrskyyU3VI1hw7C0BSKB9OduwSJ79FTCqtGMWqJHg==", + "cpu": [ + "riscv64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@esbuild/linux-s390x": { + "version": "0.19.12", + "resolved": "https://registry.npmjs.org/@esbuild/linux-s390x/-/linux-s390x-0.19.12.tgz", + "integrity": "sha512-+Pil1Nv3Umes4m3AZKqA2anfhJiVmNCYkPchwFJNEJN5QxmTs1uzyy4TvmDrCRNT2ApwSari7ZIgrPeUx4UZDg==", + "cpu": [ + "s390x" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@esbuild/linux-x64": { + "version": "0.19.12", + "resolved": "https://registry.npmjs.org/@esbuild/linux-x64/-/linux-x64-0.19.12.tgz", + "integrity": "sha512-B71g1QpxfwBvNrfyJdVDexenDIt1CiDN1TIXLbhOw0KhJzE78KIFGX6OJ9MrtC0oOqMWf+0xop4qEU8JrJTwCg==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@esbuild/netbsd-arm64": { + "version": "0.27.4", + "resolved": "https://registry.npmjs.org/@esbuild/netbsd-arm64/-/netbsd-arm64-0.27.4.tgz", + "integrity": "sha512-xHT8X4sb0GS8qTqiwzHqpY00C95DPAq7nAwX35Ie/s+LO9830hrMd3oX0ZMKLvy7vsonee73x0lmcdOVXFzd6Q==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "netbsd" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/netbsd-x64": { + "version": "0.19.12", + "resolved": "https://registry.npmjs.org/@esbuild/netbsd-x64/-/netbsd-x64-0.19.12.tgz", + "integrity": "sha512-3ltjQ7n1owJgFbuC61Oj++XhtzmymoCihNFgT84UAmJnxJfm4sYCiSLTXZtE00VWYpPMYc+ZQmB6xbSdVh0JWA==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "netbsd" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@esbuild/openbsd-arm64": { + "version": "0.27.4", + "resolved": "https://registry.npmjs.org/@esbuild/openbsd-arm64/-/openbsd-arm64-0.27.4.tgz", + "integrity": "sha512-2MyL3IAaTX+1/qP0O1SwskwcwCoOI4kV2IBX1xYnDDqthmq5ArrW94qSIKCAuRraMgPOmG0RDTA74mzYNQA9ow==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "openbsd" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/openbsd-x64": { + "version": "0.19.12", + "resolved": "https://registry.npmjs.org/@esbuild/openbsd-x64/-/openbsd-x64-0.19.12.tgz", + "integrity": "sha512-RbrfTB9SWsr0kWmb9srfF+L933uMDdu9BIzdA7os2t0TXhCRjrQyCeOt6wVxr79CKD4c+p+YhCj31HBkYcXebw==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "openbsd" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@esbuild/openharmony-arm64": { + "version": "0.27.4", + "resolved": "https://registry.npmjs.org/@esbuild/openharmony-arm64/-/openharmony-arm64-0.27.4.tgz", + "integrity": "sha512-JkTZrl6VbyO8lDQO3yv26nNr2RM2yZzNrNHEsj9bm6dOwwu9OYN28CjzZkH57bh4w0I2F7IodpQvUAEd1mbWXg==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "openharmony" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/sunos-x64": { + "version": "0.19.12", + "resolved": "https://registry.npmjs.org/@esbuild/sunos-x64/-/sunos-x64-0.19.12.tgz", + "integrity": "sha512-HKjJwRrW8uWtCQnQOz9qcU3mUZhTUQvi56Q8DPTLLB+DawoiQdjsYq+j+D3s9I8VFtDr+F9CjgXKKC4ss89IeA==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "sunos" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@esbuild/win32-arm64": { + "version": "0.19.12", + "resolved": "https://registry.npmjs.org/@esbuild/win32-arm64/-/win32-arm64-0.19.12.tgz", + "integrity": "sha512-URgtR1dJnmGvX864pn1B2YUYNzjmXkuJOIqG2HdU62MVS4EHpU2946OZoTMnRUHklGtJdJZ33QfzdjGACXhn1A==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "win32" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@esbuild/win32-ia32": { + "version": "0.19.12", + "resolved": "https://registry.npmjs.org/@esbuild/win32-ia32/-/win32-ia32-0.19.12.tgz", + "integrity": "sha512-+ZOE6pUkMOJfmxmBZElNOx72NKpIa/HFOMGzu8fqzQJ5kgf6aTGrcJaFsNiVMH4JKpMipyK+7k0n2UXN7a8YKQ==", + "cpu": [ + "ia32" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "win32" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@esbuild/win32-x64": { + "version": "0.19.12", + "resolved": "https://registry.npmjs.org/@esbuild/win32-x64/-/win32-x64-0.19.12.tgz", + "integrity": "sha512-T1QyPSDCyMXaO3pzBkF96E8xMkiRYbUEZADd29SyPGabqxMViNoii+NcK7eWJAEoU6RZyEm5lVSIjTmcdoB9HA==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "win32" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@fastify/accept-negotiator": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/@fastify/accept-negotiator/-/accept-negotiator-2.0.1.tgz", + "integrity": "sha512-/c/TW2bO/v9JeEgoD/g1G5GxGeCF1Hafdf79WPmUlgYiBXummY0oX3VVq4yFkKKVBKDNlaDUYoab7g38RpPqCQ==", + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/fastify" + }, + { + "type": "opencollective", + "url": "https://opencollective.com/fastify" + } + ], + "license": "MIT" + }, + "node_modules/@fastify/ajv-compiler": { + "version": "4.0.5", + "resolved": "https://registry.npmjs.org/@fastify/ajv-compiler/-/ajv-compiler-4.0.5.tgz", + "integrity": "sha512-KoWKW+MhvfTRWL4qrhUwAAZoaChluo0m0vbiJlGMt2GXvL4LVPQEjt8kSpHI3IBq5Rez8fg+XeH3cneztq+C7A==", + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/fastify" + }, + { + "type": "opencollective", + "url": "https://opencollective.com/fastify" + } + ], + "license": "MIT", + "dependencies": { + "ajv": "^8.12.0", + "ajv-formats": "^3.0.1", + "fast-uri": "^3.0.0" + } + }, + "node_modules/@fastify/cors": { + "version": "11.2.0", + "resolved": "https://registry.npmjs.org/@fastify/cors/-/cors-11.2.0.tgz", + "integrity": "sha512-LbLHBuSAdGdSFZYTLVA3+Ch2t+sA6nq3Ejc6XLAKiQ6ViS2qFnvicpj0htsx03FyYeLs04HfRNBsz/a8SvbcUw==", + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/fastify" + }, + { + "type": "opencollective", + "url": "https://opencollective.com/fastify" + } + ], + "license": "MIT", + "dependencies": { + "fastify-plugin": "^5.0.0", + "toad-cache": "^3.7.0" + } + }, + "node_modules/@fastify/error": { + "version": "4.2.0", + "resolved": "https://registry.npmjs.org/@fastify/error/-/error-4.2.0.tgz", + "integrity": "sha512-RSo3sVDXfHskiBZKBPRgnQTtIqpi/7zhJOEmAxCiBcM7d0uwdGdxLlsCaLzGs8v8NnxIRlfG0N51p5yFaOentQ==", + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/fastify" + }, + { + "type": "opencollective", + "url": "https://opencollective.com/fastify" + } + ], + "license": "MIT" + }, + "node_modules/@fastify/fast-json-stringify-compiler": { + "version": "5.0.3", + "resolved": "https://registry.npmjs.org/@fastify/fast-json-stringify-compiler/-/fast-json-stringify-compiler-5.0.3.tgz", + "integrity": "sha512-uik7yYHkLr6fxd8hJSZ8c+xF4WafPK+XzneQDPU+D10r5X19GW8lJcom2YijX2+qtFF1ENJlHXKFM9ouXNJYgQ==", + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/fastify" + }, + { + "type": "opencollective", + "url": "https://opencollective.com/fastify" + } + ], + "license": "MIT", + "dependencies": { + "fast-json-stringify": "^6.0.0" + } + }, + "node_modules/@fastify/forwarded": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/@fastify/forwarded/-/forwarded-3.0.1.tgz", + "integrity": "sha512-JqDochHFqXs3C3Ml3gOY58zM7OqO9ENqPo0UqAjAjH8L01fRZqwX9iLeX34//kiJubF7r2ZQHtBRU36vONbLlw==", + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/fastify" + }, + { + "type": "opencollective", + "url": "https://opencollective.com/fastify" + } + ], + "license": "MIT" + }, + "node_modules/@fastify/merge-json-schemas": { + "version": "0.2.1", + "resolved": "https://registry.npmjs.org/@fastify/merge-json-schemas/-/merge-json-schemas-0.2.1.tgz", + "integrity": "sha512-OA3KGBCy6KtIvLf8DINC5880o5iBlDX4SxzLQS8HorJAbqluzLRn80UXU0bxZn7UOFhFgpRJDasfwn9nG4FG4A==", + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/fastify" + }, + { + "type": "opencollective", + "url": "https://opencollective.com/fastify" + } + ], + "license": "MIT", + "dependencies": { + "dequal": "^2.0.3" + } + }, + "node_modules/@fastify/proxy-addr": { + "version": "5.1.0", + "resolved": "https://registry.npmjs.org/@fastify/proxy-addr/-/proxy-addr-5.1.0.tgz", + "integrity": "sha512-INS+6gh91cLUjB+PVHfu1UqcB76Sqtpyp7bnL+FYojhjygvOPA9ctiD/JDKsyD9Xgu4hUhCSJBPig/w7duNajw==", + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/fastify" + }, + { + "type": "opencollective", + "url": "https://opencollective.com/fastify" + } + ], + "license": "MIT", + "dependencies": { + "@fastify/forwarded": "^3.0.0", + "ipaddr.js": "^2.1.0" + } + }, + "node_modules/@fastify/rate-limit": { + "version": "10.3.0", + "resolved": "https://registry.npmjs.org/@fastify/rate-limit/-/rate-limit-10.3.0.tgz", + "integrity": "sha512-eIGkG9XKQs0nyynatApA3EVrojHOuq4l6fhB4eeCk4PIOeadvOJz9/4w3vGI44Go17uaXOWEcPkaD8kuKm7g6Q==", + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/fastify" + }, + { + "type": "opencollective", + "url": "https://opencollective.com/fastify" + } + ], + "license": "MIT", + "dependencies": { + "@lukeed/ms": "^2.0.2", + "fastify-plugin": "^5.0.0", + "toad-cache": "^3.7.0" + } + }, + "node_modules/@fastify/send": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/@fastify/send/-/send-4.1.0.tgz", + "integrity": "sha512-TMYeQLCBSy2TOFmV95hQWkiTYgC/SEx7vMdV+wnZVX4tt8VBLKzmH8vV9OzJehV0+XBfg+WxPMt5wp+JBUKsVw==", + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/fastify" + }, + { + "type": "opencollective", + "url": "https://opencollective.com/fastify" + } + ], + "license": "MIT", + "dependencies": { + "@lukeed/ms": "^2.0.2", + "escape-html": "~1.0.3", + "fast-decode-uri-component": "^1.0.1", + "http-errors": "^2.0.0", + "mime": "^3" + } + }, + "node_modules/@fastify/static": { + "version": "9.0.0", + "resolved": "https://registry.npmjs.org/@fastify/static/-/static-9.0.0.tgz", + "integrity": "sha512-r64H8Woe/vfilg5RTy7lwWlE8ZZcTrc3kebYFMEUBrMqlydhQyoiExQXdYAy2REVpST/G35+stAM8WYp1WGmMA==", + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/fastify" + }, + { + "type": "opencollective", + "url": "https://opencollective.com/fastify" + } + ], + "license": "MIT", + "dependencies": { + "@fastify/accept-negotiator": "^2.0.0", + "@fastify/send": "^4.0.0", + "content-disposition": "^1.0.1", + "fastify-plugin": "^5.0.0", + "fastq": "^1.17.1", + "glob": "^13.0.0" + } + }, + "node_modules/@jridgewell/gen-mapping": { + "version": "0.3.13", + "resolved": "https://registry.npmjs.org/@jridgewell/gen-mapping/-/gen-mapping-0.3.13.tgz", + "integrity": "sha512-2kkt/7niJ6MgEPxF0bYdQ6etZaA+fQvDcLKckhy1yIQOzaoKjBBjSj63/aLVjYE3qhRt5dvM+uUyfCg6UKCBbA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jridgewell/sourcemap-codec": "^1.5.0", + "@jridgewell/trace-mapping": "^0.3.24" + } + }, + "node_modules/@jridgewell/remapping": { + "version": "2.3.5", + "resolved": "https://registry.npmjs.org/@jridgewell/remapping/-/remapping-2.3.5.tgz", + "integrity": "sha512-LI9u/+laYG4Ds1TDKSJW2YPrIlcVYOwi2fUC6xB43lueCjgxV4lffOCZCtYFiH6TNOX+tQKXx97T4IKHbhyHEQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jridgewell/gen-mapping": "^0.3.5", + "@jridgewell/trace-mapping": "^0.3.24" + } + }, + "node_modules/@jridgewell/resolve-uri": { + "version": "3.1.2", + "resolved": "https://registry.npmjs.org/@jridgewell/resolve-uri/-/resolve-uri-3.1.2.tgz", + "integrity": "sha512-bRISgCIjP20/tbWSPWMEi54QVPRZExkuD9lJL+UIxUKtwVJA8wW1Trb1jMs1RFXo1CBTNZ/5hpC9QvmKWdopKw==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6.0.0" + } + }, + "node_modules/@jridgewell/sourcemap-codec": { + "version": "1.5.5", + "resolved": "https://registry.npmjs.org/@jridgewell/sourcemap-codec/-/sourcemap-codec-1.5.5.tgz", + "integrity": "sha512-cYQ9310grqxueWbl+WuIUIaiUaDcj7WOq5fVhEljNVgRfOUhY9fy2zTvfoqWsnebh8Sl70VScFbICvJnLKB0Og==", + "dev": true, + "license": "MIT" + }, + "node_modules/@jridgewell/trace-mapping": { + "version": "0.3.31", + "resolved": "https://registry.npmjs.org/@jridgewell/trace-mapping/-/trace-mapping-0.3.31.tgz", + "integrity": "sha512-zzNR+SdQSDJzc8joaeP8QQoCQr8NuYx2dIIytl1QeBEZHJ9uW6hebsrYgbz8hJwUQao3TWCMtmfV8Nu1twOLAw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jridgewell/resolve-uri": "^3.1.0", + "@jridgewell/sourcemap-codec": "^1.4.14" + } + }, + "node_modules/@libsql/client": { + "version": "0.14.0", + "resolved": "https://registry.npmjs.org/@libsql/client/-/client-0.14.0.tgz", + "integrity": "sha512-/9HEKfn6fwXB5aTEEoMeFh4CtG0ZzbncBb1e++OCdVpgKZ/xyMsIVYXm0w7Pv4RUel803vE6LwniB3PqD72R0Q==", + "license": "MIT", + "dependencies": { + "@libsql/core": "^0.14.0", + "@libsql/hrana-client": "^0.7.0", + "js-base64": "^3.7.5", + "libsql": "^0.4.4", + "promise-limit": "^2.7.0" + } + }, + "node_modules/@libsql/core": { + "version": "0.14.0", + "resolved": "https://registry.npmjs.org/@libsql/core/-/core-0.14.0.tgz", + "integrity": "sha512-nhbuXf7GP3PSZgdCY2Ecj8vz187ptHlZQ0VRc751oB2C1W8jQUXKKklvt7t1LJiUTQBVJuadF628eUk+3cRi4Q==", + "license": "MIT", + "dependencies": { + "js-base64": "^3.7.5" + } + }, + "node_modules/@libsql/darwin-arm64": { + "version": "0.4.7", + "resolved": "https://registry.npmjs.org/@libsql/darwin-arm64/-/darwin-arm64-0.4.7.tgz", + "integrity": "sha512-yOL742IfWUlUevnI5PdnIT4fryY3LYTdLm56bnY0wXBw7dhFcnjuA7jrH3oSVz2mjZTHujxoITgAE7V6Z+eAbg==", + "cpu": [ + "arm64" + ], + "license": "MIT", + "optional": true, + "os": [ + "darwin" + ] + }, + "node_modules/@libsql/darwin-x64": { + "version": "0.4.7", + "resolved": "https://registry.npmjs.org/@libsql/darwin-x64/-/darwin-x64-0.4.7.tgz", + "integrity": "sha512-ezc7V75+eoyyH07BO9tIyJdqXXcRfZMbKcLCeF8+qWK5nP8wWuMcfOVywecsXGRbT99zc5eNra4NEx6z5PkSsA==", + "cpu": [ + "x64" + ], + "license": "MIT", + "optional": true, + "os": [ + "darwin" + ] + }, + "node_modules/@libsql/hrana-client": { + "version": "0.7.0", + "resolved": "https://registry.npmjs.org/@libsql/hrana-client/-/hrana-client-0.7.0.tgz", + "integrity": "sha512-OF8fFQSkbL7vJY9rfuegK1R7sPgQ6kFMkDamiEccNUvieQ+3urzfDFI616oPl8V7T9zRmnTkSjMOImYCAVRVuw==", + "license": "MIT", + "dependencies": { + "@libsql/isomorphic-fetch": "^0.3.1", + "@libsql/isomorphic-ws": "^0.1.5", + "js-base64": "^3.7.5", + "node-fetch": "^3.3.2" + } + }, + "node_modules/@libsql/isomorphic-fetch": { + "version": "0.3.1", + "resolved": "https://registry.npmjs.org/@libsql/isomorphic-fetch/-/isomorphic-fetch-0.3.1.tgz", + "integrity": "sha512-6kK3SUK5Uu56zPq/Las620n5aS9xJq+jMBcNSOmjhNf/MUvdyji4vrMTqD7ptY7/4/CAVEAYDeotUz60LNQHtw==", + "license": "MIT", + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@libsql/isomorphic-ws": { + "version": "0.1.5", + "resolved": "https://registry.npmjs.org/@libsql/isomorphic-ws/-/isomorphic-ws-0.1.5.tgz", + "integrity": "sha512-DtLWIH29onUYR00i0GlQ3UdcTRC6EP4u9w/h9LxpUZJWRMARk6dQwZ6Jkd+QdwVpuAOrdxt18v0K2uIYR3fwFg==", + "license": "MIT", + "dependencies": { + "@types/ws": "^8.5.4", + "ws": "^8.13.0" + } + }, + "node_modules/@libsql/linux-arm64-gnu": { + "version": "0.4.7", + "resolved": "https://registry.npmjs.org/@libsql/linux-arm64-gnu/-/linux-arm64-gnu-0.4.7.tgz", + "integrity": "sha512-WlX2VYB5diM4kFfNaYcyhw5y+UJAI3xcMkEUJZPtRDEIu85SsSFrQ+gvoKfcVh76B//ztSeEX2wl9yrjF7BBCA==", + "cpu": [ + "arm64" + ], + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@libsql/linux-arm64-musl": { + "version": "0.4.7", + "resolved": "https://registry.npmjs.org/@libsql/linux-arm64-musl/-/linux-arm64-musl-0.4.7.tgz", + "integrity": "sha512-6kK9xAArVRlTCpWeqnNMCoXW1pe7WITI378n4NpvU5EJ0Ok3aNTIC2nRPRjhro90QcnmLL1jPcrVwO4WD1U0xw==", + "cpu": [ + "arm64" + ], + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@libsql/linux-x64-gnu": { + "version": "0.4.7", + "resolved": "https://registry.npmjs.org/@libsql/linux-x64-gnu/-/linux-x64-gnu-0.4.7.tgz", + "integrity": "sha512-CMnNRCmlWQqqzlTw6NeaZXzLWI8bydaXDke63JTUCvu8R+fj/ENsLrVBtPDlxQ0wGsYdXGlrUCH8Qi9gJep0yQ==", + "cpu": [ + "x64" + ], + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@libsql/linux-x64-musl": { + "version": "0.4.7", + "resolved": "https://registry.npmjs.org/@libsql/linux-x64-musl/-/linux-x64-musl-0.4.7.tgz", + "integrity": "sha512-nI6tpS1t6WzGAt1Kx1n1HsvtBbZ+jHn0m7ogNNT6pQHZQj7AFFTIMeDQw/i/Nt5H38np1GVRNsFe99eSIMs9XA==", + "cpu": [ + "x64" + ], + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@libsql/win32-x64-msvc": { + "version": "0.4.7", + "resolved": "https://registry.npmjs.org/@libsql/win32-x64-msvc/-/win32-x64-msvc-0.4.7.tgz", + "integrity": "sha512-7pJzOWzPm6oJUxml+PCDRzYQ4A1hTMHAciTAHfFK4fkbDZX33nWPVG7Y3vqdKtslcwAzwmrNDc6sXy2nwWnbiw==", + "cpu": [ + "x64" + ], + "license": "MIT", + "optional": true, + "os": [ + "win32" + ] + }, + "node_modules/@lukeed/ms": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/@lukeed/ms/-/ms-2.0.2.tgz", + "integrity": "sha512-9I2Zn6+NJLfaGoz9jN3lpwDgAYvfGeNYdbAIjJOqzs4Tpc+VU3Jqq4IofSUBKajiDS8k9fZIg18/z13mpk1bsA==", + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/@neon-rs/load": { + "version": "0.0.4", + "resolved": "https://registry.npmjs.org/@neon-rs/load/-/load-0.0.4.tgz", + "integrity": "sha512-kTPhdZyTQxB+2wpiRcFWrDcejc4JI6tkPuS7UZCG4l6Zvc5kU/gGQ/ozvHTh1XR5tS+UlfAfGuPajjzQjCiHCw==", + "license": "MIT" + }, + "node_modules/@petamoriken/float16": { + "version": "3.9.3", + "resolved": "https://registry.npmjs.org/@petamoriken/float16/-/float16-3.9.3.tgz", + "integrity": "sha512-8awtpHXCx/bNpFt4mt2xdkgtgVvKqty8VbjHI/WWWQuEw+KLzFot3f4+LkQY9YmOtq7A5GdOnqoIC8Pdygjk2g==", + "dev": true, + "license": "MIT" + }, + "node_modules/@pinojs/redact": { + "version": "0.4.0", + "resolved": "https://registry.npmjs.org/@pinojs/redact/-/redact-0.4.0.tgz", + "integrity": "sha512-k2ENnmBugE/rzQfEcdWHcCY+/FM3VLzH9cYEsbdsoqrvzAKRhUZeRNhAZvB8OitQJ1TBed3yqWtdjzS6wJKBwg==", + "license": "MIT" + }, + "node_modules/@rolldown/pluginutils": { + "version": "1.0.0-beta.27", + "resolved": "https://registry.npmjs.org/@rolldown/pluginutils/-/pluginutils-1.0.0-beta.27.tgz", + "integrity": "sha512-+d0F4MKMCbeVUJwG96uQ4SgAznZNSq93I3V+9NHA4OpvqG8mRCpGdKmK8l/dl02h2CCDHwW2FqilnTyDcAnqjA==", + "dev": true, + "license": "MIT" + }, + "node_modules/@rollup/rollup-android-arm-eabi": { + "version": "4.60.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-android-arm-eabi/-/rollup-android-arm-eabi-4.60.0.tgz", + "integrity": "sha512-WOhNW9K8bR3kf4zLxbfg6Pxu2ybOUbB2AjMDHSQx86LIF4rH4Ft7vmMwNt0loO0eonglSNy4cpD3MKXXKQu0/A==", + "cpu": [ + "arm" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "android" + ] + }, + "node_modules/@rollup/rollup-android-arm64": { + "version": "4.60.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-android-arm64/-/rollup-android-arm64-4.60.0.tgz", + "integrity": "sha512-u6JHLll5QKRvjciE78bQXDmqRqNs5M/3GVqZeMwvmjaNODJih/WIrJlFVEihvV0MiYFmd+ZyPr9wxOVbPAG2Iw==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "android" + ] + }, + "node_modules/@rollup/rollup-darwin-arm64": { + "version": "4.60.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-darwin-arm64/-/rollup-darwin-arm64-4.60.0.tgz", + "integrity": "sha512-qEF7CsKKzSRc20Ciu2Zw1wRrBz4g56F7r/vRwY430UPp/nt1x21Q/fpJ9N5l47WWvJlkNCPJz3QRVw008fi7yA==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "darwin" + ] + }, + "node_modules/@rollup/rollup-darwin-x64": { + "version": "4.60.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-darwin-x64/-/rollup-darwin-x64-4.60.0.tgz", + "integrity": "sha512-WADYozJ4QCnXCH4wPB+3FuGmDPoFseVCUrANmA5LWwGmC6FL14BWC7pcq+FstOZv3baGX65tZ378uT6WG8ynTw==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "darwin" + ] + }, + "node_modules/@rollup/rollup-freebsd-arm64": { + "version": "4.60.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-freebsd-arm64/-/rollup-freebsd-arm64-4.60.0.tgz", + "integrity": "sha512-6b8wGHJlDrGeSE3aH5mGNHBjA0TTkxdoNHik5EkvPHCt351XnigA4pS7Wsj/Eo9Y8RBU6f35cjN9SYmCFBtzxw==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "freebsd" + ] + }, + "node_modules/@rollup/rollup-freebsd-x64": { + "version": "4.60.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-freebsd-x64/-/rollup-freebsd-x64-4.60.0.tgz", + "integrity": "sha512-h25Ga0t4jaylMB8M/JKAyrvvfxGRjnPQIR8lnCayyzEjEOx2EJIlIiMbhpWxDRKGKF8jbNH01NnN663dH638mA==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "freebsd" + ] + }, + "node_modules/@rollup/rollup-linux-arm-gnueabihf": { + "version": "4.60.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm-gnueabihf/-/rollup-linux-arm-gnueabihf-4.60.0.tgz", + "integrity": "sha512-RzeBwv0B3qtVBWtcuABtSuCzToo2IEAIQrcyB/b2zMvBWVbjo8bZDjACUpnaafaxhTw2W+imQbP2BD1usasK4g==", + "cpu": [ + "arm" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-linux-arm-musleabihf": { + "version": "4.60.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm-musleabihf/-/rollup-linux-arm-musleabihf-4.60.0.tgz", + "integrity": "sha512-Sf7zusNI2CIU1HLzuu9Tc5YGAHEZs5Lu7N1ssJG4Tkw6e0MEsN7NdjUDDfGNHy2IU+ENyWT+L2obgWiguWibWQ==", + "cpu": [ + "arm" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-linux-arm64-gnu": { + "version": "4.60.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm64-gnu/-/rollup-linux-arm64-gnu-4.60.0.tgz", + "integrity": "sha512-DX2x7CMcrJzsE91q7/O02IJQ5/aLkVtYFryqCjduJhUfGKG6yJV8hxaw8pZa93lLEpPTP/ohdN4wFz7yp/ry9A==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-linux-arm64-musl": { + "version": "4.60.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm64-musl/-/rollup-linux-arm64-musl-4.60.0.tgz", + "integrity": "sha512-09EL+yFVbJZlhcQfShpswwRZ0Rg+z/CsSELFCnPt3iK+iqwGsI4zht3secj5vLEs957QvFFXnzAT0FFPIxSrkQ==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-linux-loong64-gnu": { + "version": "4.60.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-loong64-gnu/-/rollup-linux-loong64-gnu-4.60.0.tgz", + "integrity": "sha512-i9IcCMPr3EXm8EQg5jnja0Zyc1iFxJjZWlb4wr7U2Wx/GrddOuEafxRdMPRYVaXjgbhvqalp6np07hN1w9kAKw==", + "cpu": [ + "loong64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-linux-loong64-musl": { + "version": "4.60.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-loong64-musl/-/rollup-linux-loong64-musl-4.60.0.tgz", + "integrity": "sha512-DGzdJK9kyJ+B78MCkWeGnpXJ91tK/iKA6HwHxF4TAlPIY7GXEvMe8hBFRgdrR9Ly4qebR/7gfUs9y2IoaVEyog==", + "cpu": [ + "loong64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-linux-ppc64-gnu": { + "version": "4.60.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-ppc64-gnu/-/rollup-linux-ppc64-gnu-4.60.0.tgz", + "integrity": "sha512-RwpnLsqC8qbS8z1H1AxBA1H6qknR4YpPR9w2XX0vo2Sz10miu57PkNcnHVaZkbqyw/kUWfKMI73jhmfi9BRMUQ==", + "cpu": [ + "ppc64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-linux-ppc64-musl": { + "version": "4.60.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-ppc64-musl/-/rollup-linux-ppc64-musl-4.60.0.tgz", + "integrity": "sha512-Z8pPf54Ly3aqtdWC3G4rFigZgNvd+qJlOE52fmko3KST9SoGfAdSRCwyoyG05q1HrrAblLbk1/PSIV+80/pxLg==", + "cpu": [ + "ppc64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-linux-riscv64-gnu": { + "version": "4.60.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-riscv64-gnu/-/rollup-linux-riscv64-gnu-4.60.0.tgz", + "integrity": "sha512-3a3qQustp3COCGvnP4SvrMHnPQ9d1vzCakQVRTliaz8cIp/wULGjiGpbcqrkv0WrHTEp8bQD/B3HBjzujVWLOA==", + "cpu": [ + "riscv64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-linux-riscv64-musl": { + "version": "4.60.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-riscv64-musl/-/rollup-linux-riscv64-musl-4.60.0.tgz", + "integrity": "sha512-pjZDsVH/1VsghMJ2/kAaxt6dL0psT6ZexQVrijczOf+PeP2BUqTHYejk3l6TlPRydggINOeNRhvpLa0AYpCWSQ==", + "cpu": [ + "riscv64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-linux-s390x-gnu": { + "version": "4.60.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-s390x-gnu/-/rollup-linux-s390x-gnu-4.60.0.tgz", + "integrity": "sha512-3ObQs0BhvPgiUVZrN7gqCSvmFuMWvWvsjG5ayJ3Lraqv+2KhOsp+pUbigqbeWqueGIsnn+09HBw27rJ+gYK4VQ==", + "cpu": [ + "s390x" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-linux-x64-gnu": { + "version": "4.60.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-x64-gnu/-/rollup-linux-x64-gnu-4.60.0.tgz", + "integrity": "sha512-EtylprDtQPdS5rXvAayrNDYoJhIz1/vzN2fEubo3yLE7tfAw+948dO0g4M0vkTVFhKojnF+n6C8bDNe+gDRdTg==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-linux-x64-musl": { + "version": "4.60.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-x64-musl/-/rollup-linux-x64-musl-4.60.0.tgz", + "integrity": "sha512-k09oiRCi/bHU9UVFqD17r3eJR9bn03TyKraCrlz5ULFJGdJGi7VOmm9jl44vOJvRJ6P7WuBi/s2A97LxxHGIdw==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-openbsd-x64": { + "version": "4.60.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-openbsd-x64/-/rollup-openbsd-x64-4.60.0.tgz", + "integrity": "sha512-1o/0/pIhozoSaDJoDcec+IVLbnRtQmHwPV730+AOD29lHEEo4F5BEUB24H0OBdhbBBDwIOSuf7vgg0Ywxdfiiw==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "openbsd" + ] + }, + "node_modules/@rollup/rollup-openharmony-arm64": { + "version": "4.60.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-openharmony-arm64/-/rollup-openharmony-arm64-4.60.0.tgz", + "integrity": "sha512-pESDkos/PDzYwtyzB5p/UoNU/8fJo68vcXM9ZW2V0kjYayj1KaaUfi1NmTUTUpMn4UhU4gTuK8gIaFO4UGuMbA==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "openharmony" + ] + }, + "node_modules/@rollup/rollup-win32-arm64-msvc": { + "version": "4.60.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-arm64-msvc/-/rollup-win32-arm64-msvc-4.60.0.tgz", + "integrity": "sha512-hj1wFStD7B1YBeYmvY+lWXZ7ey73YGPcViMShYikqKT1GtstIKQAtfUI6yrzPjAy/O7pO0VLXGmUVWXQMaYgTQ==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "win32" + ] + }, + "node_modules/@rollup/rollup-win32-ia32-msvc": { + "version": "4.60.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-ia32-msvc/-/rollup-win32-ia32-msvc-4.60.0.tgz", + "integrity": "sha512-SyaIPFoxmUPlNDq5EHkTbiKzmSEmq/gOYFI/3HHJ8iS/v1mbugVa7dXUzcJGQfoytp9DJFLhHH4U3/eTy2Bq4w==", + "cpu": [ + "ia32" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "win32" + ] + }, + "node_modules/@rollup/rollup-win32-x64-gnu": { + "version": "4.60.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-x64-gnu/-/rollup-win32-x64-gnu-4.60.0.tgz", + "integrity": "sha512-RdcryEfzZr+lAr5kRm2ucN9aVlCCa2QNq4hXelZxb8GG0NJSazq44Z3PCCc8wISRuCVnGs0lQJVX5Vp6fKA+IA==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "win32" + ] + }, + "node_modules/@rollup/rollup-win32-x64-msvc": { + "version": "4.60.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-x64-msvc/-/rollup-win32-x64-msvc-4.60.0.tgz", + "integrity": "sha512-PrsWNQ8BuE00O3Xsx3ALh2Df8fAj9+cvvX9AIA6o4KpATR98c9mud4XtDWVvsEuyia5U4tVSTKygawyJkjm60w==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "win32" + ] + }, + "node_modules/@tanstack/query-core": { + "version": "5.95.0", + "resolved": "https://registry.npmjs.org/@tanstack/query-core/-/query-core-5.95.0.tgz", + "integrity": "sha512-H1/CWCe8tGL3YIVeo770Z6kPbt0B3M1d/iQXIIK1qlFiFt6G2neYdkHgLapOC8uMYNt9DmHjmGukEKgdMk1P+A==", + "license": "MIT", + "funding": { + "type": "github", + "url": "https://github.com/sponsors/tannerlinsley" + } + }, + "node_modules/@tanstack/react-query": { + "version": "5.95.0", + "resolved": "https://registry.npmjs.org/@tanstack/react-query/-/react-query-5.95.0.tgz", + "integrity": "sha512-EMP8B+BK9zvnAemT8M/y3z/WO0NjZ7fIUY3T3wnHYK6AA3qK/k33i7tPgCXCejhX0cd4I6bJIXN2GmjrHjDBzg==", + "license": "MIT", + "dependencies": { + "@tanstack/query-core": "5.95.0" + }, + "funding": { + "type": "github", + "url": "https://github.com/sponsors/tannerlinsley" + }, + "peerDependencies": { + "react": "^18 || ^19" + } + }, + "node_modules/@types/babel__core": { + "version": "7.20.5", + "resolved": "https://registry.npmjs.org/@types/babel__core/-/babel__core-7.20.5.tgz", + "integrity": "sha512-qoQprZvz5wQFJwMDqeseRXWv3rqMvhgpbXFfVyWhbx9X47POIA6i/+dXefEmZKoAgOaTdaIgNSMqMIU61yRyzA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/parser": "^7.20.7", + "@babel/types": "^7.20.7", + "@types/babel__generator": "*", + "@types/babel__template": "*", + "@types/babel__traverse": "*" + } + }, + "node_modules/@types/babel__generator": { + "version": "7.27.0", + "resolved": "https://registry.npmjs.org/@types/babel__generator/-/babel__generator-7.27.0.tgz", + "integrity": "sha512-ufFd2Xi92OAVPYsy+P4n7/U7e68fex0+Ee8gSG9KX7eo084CWiQ4sdxktvdl0bOPupXtVJPY19zk6EwWqUQ8lg==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/types": "^7.0.0" + } + }, + "node_modules/@types/babel__template": { + "version": "7.4.4", + "resolved": "https://registry.npmjs.org/@types/babel__template/-/babel__template-7.4.4.tgz", + "integrity": "sha512-h/NUaSyG5EyxBIp8YRxo4RMe2/qQgvyowRwVMzhYhBCONbW8PUsg4lkFMrhgZhUe5z3L3MiLDuvyJ/CaPa2A8A==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/parser": "^7.1.0", + "@babel/types": "^7.0.0" + } + }, + "node_modules/@types/babel__traverse": { + "version": "7.28.0", + "resolved": "https://registry.npmjs.org/@types/babel__traverse/-/babel__traverse-7.28.0.tgz", + "integrity": "sha512-8PvcXf70gTDZBgt9ptxJ8elBeBjcLOAcOtoO/mPJjtji1+CdGbHgm77om1GrsPxsiE+uXIpNSK64UYaIwQXd4Q==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/types": "^7.28.2" + } + }, + "node_modules/@types/chai": { + "version": "5.2.3", + "resolved": "https://registry.npmjs.org/@types/chai/-/chai-5.2.3.tgz", + "integrity": "sha512-Mw558oeA9fFbv65/y4mHtXDs9bPnFMZAL/jxdPFUpOHHIXX91mcgEHbS5Lahr+pwZFR8A7GQleRWeI6cGFC2UA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@types/deep-eql": "*", + "assertion-error": "^2.0.1" + } + }, + "node_modules/@types/deep-eql": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/@types/deep-eql/-/deep-eql-4.0.2.tgz", + "integrity": "sha512-c9h9dVVMigMPc4bwTvC5dxqtqJZwQPePsWjPlpSOnojbor6pGqdk541lfA7AqFQr5pB1BRdq0juY9db81BwyFw==", + "dev": true, + "license": "MIT" + }, + "node_modules/@types/estree": { + "version": "1.0.8", + "resolved": "https://registry.npmjs.org/@types/estree/-/estree-1.0.8.tgz", + "integrity": "sha512-dWHzHa2WqEXI/O1E9OjrocMTKJl2mSrEolh1Iomrv6U+JuNwaHXsXx9bLu5gG7BUWFIN0skIQJQ/L1rIex4X6w==", + "dev": true, + "license": "MIT" + }, + "node_modules/@types/node": { + "version": "22.19.15", + "resolved": "https://registry.npmjs.org/@types/node/-/node-22.19.15.tgz", + "integrity": "sha512-F0R/h2+dsy5wJAUe3tAU6oqa2qbWY5TpNfL/RGmo1y38hiyO1w3x2jPtt76wmuaJI4DQnOBu21cNXQ2STIUUWg==", + "license": "MIT", + "dependencies": { + "undici-types": "~6.21.0" + } + }, + "node_modules/@types/react": { + "version": "19.2.14", + "resolved": "https://registry.npmjs.org/@types/react/-/react-19.2.14.tgz", + "integrity": "sha512-ilcTH/UniCkMdtexkoCN0bI7pMcJDvmQFPvuPvmEaYA/NSfFTAgdUSLAoVjaRJm7+6PvcM+q1zYOwS4wTYMF9w==", + "devOptional": true, + "license": "MIT", + "dependencies": { + "csstype": "^3.2.2" + } + }, + "node_modules/@types/react-dom": { + "version": "19.2.3", + "resolved": "https://registry.npmjs.org/@types/react-dom/-/react-dom-19.2.3.tgz", + "integrity": "sha512-jp2L/eY6fn+KgVVQAOqYItbF0VY/YApe5Mz2F0aykSO8gx31bYCZyvSeYxCHKvzHG5eZjc+zyaS5BrBWya2+kQ==", + "dev": true, + "license": "MIT", + "peerDependencies": { + "@types/react": "^19.2.0" + } + }, + "node_modules/@types/ws": { + "version": "8.18.1", + "resolved": "https://registry.npmjs.org/@types/ws/-/ws-8.18.1.tgz", + "integrity": "sha512-ThVF6DCVhA8kUGy+aazFQ4kXQ7E1Ty7A3ypFOe0IcJV8O/M511G99AW24irKrW56Wt44yG9+ij8FaqoBGkuBXg==", + "license": "MIT", + "dependencies": { + "@types/node": "*" + } + }, + "node_modules/@vitejs/plugin-react": { + "version": "4.7.0", + "resolved": "https://registry.npmjs.org/@vitejs/plugin-react/-/plugin-react-4.7.0.tgz", + "integrity": "sha512-gUu9hwfWvvEDBBmgtAowQCojwZmJ5mcLn3aufeCsitijs3+f2NsrPtlAWIR6OPiqljl96GVCUbLe0HyqIpVaoA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/core": "^7.28.0", + "@babel/plugin-transform-react-jsx-self": "^7.27.1", + "@babel/plugin-transform-react-jsx-source": "^7.27.1", + "@rolldown/pluginutils": "1.0.0-beta.27", + "@types/babel__core": "^7.20.5", + "react-refresh": "^0.17.0" + }, + "engines": { + "node": "^14.18.0 || >=16.0.0" + }, + "peerDependencies": { + "vite": "^4.2.0 || ^5.0.0 || ^6.0.0 || ^7.0.0" + } + }, + "node_modules/@vitest/expect": { + "version": "3.2.4", + "resolved": "https://registry.npmjs.org/@vitest/expect/-/expect-3.2.4.tgz", + "integrity": "sha512-Io0yyORnB6sikFlt8QW5K7slY4OjqNX9jmJQ02QDda8lyM6B5oNgVWoSoKPac8/kgnCUzuHQKrSLtu/uOqqrig==", + "dev": true, + "license": "MIT", + "dependencies": { + "@types/chai": "^5.2.2", + "@vitest/spy": "3.2.4", + "@vitest/utils": "3.2.4", + "chai": "^5.2.0", + "tinyrainbow": "^2.0.0" + }, + "funding": { + "url": "https://opencollective.com/vitest" + } + }, + "node_modules/@vitest/mocker": { + "version": "3.2.4", + "resolved": "https://registry.npmjs.org/@vitest/mocker/-/mocker-3.2.4.tgz", + "integrity": "sha512-46ryTE9RZO/rfDd7pEqFl7etuyzekzEhUbTW3BvmeO/BcCMEgq59BKhek3dXDWgAj4oMK6OZi+vRr1wPW6qjEQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "@vitest/spy": "3.2.4", + "estree-walker": "^3.0.3", + "magic-string": "^0.30.17" + }, + "funding": { + "url": "https://opencollective.com/vitest" + }, + "peerDependencies": { + "msw": "^2.4.9", + "vite": "^5.0.0 || ^6.0.0 || ^7.0.0-0" + }, + "peerDependenciesMeta": { + "msw": { + "optional": true + }, + "vite": { + "optional": true + } + } + }, + "node_modules/@vitest/pretty-format": { + "version": "3.2.4", + "resolved": "https://registry.npmjs.org/@vitest/pretty-format/-/pretty-format-3.2.4.tgz", + "integrity": "sha512-IVNZik8IVRJRTr9fxlitMKeJeXFFFN0JaB9PHPGQ8NKQbGpfjlTx9zO4RefN8gp7eqjNy8nyK3NZmBzOPeIxtA==", + "dev": true, + "license": "MIT", + "dependencies": { + "tinyrainbow": "^2.0.0" + }, + "funding": { + "url": "https://opencollective.com/vitest" + } + }, + "node_modules/@vitest/runner": { + "version": "3.2.4", + "resolved": "https://registry.npmjs.org/@vitest/runner/-/runner-3.2.4.tgz", + "integrity": "sha512-oukfKT9Mk41LreEW09vt45f8wx7DordoWUZMYdY/cyAk7w5TWkTRCNZYF7sX7n2wB7jyGAl74OxgwhPgKaqDMQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "@vitest/utils": "3.2.4", + "pathe": "^2.0.3", + "strip-literal": "^3.0.0" + }, + "funding": { + "url": "https://opencollective.com/vitest" + } + }, + "node_modules/@vitest/snapshot": { + "version": "3.2.4", + "resolved": "https://registry.npmjs.org/@vitest/snapshot/-/snapshot-3.2.4.tgz", + "integrity": "sha512-dEYtS7qQP2CjU27QBC5oUOxLE/v5eLkGqPE0ZKEIDGMs4vKWe7IjgLOeauHsR0D5YuuycGRO5oSRXnwnmA78fQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "@vitest/pretty-format": "3.2.4", + "magic-string": "^0.30.17", + "pathe": "^2.0.3" + }, + "funding": { + "url": "https://opencollective.com/vitest" + } + }, + "node_modules/@vitest/spy": { + "version": "3.2.4", + "resolved": "https://registry.npmjs.org/@vitest/spy/-/spy-3.2.4.tgz", + "integrity": "sha512-vAfasCOe6AIK70iP5UD11Ac4siNUNJ9i/9PZ3NKx07sG6sUxeag1LWdNrMWeKKYBLlzuK+Gn65Yd5nyL6ds+nw==", + "dev": true, + "license": "MIT", + "dependencies": { + "tinyspy": "^4.0.3" + }, + "funding": { + "url": "https://opencollective.com/vitest" + } + }, + "node_modules/@vitest/utils": { + "version": "3.2.4", + "resolved": "https://registry.npmjs.org/@vitest/utils/-/utils-3.2.4.tgz", + "integrity": "sha512-fB2V0JFrQSMsCo9HiSq3Ezpdv4iYaXRG1Sx8edX3MwxfyNn83mKiGzOcH+Fkxt4MHxr3y42fQi1oeAInqgX2QA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@vitest/pretty-format": "3.2.4", + "loupe": "^3.1.4", + "tinyrainbow": "^2.0.0" + }, + "funding": { + "url": "https://opencollective.com/vitest" + } + }, + "node_modules/abstract-logging": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/abstract-logging/-/abstract-logging-2.0.1.tgz", + "integrity": "sha512-2BjRTZxTPvheOvGbBslFSYOUkr+SjPtOnrLP33f+VIWLzezQpZcqVg7ja3L4dBXmzzgwT+a029jRx5PCi3JuiA==", + "license": "MIT" + }, + "node_modules/ajv": { + "version": "8.18.0", + "resolved": "https://registry.npmjs.org/ajv/-/ajv-8.18.0.tgz", + "integrity": "sha512-PlXPeEWMXMZ7sPYOHqmDyCJzcfNrUr3fGNKtezX14ykXOEIvyK81d+qydx89KY5O71FKMPaQ2vBfBFI5NHR63A==", + "license": "MIT", + "dependencies": { + "fast-deep-equal": "^3.1.3", + "fast-uri": "^3.0.1", + "json-schema-traverse": "^1.0.0", + "require-from-string": "^2.0.2" + }, + "funding": { + "type": "github", + "url": "https://github.com/sponsors/epoberezkin" + } + }, + "node_modules/ajv-formats": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/ajv-formats/-/ajv-formats-3.0.1.tgz", + "integrity": "sha512-8iUql50EUR+uUcdRQ3HDqa6EVyo3docL8g5WJ3FNcWmu62IbkGUue/pEyLBW8VGKKucTPgqeks4fIU1DA4yowQ==", + "license": "MIT", + "dependencies": { + "ajv": "^8.0.0" + }, + "peerDependencies": { + "ajv": "^8.0.0" + }, + "peerDependenciesMeta": { + "ajv": { + "optional": true + } + } + }, + "node_modules/assertion-error": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/assertion-error/-/assertion-error-2.0.1.tgz", + "integrity": "sha512-Izi8RQcffqCeNVgFigKli1ssklIbpHnCYc6AknXGYoB6grJqyeby7jv12JUQgmTAnIDnbck1uxksT4dzN3PWBA==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=12" + } + }, + "node_modules/atomic-sleep": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/atomic-sleep/-/atomic-sleep-1.0.0.tgz", + "integrity": "sha512-kNOjDqAh7px0XWNI+4QbzoiR/nTkHAWNud2uvnJquD1/x5a7EQZMJT0AczqK0Qn67oY/TTQ1LbUKajZpp3I9tQ==", + "license": "MIT", + "engines": { + "node": ">=8.0.0" + } + }, + "node_modules/avvio": { + "version": "9.2.0", + "resolved": "https://registry.npmjs.org/avvio/-/avvio-9.2.0.tgz", + "integrity": "sha512-2t/sy01ArdHHE0vRH5Hsay+RtCZt3dLPji7W7/MMOCEgze5b7SNDC4j5H6FnVgPkI1MTNFGzHdHrVXDDl7QSSQ==", + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/fastify" + }, + { + "type": "opencollective", + "url": "https://opencollective.com/fastify" + } + ], + "license": "MIT", + "dependencies": { + "@fastify/error": "^4.0.0", + "fastq": "^1.17.1" + } + }, + "node_modules/balanced-match": { + "version": "4.0.4", + "resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-4.0.4.tgz", + "integrity": "sha512-BLrgEcRTwX2o6gGxGOCNyMvGSp35YofuYzw9h1IMTRmKqttAZZVU67bdb9Pr2vUHA8+j3i2tJfjO6C6+4myGTA==", + "license": "MIT", + "engines": { + "node": "18 || 20 || >=22" + } + }, + "node_modules/baseline-browser-mapping": { + "version": "2.10.10", + "resolved": "https://registry.npmjs.org/baseline-browser-mapping/-/baseline-browser-mapping-2.10.10.tgz", + "integrity": "sha512-sUoJ3IMxx4AyRqO4MLeHlnGDkyXRoUG0/AI9fjK+vS72ekpV0yWVY7O0BVjmBcRtkNcsAO2QDZ4tdKKGoI6YaQ==", + "dev": true, + "license": "Apache-2.0", + "bin": { + "baseline-browser-mapping": "dist/cli.cjs" + }, + "engines": { + "node": ">=6.0.0" + } + }, + "node_modules/brace-expansion": { + "version": "5.0.4", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-5.0.4.tgz", + "integrity": "sha512-h+DEnpVvxmfVefa4jFbCf5HdH5YMDXRsmKflpf1pILZWRFlTbJpxeU55nJl4Smt5HQaGzg1o6RHFPJaOqnmBDg==", + "license": "MIT", + "dependencies": { + "balanced-match": "^4.0.2" + }, + "engines": { + "node": "18 || 20 || >=22" + } + }, + "node_modules/browserslist": { + "version": "4.28.1", + "resolved": "https://registry.npmjs.org/browserslist/-/browserslist-4.28.1.tgz", + "integrity": "sha512-ZC5Bd0LgJXgwGqUknZY/vkUQ04r8NXnJZ3yYi4vDmSiZmC/pdSN0NbNRPxZpbtO4uAfDUAFffO8IZoM3Gj8IkA==", + "dev": true, + "funding": [ + { + "type": "opencollective", + "url": "https://opencollective.com/browserslist" + }, + { + "type": "tidelift", + "url": "https://tidelift.com/funding/github/npm/browserslist" + }, + { + "type": "github", + "url": "https://github.com/sponsors/ai" + } + ], + "license": "MIT", + "dependencies": { + "baseline-browser-mapping": "^2.9.0", + "caniuse-lite": "^1.0.30001759", + "electron-to-chromium": "^1.5.263", + "node-releases": "^2.0.27", + "update-browserslist-db": "^1.2.0" + }, + "bin": { + "browserslist": "cli.js" + }, + "engines": { + "node": "^6 || ^7 || ^8 || ^9 || ^10 || ^11 || ^12 || >=13.7" + } + }, + "node_modules/buffer-from": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/buffer-from/-/buffer-from-1.1.2.tgz", + "integrity": "sha512-E+XQCRwSbaaiChtv6k6Dwgc+bx+Bs6vuKJHHl5kox/BaKbhiXzqQOwK4cO22yElGp2OCmjwVhT3HmxgyPGnJfQ==", + "dev": true, + "license": "MIT" + }, + "node_modules/cac": { + "version": "6.7.14", + "resolved": "https://registry.npmjs.org/cac/-/cac-6.7.14.tgz", + "integrity": "sha512-b6Ilus+c3RrdDk+JhLKUAQfzzgLEPy6wcXqS7f/xe1EETvsDP6GORG7SFuOs6cID5YkqchW/LXZbX5bc8j7ZcQ==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/caniuse-lite": { + "version": "1.0.30001781", + "resolved": "https://registry.npmjs.org/caniuse-lite/-/caniuse-lite-1.0.30001781.tgz", + "integrity": "sha512-RdwNCyMsNBftLjW6w01z8bKEvT6e/5tpPVEgtn22TiLGlstHOVecsX2KHFkD5e/vRnIE4EGzpuIODb3mtswtkw==", + "dev": true, + "funding": [ + { + "type": "opencollective", + "url": "https://opencollective.com/browserslist" + }, + { + "type": "tidelift", + "url": "https://tidelift.com/funding/github/npm/caniuse-lite" + }, + { + "type": "github", + "url": "https://github.com/sponsors/ai" + } + ], + "license": "CC-BY-4.0" + }, + "node_modules/chai": { + "version": "5.3.3", + "resolved": "https://registry.npmjs.org/chai/-/chai-5.3.3.tgz", + "integrity": "sha512-4zNhdJD/iOjSH0A05ea+Ke6MU5mmpQcbQsSOkgdaUMJ9zTlDTD/GYlwohmIE2u0gaxHYiVHEn1Fw9mZ/ktJWgw==", + "dev": true, + "license": "MIT", + "dependencies": { + "assertion-error": "^2.0.1", + "check-error": "^2.1.1", + "deep-eql": "^5.0.1", + "loupe": "^3.1.0", + "pathval": "^2.0.0" + }, + "engines": { + "node": ">=18" + } + }, + "node_modules/check-error": { + "version": "2.1.3", + "resolved": "https://registry.npmjs.org/check-error/-/check-error-2.1.3.tgz", + "integrity": "sha512-PAJdDJusoxnwm1VwW07VWwUN1sl7smmC3OKggvndJFadxxDRyFJBX/ggnu/KE4kQAB7a3Dp8f/YXC1FlUprWmA==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 16" + } + }, + "node_modules/content-disposition": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/content-disposition/-/content-disposition-1.0.1.tgz", + "integrity": "sha512-oIXISMynqSqm241k6kcQ5UwttDILMK4BiurCfGEREw6+X9jkkpEe5T9FZaApyLGGOnFuyMWZpdolTXMtvEJ08Q==", + "license": "MIT", + "engines": { + "node": ">=18" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/express" + } + }, + "node_modules/convert-source-map": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/convert-source-map/-/convert-source-map-2.0.0.tgz", + "integrity": "sha512-Kvp459HrV2FEJ1CAsi1Ku+MY3kasH19TFykTz2xWmMeq6bk2NU3XXvfJ+Q61m0xktWwt+1HSYf3JZsTms3aRJg==", + "dev": true, + "license": "MIT" + }, + "node_modules/cookie": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/cookie/-/cookie-1.1.1.tgz", + "integrity": "sha512-ei8Aos7ja0weRpFzJnEA9UHJ/7XQmqglbRwnf2ATjcB9Wq874VKH9kfjjirM6UhU2/E5fFYadylyhFldcqSidQ==", + "license": "MIT", + "engines": { + "node": ">=18" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/express" + } + }, + "node_modules/croner": { + "version": "10.0.1", + "resolved": "https://registry.npmjs.org/croner/-/croner-10.0.1.tgz", + "integrity": "sha512-ixNtAJndqh173VQ4KodSdJEI6nuioBWI0V1ITNKhZZsO0pEMoDxz539T4FTTbSZ/xIOSuDnzxLVRqBVSvPNE2g==", + "funding": [ + { + "type": "other", + "url": "https://paypal.me/hexagonpp" + }, + { + "type": "github", + "url": "https://github.com/sponsors/hexagon" + } + ], + "license": "MIT", + "engines": { + "node": ">=18.0" + } + }, + "node_modules/csstype": { + "version": "3.2.3", + "resolved": "https://registry.npmjs.org/csstype/-/csstype-3.2.3.tgz", + "integrity": "sha512-z1HGKcYy2xA8AGQfwrn0PAy+PB7X/GSj3UVJW9qKyn43xWa+gl5nXmU4qqLMRzWVLFC8KusUX8T/0kCiOYpAIQ==", + "devOptional": true, + "license": "MIT" + }, + "node_modules/data-uri-to-buffer": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/data-uri-to-buffer/-/data-uri-to-buffer-4.0.1.tgz", + "integrity": "sha512-0R9ikRb668HB7QDxT1vkpuUBtqc53YyAwMwGeUFKRojY/NWKvdZ+9UYtRfGmhqNbRkTSVpMbmyhXipFFv2cb/A==", + "license": "MIT", + "engines": { + "node": ">= 12" + } + }, + "node_modules/debug": { + "version": "4.4.3", + "resolved": "https://registry.npmjs.org/debug/-/debug-4.4.3.tgz", + "integrity": "sha512-RGwwWnwQvkVfavKVt22FGLw+xYSdzARwm0ru6DhTVA3umU5hZc28V3kO4stgYryrTlLpuvgI9GiijltAjNbcqA==", + "dev": true, + "license": "MIT", + "dependencies": { + "ms": "^2.1.3" + }, + "engines": { + "node": ">=6.0" + }, + "peerDependenciesMeta": { + "supports-color": { + "optional": true + } + } + }, + "node_modules/deep-eql": { + "version": "5.0.2", + "resolved": "https://registry.npmjs.org/deep-eql/-/deep-eql-5.0.2.tgz", + "integrity": "sha512-h5k/5U50IJJFpzfL6nO9jaaumfjO/f2NjK/oYB2Djzm4p9L+3T9qWpZqZ2hAbLPuuYq9wrU08WQyBTL5GbPk5Q==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6" + } + }, + "node_modules/depd": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/depd/-/depd-2.0.0.tgz", + "integrity": "sha512-g7nH6P6dyDioJogAAGprGpCtVImJhpPk/roCzdb3fIh61/s/nPsfR6onyMwkCAR/OlC3yBC0lESvUoQEAssIrw==", + "license": "MIT", + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/dequal": { + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/dequal/-/dequal-2.0.3.tgz", + "integrity": "sha512-0je+qPKHEMohvfRTCEo3CrPG6cAzAYgmzKyxRiYSSDkS6eGJdyVJm7WaYA5ECaAD9wLB2T4EEeymA5aFVcYXCA==", + "license": "MIT", + "engines": { + "node": ">=6" + } + }, + "node_modules/detect-libc": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/detect-libc/-/detect-libc-2.0.2.tgz", + "integrity": "sha512-UX6sGumvvqSaXgdKGUsgZWqcUyIXZ/vZTrlRT/iobiKhGL0zL4d3osHj3uqllWJK+i+sixDS/3COVEOFbupFyw==", + "license": "Apache-2.0", + "engines": { + "node": ">=8" + } + }, + "node_modules/dotenv": { + "version": "16.6.1", + "resolved": "https://registry.npmjs.org/dotenv/-/dotenv-16.6.1.tgz", + "integrity": "sha512-uBq4egWHTcTt33a72vpSG0z3HnPuIl6NqYcTrKEg2azoEyl2hpW0zqlxysq2pK9HlDIHyHyakeYaYnSAwd8bow==", + "license": "BSD-2-Clause", + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://dotenvx.com" + } + }, + "node_modules/drizzle-kit": { + "version": "0.30.6", + "resolved": "https://registry.npmjs.org/drizzle-kit/-/drizzle-kit-0.30.6.tgz", + "integrity": "sha512-U4wWit0fyZuGuP7iNmRleQyK2V8wCuv57vf5l3MnG4z4fzNTjY/U13M8owyQ5RavqvqxBifWORaR3wIUzlN64g==", + "dev": true, + "license": "MIT", + "dependencies": { + "@drizzle-team/brocli": "^0.10.2", + "@esbuild-kit/esm-loader": "^2.5.5", + "esbuild": "^0.19.7", + "esbuild-register": "^3.5.0", + "gel": "^2.0.0" + }, + "bin": { + "drizzle-kit": "bin.cjs" + } + }, + "node_modules/drizzle-orm": { + "version": "0.38.4", + "resolved": "https://registry.npmjs.org/drizzle-orm/-/drizzle-orm-0.38.4.tgz", + "integrity": "sha512-s7/5BpLKO+WJRHspvpqTydxFob8i1vo2rEx4pY6TGY7QSMuUfWUuzaY0DIpXCkgHOo37BaFC+SJQb99dDUXT3Q==", + "license": "Apache-2.0", + "peerDependencies": { + "@aws-sdk/client-rds-data": ">=3", + "@cloudflare/workers-types": ">=4", + "@electric-sql/pglite": ">=0.2.0", + "@libsql/client": ">=0.10.0", + "@libsql/client-wasm": ">=0.10.0", + "@neondatabase/serverless": ">=0.10.0", + "@op-engineering/op-sqlite": ">=2", + "@opentelemetry/api": "^1.4.1", + "@planetscale/database": ">=1", + "@prisma/client": "*", + "@tidbcloud/serverless": "*", + "@types/better-sqlite3": "*", + "@types/pg": "*", + "@types/react": ">=18", + "@types/sql.js": "*", + "@vercel/postgres": ">=0.8.0", + "@xata.io/client": "*", + "better-sqlite3": ">=7", + "bun-types": "*", + "expo-sqlite": ">=14.0.0", + "knex": "*", + "kysely": "*", + "mysql2": ">=2", + "pg": ">=8", + "postgres": ">=3", + "react": ">=18", + "sql.js": ">=1", + "sqlite3": ">=5" + }, + "peerDependenciesMeta": { + "@aws-sdk/client-rds-data": { + "optional": true + }, + "@cloudflare/workers-types": { + "optional": true + }, + "@electric-sql/pglite": { + "optional": true + }, + "@libsql/client": { + "optional": true + }, + "@libsql/client-wasm": { + "optional": true + }, + "@neondatabase/serverless": { + "optional": true + }, + "@op-engineering/op-sqlite": { + "optional": true + }, + "@opentelemetry/api": { + "optional": true + }, + "@planetscale/database": { + "optional": true + }, + "@prisma/client": { + "optional": true + }, + "@tidbcloud/serverless": { + "optional": true + }, + "@types/better-sqlite3": { + "optional": true + }, + "@types/pg": { + "optional": true + }, + "@types/react": { + "optional": true + }, + "@types/sql.js": { + "optional": true + }, + "@vercel/postgres": { + "optional": true + }, + "@xata.io/client": { + "optional": true + }, + "better-sqlite3": { + "optional": true + }, + "bun-types": { + "optional": true + }, + "expo-sqlite": { + "optional": true + }, + "knex": { + "optional": true + }, + "kysely": { + "optional": true + }, + "mysql2": { + "optional": true + }, + "pg": { + "optional": true + }, + "postgres": { + "optional": true + }, + "prisma": { + "optional": true + }, + "react": { + "optional": true + }, + "sql.js": { + "optional": true + }, + "sqlite3": { + "optional": true + } + } + }, + "node_modules/electron-to-chromium": { + "version": "1.5.321", + "resolved": "https://registry.npmjs.org/electron-to-chromium/-/electron-to-chromium-1.5.321.tgz", + "integrity": "sha512-L2C7Q279W2D/J4PLZLk7sebOILDSWos7bMsMNN06rK482umHUrh/3lM8G7IlHFOYip2oAg5nha1rCMxr/rs6ZQ==", + "dev": true, + "license": "ISC" + }, + "node_modules/env-paths": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/env-paths/-/env-paths-3.0.0.tgz", + "integrity": "sha512-dtJUTepzMW3Lm/NPxRf3wP4642UWhjL2sQxc+ym2YMj1m/H2zDNQOlezafzkHwn6sMstjHTwG6iQQsctDW/b1A==", + "dev": true, + "license": "MIT", + "engines": { + "node": "^12.20.0 || ^14.13.1 || >=16.0.0" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/es-module-lexer": { + "version": "1.7.0", + "resolved": "https://registry.npmjs.org/es-module-lexer/-/es-module-lexer-1.7.0.tgz", + "integrity": "sha512-jEQoCwk8hyb2AZziIOLhDqpm5+2ww5uIE6lkO/6jcOCusfk6LhMHpXXfBLXTZ7Ydyt0j4VoUQv6uGNYbdW+kBA==", + "dev": true, + "license": "MIT" + }, + "node_modules/esbuild": { + "version": "0.19.12", + "resolved": "https://registry.npmjs.org/esbuild/-/esbuild-0.19.12.tgz", + "integrity": "sha512-aARqgq8roFBj054KvQr5f1sFu0D65G+miZRCuJyJ0G13Zwx7vRar5Zhn2tkQNzIXcBrNVsv/8stehpj+GAjgbg==", + "dev": true, + "hasInstallScript": true, + "license": "MIT", + "bin": { + "esbuild": "bin/esbuild" + }, + "engines": { + "node": ">=12" + }, + "optionalDependencies": { + "@esbuild/aix-ppc64": "0.19.12", + "@esbuild/android-arm": "0.19.12", + "@esbuild/android-arm64": "0.19.12", + "@esbuild/android-x64": "0.19.12", + "@esbuild/darwin-arm64": "0.19.12", + "@esbuild/darwin-x64": "0.19.12", + "@esbuild/freebsd-arm64": "0.19.12", + "@esbuild/freebsd-x64": "0.19.12", + "@esbuild/linux-arm": "0.19.12", + "@esbuild/linux-arm64": "0.19.12", + "@esbuild/linux-ia32": "0.19.12", + "@esbuild/linux-loong64": "0.19.12", + "@esbuild/linux-mips64el": "0.19.12", + "@esbuild/linux-ppc64": "0.19.12", + "@esbuild/linux-riscv64": "0.19.12", + "@esbuild/linux-s390x": "0.19.12", + "@esbuild/linux-x64": "0.19.12", + "@esbuild/netbsd-x64": "0.19.12", + "@esbuild/openbsd-x64": "0.19.12", + "@esbuild/sunos-x64": "0.19.12", + "@esbuild/win32-arm64": "0.19.12", + "@esbuild/win32-ia32": "0.19.12", + "@esbuild/win32-x64": "0.19.12" + } + }, + "node_modules/esbuild-register": { + "version": "3.6.0", + "resolved": "https://registry.npmjs.org/esbuild-register/-/esbuild-register-3.6.0.tgz", + "integrity": "sha512-H2/S7Pm8a9CL1uhp9OvjwrBh5Pvx0H8qVOxNu8Wed9Y7qv56MPtq+GGM8RJpq6glYJn9Wspr8uw7l55uyinNeg==", + "dev": true, + "license": "MIT", + "dependencies": { + "debug": "^4.3.4" + }, + "peerDependencies": { + "esbuild": ">=0.12 <1" + } + }, + "node_modules/escalade": { + "version": "3.2.0", + "resolved": "https://registry.npmjs.org/escalade/-/escalade-3.2.0.tgz", + "integrity": "sha512-WUj2qlxaQtO4g6Pq5c29GTcWGDyd8itL8zTlipgECz3JesAiiOKotd8JU6otB3PACgG6xkJUyVhboMS+bje/jA==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6" + } + }, + "node_modules/escape-html": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/escape-html/-/escape-html-1.0.3.tgz", + "integrity": "sha512-NiSupZ4OeuGwr68lGIeym/ksIZMJodUGOSCZ/FSnTxcrekbvqrgdUxlJOMpijaKZVjAJrWrGs/6Jy8OMuyj9ow==", + "license": "MIT" + }, + "node_modules/estree-walker": { + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/estree-walker/-/estree-walker-3.0.3.tgz", + "integrity": "sha512-7RUKfXgSMMkzt6ZuXmqapOurLGPPfgj6l9uRZ7lRGolvk0y2yocc35LdcxKC5PQZdn2DMqioAQ2NoWcrTKmm6g==", + "dev": true, + "license": "MIT", + "dependencies": { + "@types/estree": "^1.0.0" + } + }, + "node_modules/expect-type": { + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/expect-type/-/expect-type-1.3.0.tgz", + "integrity": "sha512-knvyeauYhqjOYvQ66MznSMs83wmHrCycNEN6Ao+2AeYEfxUIkuiVxdEa1qlGEPK+We3n0THiDciYSsCcgW/DoA==", + "dev": true, + "license": "Apache-2.0", + "engines": { + "node": ">=12.0.0" + } + }, + "node_modules/fast-decode-uri-component": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/fast-decode-uri-component/-/fast-decode-uri-component-1.0.1.tgz", + "integrity": "sha512-WKgKWg5eUxvRZGwW8FvfbaH7AXSh2cL+3j5fMGzUMCxWBJ3dV3a7Wz8y2f/uQ0e3B6WmodD3oS54jTQ9HVTIIg==", + "license": "MIT" + }, + "node_modules/fast-deep-equal": { + "version": "3.1.3", + "resolved": "https://registry.npmjs.org/fast-deep-equal/-/fast-deep-equal-3.1.3.tgz", + "integrity": "sha512-f3qQ9oQy9j2AhBe/H9VC91wLmKBCCU/gDOnKNAYG5hswO7BLKj09Hc5HYNz9cGI++xlpDCIgDaitVs03ATR84Q==", + "license": "MIT" + }, + "node_modules/fast-json-stringify": { + "version": "6.3.0", + "resolved": "https://registry.npmjs.org/fast-json-stringify/-/fast-json-stringify-6.3.0.tgz", + "integrity": "sha512-oRCntNDY/329HJPlmdNLIdogNtt6Vyjb1WuT01Soss3slIdyUp8kAcDU3saQTOquEK8KFVfwIIF7FebxUAu+yA==", + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/fastify" + }, + { + "type": "opencollective", + "url": "https://opencollective.com/fastify" + } + ], + "license": "MIT", + "dependencies": { + "@fastify/merge-json-schemas": "^0.2.0", + "ajv": "^8.12.0", + "ajv-formats": "^3.0.1", + "fast-uri": "^3.0.0", + "json-schema-ref-resolver": "^3.0.0", + "rfdc": "^1.2.0" + } + }, + "node_modules/fast-querystring": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/fast-querystring/-/fast-querystring-1.1.2.tgz", + "integrity": "sha512-g6KuKWmFXc0fID8WWH0jit4g0AGBoJhCkJMb1RmbsSEUNvQ+ZC8D6CUZ+GtF8nMzSPXnhiePyyqqipzNNEnHjg==", + "license": "MIT", + "dependencies": { + "fast-decode-uri-component": "^1.0.1" + } + }, + "node_modules/fast-uri": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/fast-uri/-/fast-uri-3.1.0.tgz", + "integrity": "sha512-iPeeDKJSWf4IEOasVVrknXpaBV0IApz/gp7S2bb7Z4Lljbl2MGJRqInZiUrQwV16cpzw/D3S5j5Julj/gT52AA==", + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/fastify" + }, + { + "type": "opencollective", + "url": "https://opencollective.com/fastify" + } + ], + "license": "BSD-3-Clause" + }, + "node_modules/fastify": { + "version": "5.8.2", + "resolved": "https://registry.npmjs.org/fastify/-/fastify-5.8.2.tgz", + "integrity": "sha512-lZmt3navvZG915IE+f7/TIVamxIwmBd+OMB+O9WBzcpIwOo6F0LTh0sluoMFk5VkrKTvvrwIaoJPkir4Z+jtAg==", + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/fastify" + }, + { + "type": "opencollective", + "url": "https://opencollective.com/fastify" + } + ], + "license": "MIT", + "dependencies": { + "@fastify/ajv-compiler": "^4.0.5", + "@fastify/error": "^4.0.0", + "@fastify/fast-json-stringify-compiler": "^5.0.0", + "@fastify/proxy-addr": "^5.0.0", + "abstract-logging": "^2.0.1", + "avvio": "^9.0.0", + "fast-json-stringify": "^6.0.0", + "find-my-way": "^9.0.0", + "light-my-request": "^6.0.0", + "pino": "^9.14.0 || ^10.1.0", + "process-warning": "^5.0.0", + "rfdc": "^1.3.1", + "secure-json-parse": "^4.0.0", + "semver": "^7.6.0", + "toad-cache": "^3.7.0" + } + }, + "node_modules/fastify-plugin": { + "version": "5.1.0", + "resolved": "https://registry.npmjs.org/fastify-plugin/-/fastify-plugin-5.1.0.tgz", + "integrity": "sha512-FAIDA8eovSt5qcDgcBvDuX/v0Cjz0ohGhENZ/wpc3y+oZCY2afZ9Baqql3g/lC+OHRnciQol4ww7tuthOb9idw==", + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/fastify" + }, + { + "type": "opencollective", + "url": "https://opencollective.com/fastify" + } + ], + "license": "MIT" + }, + "node_modules/fastq": { + "version": "1.20.1", + "resolved": "https://registry.npmjs.org/fastq/-/fastq-1.20.1.tgz", + "integrity": "sha512-GGToxJ/w1x32s/D2EKND7kTil4n8OVk/9mycTc4VDza13lOvpUZTGX3mFSCtV9ksdGBVzvsyAVLM6mHFThxXxw==", + "license": "ISC", + "dependencies": { + "reusify": "^1.0.4" + } + }, + "node_modules/fdir": { + "version": "6.5.0", + "resolved": "https://registry.npmjs.org/fdir/-/fdir-6.5.0.tgz", + "integrity": "sha512-tIbYtZbucOs0BRGqPJkshJUYdL+SDH7dVM8gjy+ERp3WAUjLEFJE+02kanyHtwjWOnwrKYBiwAmM0p4kLJAnXg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=12.0.0" + }, + "peerDependencies": { + "picomatch": "^3 || ^4" + }, + "peerDependenciesMeta": { + "picomatch": { + "optional": true + } + } + }, + "node_modules/fetch-blob": { + "version": "3.2.0", + "resolved": "https://registry.npmjs.org/fetch-blob/-/fetch-blob-3.2.0.tgz", + "integrity": "sha512-7yAQpD2UMJzLi1Dqv7qFYnPbaPx7ZfFK6PiIxQ4PfkGPyNyl2Ugx+a/umUonmKqjhM4DnfbMvdX6otXq83soQQ==", + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/jimmywarting" + }, + { + "type": "paypal", + "url": "https://paypal.me/jimmywarting" + } + ], + "license": "MIT", + "dependencies": { + "node-domexception": "^1.0.0", + "web-streams-polyfill": "^3.0.3" + }, + "engines": { + "node": "^12.20 || >= 14.13" + } + }, + "node_modules/find-my-way": { + "version": "9.5.0", + "resolved": "https://registry.npmjs.org/find-my-way/-/find-my-way-9.5.0.tgz", + "integrity": "sha512-VW2RfnmscZO5KgBY5XVyKREMW5nMZcxDy+buTOsL+zIPnBlbKm+00sgzoQzq1EVh4aALZLfKdwv6atBGcjvjrQ==", + "license": "MIT", + "dependencies": { + "fast-deep-equal": "^3.1.3", + "fast-querystring": "^1.0.0", + "safe-regex2": "^5.0.0" + }, + "engines": { + "node": ">=20" + } + }, + "node_modules/formdata-polyfill": { + "version": "4.0.10", + "resolved": "https://registry.npmjs.org/formdata-polyfill/-/formdata-polyfill-4.0.10.tgz", + "integrity": "sha512-buewHzMvYL29jdeQTVILecSaZKnt/RJWjoZCF5OW60Z67/GmSLBkOFM7qh1PI3zFNtJbaZL5eQu1vLfazOwj4g==", + "license": "MIT", + "dependencies": { + "fetch-blob": "^3.1.2" + }, + "engines": { + "node": ">=12.20.0" + } + }, + "node_modules/fsevents": { + "version": "2.3.3", + "resolved": "https://registry.npmjs.org/fsevents/-/fsevents-2.3.3.tgz", + "integrity": "sha512-5xoDfX+fL7faATnagmWPpbFtwh/R77WmMMqqHGS65C3vvB0YHrgF+B1YmZ3441tMj5n63k0212XNoJwzlhffQw==", + "dev": true, + "hasInstallScript": true, + "license": "MIT", + "optional": true, + "os": [ + "darwin" + ], + "engines": { + "node": "^8.16.0 || ^10.6.0 || >=11.0.0" + } + }, + "node_modules/gel": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/gel/-/gel-2.2.0.tgz", + "integrity": "sha512-q0ma7z2swmoamHQusey8ayo8+ilVdzDt4WTxSPzq/yRqvucWRfymRVMvNgmSC0XK7eNjjEZEcplxpgaNojKdmQ==", + "dev": true, + "license": "Apache-2.0", + "dependencies": { + "@petamoriken/float16": "^3.8.7", + "debug": "^4.3.4", + "env-paths": "^3.0.0", + "semver": "^7.6.2", + "shell-quote": "^1.8.1", + "which": "^4.0.0" + }, + "bin": { + "gel": "dist/cli.mjs" + }, + "engines": { + "node": ">= 18.0.0" + } + }, + "node_modules/gensync": { + "version": "1.0.0-beta.2", + "resolved": "https://registry.npmjs.org/gensync/-/gensync-1.0.0-beta.2.tgz", + "integrity": "sha512-3hN7NaskYvMDLQY55gnW3NQ+mesEAepTqlg+VEbj7zzqEMBVNhzcGYYeqFo/TlYz6eQiFcp1HcsCZO+nGgS8zg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/get-tsconfig": { + "version": "4.13.7", + "resolved": "https://registry.npmjs.org/get-tsconfig/-/get-tsconfig-4.13.7.tgz", + "integrity": "sha512-7tN6rFgBlMgpBML5j8typ92BKFi2sFQvIdpAqLA2beia5avZDrMs0FLZiM5etShWq5irVyGcGMEA1jcDaK7A/Q==", + "dev": true, + "license": "MIT", + "dependencies": { + "resolve-pkg-maps": "^1.0.0" + }, + "funding": { + "url": "https://github.com/privatenumber/get-tsconfig?sponsor=1" + } + }, + "node_modules/glob": { + "version": "13.0.6", + "resolved": "https://registry.npmjs.org/glob/-/glob-13.0.6.tgz", + "integrity": "sha512-Wjlyrolmm8uDpm/ogGyXZXb1Z+Ca2B8NbJwqBVg0axK9GbBeoS7yGV6vjXnYdGm6X53iehEuxxbyiKp8QmN4Vw==", + "license": "BlueOak-1.0.0", + "dependencies": { + "minimatch": "^10.2.2", + "minipass": "^7.1.3", + "path-scurry": "^2.0.2" + }, + "engines": { + "node": "18 || 20 || >=22" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, + "node_modules/http-errors": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/http-errors/-/http-errors-2.0.1.tgz", + "integrity": "sha512-4FbRdAX+bSdmo4AUFuS0WNiPz8NgFt+r8ThgNWmlrjQjt1Q7ZR9+zTlce2859x4KSXrwIsaeTqDoKQmtP8pLmQ==", + "license": "MIT", + "dependencies": { + "depd": "~2.0.0", + "inherits": "~2.0.4", + "setprototypeof": "~1.2.0", + "statuses": "~2.0.2", + "toidentifier": "~1.0.1" + }, + "engines": { + "node": ">= 0.8" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/express" + } + }, + "node_modules/inherits": { + "version": "2.0.4", + "resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.4.tgz", + "integrity": "sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ==", + "license": "ISC" + }, + "node_modules/ipaddr.js": { + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/ipaddr.js/-/ipaddr.js-2.3.0.tgz", + "integrity": "sha512-Zv/pA+ciVFbCSBBjGfaKUya/CcGmUHzTydLMaTwrUUEM2DIEO3iZvueGxmacvmN50fGpGVKeTXpb2LcYQxeVdg==", + "license": "MIT", + "engines": { + "node": ">= 10" + } + }, + "node_modules/isexe": { + "version": "3.1.5", + "resolved": "https://registry.npmjs.org/isexe/-/isexe-3.1.5.tgz", + "integrity": "sha512-6B3tLtFqtQS4ekarvLVMZ+X+VlvQekbe4taUkf/rhVO3d/h0M2rfARm/pXLcPEsjjMsFgrFgSrhQIxcSVrBz8w==", + "dev": true, + "license": "BlueOak-1.0.0", + "engines": { + "node": ">=18" + } + }, + "node_modules/js-base64": { + "version": "3.7.8", + "resolved": "https://registry.npmjs.org/js-base64/-/js-base64-3.7.8.tgz", + "integrity": "sha512-hNngCeKxIUQiEUN3GPJOkz4wF/YvdUdbNL9hsBcMQTkKzboD7T/q3OYOuuPZLUE6dBxSGpwhk5mwuDud7JVAow==", + "license": "BSD-3-Clause" + }, + "node_modules/js-tokens": { + "version": "9.0.1", + "resolved": "https://registry.npmjs.org/js-tokens/-/js-tokens-9.0.1.tgz", + "integrity": "sha512-mxa9E9ITFOt0ban3j6L5MpjwegGz6lBQmM1IJkWeBZGcMxto50+eWdjC/52xDbS2vy0k7vIMK0Fe2wfL9OQSpQ==", + "dev": true, + "license": "MIT" + }, + "node_modules/jsesc": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/jsesc/-/jsesc-3.1.0.tgz", + "integrity": "sha512-/sM3dO2FOzXjKQhJuo0Q173wf2KOo8t4I8vHy6lF9poUp7bKT0/NHE8fPX23PwfhnykfqnC2xRxOnVw5XuGIaA==", + "dev": true, + "license": "MIT", + "bin": { + "jsesc": "bin/jsesc" + }, + "engines": { + "node": ">=6" + } + }, + "node_modules/json-schema-ref-resolver": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/json-schema-ref-resolver/-/json-schema-ref-resolver-3.0.0.tgz", + "integrity": "sha512-hOrZIVL5jyYFjzk7+y7n5JDzGlU8rfWDuYyHwGa2WA8/pcmMHezp2xsVwxrebD/Q9t8Nc5DboieySDpCp4WG4A==", + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/fastify" + }, + { + "type": "opencollective", + "url": "https://opencollective.com/fastify" + } + ], + "license": "MIT", + "dependencies": { + "dequal": "^2.0.3" + } + }, + "node_modules/json-schema-traverse": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/json-schema-traverse/-/json-schema-traverse-1.0.0.tgz", + "integrity": "sha512-NM8/P9n3XjXhIZn1lLhkFaACTOURQXjWhV4BA/RnOv8xvgqtqpAX9IO4mRQxSx1Rlo4tqzeqb0sOlruaOy3dug==", + "license": "MIT" + }, + "node_modules/json5": { + "version": "2.2.3", + "resolved": "https://registry.npmjs.org/json5/-/json5-2.2.3.tgz", + "integrity": "sha512-XmOWe7eyHYH14cLdVPoyg+GOH3rYX++KpzrylJwSW98t3Nk+U8XOl8FWKOgwtzdb8lXGf6zYwDUzeHMWfxasyg==", + "dev": true, + "license": "MIT", + "bin": { + "json5": "lib/cli.js" + }, + "engines": { + "node": ">=6" + } + }, + "node_modules/libsql": { + "version": "0.4.7", + "resolved": "https://registry.npmjs.org/libsql/-/libsql-0.4.7.tgz", + "integrity": "sha512-T9eIRCs6b0J1SHKYIvD8+KCJMcWZ900iZyxdnSCdqxN12Z1ijzT+jY5nrk72Jw4B0HGzms2NgpryArlJqvc3Lw==", + "cpu": [ + "x64", + "arm64", + "wasm32" + ], + "license": "MIT", + "os": [ + "darwin", + "linux", + "win32" + ], + "dependencies": { + "@neon-rs/load": "^0.0.4", + "detect-libc": "2.0.2" + }, + "optionalDependencies": { + "@libsql/darwin-arm64": "0.4.7", + "@libsql/darwin-x64": "0.4.7", + "@libsql/linux-arm64-gnu": "0.4.7", + "@libsql/linux-arm64-musl": "0.4.7", + "@libsql/linux-x64-gnu": "0.4.7", + "@libsql/linux-x64-musl": "0.4.7", + "@libsql/win32-x64-msvc": "0.4.7" + } + }, + "node_modules/light-my-request": { + "version": "6.6.0", + "resolved": "https://registry.npmjs.org/light-my-request/-/light-my-request-6.6.0.tgz", + "integrity": "sha512-CHYbu8RtboSIoVsHZ6Ye4cj4Aw/yg2oAFimlF7mNvfDV192LR7nDiKtSIfCuLT7KokPSTn/9kfVLm5OGN0A28A==", + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/fastify" + }, + { + "type": "opencollective", + "url": "https://opencollective.com/fastify" + } + ], + "license": "BSD-3-Clause", + "dependencies": { + "cookie": "^1.0.1", + "process-warning": "^4.0.0", + "set-cookie-parser": "^2.6.0" + } + }, + "node_modules/light-my-request/node_modules/process-warning": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/process-warning/-/process-warning-4.0.1.tgz", + "integrity": "sha512-3c2LzQ3rY9d0hc1emcsHhfT9Jwz0cChib/QN89oME2R451w5fy3f0afAhERFZAwrbDU43wk12d0ORBpDVME50Q==", + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/fastify" + }, + { + "type": "opencollective", + "url": "https://opencollective.com/fastify" + } + ], + "license": "MIT" + }, + "node_modules/loupe": { + "version": "3.2.1", + "resolved": "https://registry.npmjs.org/loupe/-/loupe-3.2.1.tgz", + "integrity": "sha512-CdzqowRJCeLU72bHvWqwRBBlLcMEtIvGrlvef74kMnV2AolS9Y8xUv1I0U/MNAWMhBlKIoyuEgoJ0t/bbwHbLQ==", + "dev": true, + "license": "MIT" + }, + "node_modules/lru-cache": { + "version": "11.2.7", + "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-11.2.7.tgz", + "integrity": "sha512-aY/R+aEsRelme17KGQa/1ZSIpLpNYYrhcrepKTZgE+W3WM16YMCaPwOHLHsmopZHELU0Ojin1lPVxKR0MihncA==", + "license": "BlueOak-1.0.0", + "engines": { + "node": "20 || >=22" + } + }, + "node_modules/lucide-react": { + "version": "0.577.0", + "resolved": "https://registry.npmjs.org/lucide-react/-/lucide-react-0.577.0.tgz", + "integrity": "sha512-4LjoFv2eEPwYDPg/CUdBJQSDfPyzXCRrVW1X7jrx/trgxnxkHFjnVZINbzvzxjN70dxychOfg+FTYwBiS3pQ5A==", + "license": "ISC", + "peerDependencies": { + "react": "^16.5.1 || ^17.0.0 || ^18.0.0 || ^19.0.0" + } + }, + "node_modules/magic-string": { + "version": "0.30.21", + "resolved": "https://registry.npmjs.org/magic-string/-/magic-string-0.30.21.tgz", + "integrity": "sha512-vd2F4YUyEXKGcLHoq+TEyCjxueSeHnFxyyjNp80yg0XV4vUhnDer/lvvlqM/arB5bXQN5K2/3oinyCRyx8T2CQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jridgewell/sourcemap-codec": "^1.5.5" + } + }, + "node_modules/mime": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/mime/-/mime-3.0.0.tgz", + "integrity": "sha512-jSCU7/VB1loIWBZe14aEYHU/+1UMEHoaO7qxCOVJOw9GgH72VAWppxNcjU+x9a2k3GSIBXNKxXQFqRvvZ7vr3A==", + "license": "MIT", + "bin": { + "mime": "cli.js" + }, + "engines": { + "node": ">=10.0.0" + } + }, + "node_modules/minimatch": { + "version": "10.2.4", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-10.2.4.tgz", + "integrity": "sha512-oRjTw/97aTBN0RHbYCdtF1MQfvusSIBQM0IZEgzl6426+8jSC0nF1a/GmnVLpfB9yyr6g6FTqWqiZVbxrtaCIg==", + "license": "BlueOak-1.0.0", + "dependencies": { + "brace-expansion": "^5.0.2" + }, + "engines": { + "node": "18 || 20 || >=22" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, + "node_modules/minipass": { + "version": "7.1.3", + "resolved": "https://registry.npmjs.org/minipass/-/minipass-7.1.3.tgz", + "integrity": "sha512-tEBHqDnIoM/1rXME1zgka9g6Q2lcoCkxHLuc7ODJ5BxbP5d4c2Z5cGgtXAku59200Cx7diuHTOYfSBD8n6mm8A==", + "license": "BlueOak-1.0.0", + "engines": { + "node": ">=16 || 14 >=14.17" + } + }, + "node_modules/ms": { + "version": "2.1.3", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.3.tgz", + "integrity": "sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA==", + "dev": true, + "license": "MIT" + }, + "node_modules/nanoid": { + "version": "3.3.11", + "resolved": "https://registry.npmjs.org/nanoid/-/nanoid-3.3.11.tgz", + "integrity": "sha512-N8SpfPUnUp1bK+PMYW8qSWdl9U+wwNWI4QKxOYDy9JAro3WMX7p2OeVRF9v+347pnakNevPmiHhNmZ2HbFA76w==", + "dev": true, + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/ai" + } + ], + "license": "MIT", + "bin": { + "nanoid": "bin/nanoid.cjs" + }, + "engines": { + "node": "^10 || ^12 || ^13.7 || ^14 || >=15.0.1" + } + }, + "node_modules/node-domexception": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/node-domexception/-/node-domexception-1.0.0.tgz", + "integrity": "sha512-/jKZoMpw0F8GRwl4/eLROPA3cfcXtLApP0QzLmUT/HuPCZWyB7IY9ZrMeKw2O/nFIqPQB3PVM9aYm0F312AXDQ==", + "deprecated": "Use your platform's native DOMException instead", + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/jimmywarting" + }, + { + "type": "github", + "url": "https://paypal.me/jimmywarting" + } + ], + "license": "MIT", + "engines": { + "node": ">=10.5.0" + } + }, + "node_modules/node-fetch": { + "version": "3.3.2", + "resolved": "https://registry.npmjs.org/node-fetch/-/node-fetch-3.3.2.tgz", + "integrity": "sha512-dRB78srN/l6gqWulah9SrxeYnxeddIG30+GOqK/9OlLVyLg3HPnr6SqOWTWOXKRwC2eGYCkZ59NNuSgvSrpgOA==", + "license": "MIT", + "dependencies": { + "data-uri-to-buffer": "^4.0.0", + "fetch-blob": "^3.1.4", + "formdata-polyfill": "^4.0.10" + }, + "engines": { + "node": "^12.20.0 || ^14.13.1 || >=16.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/node-fetch" + } + }, + "node_modules/node-releases": { + "version": "2.0.36", + "resolved": "https://registry.npmjs.org/node-releases/-/node-releases-2.0.36.tgz", + "integrity": "sha512-TdC8FSgHz8Mwtw9g5L4gR/Sh9XhSP/0DEkQxfEFXOpiul5IiHgHan2VhYYb6agDSfp4KuvltmGApc8HMgUrIkA==", + "dev": true, + "license": "MIT" + }, + "node_modules/on-exit-leak-free": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/on-exit-leak-free/-/on-exit-leak-free-2.1.2.tgz", + "integrity": "sha512-0eJJY6hXLGf1udHwfNftBqH+g73EU4B504nZeKpz1sYRKafAghwxEJunB2O7rDZkL4PGfsMVnTXZ2EjibbqcsA==", + "license": "MIT", + "engines": { + "node": ">=14.0.0" + } + }, + "node_modules/path-scurry": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/path-scurry/-/path-scurry-2.0.2.tgz", + "integrity": "sha512-3O/iVVsJAPsOnpwWIeD+d6z/7PmqApyQePUtCndjatj/9I5LylHvt5qluFaBT3I5h3r1ejfR056c+FCv+NnNXg==", + "license": "BlueOak-1.0.0", + "dependencies": { + "lru-cache": "^11.0.0", + "minipass": "^7.1.2" + }, + "engines": { + "node": "18 || 20 || >=22" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, + "node_modules/pathe": { + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/pathe/-/pathe-2.0.3.tgz", + "integrity": "sha512-WUjGcAqP1gQacoQe+OBJsFA7Ld4DyXuUIjZ5cc75cLHvJ7dtNsTugphxIADwspS+AraAUePCKrSVtPLFj/F88w==", + "dev": true, + "license": "MIT" + }, + "node_modules/pathval": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/pathval/-/pathval-2.0.1.tgz", + "integrity": "sha512-//nshmD55c46FuFw26xV/xFAaB5HF9Xdap7HJBBnrKdAd6/GxDBaNA1870O79+9ueg61cZLSVc+OaFlfmObYVQ==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 14.16" + } + }, + "node_modules/picocolors": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/picocolors/-/picocolors-1.1.1.tgz", + "integrity": "sha512-xceH2snhtb5M9liqDsmEw56le376mTZkEX/jEb/RxNFyegNul7eNslCXP9FDj/Lcu0X8KEyMceP2ntpaHrDEVA==", + "dev": true, + "license": "ISC" + }, + "node_modules/picomatch": { + "version": "4.0.3", + "resolved": "https://registry.npmjs.org/picomatch/-/picomatch-4.0.3.tgz", + "integrity": "sha512-5gTmgEY/sqK6gFXLIsQNH19lWb4ebPDLA4SdLP7dsWkIXHWlG66oPuVvXSGFPppYZz8ZDZq0dYYrbHfBCVUb1Q==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/sponsors/jonschlinkert" + } + }, + "node_modules/pino": { + "version": "10.3.1", + "resolved": "https://registry.npmjs.org/pino/-/pino-10.3.1.tgz", + "integrity": "sha512-r34yH/GlQpKZbU1BvFFqOjhISRo1MNx1tWYsYvmj6KIRHSPMT2+yHOEb1SG6NMvRoHRF0a07kCOox/9yakl1vg==", + "license": "MIT", + "dependencies": { + "@pinojs/redact": "^0.4.0", + "atomic-sleep": "^1.0.0", + "on-exit-leak-free": "^2.1.0", + "pino-abstract-transport": "^3.0.0", + "pino-std-serializers": "^7.0.0", + "process-warning": "^5.0.0", + "quick-format-unescaped": "^4.0.3", + "real-require": "^0.2.0", + "safe-stable-stringify": "^2.3.1", + "sonic-boom": "^4.0.1", + "thread-stream": "^4.0.0" + }, + "bin": { + "pino": "bin.js" + } + }, + "node_modules/pino-abstract-transport": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/pino-abstract-transport/-/pino-abstract-transport-3.0.0.tgz", + "integrity": "sha512-wlfUczU+n7Hy/Ha5j9a/gZNy7We5+cXp8YL+X+PG8S0KXxw7n/JXA3c46Y0zQznIJ83URJiwy7Lh56WLokNuxg==", + "license": "MIT", + "dependencies": { + "split2": "^4.0.0" + } + }, + "node_modules/pino-std-serializers": { + "version": "7.1.0", + "resolved": "https://registry.npmjs.org/pino-std-serializers/-/pino-std-serializers-7.1.0.tgz", + "integrity": "sha512-BndPH67/JxGExRgiX1dX0w1FvZck5Wa4aal9198SrRhZjH3GxKQUKIBnYJTdj2HDN3UQAS06HlfcSbQj2OHmaw==", + "license": "MIT" + }, + "node_modules/postcss": { + "version": "8.5.8", + "resolved": "https://registry.npmjs.org/postcss/-/postcss-8.5.8.tgz", + "integrity": "sha512-OW/rX8O/jXnm82Ey1k44pObPtdblfiuWnrd8X7GJ7emImCOstunGbXUpp7HdBrFQX6rJzn3sPT397Wp5aCwCHg==", + "dev": true, + "funding": [ + { + "type": "opencollective", + "url": "https://opencollective.com/postcss/" + }, + { + "type": "tidelift", + "url": "https://tidelift.com/funding/github/npm/postcss" + }, + { + "type": "github", + "url": "https://github.com/sponsors/ai" + } + ], + "license": "MIT", + "dependencies": { + "nanoid": "^3.3.11", + "picocolors": "^1.1.1", + "source-map-js": "^1.2.1" + }, + "engines": { + "node": "^10 || ^12 || >=14" + } + }, + "node_modules/process-warning": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/process-warning/-/process-warning-5.0.0.tgz", + "integrity": "sha512-a39t9ApHNx2L4+HBnQKqxxHNs1r7KF+Intd8Q/g1bUh6q0WIp9voPXJ/x0j+ZL45KF1pJd9+q2jLIRMfvEshkA==", + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/fastify" + }, + { + "type": "opencollective", + "url": "https://opencollective.com/fastify" + } + ], + "license": "MIT" + }, + "node_modules/promise-limit": { + "version": "2.7.0", + "resolved": "https://registry.npmjs.org/promise-limit/-/promise-limit-2.7.0.tgz", + "integrity": "sha512-7nJ6v5lnJsXwGprnGXga4wx6d1POjvi5Qmf1ivTRxTjH4Z/9Czja/UCMLVmB9N93GeWOU93XaFaEt6jbuoagNw==", + "license": "ISC" + }, + "node_modules/quick-format-unescaped": { + "version": "4.0.4", + "resolved": "https://registry.npmjs.org/quick-format-unescaped/-/quick-format-unescaped-4.0.4.tgz", + "integrity": "sha512-tYC1Q1hgyRuHgloV/YXs2w15unPVh8qfu/qCTfhTYamaw7fyhumKa2yGpdSo87vY32rIclj+4fWYQXUMs9EHvg==", + "license": "MIT" + }, + "node_modules/react": { + "version": "19.2.4", + "resolved": "https://registry.npmjs.org/react/-/react-19.2.4.tgz", + "integrity": "sha512-9nfp2hYpCwOjAN+8TZFGhtWEwgvWHXqESH8qT89AT/lWklpLON22Lc8pEtnpsZz7VmawabSU0gCjnj8aC0euHQ==", + "license": "MIT", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/react-dom": { + "version": "19.2.4", + "resolved": "https://registry.npmjs.org/react-dom/-/react-dom-19.2.4.tgz", + "integrity": "sha512-AXJdLo8kgMbimY95O2aKQqsz2iWi9jMgKJhRBAxECE4IFxfcazB2LmzloIoibJI3C12IlY20+KFaLv+71bUJeQ==", + "license": "MIT", + "dependencies": { + "scheduler": "^0.27.0" + }, + "peerDependencies": { + "react": "^19.2.4" + } + }, + "node_modules/react-refresh": { + "version": "0.17.0", + "resolved": "https://registry.npmjs.org/react-refresh/-/react-refresh-0.17.0.tgz", + "integrity": "sha512-z6F7K9bV85EfseRCp2bzrpyQ0Gkw1uLoCel9XBVWPg/TjRj94SkJzUTGfOa4bs7iJvBWtQG0Wq7wnI0syw3EBQ==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/react-router": { + "version": "7.13.1", + "resolved": "https://registry.npmjs.org/react-router/-/react-router-7.13.1.tgz", + "integrity": "sha512-td+xP4X2/6BJvZoX6xw++A2DdEi++YypA69bJUV5oVvqf6/9/9nNlD70YO1e9d3MyamJEBQFEzk6mbfDYbqrSA==", + "license": "MIT", + "dependencies": { + "cookie": "^1.0.1", + "set-cookie-parser": "^2.6.0" + }, + "engines": { + "node": ">=20.0.0" + }, + "peerDependencies": { + "react": ">=18", + "react-dom": ">=18" + }, + "peerDependenciesMeta": { + "react-dom": { + "optional": true + } + } + }, + "node_modules/react-router-dom": { + "version": "7.13.1", + "resolved": "https://registry.npmjs.org/react-router-dom/-/react-router-dom-7.13.1.tgz", + "integrity": "sha512-UJnV3Rxc5TgUPJt2KJpo1Jpy0OKQr0AjgbZzBFjaPJcFOb2Y8jA5H3LT8HUJAiRLlWrEXWHbF1Z4SCZaQjWDHw==", + "license": "MIT", + "dependencies": { + "react-router": "7.13.1" + }, + "engines": { + "node": ">=20.0.0" + }, + "peerDependencies": { + "react": ">=18", + "react-dom": ">=18" + } + }, + "node_modules/real-require": { + "version": "0.2.0", + "resolved": "https://registry.npmjs.org/real-require/-/real-require-0.2.0.tgz", + "integrity": "sha512-57frrGM/OCTLqLOAh0mhVA9VBMHd+9U7Zb2THMGdBUoZVOtGbJzjxsYGDJ3A9AYYCP4hn6y1TVbaOfzWtm5GFg==", + "license": "MIT", + "engines": { + "node": ">= 12.13.0" + } + }, + "node_modules/require-from-string": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/require-from-string/-/require-from-string-2.0.2.tgz", + "integrity": "sha512-Xf0nWe6RseziFMu+Ap9biiUbmplq6S9/p+7w7YXP/JBHhrUDDUhwa+vANyubuqfZWTveU//DYVGsDG7RKL/vEw==", + "license": "MIT", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/resolve-pkg-maps": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/resolve-pkg-maps/-/resolve-pkg-maps-1.0.0.tgz", + "integrity": "sha512-seS2Tj26TBVOC2NIc2rOe2y2ZO7efxITtLZcGSOnHHNOQ7CkiUBfw0Iw2ck6xkIhPwLhKNLS8BO+hEpngQlqzw==", + "dev": true, + "license": "MIT", + "funding": { + "url": "https://github.com/privatenumber/resolve-pkg-maps?sponsor=1" + } + }, + "node_modules/ret": { + "version": "0.5.0", + "resolved": "https://registry.npmjs.org/ret/-/ret-0.5.0.tgz", + "integrity": "sha512-I1XxrZSQ+oErkRR4jYbAyEEu2I0avBvvMM5JN+6EBprOGRCs63ENqZ3vjavq8fBw2+62G5LF5XelKwuJpcvcxw==", + "license": "MIT", + "engines": { + "node": ">=10" + } + }, + "node_modules/reusify": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/reusify/-/reusify-1.1.0.tgz", + "integrity": "sha512-g6QUff04oZpHs0eG5p83rFLhHeV00ug/Yf9nZM6fLeUrPguBTkTQOdpAWWspMh55TZfVQDPaN3NQJfbVRAxdIw==", + "license": "MIT", + "engines": { + "iojs": ">=1.0.0", + "node": ">=0.10.0" + } + }, + "node_modules/rfdc": { + "version": "1.4.1", + "resolved": "https://registry.npmjs.org/rfdc/-/rfdc-1.4.1.tgz", + "integrity": "sha512-q1b3N5QkRUWUl7iyylaaj3kOpIT0N2i9MqIEQXP73GVsN9cw3fdx8X63cEmWhJGi2PPCF23Ijp7ktmd39rawIA==", + "license": "MIT" + }, + "node_modules/rollup": { + "version": "4.60.0", + "resolved": "https://registry.npmjs.org/rollup/-/rollup-4.60.0.tgz", + "integrity": "sha512-yqjxruMGBQJ2gG4HtjZtAfXArHomazDHoFwFFmZZl0r7Pdo7qCIXKqKHZc8yeoMgzJJ+pO6pEEHa+V7uzWlrAQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "@types/estree": "1.0.8" + }, + "bin": { + "rollup": "dist/bin/rollup" + }, + "engines": { + "node": ">=18.0.0", + "npm": ">=8.0.0" + }, + "optionalDependencies": { + "@rollup/rollup-android-arm-eabi": "4.60.0", + "@rollup/rollup-android-arm64": "4.60.0", + "@rollup/rollup-darwin-arm64": "4.60.0", + "@rollup/rollup-darwin-x64": "4.60.0", + "@rollup/rollup-freebsd-arm64": "4.60.0", + "@rollup/rollup-freebsd-x64": "4.60.0", + "@rollup/rollup-linux-arm-gnueabihf": "4.60.0", + "@rollup/rollup-linux-arm-musleabihf": "4.60.0", + "@rollup/rollup-linux-arm64-gnu": "4.60.0", + "@rollup/rollup-linux-arm64-musl": "4.60.0", + "@rollup/rollup-linux-loong64-gnu": "4.60.0", + "@rollup/rollup-linux-loong64-musl": "4.60.0", + "@rollup/rollup-linux-ppc64-gnu": "4.60.0", + "@rollup/rollup-linux-ppc64-musl": "4.60.0", + "@rollup/rollup-linux-riscv64-gnu": "4.60.0", + "@rollup/rollup-linux-riscv64-musl": "4.60.0", + "@rollup/rollup-linux-s390x-gnu": "4.60.0", + "@rollup/rollup-linux-x64-gnu": "4.60.0", + "@rollup/rollup-linux-x64-musl": "4.60.0", + "@rollup/rollup-openbsd-x64": "4.60.0", + "@rollup/rollup-openharmony-arm64": "4.60.0", + "@rollup/rollup-win32-arm64-msvc": "4.60.0", + "@rollup/rollup-win32-ia32-msvc": "4.60.0", + "@rollup/rollup-win32-x64-gnu": "4.60.0", + "@rollup/rollup-win32-x64-msvc": "4.60.0", + "fsevents": "~2.3.2" + } + }, + "node_modules/safe-regex2": { + "version": "5.1.0", + "resolved": "https://registry.npmjs.org/safe-regex2/-/safe-regex2-5.1.0.tgz", + "integrity": "sha512-pNHAuBW7TrcleFHsxBr5QMi/Iyp0ENjUKz7GCcX1UO7cMh+NmVK6HxQckNL1tJp1XAJVjG6B8OKIPqodqj9rtw==", + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/fastify" + }, + { + "type": "opencollective", + "url": "https://opencollective.com/fastify" + } + ], + "license": "MIT", + "dependencies": { + "ret": "~0.5.0" + }, + "bin": { + "safe-regex2": "bin/safe-regex2.js" + } + }, + "node_modules/safe-stable-stringify": { + "version": "2.5.0", + "resolved": "https://registry.npmjs.org/safe-stable-stringify/-/safe-stable-stringify-2.5.0.tgz", + "integrity": "sha512-b3rppTKm9T+PsVCBEOUR46GWI7fdOs00VKZ1+9c1EWDaDMvjQc6tUwuFyIprgGgTcWoVHSKrU8H31ZHA2e0RHA==", + "license": "MIT", + "engines": { + "node": ">=10" + } + }, + "node_modules/scheduler": { + "version": "0.27.0", + "resolved": "https://registry.npmjs.org/scheduler/-/scheduler-0.27.0.tgz", + "integrity": "sha512-eNv+WrVbKu1f3vbYJT/xtiF5syA5HPIMtf9IgY/nKg0sWqzAUEvqY/xm7OcZc/qafLx/iO9FgOmeSAp4v5ti/Q==", + "license": "MIT" + }, + "node_modules/secure-json-parse": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/secure-json-parse/-/secure-json-parse-4.1.0.tgz", + "integrity": "sha512-l4KnYfEyqYJxDwlNVyRfO2E4NTHfMKAWdUuA8J0yve2Dz/E/PdBepY03RvyJpssIpRFwJoCD55wA+mEDs6ByWA==", + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/fastify" + }, + { + "type": "opencollective", + "url": "https://opencollective.com/fastify" + } + ], + "license": "BSD-3-Clause" + }, + "node_modules/semver": { + "version": "7.7.4", + "resolved": "https://registry.npmjs.org/semver/-/semver-7.7.4.tgz", + "integrity": "sha512-vFKC2IEtQnVhpT78h1Yp8wzwrf8CM+MzKMHGJZfBtzhZNycRFnXsHk6E5TxIkkMsgNS7mdX3AGB7x2QM2di4lA==", + "license": "ISC", + "bin": { + "semver": "bin/semver.js" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/set-cookie-parser": { + "version": "2.7.2", + "resolved": "https://registry.npmjs.org/set-cookie-parser/-/set-cookie-parser-2.7.2.tgz", + "integrity": "sha512-oeM1lpU/UvhTxw+g3cIfxXHyJRc/uidd3yK1P242gzHds0udQBYzs3y8j4gCCW+ZJ7ad0yctld8RYO+bdurlvw==", + "license": "MIT" + }, + "node_modules/setprototypeof": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/setprototypeof/-/setprototypeof-1.2.0.tgz", + "integrity": "sha512-E5LDX7Wrp85Kil5bhZv46j8jOeboKq5JMmYM3gVGdGH8xFpPWXUMsNrlODCrkoxMEeNi/XZIwuRvY4XNwYMJpw==", + "license": "ISC" + }, + "node_modules/shell-quote": { + "version": "1.8.3", + "resolved": "https://registry.npmjs.org/shell-quote/-/shell-quote-1.8.3.tgz", + "integrity": "sha512-ObmnIF4hXNg1BqhnHmgbDETF8dLPCggZWBjkQfhZpbszZnYur5DUljTcCHii5LC3J5E0yeO/1LIMyH+UvHQgyw==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/siginfo": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/siginfo/-/siginfo-2.0.0.tgz", + "integrity": "sha512-ybx0WO1/8bSBLEWXZvEd7gMW3Sn3JFlW3TvX1nREbDLRNQNaeNN8WK0meBwPdAaOI7TtRRRJn/Es1zhrrCHu7g==", + "dev": true, + "license": "ISC" + }, + "node_modules/sonic-boom": { + "version": "4.2.1", + "resolved": "https://registry.npmjs.org/sonic-boom/-/sonic-boom-4.2.1.tgz", + "integrity": "sha512-w6AxtubXa2wTXAUsZMMWERrsIRAdrK0Sc+FUytWvYAhBJLyuI4llrMIC1DtlNSdI99EI86KZum2MMq3EAZlF9Q==", + "license": "MIT", + "dependencies": { + "atomic-sleep": "^1.0.0" + } + }, + "node_modules/source-map": { + "version": "0.6.1", + "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.6.1.tgz", + "integrity": "sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g==", + "dev": true, + "license": "BSD-3-Clause", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/source-map-js": { + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/source-map-js/-/source-map-js-1.2.1.tgz", + "integrity": "sha512-UXWMKhLOwVKb728IUtQPXxfYU+usdybtUrK/8uGE8CQMvrhOpwvzDBwj0QhSL7MQc7vIsISBG8VQ8+IDQxpfQA==", + "dev": true, + "license": "BSD-3-Clause", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/source-map-support": { + "version": "0.5.21", + "resolved": "https://registry.npmjs.org/source-map-support/-/source-map-support-0.5.21.tgz", + "integrity": "sha512-uBHU3L3czsIyYXKX88fdrGovxdSCoTGDRZ6SYXtSRxLZUzHg5P/66Ht6uoUlHu9EZod+inXhKo3qQgwXUT/y1w==", + "dev": true, + "license": "MIT", + "dependencies": { + "buffer-from": "^1.0.0", + "source-map": "^0.6.0" + } + }, + "node_modules/split2": { + "version": "4.2.0", + "resolved": "https://registry.npmjs.org/split2/-/split2-4.2.0.tgz", + "integrity": "sha512-UcjcJOWknrNkF6PLX83qcHM6KHgVKNkV62Y8a5uYDVv9ydGQVwAHMKqHdJje1VTWpljG0WYpCDhrCdAOYH4TWg==", + "license": "ISC", + "engines": { + "node": ">= 10.x" + } + }, + "node_modules/stackback": { + "version": "0.0.2", + "resolved": "https://registry.npmjs.org/stackback/-/stackback-0.0.2.tgz", + "integrity": "sha512-1XMJE5fQo1jGH6Y/7ebnwPOBEkIEnT4QF32d5R1+VXdXveM0IBMJt8zfaxX1P3QhVwrYe+576+jkANtSS2mBbw==", + "dev": true, + "license": "MIT" + }, + "node_modules/statuses": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/statuses/-/statuses-2.0.2.tgz", + "integrity": "sha512-DvEy55V3DB7uknRo+4iOGT5fP1slR8wQohVdknigZPMpMstaKJQWhwiYBACJE3Ul2pTnATihhBYnRhZQHGBiRw==", + "license": "MIT", + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/std-env": { + "version": "3.10.0", + "resolved": "https://registry.npmjs.org/std-env/-/std-env-3.10.0.tgz", + "integrity": "sha512-5GS12FdOZNliM5mAOxFRg7Ir0pWz8MdpYm6AY6VPkGpbA7ZzmbzNcBJQ0GPvvyWgcY7QAhCgf9Uy89I03faLkg==", + "dev": true, + "license": "MIT" + }, + "node_modules/strip-literal": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/strip-literal/-/strip-literal-3.1.0.tgz", + "integrity": "sha512-8r3mkIM/2+PpjHoOtiAW8Rg3jJLHaV7xPwG+YRGrv6FP0wwk/toTpATxWYOW0BKdWwl82VT2tFYi5DlROa0Mxg==", + "dev": true, + "license": "MIT", + "dependencies": { + "js-tokens": "^9.0.1" + }, + "funding": { + "url": "https://github.com/sponsors/antfu" + } + }, + "node_modules/thread-stream": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/thread-stream/-/thread-stream-4.0.0.tgz", + "integrity": "sha512-4iMVL6HAINXWf1ZKZjIPcz5wYaOdPhtO8ATvZ+Xqp3BTdaqtAwQkNmKORqcIo5YkQqGXq5cwfswDwMqqQNrpJA==", + "license": "MIT", + "dependencies": { + "real-require": "^0.2.0" + }, + "engines": { + "node": ">=20" + } + }, + "node_modules/tinybench": { + "version": "2.9.0", + "resolved": "https://registry.npmjs.org/tinybench/-/tinybench-2.9.0.tgz", + "integrity": "sha512-0+DUvqWMValLmha6lr4kD8iAMK1HzV0/aKnCtWb9v9641TnP/MFb7Pc2bxoxQjTXAErryXVgUOfv2YqNllqGeg==", + "dev": true, + "license": "MIT" + }, + "node_modules/tinyexec": { + "version": "0.3.2", + "resolved": "https://registry.npmjs.org/tinyexec/-/tinyexec-0.3.2.tgz", + "integrity": "sha512-KQQR9yN7R5+OSwaK0XQoj22pwHoTlgYqmUscPYoknOoWCWfj/5/ABTMRi69FrKU5ffPVh5QcFikpWJI/P1ocHA==", + "dev": true, + "license": "MIT" + }, + "node_modules/tinyglobby": { + "version": "0.2.15", + "resolved": "https://registry.npmjs.org/tinyglobby/-/tinyglobby-0.2.15.tgz", + "integrity": "sha512-j2Zq4NyQYG5XMST4cbs02Ak8iJUdxRM0XI5QyxXuZOzKOINmWurp3smXu3y5wDcJrptwpSjgXHzIQxR0omXljQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "fdir": "^6.5.0", + "picomatch": "^4.0.3" + }, + "engines": { + "node": ">=12.0.0" + }, + "funding": { + "url": "https://github.com/sponsors/SuperchupuDev" + } + }, + "node_modules/tinypool": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/tinypool/-/tinypool-1.1.1.tgz", + "integrity": "sha512-Zba82s87IFq9A9XmjiX5uZA/ARWDrB03OHlq+Vw1fSdt0I+4/Kutwy8BP4Y/y/aORMo61FQ0vIb5j44vSo5Pkg==", + "dev": true, + "license": "MIT", + "engines": { + "node": "^18.0.0 || >=20.0.0" + } + }, + "node_modules/tinyrainbow": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/tinyrainbow/-/tinyrainbow-2.0.0.tgz", + "integrity": "sha512-op4nsTR47R6p0vMUUoYl/a+ljLFVtlfaXkLQmqfLR1qHma1h/ysYk4hEXZ880bf2CYgTskvTa/e196Vd5dDQXw==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=14.0.0" + } + }, + "node_modules/tinyspy": { + "version": "4.0.4", + "resolved": "https://registry.npmjs.org/tinyspy/-/tinyspy-4.0.4.tgz", + "integrity": "sha512-azl+t0z7pw/z958Gy9svOTuzqIk6xq+NSheJzn5MMWtWTFywIacg2wUlzKFGtt3cthx0r2SxMK0yzJOR0IES7Q==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=14.0.0" + } + }, + "node_modules/toad-cache": { + "version": "3.7.0", + "resolved": "https://registry.npmjs.org/toad-cache/-/toad-cache-3.7.0.tgz", + "integrity": "sha512-/m8M+2BJUpoJdgAHoG+baCwBT+tf2VraSfkBgl0Y00qIWt41DJ8R5B8nsEw0I58YwF5IZH6z24/2TobDKnqSWw==", + "license": "MIT", + "engines": { + "node": ">=12" + } + }, + "node_modules/toidentifier": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/toidentifier/-/toidentifier-1.0.1.tgz", + "integrity": "sha512-o5sSPKEkg/DIQNmH43V0/uerLrpzVedkUh8tGNvaeXpfpuwjKenlSox/2O/BTlZUtEe+JG7s5YhEz608PlAHRA==", + "license": "MIT", + "engines": { + "node": ">=0.6" + } + }, + "node_modules/tsx": { + "version": "4.21.0", + "resolved": "https://registry.npmjs.org/tsx/-/tsx-4.21.0.tgz", + "integrity": "sha512-5C1sg4USs1lfG0GFb2RLXsdpXqBSEhAaA/0kPL01wxzpMqLILNxIxIOKiILz+cdg/pLnOUxFYOR5yhHU666wbw==", + "dev": true, + "license": "MIT", + "dependencies": { + "esbuild": "~0.27.0", + "get-tsconfig": "^4.7.5" + }, + "bin": { + "tsx": "dist/cli.mjs" + }, + "engines": { + "node": ">=18.0.0" + }, + "optionalDependencies": { + "fsevents": "~2.3.3" + } + }, + "node_modules/tsx/node_modules/@esbuild/aix-ppc64": { + "version": "0.27.4", + "resolved": "https://registry.npmjs.org/@esbuild/aix-ppc64/-/aix-ppc64-0.27.4.tgz", + "integrity": "sha512-cQPwL2mp2nSmHHJlCyoXgHGhbEPMrEEU5xhkcy3Hs/O7nGZqEpZ2sUtLaL9MORLtDfRvVl2/3PAuEkYZH0Ty8Q==", + "cpu": [ + "ppc64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "aix" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/tsx/node_modules/@esbuild/android-arm": { + "version": "0.27.4", + "resolved": "https://registry.npmjs.org/@esbuild/android-arm/-/android-arm-0.27.4.tgz", + "integrity": "sha512-X9bUgvxiC8CHAGKYufLIHGXPJWnr0OCdR0anD2e21vdvgCI8lIfqFbnoeOz7lBjdrAGUhqLZLcQo6MLhTO2DKQ==", + "cpu": [ + "arm" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "android" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/tsx/node_modules/@esbuild/android-arm64": { + "version": "0.27.4", + "resolved": "https://registry.npmjs.org/@esbuild/android-arm64/-/android-arm64-0.27.4.tgz", + "integrity": "sha512-gdLscB7v75wRfu7QSm/zg6Rx29VLdy9eTr2t44sfTW7CxwAtQghZ4ZnqHk3/ogz7xao0QAgrkradbBzcqFPasw==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "android" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/tsx/node_modules/@esbuild/android-x64": { + "version": "0.27.4", + "resolved": "https://registry.npmjs.org/@esbuild/android-x64/-/android-x64-0.27.4.tgz", + "integrity": "sha512-PzPFnBNVF292sfpfhiyiXCGSn9HZg5BcAz+ivBuSsl6Rk4ga1oEXAamhOXRFyMcjwr2DVtm40G65N3GLeH1Lvw==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "android" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/tsx/node_modules/@esbuild/darwin-arm64": { + "version": "0.27.4", + "resolved": "https://registry.npmjs.org/@esbuild/darwin-arm64/-/darwin-arm64-0.27.4.tgz", + "integrity": "sha512-b7xaGIwdJlht8ZFCvMkpDN6uiSmnxxK56N2GDTMYPr2/gzvfdQN8rTfBsvVKmIVY/X7EM+/hJKEIbbHs9oA4tQ==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "darwin" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/tsx/node_modules/@esbuild/darwin-x64": { + "version": "0.27.4", + "resolved": "https://registry.npmjs.org/@esbuild/darwin-x64/-/darwin-x64-0.27.4.tgz", + "integrity": "sha512-sR+OiKLwd15nmCdqpXMnuJ9W2kpy0KigzqScqHI3Hqwr7IXxBp3Yva+yJwoqh7rE8V77tdoheRYataNKL4QrPw==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "darwin" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/tsx/node_modules/@esbuild/freebsd-arm64": { + "version": "0.27.4", + "resolved": "https://registry.npmjs.org/@esbuild/freebsd-arm64/-/freebsd-arm64-0.27.4.tgz", + "integrity": "sha512-jnfpKe+p79tCnm4GVav68A7tUFeKQwQyLgESwEAUzyxk/TJr4QdGog9sqWNcUbr/bZt/O/HXouspuQDd9JxFSw==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "freebsd" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/tsx/node_modules/@esbuild/freebsd-x64": { + "version": "0.27.4", + "resolved": "https://registry.npmjs.org/@esbuild/freebsd-x64/-/freebsd-x64-0.27.4.tgz", + "integrity": "sha512-2kb4ceA/CpfUrIcTUl1wrP/9ad9Atrp5J94Lq69w7UwOMolPIGrfLSvAKJp0RTvkPPyn6CIWrNy13kyLikZRZQ==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "freebsd" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/tsx/node_modules/@esbuild/linux-arm": { + "version": "0.27.4", + "resolved": "https://registry.npmjs.org/@esbuild/linux-arm/-/linux-arm-0.27.4.tgz", + "integrity": "sha512-aBYgcIxX/wd5n2ys0yESGeYMGF+pv6g0DhZr3G1ZG4jMfruU9Tl1i2Z+Wnj9/KjGz1lTLCcorqE2viePZqj4Eg==", + "cpu": [ + "arm" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/tsx/node_modules/@esbuild/linux-arm64": { + "version": "0.27.4", + "resolved": "https://registry.npmjs.org/@esbuild/linux-arm64/-/linux-arm64-0.27.4.tgz", + "integrity": "sha512-7nQOttdzVGth1iz57kxg9uCz57dxQLHWxopL6mYuYthohPKEK0vU0C3O21CcBK6KDlkYVcnDXY099HcCDXd9dA==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/tsx/node_modules/@esbuild/linux-ia32": { + "version": "0.27.4", + "resolved": "https://registry.npmjs.org/@esbuild/linux-ia32/-/linux-ia32-0.27.4.tgz", + "integrity": "sha512-oPtixtAIzgvzYcKBQM/qZ3R+9TEUd1aNJQu0HhGyqtx6oS7qTpvjheIWBbes4+qu1bNlo2V4cbkISr8q6gRBFA==", + "cpu": [ + "ia32" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/tsx/node_modules/@esbuild/linux-loong64": { + "version": "0.27.4", + "resolved": "https://registry.npmjs.org/@esbuild/linux-loong64/-/linux-loong64-0.27.4.tgz", + "integrity": "sha512-8mL/vh8qeCoRcFH2nM8wm5uJP+ZcVYGGayMavi8GmRJjuI3g1v6Z7Ni0JJKAJW+m0EtUuARb6Lmp4hMjzCBWzA==", + "cpu": [ + "loong64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/tsx/node_modules/@esbuild/linux-mips64el": { + "version": "0.27.4", + "resolved": "https://registry.npmjs.org/@esbuild/linux-mips64el/-/linux-mips64el-0.27.4.tgz", + "integrity": "sha512-1RdrWFFiiLIW7LQq9Q2NES+HiD4NyT8Itj9AUeCl0IVCA459WnPhREKgwrpaIfTOe+/2rdntisegiPWn/r/aAw==", + "cpu": [ + "mips64el" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/tsx/node_modules/@esbuild/linux-ppc64": { + "version": "0.27.4", + "resolved": "https://registry.npmjs.org/@esbuild/linux-ppc64/-/linux-ppc64-0.27.4.tgz", + "integrity": "sha512-tLCwNG47l3sd9lpfyx9LAGEGItCUeRCWeAx6x2Jmbav65nAwoPXfewtAdtbtit/pJFLUWOhpv0FpS6GQAmPrHA==", + "cpu": [ + "ppc64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/tsx/node_modules/@esbuild/linux-riscv64": { + "version": "0.27.4", + "resolved": "https://registry.npmjs.org/@esbuild/linux-riscv64/-/linux-riscv64-0.27.4.tgz", + "integrity": "sha512-BnASypppbUWyqjd1KIpU4AUBiIhVr6YlHx/cnPgqEkNoVOhHg+YiSVxM1RLfiy4t9cAulbRGTNCKOcqHrEQLIw==", + "cpu": [ + "riscv64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/tsx/node_modules/@esbuild/linux-s390x": { + "version": "0.27.4", + "resolved": "https://registry.npmjs.org/@esbuild/linux-s390x/-/linux-s390x-0.27.4.tgz", + "integrity": "sha512-+eUqgb/Z7vxVLezG8bVB9SfBie89gMueS+I0xYh2tJdw3vqA/0ImZJ2ROeWwVJN59ihBeZ7Tu92dF/5dy5FttA==", + "cpu": [ + "s390x" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/tsx/node_modules/@esbuild/linux-x64": { + "version": "0.27.4", + "resolved": "https://registry.npmjs.org/@esbuild/linux-x64/-/linux-x64-0.27.4.tgz", + "integrity": "sha512-S5qOXrKV8BQEzJPVxAwnryi2+Iq5pB40gTEIT69BQONqR7JH1EPIcQ/Uiv9mCnn05jff9umq/5nqzxlqTOg9NA==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/tsx/node_modules/@esbuild/netbsd-x64": { + "version": "0.27.4", + "resolved": "https://registry.npmjs.org/@esbuild/netbsd-x64/-/netbsd-x64-0.27.4.tgz", + "integrity": "sha512-RugOvOdXfdyi5Tyv40kgQnI0byv66BFgAqjdgtAKqHoZTbTF2QqfQrFwa7cHEORJf6X2ht+l9ABLMP0dnKYsgg==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "netbsd" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/tsx/node_modules/@esbuild/openbsd-x64": { + "version": "0.27.4", + "resolved": "https://registry.npmjs.org/@esbuild/openbsd-x64/-/openbsd-x64-0.27.4.tgz", + "integrity": "sha512-u8fg/jQ5aQDfsnIV6+KwLOf1CmJnfu1ShpwqdwC0uA7ZPwFws55Ngc12vBdeUdnuWoQYx/SOQLGDcdlfXhYmXQ==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "openbsd" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/tsx/node_modules/@esbuild/sunos-x64": { + "version": "0.27.4", + "resolved": "https://registry.npmjs.org/@esbuild/sunos-x64/-/sunos-x64-0.27.4.tgz", + "integrity": "sha512-/gOzgaewZJfeJTlsWhvUEmUG4tWEY2Spp5M20INYRg2ZKl9QPO3QEEgPeRtLjEWSW8FilRNacPOg8R1uaYkA6g==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "sunos" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/tsx/node_modules/@esbuild/win32-arm64": { + "version": "0.27.4", + "resolved": "https://registry.npmjs.org/@esbuild/win32-arm64/-/win32-arm64-0.27.4.tgz", + "integrity": "sha512-Z9SExBg2y32smoDQdf1HRwHRt6vAHLXcxD2uGgO/v2jK7Y718Ix4ndsbNMU/+1Qiem9OiOdaqitioZwxivhXYg==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "win32" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/tsx/node_modules/@esbuild/win32-ia32": { + "version": "0.27.4", + "resolved": "https://registry.npmjs.org/@esbuild/win32-ia32/-/win32-ia32-0.27.4.tgz", + "integrity": "sha512-DAyGLS0Jz5G5iixEbMHi5KdiApqHBWMGzTtMiJ72ZOLhbu/bzxgAe8Ue8CTS3n3HbIUHQz/L51yMdGMeoxXNJw==", + "cpu": [ + "ia32" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "win32" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/tsx/node_modules/@esbuild/win32-x64": { + "version": "0.27.4", + "resolved": "https://registry.npmjs.org/@esbuild/win32-x64/-/win32-x64-0.27.4.tgz", + "integrity": "sha512-+knoa0BDoeXgkNvvV1vvbZX4+hizelrkwmGJBdT17t8FNPwG2lKemmuMZlmaNQ3ws3DKKCxpb4zRZEIp3UxFCg==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "win32" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/tsx/node_modules/esbuild": { + "version": "0.27.4", + "resolved": "https://registry.npmjs.org/esbuild/-/esbuild-0.27.4.tgz", + "integrity": "sha512-Rq4vbHnYkK5fws5NF7MYTU68FPRE1ajX7heQ/8QXXWqNgqqJ/GkmmyxIzUnf2Sr/bakf8l54716CcMGHYhMrrQ==", + "dev": true, + "hasInstallScript": true, + "license": "MIT", + "bin": { + "esbuild": "bin/esbuild" + }, + "engines": { + "node": ">=18" + }, + "optionalDependencies": { + "@esbuild/aix-ppc64": "0.27.4", + "@esbuild/android-arm": "0.27.4", + "@esbuild/android-arm64": "0.27.4", + "@esbuild/android-x64": "0.27.4", + "@esbuild/darwin-arm64": "0.27.4", + "@esbuild/darwin-x64": "0.27.4", + "@esbuild/freebsd-arm64": "0.27.4", + "@esbuild/freebsd-x64": "0.27.4", + "@esbuild/linux-arm": "0.27.4", + "@esbuild/linux-arm64": "0.27.4", + "@esbuild/linux-ia32": "0.27.4", + "@esbuild/linux-loong64": "0.27.4", + "@esbuild/linux-mips64el": "0.27.4", + "@esbuild/linux-ppc64": "0.27.4", + "@esbuild/linux-riscv64": "0.27.4", + "@esbuild/linux-s390x": "0.27.4", + "@esbuild/linux-x64": "0.27.4", + "@esbuild/netbsd-arm64": "0.27.4", + "@esbuild/netbsd-x64": "0.27.4", + "@esbuild/openbsd-arm64": "0.27.4", + "@esbuild/openbsd-x64": "0.27.4", + "@esbuild/openharmony-arm64": "0.27.4", + "@esbuild/sunos-x64": "0.27.4", + "@esbuild/win32-arm64": "0.27.4", + "@esbuild/win32-ia32": "0.27.4", + "@esbuild/win32-x64": "0.27.4" + } + }, + "node_modules/typescript": { + "version": "5.9.3", + "resolved": "https://registry.npmjs.org/typescript/-/typescript-5.9.3.tgz", + "integrity": "sha512-jl1vZzPDinLr9eUt3J/t7V6FgNEw9QjvBPdysz9KfQDD41fQrC2Y4vKQdiaUpFT4bXlb1RHhLpp8wtm6M5TgSw==", + "dev": true, + "license": "Apache-2.0", + "bin": { + "tsc": "bin/tsc", + "tsserver": "bin/tsserver" + }, + "engines": { + "node": ">=14.17" + } + }, + "node_modules/undici-types": { + "version": "6.21.0", + "resolved": "https://registry.npmjs.org/undici-types/-/undici-types-6.21.0.tgz", + "integrity": "sha512-iwDZqg0QAGrg9Rav5H4n0M64c3mkR59cJ6wQp+7C4nI0gsmExaedaYLNO44eT4AtBBwjbTiGPMlt2Md0T9H9JQ==", + "license": "MIT" + }, + "node_modules/update-browserslist-db": { + "version": "1.2.3", + "resolved": "https://registry.npmjs.org/update-browserslist-db/-/update-browserslist-db-1.2.3.tgz", + "integrity": "sha512-Js0m9cx+qOgDxo0eMiFGEueWztz+d4+M3rGlmKPT+T4IS/jP4ylw3Nwpu6cpTTP8R1MAC1kF4VbdLt3ARf209w==", + "dev": true, + "funding": [ + { + "type": "opencollective", + "url": "https://opencollective.com/browserslist" + }, + { + "type": "tidelift", + "url": "https://tidelift.com/funding/github/npm/browserslist" + }, + { + "type": "github", + "url": "https://github.com/sponsors/ai" + } + ], + "license": "MIT", + "dependencies": { + "escalade": "^3.2.0", + "picocolors": "^1.1.1" + }, + "bin": { + "update-browserslist-db": "cli.js" + }, + "peerDependencies": { + "browserslist": ">= 4.21.0" + } + }, + "node_modules/vite": { + "version": "7.3.1", + "resolved": "https://registry.npmjs.org/vite/-/vite-7.3.1.tgz", + "integrity": "sha512-w+N7Hifpc3gRjZ63vYBXA56dvvRlNWRczTdmCBBa+CotUzAPf5b7YMdMR/8CQoeYE5LX3W4wj6RYTgonm1b9DA==", + "dev": true, + "license": "MIT", + "dependencies": { + "esbuild": "^0.27.0", + "fdir": "^6.5.0", + "picomatch": "^4.0.3", + "postcss": "^8.5.6", + "rollup": "^4.43.0", + "tinyglobby": "^0.2.15" + }, + "bin": { + "vite": "bin/vite.js" + }, + "engines": { + "node": "^20.19.0 || >=22.12.0" + }, + "funding": { + "url": "https://github.com/vitejs/vite?sponsor=1" + }, + "optionalDependencies": { + "fsevents": "~2.3.3" + }, + "peerDependencies": { + "@types/node": "^20.19.0 || >=22.12.0", + "jiti": ">=1.21.0", + "less": "^4.0.0", + "lightningcss": "^1.21.0", + "sass": "^1.70.0", + "sass-embedded": "^1.70.0", + "stylus": ">=0.54.8", + "sugarss": "^5.0.0", + "terser": "^5.16.0", + "tsx": "^4.8.1", + "yaml": "^2.4.2" + }, + "peerDependenciesMeta": { + "@types/node": { + "optional": true + }, + "jiti": { + "optional": true + }, + "less": { + "optional": true + }, + "lightningcss": { + "optional": true + }, + "sass": { + "optional": true + }, + "sass-embedded": { + "optional": true + }, + "stylus": { + "optional": true + }, + "sugarss": { + "optional": true + }, + "terser": { + "optional": true + }, + "tsx": { + "optional": true + }, + "yaml": { + "optional": true + } + } + }, + "node_modules/vite-node": { + "version": "3.2.4", + "resolved": "https://registry.npmjs.org/vite-node/-/vite-node-3.2.4.tgz", + "integrity": "sha512-EbKSKh+bh1E1IFxeO0pg1n4dvoOTt0UDiXMd/qn++r98+jPO1xtJilvXldeuQ8giIB5IkpjCgMleHMNEsGH6pg==", + "dev": true, + "license": "MIT", + "dependencies": { + "cac": "^6.7.14", + "debug": "^4.4.1", + "es-module-lexer": "^1.7.0", + "pathe": "^2.0.3", + "vite": "^5.0.0 || ^6.0.0 || ^7.0.0-0" + }, + "bin": { + "vite-node": "vite-node.mjs" + }, + "engines": { + "node": "^18.0.0 || ^20.0.0 || >=22.0.0" + }, + "funding": { + "url": "https://opencollective.com/vitest" + } + }, + "node_modules/vite/node_modules/@esbuild/aix-ppc64": { + "version": "0.27.4", + "resolved": "https://registry.npmjs.org/@esbuild/aix-ppc64/-/aix-ppc64-0.27.4.tgz", + "integrity": "sha512-cQPwL2mp2nSmHHJlCyoXgHGhbEPMrEEU5xhkcy3Hs/O7nGZqEpZ2sUtLaL9MORLtDfRvVl2/3PAuEkYZH0Ty8Q==", + "cpu": [ + "ppc64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "aix" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/vite/node_modules/@esbuild/android-arm": { + "version": "0.27.4", + "resolved": "https://registry.npmjs.org/@esbuild/android-arm/-/android-arm-0.27.4.tgz", + "integrity": "sha512-X9bUgvxiC8CHAGKYufLIHGXPJWnr0OCdR0anD2e21vdvgCI8lIfqFbnoeOz7lBjdrAGUhqLZLcQo6MLhTO2DKQ==", + "cpu": [ + "arm" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "android" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/vite/node_modules/@esbuild/android-arm64": { + "version": "0.27.4", + "resolved": "https://registry.npmjs.org/@esbuild/android-arm64/-/android-arm64-0.27.4.tgz", + "integrity": "sha512-gdLscB7v75wRfu7QSm/zg6Rx29VLdy9eTr2t44sfTW7CxwAtQghZ4ZnqHk3/ogz7xao0QAgrkradbBzcqFPasw==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "android" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/vite/node_modules/@esbuild/android-x64": { + "version": "0.27.4", + "resolved": "https://registry.npmjs.org/@esbuild/android-x64/-/android-x64-0.27.4.tgz", + "integrity": "sha512-PzPFnBNVF292sfpfhiyiXCGSn9HZg5BcAz+ivBuSsl6Rk4ga1oEXAamhOXRFyMcjwr2DVtm40G65N3GLeH1Lvw==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "android" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/vite/node_modules/@esbuild/darwin-arm64": { + "version": "0.27.4", + "resolved": "https://registry.npmjs.org/@esbuild/darwin-arm64/-/darwin-arm64-0.27.4.tgz", + "integrity": "sha512-b7xaGIwdJlht8ZFCvMkpDN6uiSmnxxK56N2GDTMYPr2/gzvfdQN8rTfBsvVKmIVY/X7EM+/hJKEIbbHs9oA4tQ==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "darwin" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/vite/node_modules/@esbuild/darwin-x64": { + "version": "0.27.4", + "resolved": "https://registry.npmjs.org/@esbuild/darwin-x64/-/darwin-x64-0.27.4.tgz", + "integrity": "sha512-sR+OiKLwd15nmCdqpXMnuJ9W2kpy0KigzqScqHI3Hqwr7IXxBp3Yva+yJwoqh7rE8V77tdoheRYataNKL4QrPw==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "darwin" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/vite/node_modules/@esbuild/freebsd-arm64": { + "version": "0.27.4", + "resolved": "https://registry.npmjs.org/@esbuild/freebsd-arm64/-/freebsd-arm64-0.27.4.tgz", + "integrity": "sha512-jnfpKe+p79tCnm4GVav68A7tUFeKQwQyLgESwEAUzyxk/TJr4QdGog9sqWNcUbr/bZt/O/HXouspuQDd9JxFSw==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "freebsd" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/vite/node_modules/@esbuild/freebsd-x64": { + "version": "0.27.4", + "resolved": "https://registry.npmjs.org/@esbuild/freebsd-x64/-/freebsd-x64-0.27.4.tgz", + "integrity": "sha512-2kb4ceA/CpfUrIcTUl1wrP/9ad9Atrp5J94Lq69w7UwOMolPIGrfLSvAKJp0RTvkPPyn6CIWrNy13kyLikZRZQ==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "freebsd" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/vite/node_modules/@esbuild/linux-arm": { + "version": "0.27.4", + "resolved": "https://registry.npmjs.org/@esbuild/linux-arm/-/linux-arm-0.27.4.tgz", + "integrity": "sha512-aBYgcIxX/wd5n2ys0yESGeYMGF+pv6g0DhZr3G1ZG4jMfruU9Tl1i2Z+Wnj9/KjGz1lTLCcorqE2viePZqj4Eg==", + "cpu": [ + "arm" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/vite/node_modules/@esbuild/linux-arm64": { + "version": "0.27.4", + "resolved": "https://registry.npmjs.org/@esbuild/linux-arm64/-/linux-arm64-0.27.4.tgz", + "integrity": "sha512-7nQOttdzVGth1iz57kxg9uCz57dxQLHWxopL6mYuYthohPKEK0vU0C3O21CcBK6KDlkYVcnDXY099HcCDXd9dA==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/vite/node_modules/@esbuild/linux-ia32": { + "version": "0.27.4", + "resolved": "https://registry.npmjs.org/@esbuild/linux-ia32/-/linux-ia32-0.27.4.tgz", + "integrity": "sha512-oPtixtAIzgvzYcKBQM/qZ3R+9TEUd1aNJQu0HhGyqtx6oS7qTpvjheIWBbes4+qu1bNlo2V4cbkISr8q6gRBFA==", + "cpu": [ + "ia32" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/vite/node_modules/@esbuild/linux-loong64": { + "version": "0.27.4", + "resolved": "https://registry.npmjs.org/@esbuild/linux-loong64/-/linux-loong64-0.27.4.tgz", + "integrity": "sha512-8mL/vh8qeCoRcFH2nM8wm5uJP+ZcVYGGayMavi8GmRJjuI3g1v6Z7Ni0JJKAJW+m0EtUuARb6Lmp4hMjzCBWzA==", + "cpu": [ + "loong64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/vite/node_modules/@esbuild/linux-mips64el": { + "version": "0.27.4", + "resolved": "https://registry.npmjs.org/@esbuild/linux-mips64el/-/linux-mips64el-0.27.4.tgz", + "integrity": "sha512-1RdrWFFiiLIW7LQq9Q2NES+HiD4NyT8Itj9AUeCl0IVCA459WnPhREKgwrpaIfTOe+/2rdntisegiPWn/r/aAw==", + "cpu": [ + "mips64el" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/vite/node_modules/@esbuild/linux-ppc64": { + "version": "0.27.4", + "resolved": "https://registry.npmjs.org/@esbuild/linux-ppc64/-/linux-ppc64-0.27.4.tgz", + "integrity": "sha512-tLCwNG47l3sd9lpfyx9LAGEGItCUeRCWeAx6x2Jmbav65nAwoPXfewtAdtbtit/pJFLUWOhpv0FpS6GQAmPrHA==", + "cpu": [ + "ppc64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/vite/node_modules/@esbuild/linux-riscv64": { + "version": "0.27.4", + "resolved": "https://registry.npmjs.org/@esbuild/linux-riscv64/-/linux-riscv64-0.27.4.tgz", + "integrity": "sha512-BnASypppbUWyqjd1KIpU4AUBiIhVr6YlHx/cnPgqEkNoVOhHg+YiSVxM1RLfiy4t9cAulbRGTNCKOcqHrEQLIw==", + "cpu": [ + "riscv64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/vite/node_modules/@esbuild/linux-s390x": { + "version": "0.27.4", + "resolved": "https://registry.npmjs.org/@esbuild/linux-s390x/-/linux-s390x-0.27.4.tgz", + "integrity": "sha512-+eUqgb/Z7vxVLezG8bVB9SfBie89gMueS+I0xYh2tJdw3vqA/0ImZJ2ROeWwVJN59ihBeZ7Tu92dF/5dy5FttA==", + "cpu": [ + "s390x" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/vite/node_modules/@esbuild/linux-x64": { + "version": "0.27.4", + "resolved": "https://registry.npmjs.org/@esbuild/linux-x64/-/linux-x64-0.27.4.tgz", + "integrity": "sha512-S5qOXrKV8BQEzJPVxAwnryi2+Iq5pB40gTEIT69BQONqR7JH1EPIcQ/Uiv9mCnn05jff9umq/5nqzxlqTOg9NA==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/vite/node_modules/@esbuild/netbsd-x64": { + "version": "0.27.4", + "resolved": "https://registry.npmjs.org/@esbuild/netbsd-x64/-/netbsd-x64-0.27.4.tgz", + "integrity": "sha512-RugOvOdXfdyi5Tyv40kgQnI0byv66BFgAqjdgtAKqHoZTbTF2QqfQrFwa7cHEORJf6X2ht+l9ABLMP0dnKYsgg==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "netbsd" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/vite/node_modules/@esbuild/openbsd-x64": { + "version": "0.27.4", + "resolved": "https://registry.npmjs.org/@esbuild/openbsd-x64/-/openbsd-x64-0.27.4.tgz", + "integrity": "sha512-u8fg/jQ5aQDfsnIV6+KwLOf1CmJnfu1ShpwqdwC0uA7ZPwFws55Ngc12vBdeUdnuWoQYx/SOQLGDcdlfXhYmXQ==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "openbsd" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/vite/node_modules/@esbuild/sunos-x64": { + "version": "0.27.4", + "resolved": "https://registry.npmjs.org/@esbuild/sunos-x64/-/sunos-x64-0.27.4.tgz", + "integrity": "sha512-/gOzgaewZJfeJTlsWhvUEmUG4tWEY2Spp5M20INYRg2ZKl9QPO3QEEgPeRtLjEWSW8FilRNacPOg8R1uaYkA6g==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "sunos" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/vite/node_modules/@esbuild/win32-arm64": { + "version": "0.27.4", + "resolved": "https://registry.npmjs.org/@esbuild/win32-arm64/-/win32-arm64-0.27.4.tgz", + "integrity": "sha512-Z9SExBg2y32smoDQdf1HRwHRt6vAHLXcxD2uGgO/v2jK7Y718Ix4ndsbNMU/+1Qiem9OiOdaqitioZwxivhXYg==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "win32" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/vite/node_modules/@esbuild/win32-ia32": { + "version": "0.27.4", + "resolved": "https://registry.npmjs.org/@esbuild/win32-ia32/-/win32-ia32-0.27.4.tgz", + "integrity": "sha512-DAyGLS0Jz5G5iixEbMHi5KdiApqHBWMGzTtMiJ72ZOLhbu/bzxgAe8Ue8CTS3n3HbIUHQz/L51yMdGMeoxXNJw==", + "cpu": [ + "ia32" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "win32" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/vite/node_modules/@esbuild/win32-x64": { + "version": "0.27.4", + "resolved": "https://registry.npmjs.org/@esbuild/win32-x64/-/win32-x64-0.27.4.tgz", + "integrity": "sha512-+knoa0BDoeXgkNvvV1vvbZX4+hizelrkwmGJBdT17t8FNPwG2lKemmuMZlmaNQ3ws3DKKCxpb4zRZEIp3UxFCg==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "win32" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/vite/node_modules/esbuild": { + "version": "0.27.4", + "resolved": "https://registry.npmjs.org/esbuild/-/esbuild-0.27.4.tgz", + "integrity": "sha512-Rq4vbHnYkK5fws5NF7MYTU68FPRE1ajX7heQ/8QXXWqNgqqJ/GkmmyxIzUnf2Sr/bakf8l54716CcMGHYhMrrQ==", + "dev": true, + "hasInstallScript": true, + "license": "MIT", + "bin": { + "esbuild": "bin/esbuild" + }, + "engines": { + "node": ">=18" + }, + "optionalDependencies": { + "@esbuild/aix-ppc64": "0.27.4", + "@esbuild/android-arm": "0.27.4", + "@esbuild/android-arm64": "0.27.4", + "@esbuild/android-x64": "0.27.4", + "@esbuild/darwin-arm64": "0.27.4", + "@esbuild/darwin-x64": "0.27.4", + "@esbuild/freebsd-arm64": "0.27.4", + "@esbuild/freebsd-x64": "0.27.4", + "@esbuild/linux-arm": "0.27.4", + "@esbuild/linux-arm64": "0.27.4", + "@esbuild/linux-ia32": "0.27.4", + "@esbuild/linux-loong64": "0.27.4", + "@esbuild/linux-mips64el": "0.27.4", + "@esbuild/linux-ppc64": "0.27.4", + "@esbuild/linux-riscv64": "0.27.4", + "@esbuild/linux-s390x": "0.27.4", + "@esbuild/linux-x64": "0.27.4", + "@esbuild/netbsd-arm64": "0.27.4", + "@esbuild/netbsd-x64": "0.27.4", + "@esbuild/openbsd-arm64": "0.27.4", + "@esbuild/openbsd-x64": "0.27.4", + "@esbuild/openharmony-arm64": "0.27.4", + "@esbuild/sunos-x64": "0.27.4", + "@esbuild/win32-arm64": "0.27.4", + "@esbuild/win32-ia32": "0.27.4", + "@esbuild/win32-x64": "0.27.4" + } + }, + "node_modules/vitest": { + "version": "3.2.4", + "resolved": "https://registry.npmjs.org/vitest/-/vitest-3.2.4.tgz", + "integrity": "sha512-LUCP5ev3GURDysTWiP47wRRUpLKMOfPh+yKTx3kVIEiu5KOMeqzpnYNsKyOoVrULivR8tLcks4+lga33Whn90A==", + "dev": true, + "license": "MIT", + "dependencies": { + "@types/chai": "^5.2.2", + "@vitest/expect": "3.2.4", + "@vitest/mocker": "3.2.4", + "@vitest/pretty-format": "^3.2.4", + "@vitest/runner": "3.2.4", + "@vitest/snapshot": "3.2.4", + "@vitest/spy": "3.2.4", + "@vitest/utils": "3.2.4", + "chai": "^5.2.0", + "debug": "^4.4.1", + "expect-type": "^1.2.1", + "magic-string": "^0.30.17", + "pathe": "^2.0.3", + "picomatch": "^4.0.2", + "std-env": "^3.9.0", + "tinybench": "^2.9.0", + "tinyexec": "^0.3.2", + "tinyglobby": "^0.2.14", + "tinypool": "^1.1.1", + "tinyrainbow": "^2.0.0", + "vite": "^5.0.0 || ^6.0.0 || ^7.0.0-0", + "vite-node": "3.2.4", + "why-is-node-running": "^2.3.0" + }, + "bin": { + "vitest": "vitest.mjs" + }, + "engines": { + "node": "^18.0.0 || ^20.0.0 || >=22.0.0" + }, + "funding": { + "url": "https://opencollective.com/vitest" + }, + "peerDependencies": { + "@edge-runtime/vm": "*", + "@types/debug": "^4.1.12", + "@types/node": "^18.0.0 || ^20.0.0 || >=22.0.0", + "@vitest/browser": "3.2.4", + "@vitest/ui": "3.2.4", + "happy-dom": "*", + "jsdom": "*" + }, + "peerDependenciesMeta": { + "@edge-runtime/vm": { + "optional": true + }, + "@types/debug": { + "optional": true + }, + "@types/node": { + "optional": true + }, + "@vitest/browser": { + "optional": true + }, + "@vitest/ui": { + "optional": true + }, + "happy-dom": { + "optional": true + }, + "jsdom": { + "optional": true + } + } + }, + "node_modules/web-streams-polyfill": { + "version": "3.3.3", + "resolved": "https://registry.npmjs.org/web-streams-polyfill/-/web-streams-polyfill-3.3.3.tgz", + "integrity": "sha512-d2JWLCivmZYTSIoge9MsgFCZrt571BikcWGYkjC1khllbTeDlGqZ2D8vD8E/lJa8WGWbb7Plm8/XJYV7IJHZZw==", + "license": "MIT", + "engines": { + "node": ">= 8" + } + }, + "node_modules/which": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/which/-/which-4.0.0.tgz", + "integrity": "sha512-GlaYyEb07DPxYCKhKzplCWBJtvxZcZMrL+4UkrTSJHHPyZU4mYYTv3qaOe77H7EODLSSopAUFAc6W8U4yqvscg==", + "dev": true, + "license": "ISC", + "dependencies": { + "isexe": "^3.1.1" + }, + "bin": { + "node-which": "bin/which.js" + }, + "engines": { + "node": "^16.13.0 || >=18.0.0" + } + }, + "node_modules/why-is-node-running": { + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/why-is-node-running/-/why-is-node-running-2.3.0.tgz", + "integrity": "sha512-hUrmaWBdVDcxvYqnyh09zunKzROWjbZTiNy8dBEjkS7ehEDQibXJ7XvlmtbwuTclUiIyN+CyXQD4Vmko8fNm8w==", + "dev": true, + "license": "MIT", + "dependencies": { + "siginfo": "^2.0.0", + "stackback": "0.0.2" + }, + "bin": { + "why-is-node-running": "cli.js" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/ws": { + "version": "8.20.0", + "resolved": "https://registry.npmjs.org/ws/-/ws-8.20.0.tgz", + "integrity": "sha512-sAt8BhgNbzCtgGbt2OxmpuryO63ZoDk/sqaB/znQm94T4fCEsy/yV+7CdC1kJhOU9lboAEU7R3kquuycDoibVA==", + "license": "MIT", + "engines": { + "node": ">=10.0.0" + }, + "peerDependencies": { + "bufferutil": "^4.0.1", + "utf-8-validate": ">=5.0.2" + }, + "peerDependenciesMeta": { + "bufferutil": { + "optional": true + }, + "utf-8-validate": { + "optional": true + } + } + }, + "node_modules/yallist": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/yallist/-/yallist-3.1.1.tgz", + "integrity": "sha512-a4UGQaWPH59mOXUYnAG2ewncQS4i4F43Tv3JoAM+s2VDAmS9NsK8GpDMLrCHPksFT7h3K6TOoUNn2pb7RoXx4g==", + "dev": true, + "license": "ISC" + } + } +} diff --git a/package.json b/package.json new file mode 100644 index 0000000..36d0bd7 --- /dev/null +++ b/package.json @@ -0,0 +1,47 @@ +{ + "name": "tubearr", + "version": "0.1.0", + "description": "Self-hosted content archival and monitoring application in the *arr family style", + "private": true, + "type": "module", + "scripts": { + "dev": "tsx watch src/index.ts", + "dev:frontend": "vite --config src/frontend/vite.config.ts", + "build": "tsc", + "build:frontend": "vite build --config src/frontend/vite.config.ts", + "start": "node dist/index.js", + "test": "vitest run", + "db:generate": "drizzle-kit generate", + "db:migrate": "tsx src/db/migrate.ts" + }, + "engines": { + "node": ">=18.0.0" + }, + "dependencies": { + "@fastify/cors": "^11.0.0", + "@fastify/rate-limit": "^10.2.1", + "@fastify/static": "^9.0.0", + "@libsql/client": "^0.14.0", + "@tanstack/react-query": "^5.95.0", + "croner": "^10.0.1", + "dotenv": "^16.4.7", + "drizzle-orm": "^0.38.4", + "fastify": "^5.2.1", + "fastify-plugin": "^5.1.0", + "lucide-react": "^0.577.0", + "react": "^19.2.4", + "react-dom": "^19.2.4", + "react-router": "^7.13.1", + "react-router-dom": "^7.13.1" + }, + "devDependencies": { + "@types/node": "^22.12.0", + "@types/react": "^19.2.14", + "@types/react-dom": "^19.2.3", + "@vitejs/plugin-react": "^4.7.0", + "drizzle-kit": "^0.30.4", + "tsx": "^4.19.2", + "typescript": "^5.7.3", + "vitest": "^3.0.5" + } +} diff --git a/scripts/docker-smoke-test.sh b/scripts/docker-smoke-test.sh new file mode 100644 index 0000000..b14a03b --- /dev/null +++ b/scripts/docker-smoke-test.sh @@ -0,0 +1,239 @@ +#!/usr/bin/env bash +# ============================================================ +# Docker Smoke Test — Tubearr +# +# Builds the Docker image, starts a container, and verifies +# core endpoints work end-to-end. Tests restart persistence. +# +# Usage: bash scripts/docker-smoke-test.sh +# ============================================================ + +set -euo pipefail + +# ── Configuration ── + +IMAGE_NAME="tubearr" +# Container name must match docker-compose.yml container_name +CONTAINER_NAME="tubearr" +PORT=8989 +HEALTH_TIMEOUT=90 # seconds to wait for healthy status +COMPOSE_FILE="docker-compose.yml" + +# ── Color output helpers ── + +RED='\033[0;31m' +GREEN='\033[0;32m' +YELLOW='\033[1;33m' +NC='\033[0m' # No Color + +pass() { echo -e "${GREEN}✓ $1${NC}"; } +fail() { echo -e "${RED}✗ $1${NC}"; } +info() { echo -e "${YELLOW}→ $1${NC}"; } + +# ── Cleanup trap ── + +cleanup() { + info "Cleaning up..." + docker-compose -f "$COMPOSE_FILE" down --volumes --remove-orphans 2>/dev/null || true + # Remove any leftover config/media dirs created by compose bind mounts + rm -rf ./config ./media 2>/dev/null || true +} +trap cleanup EXIT + +# ── Pre-check: Ensure port is not already in use ── + +info "Checking port $PORT availability" +if curl -sf "http://localhost:${PORT}/ping" >/dev/null 2>&1; then + fail "Port $PORT is already in use — another service is running. Stop it before running this test." + exit 1 +fi +pass "Port $PORT is available" + +# ── Step 1: Build Docker image ── + +info "Building Docker image: $IMAGE_NAME" +docker build -t "$IMAGE_NAME" . || { + fail "Docker build failed" + exit 1 +} +pass "Docker image built successfully" + +# ── Step 2: Start container via docker-compose ── + +info "Starting container via docker-compose" +docker-compose -f "$COMPOSE_FILE" up -d || { + fail "docker-compose up failed" + exit 1 +} +pass "Container started" + +# ── Step 3: Wait for healthy ── + +info "Waiting for container to become healthy (timeout: ${HEALTH_TIMEOUT}s)" +elapsed=0 +while [ $elapsed -lt $HEALTH_TIMEOUT ]; do + status=$(docker inspect --format='{{.State.Health.Status}}' "$CONTAINER_NAME" 2>/dev/null || echo "unknown") + if [ "$status" = "healthy" ]; then + break + fi + if [ "$status" = "unhealthy" ]; then + fail "Container became unhealthy" + echo "Container logs:" + docker logs "$CONTAINER_NAME" 2>&1 | tail -30 + exit 1 + fi + sleep 2 + elapsed=$((elapsed + 2)) +done + +if [ "$status" != "healthy" ]; then + fail "Container did not become healthy within ${HEALTH_TIMEOUT}s (status: $status)" + echo "Container logs:" + docker logs "$CONTAINER_NAME" 2>&1 | tail -30 + exit 1 +fi +pass "Container is healthy" + +# ── Step 4: Verify /ping (unauthenticated) ── + +info "Testing GET /ping" +PING_RESPONSE=$(curl -sf "http://localhost:${PORT}/ping" 2>&1) || { + fail "GET /ping failed" + exit 1 +} + +if echo "$PING_RESPONSE" | grep -q '"status":"ok"'; then + pass "GET /ping returns {\"status\":\"ok\"}" +else + fail "GET /ping unexpected response: $PING_RESPONSE" + exit 1 +fi + +# ── Step 5: Extract API key from container logs ── + +info "Extracting API key from container logs" +# The auth plugin logs the generated key in a banner like: +# API Key generated (save this — it will not be shown again): +# +# We look for a UUID-like string on a line by itself after the banner text. +# On restart with persisted state, the key won't be in logs — use TUBEARR_API_KEY env var as fallback. +API_KEY=$(docker logs "$CONTAINER_NAME" 2>&1 | grep -oE '[0-9a-f]{8}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{12}' | head -1 || true) + +if [ -z "$API_KEY" ]; then + fail "Could not extract API key from container logs" + echo "Container logs:" + docker logs "$CONTAINER_NAME" 2>&1 | tail -20 + exit 1 +fi +pass "API key extracted: ${API_KEY:0:8}...${API_KEY: -4}" + +# ── Step 6: Verify /api/v1/health (authenticated) ── + +info "Testing GET /api/v1/health" +HEALTH_RESPONSE=$(curl -sf -H "X-Api-Key: $API_KEY" "http://localhost:${PORT}/api/v1/health" 2>&1) || { + fail "GET /api/v1/health failed" + exit 1 +} + +if echo "$HEALTH_RESPONSE" | grep -q '"status"'; then + pass "GET /api/v1/health returns health status" +else + fail "GET /api/v1/health unexpected response: $HEALTH_RESPONSE" + exit 1 +fi + +# ── Step 7: Verify /api/v1/system/status (authenticated) ── + +info "Testing GET /api/v1/system/status" +STATUS_RESPONSE=$(curl -sf -H "X-Api-Key: $API_KEY" "http://localhost:${PORT}/api/v1/system/status" 2>&1) || { + fail "GET /api/v1/system/status failed" + exit 1 +} + +if echo "$STATUS_RESPONSE" | grep -q '"appName":"Tubearr"'; then + pass "GET /api/v1/system/status returns appName=Tubearr" +else + fail "GET /api/v1/system/status unexpected response: $STATUS_RESPONSE" + exit 1 +fi + +# ── Step 8: Verify auth rejection ── + +info "Testing auth rejection (no API key)" +AUTH_CODE=$(curl -s -o /dev/null -w '%{http_code}' "http://localhost:${PORT}/api/v1/system/status" 2>&1) + +if [ "$AUTH_CODE" = "401" ]; then + pass "Unauthenticated request correctly returns 401" +else + fail "Expected 401, got $AUTH_CODE" + exit 1 +fi + +# ── Step 9: Test restart persistence ── + +info "Testing container restart persistence" + +# Record the API key before restart +PRE_RESTART_KEY="$API_KEY" + +# Restart the container +docker-compose -f "$COMPOSE_FILE" restart || { + fail "docker-compose restart failed" + exit 1 +} + +# Wait for healthy again +info "Waiting for container to become healthy after restart" +elapsed=0 +while [ $elapsed -lt $HEALTH_TIMEOUT ]; do + status=$(docker inspect --format='{{.State.Health.Status}}' "$CONTAINER_NAME" 2>/dev/null || echo "unknown") + if [ "$status" = "healthy" ]; then + break + fi + if [ "$status" = "unhealthy" ]; then + fail "Container became unhealthy after restart" + docker logs "$CONTAINER_NAME" 2>&1 | tail -20 + exit 1 + fi + sleep 2 + elapsed=$((elapsed + 2)) +done + +if [ "$status" != "healthy" ]; then + fail "Container did not become healthy after restart within ${HEALTH_TIMEOUT}s" + docker logs "$CONTAINER_NAME" 2>&1 | tail -20 + exit 1 +fi +pass "Container healthy after restart" + +# Verify /ping still works +PING_AFTER=$(curl -sf "http://localhost:${PORT}/ping" 2>&1) || { + fail "GET /ping failed after restart" + exit 1 +} +if echo "$PING_AFTER" | grep -q '"status":"ok"'; then + pass "GET /ping works after restart" +else + fail "GET /ping unexpected response after restart: $PING_AFTER" + exit 1 +fi + +# Verify the same API key works (state persisted via volume) +HEALTH_AFTER=$(curl -sf -H "X-Api-Key: $PRE_RESTART_KEY" "http://localhost:${PORT}/api/v1/health" 2>&1) || { + fail "GET /api/v1/health failed after restart with pre-restart API key" + exit 1 +} +if echo "$HEALTH_AFTER" | grep -q '"status"'; then + pass "Pre-restart API key still works — state persisted" +else + fail "API key state not preserved across restart" + exit 1 +fi + +# ── Done ── + +echo "" +echo -e "${GREEN}═══════════════════════════════════════════${NC}" +echo -e "${GREEN} SMOKE TEST PASSED${NC}" +echo -e "${GREEN}═══════════════════════════════════════════${NC}" +echo "" diff --git a/skills-lock.json b/skills-lock.json new file mode 100644 index 0000000..6cf8914 --- /dev/null +++ b/skills-lock.json @@ -0,0 +1,25 @@ +{ + "version": 1, + "skills": { + "drizzle-migrations": { + "source": "bobmatnyc/claude-mpm-skills", + "sourceType": "github", + "computedHash": "b5e3d1249589aebf83c8308f8feacb6da6e38a21995946ea5a6a6522da898507" + }, + "drizzle-orm": { + "source": "bobmatnyc/claude-mpm-skills", + "sourceType": "github", + "computedHash": "c5132317134698624d023cbbbb612b99d2b20f532f6d1417639607cf401f03cd" + }, + "fastify-best-practices": { + "source": "mcollina/skills", + "sourceType": "github", + "computedHash": "b3a771fa66bc5d8dac0af14e99e51fca2ff4a9add56f09d986778528bdf72c4c" + }, + "fastify-typescript": { + "source": "mindrally/skills", + "sourceType": "github", + "computedHash": "d133948c2f5af7fbed1a559cd21a1cbb6abfb5ef25b90c7e3fd3064a422af993" + } + } +} diff --git a/src/__tests__/auth-model.test.ts b/src/__tests__/auth-model.test.ts new file mode 100644 index 0000000..7c6c970 --- /dev/null +++ b/src/__tests__/auth-model.test.ts @@ -0,0 +1,252 @@ +import { describe, it, expect, beforeAll, afterAll } from 'vitest'; +import { mkdtempSync, rmSync, existsSync } from 'node:fs'; +import { join } from 'node:path'; +import { tmpdir } from 'node:os'; +import { type FastifyInstance } from 'fastify'; +import { initDatabaseAsync, closeDatabase } from '../db/index'; +import { runMigrations } from '../db/migrate'; +import { buildServer } from '../server/index'; +import { systemConfig } from '../db/schema/index'; +import { eq } from 'drizzle-orm'; +import { type LibSQLDatabase } from 'drizzle-orm/libsql'; +import type * as schema from '../db/schema/index'; + +/** + * Auth model tests: verify dual-mode authentication. + * + * The Tubearr auth model (matching Sonarr/Radarr): + * - Same-origin browser requests (Origin/Referer matching server host) are trusted + * - External requests require a valid API key via header or query param + * - API key management endpoints allow reading and regenerating the key + */ + +describe('Auth model — dual-mode authentication', () => { + let server: FastifyInstance; + let db: LibSQLDatabase; + let apiKey: string; + let tmpDir: string; + + beforeAll(async () => { + tmpDir = mkdtempSync(join(tmpdir(), 'tubearr-auth-')); + const dbPath = join(tmpDir, 'test.db'); + db = await initDatabaseAsync(dbPath); + await runMigrations(dbPath); + server = await buildServer({ db }); + await server.ready(); + + // Read the generated API key from the database + const rows = await db + .select() + .from(systemConfig) + .where(eq(systemConfig.key, 'api_key')) + .limit(1); + apiKey = rows[0]?.value ?? ''; + expect(apiKey).toBeTruthy(); + }); + + afterAll(async () => { + await server.close(); + closeDatabase(); + try { + if (tmpDir && existsSync(tmpDir)) { + rmSync(tmpDir, { recursive: true, force: true }); + } + } catch { + // Temp dir cleanup is best-effort on Windows + } + }); + + // ── Same-origin bypass ── + + describe('Same-origin bypass (trusted browser requests)', () => { + it('allows request with matching Origin header — no API key needed', async () => { + const res = await server.inject({ + method: 'GET', + url: '/api/v1/system/status', + headers: { + origin: 'http://localhost:3000', + }, + }); + + expect(res.statusCode).toBe(200); + expect(res.json()).toHaveProperty('appName', 'Tubearr'); + }); + + it('allows request with matching Referer header — no API key needed', async () => { + const res = await server.inject({ + method: 'GET', + url: '/api/v1/system/status', + headers: { + referer: 'http://localhost:8989/settings', + }, + }); + + expect(res.statusCode).toBe(200); + expect(res.json()).toHaveProperty('appName', 'Tubearr'); + }); + + it('rejects cross-origin request (different hostname) without API key', async () => { + const res = await server.inject({ + method: 'GET', + url: '/api/v1/system/status', + headers: { + origin: 'http://evil.example.com:8989', + }, + }); + + expect(res.statusCode).toBe(401); + expect(res.json().message).toContain('API key'); + }); + }); + + // ── External API key authentication ── + + describe('External API key authentication', () => { + it('rejects external request without API key (no Origin/Referer)', async () => { + const res = await server.inject({ + method: 'GET', + url: '/api/v1/system/status', + }); + + expect(res.statusCode).toBe(401); + const body = res.json(); + expect(body.error).toBe('Unauthorized'); + expect(body.message).toContain('API key'); + }); + + it('allows external request with valid API key via X-Api-Key header', async () => { + const res = await server.inject({ + method: 'GET', + url: '/api/v1/system/status', + headers: { 'x-api-key': apiKey }, + }); + + expect(res.statusCode).toBe(200); + expect(res.json()).toHaveProperty('appName', 'Tubearr'); + }); + + it('allows external request with valid API key via apikey query param', async () => { + const res = await server.inject({ + method: 'GET', + url: `/api/v1/system/status?apikey=${apiKey}`, + }); + + expect(res.statusCode).toBe(200); + expect(res.json()).toHaveProperty('appName', 'Tubearr'); + }); + + it('rejects external request with invalid API key', async () => { + const res = await server.inject({ + method: 'GET', + url: '/api/v1/system/status', + headers: { 'x-api-key': 'totally-wrong-key' }, + }); + + expect(res.statusCode).toBe(401); + expect(res.json().message).toBe('Invalid API key'); + }); + }); + + // ── API key management endpoints ── + + describe('GET /api/v1/system/apikey', () => { + it('returns the current API key for same-origin requests', async () => { + const res = await server.inject({ + method: 'GET', + url: '/api/v1/system/apikey', + headers: { + origin: 'http://localhost:8989', + }, + }); + + expect(res.statusCode).toBe(200); + const body = res.json(); + expect(body).toHaveProperty('apiKey'); + expect(body.apiKey).toBe(apiKey); + }); + + it('returns the current API key for API-key-authenticated requests', async () => { + const res = await server.inject({ + method: 'GET', + url: '/api/v1/system/apikey', + headers: { 'x-api-key': apiKey }, + }); + + expect(res.statusCode).toBe(200); + expect(res.json().apiKey).toBe(apiKey); + }); + + it('rejects unauthenticated external requests', async () => { + const res = await server.inject({ + method: 'GET', + url: '/api/v1/system/apikey', + }); + + expect(res.statusCode).toBe(401); + }); + }); + + describe('POST /api/v1/system/apikey/regenerate', () => { + it('regenerates the API key and returns the new one', async () => { + const oldKey = apiKey; + + // Regenerate using same-origin auth + const res = await server.inject({ + method: 'POST', + url: '/api/v1/system/apikey/regenerate', + headers: { + origin: 'http://localhost:8989', + }, + }); + + expect(res.statusCode).toBe(200); + const body = res.json(); + expect(body).toHaveProperty('apiKey'); + expect(body.apiKey).not.toBe(oldKey); + expect(body.apiKey).toMatch( + /^[0-9a-f]{8}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{12}$/ + ); + + // Update our local reference for subsequent tests + apiKey = body.apiKey; + }); + + it('old API key no longer works for external requests after regeneration', async () => { + // The previous test regenerated the key, so the original key should be invalid + const res = await server.inject({ + method: 'GET', + url: '/api/v1/system/status', + headers: { 'x-api-key': 'the-original-key-is-gone' }, + }); + + expect(res.statusCode).toBe(401); + }); + + it('new API key works for external requests after regeneration', async () => { + // Read the current key from the DB to be sure we have the right one + const rows = await db + .select() + .from(systemConfig) + .where(eq(systemConfig.key, 'api_key')) + .limit(1); + const currentKey = rows[0]?.value ?? ''; + + const res = await server.inject({ + method: 'GET', + url: '/api/v1/system/status', + headers: { 'x-api-key': currentKey }, + }); + + expect(res.statusCode).toBe(200); + }); + + it('rejects unauthenticated external regeneration requests', async () => { + const res = await server.inject({ + method: 'POST', + url: '/api/v1/system/apikey/regenerate', + }); + + expect(res.statusCode).toBe(401); + }); + }); +}); diff --git a/src/__tests__/back-catalog-import.test.ts b/src/__tests__/back-catalog-import.test.ts new file mode 100644 index 0000000..109f791 --- /dev/null +++ b/src/__tests__/back-catalog-import.test.ts @@ -0,0 +1,452 @@ +import { describe, it, expect, vi, beforeAll, afterAll, beforeEach } from 'vitest'; +import { mkdtempSync, rmSync, existsSync } from 'node:fs'; +import { join } from 'node:path'; +import { tmpdir } from 'node:os'; +import { type FastifyInstance } from 'fastify'; +import { initDatabaseAsync, closeDatabase } from '../db/index'; +import { runMigrations } from '../db/migrate'; +import { buildServer } from '../server/index'; +import { systemConfig } from '../db/schema/index'; +import { eq } from 'drizzle-orm'; +import { type LibSQLDatabase } from 'drizzle-orm/libsql'; +import type * as schema from '../db/schema/index'; +import { createChannel } from '../db/repositories/channel-repository'; +import { + createContentItem, + getContentByChannelId, +} from '../db/repositories/content-repository'; +import { + getQueueItemByContentItemId, + getPendingQueueItems, +} from '../db/repositories/queue-repository'; +import { BackCatalogImportService } from '../services/back-catalog-import'; +import { PlatformRegistry } from '../sources/platform-source'; +import { YouTubeSource } from '../sources/youtube'; +import { SoundCloudSource } from '../sources/soundcloud'; +import { QueueService } from '../services/queue'; +import type { Channel, PlatformContentMetadata } from '../types/index'; + +// ── Mock yt-dlp ── + +const { execYtDlpMock } = vi.hoisted(() => ({ + execYtDlpMock: vi.fn(), +})); + +vi.mock('../sources/yt-dlp', async (importOriginal) => { + const actual = await importOriginal(); + return { + ...actual, + execYtDlp: execYtDlpMock, + }; +}); + +// ── Canned data ── + +function makeYtDlpEntry(id: string, title: string) { + return { + id, + title, + url: `https://www.youtube.com/watch?v=${id}`, + webpage_url: `https://www.youtube.com/watch?v=${id}`, + duration: 600, + thumbnail: `https://i.ytimg.com/vi/${id}/maxresdefault.jpg`, + live_status: 'not_live', + }; +} + +/** Generate yt-dlp NDJSON output for N entries */ +function makeNdjsonOutput(count: number, prefix = 'vid'): string { + return Array.from({ length: count }, (_, i) => + JSON.stringify(makeYtDlpEntry(`${prefix}_${i + 1}`, `Video ${i + 1}`)) + ).join('\n'); +} + +const YOUTUBE_CHANNEL_JSON = JSON.stringify({ + channel: 'Import Channel', + channel_id: 'UC_IMPORT_TEST', + channel_url: 'https://www.youtube.com/@ImportChannel', + uploader: 'Import Channel', + uploader_url: 'https://www.youtube.com/@ImportChannel', + thumbnails: [{ url: 'https://yt.com/thumb.jpg' }], +}); + +// ── Test setup ── + +describe('BackCatalogImportService', () => { + let db: LibSQLDatabase; + let tmpDir: string; + let testChannel: Channel; + let registry: PlatformRegistry; + let mockDownloadService: { downloadItem: ReturnType }; + + beforeAll(async () => { + tmpDir = mkdtempSync(join(tmpdir(), 'tubearr-import-')); + const dbPath = join(tmpDir, 'test.db'); + db = await initDatabaseAsync(dbPath); + await runMigrations(dbPath); + }); + + afterAll(() => { + closeDatabase(); + try { + if (tmpDir && existsSync(tmpDir)) { + rmSync(tmpDir, { recursive: true, force: true }); + } + } catch { + // Windows cleanup best-effort + } + }); + + beforeEach(() => { + execYtDlpMock.mockReset(); + mockDownloadService = { downloadItem: vi.fn().mockResolvedValue(undefined) }; + }); + + async function createTestChannel(suffix = ''): Promise { + return createChannel(db, { + name: `Import Test Channel${suffix}`, + platform: 'youtube', + platformId: `UC_IMPORT_TEST${suffix}`, + url: `https://www.youtube.com/@ImportChannel${suffix}`, + imageUrl: null, + formatProfileId: null, + monitoringEnabled: true, + checkInterval: 360, + metadata: null, + }); + } + + function createImportService(concurrency = 0): { + importService: BackCatalogImportService; + queueService: QueueService; + } { + const queueService = new QueueService(db, mockDownloadService as any, concurrency); + queueService.stop(); // Prevent auto-processing during tests + registry = new PlatformRegistry(); + registry.register('youtube' as any, new YouTubeSource()); + registry.register('soundcloud' as any, new SoundCloudSource()); + const importService = new BackCatalogImportService(db, registry, queueService); + return { importService, queueService }; + } + + // ── Import fetches and inserts ── + + describe('importChannel', () => { + it('fetches all content and inserts new items', async () => { + const channel = await createTestChannel('_fetch'); + + // Mock fetchAllContent → yt-dlp returns 5 entries + execYtDlpMock.mockResolvedValueOnce({ + stdout: makeNdjsonOutput(5, 'fetch'), + stderr: '', + exitCode: 0, + }); + + const { importService } = createImportService(); + const result = await importService.importChannel(channel.id, 'newest'); + + expect(result.found).toBe(5); + expect(result.imported).toBe(5); + expect(result.skipped).toBe(0); + + // Verify content items were created in DB + const content = await getContentByChannelId(db, channel.id); + expect(content.length).toBe(5); + }); + + it('deduplicates — second import inserts 0 new items', async () => { + const channel = await createTestChannel('_dedup'); + + // First import + execYtDlpMock.mockResolvedValueOnce({ + stdout: makeNdjsonOutput(3, 'dedup'), + stderr: '', + exitCode: 0, + }); + + const { importService } = createImportService(); + const result1 = await importService.importChannel(channel.id, 'newest'); + expect(result1.imported).toBe(3); + expect(result1.skipped).toBe(0); + + // Second import — same content IDs + execYtDlpMock.mockResolvedValueOnce({ + stdout: makeNdjsonOutput(3, 'dedup'), + stderr: '', + exitCode: 0, + }); + + const result2 = await importService.importChannel(channel.id, 'newest'); + expect(result2.found).toBe(3); + expect(result2.imported).toBe(0); + expect(result2.skipped).toBe(3); + }); + + it('enqueues imported items at priority -10', async () => { + const channel = await createTestChannel('_priority'); + + execYtDlpMock.mockResolvedValueOnce({ + stdout: makeNdjsonOutput(2, 'prio'), + stderr: '', + exitCode: 0, + }); + + const { importService } = createImportService(); + await importService.importChannel(channel.id, 'newest'); + + // Verify content was created and enqueued + const content = await getContentByChannelId(db, channel.id); + expect(content.length).toBe(2); + + for (const item of content) { + const queueItem = await getQueueItemByContentItemId(db, item.id); + expect(queueItem).not.toBeNull(); + expect(queueItem!.priority).toBe(-10); + expect(queueItem!.status).toBe('pending'); + } + }); + + it("'oldest' order reverses insertion order", async () => { + const channel = await createTestChannel('_order'); + + execYtDlpMock.mockResolvedValueOnce({ + stdout: makeNdjsonOutput(4, 'ord'), + stderr: '', + exitCode: 0, + }); + + const { importService } = createImportService(); + await importService.importChannel(channel.id, 'oldest'); + + // Content items should be inserted in reversed order. + // yt-dlp returns [ord_1, ord_2, ord_3, ord_4]. With 'oldest' order, + // the array is reversed to [ord_4, ord_3, ord_2, ord_1], so ord_4 + // is inserted first (lowest id) and ord_1 last (highest id). + const content = await getContentByChannelId(db, channel.id); + expect(content.length).toBe(4); + + // getContentByChannelId returns newest first (ORDER BY createdAt DESC, id DESC implied). + // Since all items have the same createdAt (same second), we verify via ID ordering. + // ord_4 was inserted first → lowest DB id; ord_1 was inserted last → highest DB id + const sortedById = [...content].sort((a, b) => a.id - b.id); + expect(sortedById[0].platformContentId).toBe('ord_4'); // First inserted (lowest id) + expect(sortedById[sortedById.length - 1].platformContentId).toBe('ord_1'); // Last inserted (highest id) + }); + + it('handles missing channel gracefully', async () => { + const { importService } = createImportService(); + + await expect(importService.importChannel(99999, 'newest')).rejects.toThrow( + /Channel 99999 not found/ + ); + }); + + it('handles platform source fetch errors gracefully', async () => { + const channel = await createTestChannel('_fetcherr'); + + // Mock yt-dlp to fail + const { YtDlpError } = await import('../sources/yt-dlp'); + execYtDlpMock.mockRejectedValueOnce( + new YtDlpError('yt-dlp exited with code 1', 'network error', 1) + ); + + const { importService } = createImportService(); + + await expect(importService.importChannel(channel.id, 'newest')).rejects.toThrow( + /yt-dlp exited/ + ); + }); + + it('individual enqueue failures do not abort the import', async () => { + const channel = await createTestChannel('_enqfail'); + + execYtDlpMock.mockResolvedValueOnce({ + stdout: makeNdjsonOutput(3, 'enqfail'), + stderr: '', + exitCode: 0, + }); + + // Create a service where the queue throws on the second enqueue + const queueService = new QueueService(db, mockDownloadService as any, 0); + queueService.stop(); + + // Spy on enqueue to make it fail on the second call + let enqueueCallCount = 0; + const originalEnqueue = queueService.enqueue.bind(queueService); + vi.spyOn(queueService, 'enqueue').mockImplementation(async (contentItemId, priority) => { + enqueueCallCount++; + if (enqueueCallCount === 2) { + throw new Error('Simulated enqueue failure'); + } + return originalEnqueue(contentItemId, priority); + }); + + registry = new PlatformRegistry(); + registry.register('youtube' as any, new YouTubeSource()); + const importService = new BackCatalogImportService(db, registry, queueService); + + const result = await importService.importChannel(channel.id, 'newest'); + + // All 3 items should be imported (content created), even though one enqueue failed + expect(result.found).toBe(3); + expect(result.imported).toBe(3); + expect(result.skipped).toBe(0); + }); + + // ── monitoringMode-aware import tests ── + + it("imports items with monitored=false when channel monitoringMode is 'future'", async () => { + const channel = await createTestChannel('_mode_future'); + + // Update channel's monitoringMode to 'future' via direct DB creation + // (createTestChannel defaults to 'all', so we need a custom one) + const futureChannel = await createChannel(db, { + name: 'Future Mode Channel', + platform: 'youtube', + platformId: `UC_FUTURE_MODE_${Date.now()}`, + url: 'https://www.youtube.com/@FutureMode', + imageUrl: null, + formatProfileId: null, + monitoringEnabled: true, + checkInterval: 360, + metadata: null, + monitoringMode: 'future', + }); + + execYtDlpMock.mockResolvedValueOnce({ + stdout: makeNdjsonOutput(3, `mode_future_${futureChannel.id}`), + stderr: '', + exitCode: 0, + }); + + const { importService } = createImportService(); + const result = await importService.importChannel(futureChannel.id, 'newest'); + + expect(result.found).toBe(3); + expect(result.imported).toBe(3); + + // Back-catalog is *existing* content, mode 'future' → monitored=false + const content = await getContentByChannelId(db, futureChannel.id); + expect(content.length).toBe(3); + for (const item of content) { + expect(item.monitored).toBe(false); + } + }); + + it("imports items with monitored=true when channel monitoringMode is 'existing'", async () => { + const existingChannel = await createChannel(db, { + name: 'Existing Mode Channel', + platform: 'youtube', + platformId: `UC_EXISTING_MODE_${Date.now()}`, + url: 'https://www.youtube.com/@ExistingMode', + imageUrl: null, + formatProfileId: null, + monitoringEnabled: true, + checkInterval: 360, + metadata: null, + monitoringMode: 'existing', + }); + + execYtDlpMock.mockResolvedValueOnce({ + stdout: makeNdjsonOutput(3, `mode_existing_${existingChannel.id}`), + stderr: '', + exitCode: 0, + }); + + const { importService } = createImportService(); + const result = await importService.importChannel(existingChannel.id, 'newest'); + + expect(result.found).toBe(3); + expect(result.imported).toBe(3); + + // Back-catalog is *existing* content, mode 'existing' → monitored=true + const content = await getContentByChannelId(db, existingChannel.id); + expect(content.length).toBe(3); + for (const item of content) { + expect(item.monitored).toBe(true); + } + }); + }); + + // ── Integration via channel POST endpoint ── + + describe('Channel POST endpoint with grabAll', () => { + let server: FastifyInstance; + let apiKey: string; + + beforeAll(async () => { + server = await buildServer({ db }); + await server.ready(); + + // Read auto-generated API key + const rows = await db + .select() + .from(systemConfig) + .where(eq(systemConfig.key, 'api_key')) + .limit(1); + apiKey = rows[0]?.value ?? ''; + expect(apiKey).toBeTruthy(); + }); + + afterAll(async () => { + await server.close(); + }); + + it('returns 201 and triggers import when grabAll is true', async () => { + // Mock resolveChannel + execYtDlpMock.mockResolvedValueOnce({ + stdout: JSON.stringify({ + channel: 'Grab All Channel', + channel_id: 'UC_GRABALL_TEST', + channel_url: 'https://www.youtube.com/@GrabAll', + uploader: 'Grab All Channel', + thumbnails: [{ url: 'https://yt.com/thumb.jpg' }], + }), + stderr: '', + exitCode: 0, + }); + + // Mock fetchAllContent (will be called async by the fire-and-forget import) + execYtDlpMock.mockResolvedValueOnce({ + stdout: makeNdjsonOutput(3, 'graball'), + stderr: '', + exitCode: 0, + }); + + // Need queueService on the server for import to run + const qs = new QueueService(db, mockDownloadService as any, 0); + qs.stop(); + server.queueService = qs; + + const res = await server.inject({ + method: 'POST', + url: '/api/v1/channel', + headers: { 'x-api-key': apiKey }, + payload: { + url: 'https://www.youtube.com/@GrabAll', + grabAll: true, + grabAllOrder: 'newest', + }, + }); + + expect(res.statusCode).toBe(201); + const body = res.json(); + expect(body.name).toBe('Grab All Channel'); + expect(body.platform).toBe('youtube'); + + // Give the fire-and-forget import a moment to complete + await new Promise((resolve) => setTimeout(resolve, 300)); + + // Verify content items were created by the async import + const content = await getContentByChannelId(db, body.id); + expect(content.length).toBe(3); + + // Verify queue items exist at priority -10 + for (const item of content) { + const queueItem = await getQueueItemByContentItemId(db, item.id); + expect(queueItem).not.toBeNull(); + expect(queueItem!.priority).toBe(-10); + } + }); + }); +}); diff --git a/src/__tests__/channel-counts.test.ts b/src/__tests__/channel-counts.test.ts new file mode 100644 index 0000000..de40b10 --- /dev/null +++ b/src/__tests__/channel-counts.test.ts @@ -0,0 +1,299 @@ +import { describe, it, expect, beforeAll, afterAll } from 'vitest'; +import { mkdtempSync, rmSync, existsSync } from 'node:fs'; +import { join } from 'node:path'; +import { tmpdir } from 'node:os'; +import { type FastifyInstance } from 'fastify'; +import { initDatabaseAsync, closeDatabase } from '../db/index'; +import { runMigrations } from '../db/migrate'; +import { buildServer } from '../server/index'; +import { systemConfig } from '../db/schema/index'; +import { eq } from 'drizzle-orm'; +import { type LibSQLDatabase } from 'drizzle-orm/libsql'; +import type * as schema from '../db/schema/index'; +import { createChannel } from '../db/repositories/channel-repository'; +import { createContentItem } from '../db/repositories/content-repository'; +import type { Channel, ContentItem } from '../types/index'; +import type { ContentCounts } from '../types/api'; + +/** + * Integration tests for GET /api/v1/channel with contentCounts. + * + * Verifies that the channel list endpoint returns per-channel aggregated + * content counts (total, monitored, downloaded) as part of each channel object. + */ +describe('Channel contentCounts API', () => { + let server: FastifyInstance; + let db: LibSQLDatabase; + let apiKey: string; + let tmpDir: string; + let channelA: Channel; + let channelB: Channel; + let channelEmpty: Channel; + + beforeAll(async () => { + tmpDir = mkdtempSync(join(tmpdir(), 'tubearr-channel-counts-')); + const dbPath = join(tmpDir, 'test.db'); + db = await initDatabaseAsync(dbPath); + await runMigrations(dbPath); + server = await buildServer({ db }); + await server.ready(); + + // Read auto-generated API key + const rows = await db + .select() + .from(systemConfig) + .where(eq(systemConfig.key, 'api_key')) + .limit(1); + apiKey = rows[0]?.value ?? ''; + expect(apiKey).toBeTruthy(); + + // ── Seed channels ── + + // Channel A: will have mixed content + channelA = await createChannel(db, { + name: 'Channel Alpha', + platform: 'youtube', + platformId: 'UC_ALPHA_COUNTS', + url: 'https://www.youtube.com/channel/UC_ALPHA_COUNTS', + monitoringEnabled: true, + checkInterval: 360, + imageUrl: null, + metadata: null, + formatProfileId: null, + }); + + // Channel B: will have independent content + channelB = await createChannel(db, { + name: 'Channel Beta', + platform: 'youtube', + platformId: 'UC_BETA_COUNTS', + url: 'https://www.youtube.com/channel/UC_BETA_COUNTS', + monitoringEnabled: true, + checkInterval: 360, + imageUrl: null, + metadata: null, + formatProfileId: null, + }); + + // Channel Empty: no content items + channelEmpty = await createChannel(db, { + name: 'Channel Empty', + platform: 'soundcloud', + platformId: 'sc_empty_counts', + url: 'https://soundcloud.com/empty-counts', + monitoringEnabled: true, + checkInterval: 360, + imageUrl: null, + metadata: null, + formatProfileId: null, + }); + + // ── Seed content for Channel A (5 items: 3 monitored, 2 not; 2 downloaded) ── + + await createContentItem(db, { + channelId: channelA.id, + title: 'Alpha Vid 1', + platformContentId: 'cnt_a1', + url: 'https://youtube.com/watch?v=cnt_a1', + contentType: 'video', + duration: 300, + monitored: true, + status: 'downloaded', + }); + await createContentItem(db, { + channelId: channelA.id, + title: 'Alpha Vid 2', + platformContentId: 'cnt_a2', + url: 'https://youtube.com/watch?v=cnt_a2', + contentType: 'video', + duration: 300, + monitored: true, + status: 'downloaded', + }); + await createContentItem(db, { + channelId: channelA.id, + title: 'Alpha Vid 3', + platformContentId: 'cnt_a3', + url: 'https://youtube.com/watch?v=cnt_a3', + contentType: 'video', + duration: 300, + monitored: true, + status: 'monitored', + }); + await createContentItem(db, { + channelId: channelA.id, + title: 'Alpha Vid 4', + platformContentId: 'cnt_a4', + url: 'https://youtube.com/watch?v=cnt_a4', + contentType: 'video', + duration: 300, + monitored: false, + status: 'ignored', + }); + await createContentItem(db, { + channelId: channelA.id, + title: 'Alpha Vid 5', + platformContentId: 'cnt_a5', + url: 'https://youtube.com/watch?v=cnt_a5', + contentType: 'video', + duration: 300, + monitored: false, + status: 'monitored', + }); + + // ── Seed content for Channel B (2 items: 1 monitored, 1 downloaded) ── + + await createContentItem(db, { + channelId: channelB.id, + title: 'Beta Vid 1', + platformContentId: 'cnt_b1', + url: 'https://youtube.com/watch?v=cnt_b1', + contentType: 'video', + duration: 600, + monitored: true, + status: 'monitored', + }); + await createContentItem(db, { + channelId: channelB.id, + title: 'Beta Vid 2', + platformContentId: 'cnt_b2', + url: 'https://youtube.com/watch?v=cnt_b2', + contentType: 'video', + duration: 600, + monitored: true, + status: 'downloaded', + }); + }); + + afterAll(async () => { + await server.close(); + closeDatabase(); + try { + if (tmpDir && existsSync(tmpDir)) { + rmSync(tmpDir, { recursive: true, force: true }); + } + } catch { + // Temp dir cleanup is best-effort on Windows (K004) + } + }); + + // ── Helpers ── + + function getChannelCounts(body: Array<{ id: number; contentCounts: ContentCounts }>, id: number) { + const channel = body.find((c) => c.id === id); + expect(channel).toBeDefined(); + return channel!.contentCounts; + } + + // ── Tests ── + + describe('GET /api/v1/channel with contentCounts', () => { + it('returns contentCounts on every channel object', async () => { + const res = await server.inject({ + method: 'GET', + url: '/api/v1/channel', + headers: { 'x-api-key': apiKey }, + }); + + expect(res.statusCode).toBe(200); + const body = res.json(); + expect(Array.isArray(body)).toBe(true); + expect(body.length).toBeGreaterThanOrEqual(3); + + // Every channel must have contentCounts + for (const channel of body) { + expect(channel).toHaveProperty('contentCounts'); + expect(channel.contentCounts).toHaveProperty('total'); + expect(channel.contentCounts).toHaveProperty('monitored'); + expect(channel.contentCounts).toHaveProperty('downloaded'); + expect(typeof channel.contentCounts.total).toBe('number'); + expect(typeof channel.contentCounts.monitored).toBe('number'); + expect(typeof channel.contentCounts.downloaded).toBe('number'); + } + }); + + it('channel with no content has zero counts', async () => { + const res = await server.inject({ + method: 'GET', + url: '/api/v1/channel', + headers: { 'x-api-key': apiKey }, + }); + + const counts = getChannelCounts(res.json(), channelEmpty.id); + expect(counts).toEqual({ total: 0, monitored: 0, downloaded: 0 }); + }); + + it('channel with mixed content has correct counts', async () => { + const res = await server.inject({ + method: 'GET', + url: '/api/v1/channel', + headers: { 'x-api-key': apiKey }, + }); + + // Channel A: 5 total, 3 monitored, 2 downloaded + const counts = getChannelCounts(res.json(), channelA.id); + expect(counts.total).toBe(5); + expect(counts.monitored).toBe(3); + expect(counts.downloaded).toBe(2); + }); + + it('multiple channels have independent counts', async () => { + const res = await server.inject({ + method: 'GET', + url: '/api/v1/channel', + headers: { 'x-api-key': apiKey }, + }); + + const body = res.json(); + const countsA = getChannelCounts(body, channelA.id); + const countsB = getChannelCounts(body, channelB.id); + + // Channel A: 5 total, 3 monitored, 2 downloaded + expect(countsA).toEqual({ total: 5, monitored: 3, downloaded: 2 }); + + // Channel B: 2 total, 2 monitored, 1 downloaded + expect(countsB).toEqual({ total: 2, monitored: 2, downloaded: 1 }); + }); + + it('counts update after toggling monitored state', async () => { + // Toggle monitored on Channel A's first content item (cnt_a1) from true to false + // Need to find the item ID first + const contentRes = await server.inject({ + method: 'GET', + url: '/api/v1/content', + headers: { 'x-api-key': apiKey }, + query: { channelId: String(channelA.id) }, + }); + + const contentBody = contentRes.json(); + const items = contentBody.data ?? contentBody; + const targetItem = (Array.isArray(items) ? items : []).find( + (i: ContentItem) => i.platformContentId === 'cnt_a1' + ); + expect(targetItem).toBeDefined(); + + // Toggle monitored off + const toggleRes = await server.inject({ + method: 'PATCH', + url: `/api/v1/content/${targetItem.id}/monitored`, + headers: { 'x-api-key': apiKey }, + payload: { monitored: false }, + }); + expect(toggleRes.statusCode).toBe(200); + + // Re-fetch channel list and check updated counts + const res = await server.inject({ + method: 'GET', + url: '/api/v1/channel', + headers: { 'x-api-key': apiKey }, + }); + + const counts = getChannelCounts(res.json(), channelA.id); + // Was: 5 total, 3 monitored, 2 downloaded + // Now: 5 total, 2 monitored, 2 downloaded (one less monitored) + expect(counts.total).toBe(5); + expect(counts.monitored).toBe(2); + expect(counts.downloaded).toBe(2); + }); + }); +}); diff --git a/src/__tests__/channel.test.ts b/src/__tests__/channel.test.ts new file mode 100644 index 0000000..33d77af --- /dev/null +++ b/src/__tests__/channel.test.ts @@ -0,0 +1,451 @@ +import { describe, it, expect, beforeAll, afterAll, vi } from 'vitest'; +import { mkdtempSync, rmSync, existsSync } from 'node:fs'; +import { join } from 'node:path'; +import { tmpdir } from 'node:os'; +import { type FastifyInstance } from 'fastify'; +import { initDatabaseAsync, closeDatabase } from '../db/index'; +import { runMigrations } from '../db/migrate'; +import { buildServer } from '../server/index'; +import { systemConfig } from '../db/schema/index'; +import { eq } from 'drizzle-orm'; +import { type LibSQLDatabase } from 'drizzle-orm/libsql'; +import type * as schema from '../db/schema/index'; + +// ── Mock yt-dlp to avoid real subprocess calls ── + +// Mock the yt-dlp module so resolveChannel doesn't invoke the real binary. +// We intercept at the yt-dlp wrapper level — platform sources call execYtDlp +// which we replace with a function returning canned JSON. +const { execYtDlpMock } = vi.hoisted(() => ({ + execYtDlpMock: vi.fn(), +})); + +vi.mock('../sources/yt-dlp', async (importOriginal) => { + const actual = await importOriginal(); + return { + ...actual, + execYtDlp: execYtDlpMock, + }; +}); + +// ── Canned yt-dlp Responses ── + +const YOUTUBE_CHANNEL_JSON = JSON.stringify({ + channel: 'Tech Channel', + channel_id: 'UC_YOUTUBE_123', + channel_url: 'https://www.youtube.com/@TechChannel', + uploader: 'Tech Channel', + uploader_url: 'https://www.youtube.com/@TechChannel', + thumbnails: [ + { url: 'https://yt.com/thumb_small.jpg' }, + { url: 'https://yt.com/thumb_large.jpg' }, + ], +}); + +const SOUNDCLOUD_ARTIST_JSON = JSON.stringify({ + uploader: 'Beat Artist', + uploader_id: 'beat-artist', + uploader_url: 'https://soundcloud.com/beat-artist', + channel: null, + channel_url: null, + thumbnails: [{ url: 'https://sc.com/avatar.jpg' }], +}); + +/** + * Integration tests for channel CRUD API. + * + * Pattern follows server.integration.test.ts: temp DB, migrations, + * buildServer, inject(). yt-dlp is mocked to avoid subprocess dependency. + */ +describe('Channel API', () => { + let server: FastifyInstance; + let db: LibSQLDatabase; + let apiKey: string; + let tmpDir: string; + + beforeAll(async () => { + tmpDir = mkdtempSync(join(tmpdir(), 'tubearr-channel-')); + const dbPath = join(tmpDir, 'test.db'); + db = await initDatabaseAsync(dbPath); + await runMigrations(dbPath); + server = await buildServer({ db }); + await server.ready(); + + // Read auto-generated API key + const rows = await db + .select() + .from(systemConfig) + .where(eq(systemConfig.key, 'api_key')) + .limit(1); + apiKey = rows[0]?.value ?? ''; + expect(apiKey).toBeTruthy(); + }); + + afterAll(async () => { + await server.close(); + closeDatabase(); + try { + if (tmpDir && existsSync(tmpDir)) { + rmSync(tmpDir, { recursive: true, force: true }); + } + } catch { + // Temp dir cleanup is best-effort on Windows (K004) + } + }); + + // ── Auth ── + + describe('Authentication', () => { + it('returns 401 on POST without API key', async () => { + const res = await server.inject({ + method: 'POST', + url: '/api/v1/channel', + payload: { url: 'https://www.youtube.com/@Test' }, + }); + expect(res.statusCode).toBe(401); + }); + + it('returns 401 on GET without API key', async () => { + const res = await server.inject({ + method: 'GET', + url: '/api/v1/channel', + }); + expect(res.statusCode).toBe(401); + }); + + it('returns 401 on GET /:id without API key', async () => { + const res = await server.inject({ + method: 'GET', + url: '/api/v1/channel/1', + }); + expect(res.statusCode).toBe(401); + }); + + it('returns 401 on PUT without API key', async () => { + const res = await server.inject({ + method: 'PUT', + url: '/api/v1/channel/1', + payload: { name: 'Updated' }, + }); + expect(res.statusCode).toBe(401); + }); + + it('returns 401 on DELETE without API key', async () => { + const res = await server.inject({ + method: 'DELETE', + url: '/api/v1/channel/1', + }); + expect(res.statusCode).toBe(401); + }); + }); + + // ── POST /api/v1/channel ── + + describe('POST /api/v1/channel', () => { + it('creates a YouTube channel and returns 201 with resolved metadata', async () => { + execYtDlpMock.mockResolvedValueOnce({ + stdout: YOUTUBE_CHANNEL_JSON, + stderr: '', + exitCode: 0, + }); + + const res = await server.inject({ + method: 'POST', + url: '/api/v1/channel', + headers: { 'x-api-key': apiKey }, + payload: { + url: 'https://www.youtube.com/@TechChannel', + checkInterval: 120, + monitoringEnabled: true, + }, + }); + + expect(res.statusCode).toBe(201); + const body = res.json(); + expect(body.name).toBe('Tech Channel'); + expect(body.platform).toBe('youtube'); + expect(body.platformId).toBe('UC_YOUTUBE_123'); + expect(body.url).toBe('https://www.youtube.com/@TechChannel'); + expect(body.checkInterval).toBe(120); + expect(body.monitoringEnabled).toBe(true); + expect(body.imageUrl).toBe('https://yt.com/thumb_large.jpg'); + expect(body.id).toBeTypeOf('number'); + expect(body.lastCheckedAt).toBeNull(); + expect(body.lastCheckStatus).toBeNull(); + }); + + it('creates a SoundCloud channel with correct platform field', async () => { + execYtDlpMock.mockResolvedValueOnce({ + stdout: SOUNDCLOUD_ARTIST_JSON, + stderr: '', + exitCode: 0, + }); + + const res = await server.inject({ + method: 'POST', + url: '/api/v1/channel', + headers: { 'x-api-key': apiKey }, + payload: { url: 'https://soundcloud.com/beat-artist' }, + }); + + expect(res.statusCode).toBe(201); + const body = res.json(); + expect(body.name).toBe('Beat Artist'); + expect(body.platform).toBe('soundcloud'); + expect(body.platformId).toBe('beat-artist'); + expect(body.monitoringEnabled).toBe(true); // default + expect(body.checkInterval).toBe(360); // default + }); + + it('returns 422 for unsupported URL', async () => { + const res = await server.inject({ + method: 'POST', + url: '/api/v1/channel', + headers: { 'x-api-key': apiKey }, + payload: { url: 'https://www.example.com/not-a-platform' }, + }); + + expect(res.statusCode).toBe(422); + const body = res.json(); + expect(body.message).toContain('Unsupported URL'); + }); + + it('returns 409 Conflict for duplicate channel (same platformId)', async () => { + // The YouTube channel from the first test already exists with platformId UC_YOUTUBE_123 + execYtDlpMock.mockResolvedValueOnce({ + stdout: YOUTUBE_CHANNEL_JSON, + stderr: '', + exitCode: 0, + }); + + const res = await server.inject({ + method: 'POST', + url: '/api/v1/channel', + headers: { 'x-api-key': apiKey }, + payload: { url: 'https://www.youtube.com/@TechChannel' }, + }); + + expect(res.statusCode).toBe(409); + const body = res.json(); + expect(body.error).toBe('Conflict'); + expect(body.message).toContain('already exists'); + }); + + it('returns 502 when yt-dlp fails', async () => { + // Import YtDlpError to throw from mock + const { YtDlpError } = await import('../sources/yt-dlp'); + execYtDlpMock.mockRejectedValueOnce( + new YtDlpError('yt-dlp exited with code 1: network error', 'network error', 1) + ); + + const res = await server.inject({ + method: 'POST', + url: '/api/v1/channel', + headers: { 'x-api-key': apiKey }, + payload: { url: 'https://www.youtube.com/@BrokenChannel' }, + }); + + expect(res.statusCode).toBe(502); + const body = res.json(); + expect(body.error).toBe('Bad Gateway'); + }); + + it('returns 400 when body is missing url field', async () => { + const res = await server.inject({ + method: 'POST', + url: '/api/v1/channel', + headers: { 'x-api-key': apiKey }, + payload: {}, + }); + + expect(res.statusCode).toBe(400); + }); + }); + + // ── GET /api/v1/channel ── + + describe('GET /api/v1/channel', () => { + it('returns all created channels', async () => { + const res = await server.inject({ + method: 'GET', + url: '/api/v1/channel', + headers: { 'x-api-key': apiKey }, + }); + + expect(res.statusCode).toBe(200); + const body = res.json(); + expect(Array.isArray(body)).toBe(true); + // We created YouTube + SoundCloud channels earlier + expect(body.length).toBeGreaterThanOrEqual(2); + + // Verify ordering by name (Beat Artist before Tech Channel) + const names = body.map((c: { name: string }) => c.name); + expect(names).toContain('Tech Channel'); + expect(names).toContain('Beat Artist'); + }); + }); + + // ── GET /api/v1/channel/:id ── + + describe('GET /api/v1/channel/:id', () => { + it('returns 200 with the correct channel', async () => { + // Get the list first to find a valid ID + const listRes = await server.inject({ + method: 'GET', + url: '/api/v1/channel', + headers: { 'x-api-key': apiKey }, + }); + const channels = listRes.json(); + const channelId = channels[0].id; + + const res = await server.inject({ + method: 'GET', + url: `/api/v1/channel/${channelId}`, + headers: { 'x-api-key': apiKey }, + }); + + expect(res.statusCode).toBe(200); + const body = res.json(); + expect(body.id).toBe(channelId); + expect(body.name).toBeTruthy(); + expect(body.platform).toBeTruthy(); + }); + + it('returns 404 for non-existent ID', async () => { + const res = await server.inject({ + method: 'GET', + url: '/api/v1/channel/99999', + headers: { 'x-api-key': apiKey }, + }); + + expect(res.statusCode).toBe(404); + expect(res.json().error).toBe('Not Found'); + }); + + it('returns 400 for non-numeric ID', async () => { + const res = await server.inject({ + method: 'GET', + url: '/api/v1/channel/abc', + headers: { 'x-api-key': apiKey }, + }); + + expect(res.statusCode).toBe(400); + }); + }); + + // ── PUT /api/v1/channel/:id ── + + describe('PUT /api/v1/channel/:id', () => { + it('updates checkInterval and returns the updated channel', async () => { + // Find the YouTube channel + const listRes = await server.inject({ + method: 'GET', + url: '/api/v1/channel', + headers: { 'x-api-key': apiKey }, + }); + const ytChannel = listRes.json().find((c: { platform: string }) => c.platform === 'youtube'); + + const res = await server.inject({ + method: 'PUT', + url: `/api/v1/channel/${ytChannel.id}`, + headers: { 'x-api-key': apiKey }, + payload: { checkInterval: 60 }, + }); + + expect(res.statusCode).toBe(200); + const body = res.json(); + expect(body.checkInterval).toBe(60); + expect(body.id).toBe(ytChannel.id); + }); + + it('updates monitoringEnabled', async () => { + const listRes = await server.inject({ + method: 'GET', + url: '/api/v1/channel', + headers: { 'x-api-key': apiKey }, + }); + const channel = listRes.json()[0]; + + const res = await server.inject({ + method: 'PUT', + url: `/api/v1/channel/${channel.id}`, + headers: { 'x-api-key': apiKey }, + payload: { monitoringEnabled: false }, + }); + + expect(res.statusCode).toBe(200); + expect(res.json().monitoringEnabled).toBe(false); + }); + + it('returns 404 for non-existent ID', async () => { + const res = await server.inject({ + method: 'PUT', + url: '/api/v1/channel/99999', + headers: { 'x-api-key': apiKey }, + payload: { name: 'Ghost' }, + }); + + expect(res.statusCode).toBe(404); + }); + }); + + // ── DELETE /api/v1/channel/:id ── + + describe('DELETE /api/v1/channel/:id', () => { + let deletableId: number; + + beforeAll(async () => { + // Create a channel specifically for deletion testing + execYtDlpMock.mockResolvedValueOnce({ + stdout: JSON.stringify({ + channel: 'Delete Me', + channel_id: 'UC_DELETE_ME', + channel_url: 'https://www.youtube.com/@DeleteMe', + uploader: 'Delete Me', + thumbnails: [], + }), + stderr: '', + exitCode: 0, + }); + + const res = await server.inject({ + method: 'POST', + url: '/api/v1/channel', + headers: { 'x-api-key': apiKey }, + payload: { url: 'https://www.youtube.com/@DeleteMe' }, + }); + + deletableId = res.json().id; + }); + + it('returns 204 on successful delete', async () => { + const res = await server.inject({ + method: 'DELETE', + url: `/api/v1/channel/${deletableId}`, + headers: { 'x-api-key': apiKey }, + }); + + expect(res.statusCode).toBe(204); + }); + + it('returns 404 when trying to GET the deleted channel', async () => { + const res = await server.inject({ + method: 'GET', + url: `/api/v1/channel/${deletableId}`, + headers: { 'x-api-key': apiKey }, + }); + + expect(res.statusCode).toBe(404); + }); + + it('returns 404 when deleting non-existent ID', async () => { + const res = await server.inject({ + method: 'DELETE', + url: '/api/v1/channel/99999', + headers: { 'x-api-key': apiKey }, + }); + + expect(res.statusCode).toBe(404); + }); + }); +}); diff --git a/src/__tests__/content-api.test.ts b/src/__tests__/content-api.test.ts new file mode 100644 index 0000000..a61670b --- /dev/null +++ b/src/__tests__/content-api.test.ts @@ -0,0 +1,309 @@ +import { describe, it, expect, beforeAll, afterAll } from 'vitest'; +import { mkdtempSync, rmSync, existsSync } from 'node:fs'; +import { join } from 'node:path'; +import { tmpdir } from 'node:os'; +import { type FastifyInstance } from 'fastify'; +import { initDatabaseAsync, closeDatabase } from '../db/index'; +import { runMigrations } from '../db/migrate'; +import { buildServer } from '../server/index'; +import { systemConfig } from '../db/schema/index'; +import { eq } from 'drizzle-orm'; +import { type LibSQLDatabase } from 'drizzle-orm/libsql'; +import type * as schema from '../db/schema/index'; +import { createChannel } from '../db/repositories/channel-repository'; +import { createContentItem } from '../db/repositories/content-repository'; +import type { Channel, ContentItem } from '../types/index'; + +/** + * Integration tests for content listing API endpoints. + */ + +describe('content-api', () => { + let server: FastifyInstance; + let db: LibSQLDatabase; + let apiKey: string; + let tmpDir: string; + let channelA: Channel; + let channelB: Channel; + const contentItems: ContentItem[] = []; + + beforeAll(async () => { + tmpDir = mkdtempSync(join(tmpdir(), 'tubearr-content-api-')); + const dbPath = join(tmpDir, 'test.db'); + db = await initDatabaseAsync(dbPath); + await runMigrations(dbPath); + server = await buildServer({ db }); + await server.ready(); + + // Read API key + const rows = await db + .select() + .from(systemConfig) + .where(eq(systemConfig.key, 'api_key')) + .limit(1); + apiKey = rows[0]?.value ?? ''; + expect(apiKey).toBeTruthy(); + + // Create two channels for filtering tests + channelA = await createChannel(db, { + name: 'Channel Alpha', + platform: 'youtube', + platformId: 'UC_alpha', + url: 'https://www.youtube.com/channel/UC_alpha', + monitoringEnabled: true, + checkInterval: 360, + imageUrl: null, + metadata: null, + formatProfileId: null, + }); + + channelB = await createChannel(db, { + name: 'Channel Beta', + platform: 'soundcloud', + platformId: 'beta_artist', + url: 'https://soundcloud.com/beta_artist', + monitoringEnabled: true, + checkInterval: 360, + imageUrl: null, + metadata: null, + formatProfileId: null, + }); + + // Create varied content items for filter/search/pagination tests + const items = [ + { channelId: channelA.id, title: 'Alpha Video One', platformContentId: 'a_v1', url: 'https://youtube.com/watch?v=a_v1', contentType: 'video' as const, duration: 600, status: 'monitored' as const }, + { channelId: channelA.id, title: 'Alpha Video Two', platformContentId: 'a_v2', url: 'https://youtube.com/watch?v=a_v2', contentType: 'video' as const, duration: 300, status: 'downloaded' as const }, + { channelId: channelA.id, title: 'Alpha Livestream Special', platformContentId: 'a_ls1', url: 'https://youtube.com/watch?v=a_ls1', contentType: 'livestream' as const, duration: 7200, status: 'monitored' as const }, + { channelId: channelB.id, title: 'Beta Audio Track', platformContentId: 'b_a1', url: 'https://soundcloud.com/beta/track1', contentType: 'audio' as const, duration: 240, status: 'monitored' as const }, + { channelId: channelB.id, title: 'Beta Audio Mix', platformContentId: 'b_a2', url: 'https://soundcloud.com/beta/mix1', contentType: 'audio' as const, duration: 3600, status: 'failed' as const }, + ]; + + for (const item of items) { + const created = await createContentItem(db, item); + if (created) contentItems.push(created); + } + + expect(contentItems.length).toBe(5); + }); + + afterAll(async () => { + await server.close(); + closeDatabase(); + try { + if (tmpDir && existsSync(tmpDir)) { + rmSync(tmpDir, { recursive: true, force: true }); + } + } catch { + // Temp dir cleanup is best-effort on Windows (K004) + } + }); + + // ── GET /api/v1/content ── + + describe('GET /api/v1/content', () => { + it('returns paginated results with all items', async () => { + const res = await server.inject({ + method: 'GET', + url: '/api/v1/content', + headers: { 'x-api-key': apiKey }, + }); + + expect(res.statusCode).toBe(200); + const body = res.json(); + expect(body.success).toBe(true); + expect(body.data).toHaveLength(5); + expect(body.pagination).toBeDefined(); + expect(body.pagination.page).toBe(1); + expect(body.pagination.pageSize).toBe(20); + expect(body.pagination.totalItems).toBe(5); + expect(body.pagination.totalPages).toBe(1); + }); + + it('respects page and pageSize parameters', async () => { + const res = await server.inject({ + method: 'GET', + url: '/api/v1/content?page=1&pageSize=2', + headers: { 'x-api-key': apiKey }, + }); + + expect(res.statusCode).toBe(200); + const body = res.json(); + expect(body.data).toHaveLength(2); + expect(body.pagination.page).toBe(1); + expect(body.pagination.pageSize).toBe(2); + expect(body.pagination.totalItems).toBe(5); + expect(body.pagination.totalPages).toBe(3); + }); + + it('returns second page correctly', async () => { + const res = await server.inject({ + method: 'GET', + url: '/api/v1/content?page=2&pageSize=2', + headers: { 'x-api-key': apiKey }, + }); + + expect(res.statusCode).toBe(200); + const body = res.json(); + expect(body.data).toHaveLength(2); + expect(body.pagination.page).toBe(2); + }); + + it('returns empty data for page beyond range', async () => { + const res = await server.inject({ + method: 'GET', + url: '/api/v1/content?page=100&pageSize=20', + headers: { 'x-api-key': apiKey }, + }); + + expect(res.statusCode).toBe(200); + const body = res.json(); + expect(body.data).toHaveLength(0); + expect(body.pagination.totalItems).toBe(5); + }); + + it('filters by status', async () => { + const res = await server.inject({ + method: 'GET', + url: '/api/v1/content?status=downloaded', + headers: { 'x-api-key': apiKey }, + }); + + expect(res.statusCode).toBe(200); + const body = res.json(); + expect(body.data).toHaveLength(1); + expect(body.data[0].status).toBe('downloaded'); + expect(body.data[0].title).toBe('Alpha Video Two'); + expect(body.pagination.totalItems).toBe(1); + }); + + it('filters by contentType', async () => { + const res = await server.inject({ + method: 'GET', + url: '/api/v1/content?contentType=audio', + headers: { 'x-api-key': apiKey }, + }); + + expect(res.statusCode).toBe(200); + const body = res.json(); + expect(body.data).toHaveLength(2); + expect(body.data.every((item: { contentType: string }) => item.contentType === 'audio')).toBe(true); + expect(body.pagination.totalItems).toBe(2); + }); + + it('filters by channelId', async () => { + const res = await server.inject({ + method: 'GET', + url: `/api/v1/content?channelId=${channelA.id}`, + headers: { 'x-api-key': apiKey }, + }); + + expect(res.statusCode).toBe(200); + const body = res.json(); + expect(body.data).toHaveLength(3); + expect(body.data.every((item: { channelId: number }) => item.channelId === channelA.id)).toBe(true); + expect(body.pagination.totalItems).toBe(3); + }); + + it('searches by title substring', async () => { + const res = await server.inject({ + method: 'GET', + url: '/api/v1/content?search=Livestream', + headers: { 'x-api-key': apiKey }, + }); + + expect(res.statusCode).toBe(200); + const body = res.json(); + expect(body.data).toHaveLength(1); + expect(body.data[0].title).toContain('Livestream'); + expect(body.pagination.totalItems).toBe(1); + }); + + it('combines multiple filters', async () => { + const res = await server.inject({ + method: 'GET', + url: `/api/v1/content?channelId=${channelB.id}&status=failed`, + headers: { 'x-api-key': apiKey }, + }); + + expect(res.statusCode).toBe(200); + const body = res.json(); + expect(body.data).toHaveLength(1); + expect(body.data[0].title).toBe('Beta Audio Mix'); + expect(body.data[0].status).toBe('failed'); + expect(body.pagination.totalItems).toBe(1); + }); + + it('returns 401 without API key', async () => { + const res = await server.inject({ + method: 'GET', + url: '/api/v1/content', + }); + + expect(res.statusCode).toBe(401); + }); + }); + + // ── GET /api/v1/channel/:id/content ── + + describe('GET /api/v1/channel/:id/content', () => { + it('returns content for a specific channel', async () => { + const res = await server.inject({ + method: 'GET', + url: `/api/v1/channel/${channelA.id}/content`, + headers: { 'x-api-key': apiKey }, + }); + + expect(res.statusCode).toBe(200); + const body = res.json(); + expect(body.success).toBe(true); + expect(body.data).toHaveLength(3); + expect(body.data.every((item: { channelId: number }) => item.channelId === channelA.id)).toBe(true); + }); + + it('returns empty array for channel with no content', async () => { + const noContentChannel = await createChannel(db, { + name: 'Empty Channel', + platform: 'youtube', + platformId: 'UC_empty', + url: 'https://www.youtube.com/channel/UC_empty', + monitoringEnabled: true, + checkInterval: 360, + imageUrl: null, + metadata: null, + formatProfileId: null, + }); + + const res = await server.inject({ + method: 'GET', + url: `/api/v1/channel/${noContentChannel.id}/content`, + headers: { 'x-api-key': apiKey }, + }); + + expect(res.statusCode).toBe(200); + const body = res.json(); + expect(body.success).toBe(true); + expect(body.data).toHaveLength(0); + }); + + it('returns 400 for invalid channel ID', async () => { + const res = await server.inject({ + method: 'GET', + url: '/api/v1/channel/notanumber/content', + headers: { 'x-api-key': apiKey }, + }); + + expect(res.statusCode).toBe(400); + const body = res.json(); + expect(body.error).toBe('Bad Request'); + }); + + it('returns 401 without API key', async () => { + const res = await server.inject({ + method: 'GET', + url: `/api/v1/channel/${channelA.id}/content`, + }); + + expect(res.statusCode).toBe(401); + }); + }); +}); diff --git a/src/__tests__/cookie-manager.test.ts b/src/__tests__/cookie-manager.test.ts new file mode 100644 index 0000000..f547fcc --- /dev/null +++ b/src/__tests__/cookie-manager.test.ts @@ -0,0 +1,229 @@ +import { describe, it, expect, afterEach } from 'vitest'; +import { mkdtempSync, rmSync, existsSync, writeFileSync } from 'node:fs'; +import { join } from 'node:path'; +import { tmpdir } from 'node:os'; +import { CookieManager } from '../services/cookie-manager'; +import { Platform } from '../types/index'; + +const VALID_COOKIE_CONTENT = `# Netscape HTTP Cookie File +# https://curl.se/docs/http-cookies.html +.youtube.com\tTRUE\t/\tTRUE\t0\tSID\tabc123 +.youtube.com\tTRUE\t/\tTRUE\t0\tHSID\txyz789 +`; + +const ALT_VALID_HEADER = `# HTTP Cookie File +.soundcloud.com\tTRUE\t/\tFALSE\t0\tsc_token\tdef456 +`; + +let tmpDir: string; +let sourceDir: string; + +function makeTmpDirs(): { cookiePath: string; sourcePath: string } { + tmpDir = mkdtempSync(join(tmpdir(), 'tubearr-cm-test-')); + sourceDir = mkdtempSync(join(tmpdir(), 'tubearr-cm-source-')); + const cookiePath = join(tmpDir, 'cookies'); + return { cookiePath, sourcePath: sourceDir }; +} + +afterEach(() => { + if (tmpDir && existsSync(tmpDir)) { + rmSync(tmpDir, { recursive: true, force: true }); + } + if (sourceDir && existsSync(sourceDir)) { + rmSync(sourceDir, { recursive: true, force: true }); + } +}); + +describe('CookieManager', () => { + describe('importCookieFile', () => { + it('imports a valid Netscape cookie file to the expected path', async () => { + const { cookiePath, sourcePath } = makeTmpDirs(); + const cm = new CookieManager(cookiePath); + + const sourceFile = join(sourcePath, 'cookies.txt'); + writeFileSync(sourceFile, VALID_COOKIE_CONTENT); + + await cm.importCookieFile(Platform.YouTube, sourceFile); + + const expectedPath = join(cookiePath, 'youtube_cookies.txt'); + expect(existsSync(expectedPath)).toBe(true); + }); + + it('accepts the alternative "# HTTP Cookie File" header', async () => { + const { cookiePath, sourcePath } = makeTmpDirs(); + const cm = new CookieManager(cookiePath); + + const sourceFile = join(sourcePath, 'cookies.txt'); + writeFileSync(sourceFile, ALT_VALID_HEADER); + + await cm.importCookieFile(Platform.SoundCloud, sourceFile); + + const expectedPath = join(cookiePath, 'soundcloud_cookies.txt'); + expect(existsSync(expectedPath)).toBe(true); + }); + + it('throws on file without Netscape header', async () => { + const { cookiePath, sourcePath } = makeTmpDirs(); + const cm = new CookieManager(cookiePath); + + const sourceFile = join(sourcePath, 'bad.txt'); + writeFileSync(sourceFile, 'not a cookie file\njust random text\n'); + + await expect( + cm.importCookieFile(Platform.YouTube, sourceFile) + ).rejects.toThrow('Invalid cookie file format'); + }); + + it('throws on empty cookie file', async () => { + const { cookiePath, sourcePath } = makeTmpDirs(); + const cm = new CookieManager(cookiePath); + + const sourceFile = join(sourcePath, 'empty.txt'); + writeFileSync(sourceFile, ''); + + await expect( + cm.importCookieFile(Platform.YouTube, sourceFile) + ).rejects.toThrow('Cookie file is empty'); + }); + + it('throws when source file does not exist', async () => { + const { cookiePath } = makeTmpDirs(); + const cm = new CookieManager(cookiePath); + + await expect( + cm.importCookieFile(Platform.YouTube, '/nonexistent/path.txt') + ).rejects.toThrow('Source cookie file not found'); + }); + + it('creates cookie directory if it does not exist', async () => { + const { cookiePath, sourcePath } = makeTmpDirs(); + const nestedCookiePath = join(cookiePath, 'nested', 'deep'); + const cm = new CookieManager(nestedCookiePath); + + const sourceFile = join(sourcePath, 'cookies.txt'); + writeFileSync(sourceFile, VALID_COOKIE_CONTENT); + + await cm.importCookieFile(Platform.YouTube, sourceFile); + + expect(existsSync(nestedCookiePath)).toBe(true); + }); + + it('accepts files with leading blank lines before header', async () => { + const { cookiePath, sourcePath } = makeTmpDirs(); + const cm = new CookieManager(cookiePath); + + const sourceFile = join(sourcePath, 'cookies.txt'); + writeFileSync(sourceFile, '\n\n# Netscape HTTP Cookie File\n.example.com\tTRUE\t/\tFALSE\t0\ttest\tval\n'); + + await cm.importCookieFile(Platform.YouTube, sourceFile); + expect(existsSync(join(cookiePath, 'youtube_cookies.txt'))).toBe(true); + }); + }); + + describe('hasCookies', () => { + it('returns false when no cookie file exists', () => { + const { cookiePath } = makeTmpDirs(); + const cm = new CookieManager(cookiePath); + + expect(cm.hasCookies(Platform.YouTube)).toBe(false); + }); + + it('returns true after importing a cookie file', async () => { + const { cookiePath, sourcePath } = makeTmpDirs(); + const cm = new CookieManager(cookiePath); + + const sourceFile = join(sourcePath, 'cookies.txt'); + writeFileSync(sourceFile, VALID_COOKIE_CONTENT); + await cm.importCookieFile(Platform.YouTube, sourceFile); + + expect(cm.hasCookies(Platform.YouTube)).toBe(true); + }); + }); + + describe('getCookieFilePath', () => { + it('returns null when no cookie file exists', () => { + const { cookiePath } = makeTmpDirs(); + const cm = new CookieManager(cookiePath); + + expect(cm.getCookieFilePath(Platform.YouTube)).toBeNull(); + }); + + it('returns path when cookie file exists', async () => { + const { cookiePath, sourcePath } = makeTmpDirs(); + const cm = new CookieManager(cookiePath); + + const sourceFile = join(sourcePath, 'cookies.txt'); + writeFileSync(sourceFile, VALID_COOKIE_CONTENT); + await cm.importCookieFile(Platform.YouTube, sourceFile); + + const result = cm.getCookieFilePath(Platform.YouTube); + expect(result).toBe(join(cookiePath, 'youtube_cookies.txt')); + }); + }); + + describe('deleteCookieFile', () => { + it('removes the cookie file for a platform', async () => { + const { cookiePath, sourcePath } = makeTmpDirs(); + const cm = new CookieManager(cookiePath); + + const sourceFile = join(sourcePath, 'cookies.txt'); + writeFileSync(sourceFile, VALID_COOKIE_CONTENT); + await cm.importCookieFile(Platform.YouTube, sourceFile); + + expect(cm.hasCookies(Platform.YouTube)).toBe(true); + await cm.deleteCookieFile(Platform.YouTube); + expect(cm.hasCookies(Platform.YouTube)).toBe(false); + }); + + it('does not throw when cookie file does not exist', async () => { + const { cookiePath } = makeTmpDirs(); + const cm = new CookieManager(cookiePath); + + // Should not throw + await cm.deleteCookieFile(Platform.YouTube); + }); + }); + + describe('multiple platforms', () => { + it('stores independent cookie files per platform', async () => { + const { cookiePath, sourcePath } = makeTmpDirs(); + const cm = new CookieManager(cookiePath); + + const ytSource = join(sourcePath, 'yt.txt'); + const scSource = join(sourcePath, 'sc.txt'); + writeFileSync(ytSource, VALID_COOKIE_CONTENT); + writeFileSync(scSource, ALT_VALID_HEADER); + + await cm.importCookieFile(Platform.YouTube, ytSource); + await cm.importCookieFile(Platform.SoundCloud, scSource); + + expect(cm.hasCookies(Platform.YouTube)).toBe(true); + expect(cm.hasCookies(Platform.SoundCloud)).toBe(true); + + // Deleting one doesn't affect the other + await cm.deleteCookieFile(Platform.YouTube); + expect(cm.hasCookies(Platform.YouTube)).toBe(false); + expect(cm.hasCookies(Platform.SoundCloud)).toBe(true); + }); + + it('getCookieFilePath returns different paths per platform', async () => { + const { cookiePath, sourcePath } = makeTmpDirs(); + const cm = new CookieManager(cookiePath); + + const ytSource = join(sourcePath, 'yt.txt'); + const scSource = join(sourcePath, 'sc.txt'); + writeFileSync(ytSource, VALID_COOKIE_CONTENT); + writeFileSync(scSource, ALT_VALID_HEADER); + + await cm.importCookieFile(Platform.YouTube, ytSource); + await cm.importCookieFile(Platform.SoundCloud, scSource); + + const ytPath = cm.getCookieFilePath(Platform.YouTube); + const scPath = cm.getCookieFilePath(Platform.SoundCloud); + + expect(ytPath).not.toBe(scPath); + expect(ytPath).toContain('youtube_cookies.txt'); + expect(scPath).toContain('soundcloud_cookies.txt'); + }); + }); +}); diff --git a/src/__tests__/database.test.ts b/src/__tests__/database.test.ts new file mode 100644 index 0000000..ba0c3da --- /dev/null +++ b/src/__tests__/database.test.ts @@ -0,0 +1,157 @@ +import { describe, it, expect, afterEach } from 'vitest'; +import { mkdtempSync, rmSync, existsSync } from 'node:fs'; +import { join } from 'node:path'; +import { tmpdir } from 'node:os'; +import { initDatabaseAsync, closeDatabase, getRawClient } from '../db/index'; +import { runMigrations } from '../db/migrate'; +import { systemConfig } from '../db/schema/index'; +import { eq } from 'drizzle-orm'; + +/** + * Integration tests for the database layer. + * + * Each test creates a temporary directory + database file, initializes + * the connection, runs migrations, then tears down. The database module + * uses a module-level singleton, so closeDatabase() must be called in + * afterEach to reset state for the next test. + */ + +const EXPECTED_TABLES = [ + 'system_config', + 'channels', + 'content_items', + 'format_profiles', + 'queue_items', + 'download_history', + 'notification_settings', + 'platform_settings', + 'playlists', + 'content_playlist', +]; + +let tmpDir: string; + +function freshDbPath(): string { + tmpDir = mkdtempSync(join(tmpdir(), 'tubearr-test-')); + return join(tmpDir, 'test.db'); +} + +function cleanup(): void { + closeDatabase(); + // On Windows, SQLite WAL/SHM files may still be locked briefly after + // closeDatabase(). Use try/catch to avoid EPERM failures in cleanup — + // the OS temp directory is cleaned automatically. + try { + if (tmpDir && existsSync(tmpDir)) { + rmSync(tmpDir, { recursive: true, force: true }); + } + } catch { + // Temp dir cleanup is best-effort on Windows + } +} + +describe('Database initialization', () => { + afterEach(cleanup); + + it('creates the database file when initializing with a new path', async () => { + const dbPath = freshDbPath(); + expect(existsSync(dbPath)).toBe(false); + + await initDatabaseAsync(dbPath); + + // libsql creates the file on first connection + expect(existsSync(dbPath)).toBe(true); + }); + + it('enables WAL journal mode', async () => { + const dbPath = freshDbPath(); + await initDatabaseAsync(dbPath); + + const client = getRawClient(); + const result = await client.execute('PRAGMA journal_mode'); + const mode = result.rows[0]?.journal_mode; + + expect(mode).toBe('wal'); + }); +}); + +describe('Database migrations', () => { + afterEach(cleanup); + + it('creates all expected tables after migration', async () => { + const dbPath = freshDbPath(); + await initDatabaseAsync(dbPath); + await runMigrations(dbPath); + + const client = getRawClient(); + const result = await client.execute( + "SELECT name FROM sqlite_master WHERE type='table' ORDER BY name" + ); + const tableNames = result.rows.map((r) => r.name as string); + + for (const table of EXPECTED_TABLES) { + expect(tableNames, `expected table "${table}" to exist`).toContain(table); + } + }); + + it('is idempotent — running migrations twice does not error', async () => { + const dbPath = freshDbPath(); + await initDatabaseAsync(dbPath); + + // First run + await runMigrations(dbPath); + // Second run — should not throw + await runMigrations(dbPath); + + // Verify tables still exist + const client = getRawClient(); + const result = await client.execute( + "SELECT name FROM sqlite_master WHERE type='table' ORDER BY name" + ); + const tableNames = result.rows.map((r) => r.name as string); + expect(tableNames).toContain('system_config'); + }); +}); + +describe('System config CRUD', () => { + afterEach(cleanup); + + it('supports insert and read of key/value pairs', async () => { + const dbPath = freshDbPath(); + const db = await initDatabaseAsync(dbPath); + await runMigrations(dbPath); + + // Insert a key-value pair + await db.insert(systemConfig).values({ + key: 'test_key', + value: 'test_value', + }); + + // Read it back + const rows = await db + .select() + .from(systemConfig) + .where(eq(systemConfig.key, 'test_key')) + .limit(1); + + expect(rows).toHaveLength(1); + expect(rows[0].key).toBe('test_key'); + expect(rows[0].value).toBe('test_value'); + expect(rows[0].createdAt).toBeTruthy(); + expect(rows[0].updatedAt).toBeTruthy(); + }); + + it('returns empty array for non-existent keys', async () => { + const dbPath = freshDbPath(); + const db = await initDatabaseAsync(dbPath); + await runMigrations(dbPath); + + const rows = await db + .select() + .from(systemConfig) + .where(eq(systemConfig.key, 'nonexistent')) + .limit(1); + + expect(rows).toHaveLength(0); + }); +}); diff --git a/src/__tests__/download-api.test.ts b/src/__tests__/download-api.test.ts new file mode 100644 index 0000000..1c8501c --- /dev/null +++ b/src/__tests__/download-api.test.ts @@ -0,0 +1,227 @@ +import { describe, it, expect, beforeAll, afterAll, vi } from 'vitest'; +import { mkdtempSync, rmSync, existsSync } from 'node:fs'; +import { join } from 'node:path'; +import { tmpdir } from 'node:os'; +import { type FastifyInstance } from 'fastify'; +import { initDatabaseAsync, closeDatabase } from '../db/index'; +import { runMigrations } from '../db/migrate'; +import { buildServer } from '../server/index'; +import { systemConfig } from '../db/schema/index'; +import { eq } from 'drizzle-orm'; +import { type LibSQLDatabase } from 'drizzle-orm/libsql'; +import type * as schema from '../db/schema/index'; +import { createChannel } from '../db/repositories/channel-repository'; +import { createContentItem, updateContentItem } from '../db/repositories/content-repository'; +import { QueueService } from '../services/queue'; +import type { DownloadService } from '../services/download'; +import type { ContentItem, Channel } from '../types/index'; + +/** + * Integration tests for the download trigger API endpoint. + * + * The download route now enqueues via QueueService instead of calling + * DownloadService directly. It returns 202 Accepted with the queue item. + */ + +describe('Download API', () => { + let server: FastifyInstance; + let db: LibSQLDatabase; + let apiKey: string; + let tmpDir: string; + let testChannel: Channel; + let queueService: QueueService; + let mockDownloadService: { + downloadItem: ReturnType; + }; + + beforeAll(async () => { + tmpDir = mkdtempSync(join(tmpdir(), 'tubearr-dl-api-')); + const dbPath = join(tmpDir, 'test.db'); + db = await initDatabaseAsync(dbPath); + await runMigrations(dbPath); + server = await buildServer({ db }); + + // Create mock download service and queue service + mockDownloadService = { + downloadItem: vi.fn().mockResolvedValue(undefined), + }; + queueService = new QueueService( + db, + mockDownloadService as unknown as DownloadService, + 2 + ); + // Stop auto-processing so tests stay deterministic + queueService.stop(); + + (server as { downloadService: DownloadService | null }).downloadService = + mockDownloadService as unknown as DownloadService; + (server as { queueService: QueueService | null }).queueService = queueService; + + await server.ready(); + + // Read API key + const rows = await db + .select() + .from(systemConfig) + .where(eq(systemConfig.key, 'api_key')) + .limit(1); + apiKey = rows[0]?.value ?? ''; + expect(apiKey).toBeTruthy(); + + // Create a test channel + testChannel = await createChannel(db, { + name: 'Download Test Channel', + platform: 'youtube', + platformId: 'UC_dl_test', + url: 'https://www.youtube.com/channel/UC_dl_test', + monitoringEnabled: true, + checkInterval: 360, + imageUrl: null, + metadata: null, + formatProfileId: null, + }); + }); + + afterAll(async () => { + queueService.stop(); + await server.close(); + closeDatabase(); + try { + if (tmpDir && existsSync(tmpDir)) { + rmSync(tmpDir, { recursive: true, force: true }); + } + } catch { + // Temp dir cleanup is best-effort on Windows + } + }); + + // ── Helpers ── + + function authed(opts: Record) { + return { + ...opts, + headers: { 'x-api-key': apiKey, ...(opts.headers as Record | undefined) }, + }; + } + + let contentCounter = 0; + async function createTestContentItem( + overrides: { status?: string; platformContentId?: string } = {} + ): Promise { + contentCounter++; + const item = await createContentItem(db, { + channelId: testChannel.id, + title: 'Test Download Video', + platformContentId: overrides.platformContentId ?? `vid_dl_${Date.now()}_${contentCounter}`, + url: 'https://www.youtube.com/watch?v=test123', + contentType: 'video', + duration: 300, + status: (overrides.status ?? 'monitored') as 'monitored', + }); + return item!; + } + + // ── Auth gating ── + + describe('Authentication', () => { + it('returns 401 when no API key is provided', async () => { + const res = await server.inject({ + method: 'POST', + url: '/api/v1/download/1', + }); + expect(res.statusCode).toBe(401); + }); + }); + + // ── 404 handling ── + + describe('Not found', () => { + it('returns 404 for non-existent content item', async () => { + const res = await server.inject( + authed({ method: 'POST', url: '/api/v1/download/99999' }) + ); + expect(res.statusCode).toBe(404); + expect(res.json().message).toContain('99999'); + }); + + it('returns 400 for non-numeric content item ID', async () => { + const res = await server.inject( + authed({ method: 'POST', url: '/api/v1/download/abc' }) + ); + expect(res.statusCode).toBe(400); + }); + }); + + // ── 409 Conflict ── + + describe('Conflict handling', () => { + it('returns 409 when content item is already downloading', async () => { + const item = await createTestContentItem(); + await updateContentItem(db, item.id, { status: 'downloading' }); + + const res = await server.inject( + authed({ method: 'POST', url: `/api/v1/download/${item.id}` }) + ); + expect(res.statusCode).toBe(409); + expect(res.json().message).toContain('downloading'); + }); + + it('returns 409 when content item is already downloaded', async () => { + const item = await createTestContentItem(); + await updateContentItem(db, item.id, { status: 'downloaded' }); + + const res = await server.inject( + authed({ method: 'POST', url: `/api/v1/download/${item.id}` }) + ); + expect(res.statusCode).toBe(409); + expect(res.json().message).toContain('downloaded'); + }); + + it('returns 409 when content item is already queued', async () => { + const item = await createTestContentItem(); + // Enqueue once + await queueService.enqueue(item.id); + + // Try to enqueue again via the download endpoint + const res = await server.inject( + authed({ method: 'POST', url: `/api/v1/download/${item.id}` }) + ); + expect(res.statusCode).toBe(409); + expect(res.json().message).toContain('already in the queue'); + }); + }); + + // ── Successful enqueue ── + + describe('Successful enqueue via download endpoint', () => { + it('returns 202 Accepted with queue item', async () => { + const item = await createTestContentItem(); + + const res = await server.inject( + authed({ method: 'POST', url: `/api/v1/download/${item.id}` }) + ); + + expect(res.statusCode).toBe(202); + const body = res.json(); + expect(body.success).toBe(true); + expect(body.data.contentItemId).toBe(item.id); + expect(body.data.status).toBe('pending'); + expect(body.data.id).toBeDefined(); + }); + + it('re-allows enqueue of failed items', async () => { + const item = await createTestContentItem(); + await updateContentItem(db, item.id, { status: 'failed' }); + + const res = await server.inject( + authed({ method: 'POST', url: `/api/v1/download/${item.id}` }) + ); + + // Failed items can be re-enqueued + expect(res.statusCode).toBe(202); + const body = res.json(); + expect(body.success).toBe(true); + expect(body.data.status).toBe('pending'); + }); + }); +}); diff --git a/src/__tests__/download.test.ts b/src/__tests__/download.test.ts new file mode 100644 index 0000000..a961bd9 --- /dev/null +++ b/src/__tests__/download.test.ts @@ -0,0 +1,764 @@ +import { describe, it, expect, vi, beforeEach, afterEach } from 'vitest'; +import { mkdtempSync, rmSync, existsSync, writeFileSync, mkdirSync } from 'node:fs'; +import { join } from 'node:path'; +import { tmpdir } from 'node:os'; +import { initDatabaseAsync, closeDatabase } from '../db/index'; +import { runMigrations } from '../db/migrate'; +import { createChannel } from '../db/repositories/channel-repository'; +import { + createContentItem, + getContentItemById, +} from '../db/repositories/content-repository'; +import { DownloadService } from '../services/download'; +import { QualityAnalyzer } from '../services/quality-analyzer'; +import { FileOrganizer } from '../services/file-organizer'; +import { CookieManager } from '../services/cookie-manager'; +import { RateLimiter } from '../services/rate-limiter'; +import { YtDlpError } from '../sources/yt-dlp'; +import type { ContentItem, Channel, FormatProfile, QualityInfo } from '../types/index'; + +// ── Mocks ── + +// Mock execYtDlp from yt-dlp module +const execYtDlpMock = vi.fn(); +vi.mock('../sources/yt-dlp', async (importOriginal) => { + const actual = await importOriginal() as Record; + return { + ...actual, + execYtDlp: (...args: unknown[]) => execYtDlpMock(...args), + }; +}); + +// Mock fs.stat for file size +const statMock = vi.fn(); +vi.mock('node:fs/promises', async (importOriginal) => { + const actual = await importOriginal() as Record; + return { + ...actual, + stat: (...args: unknown[]) => statMock(...args), + }; +}); + +// ── Test Helpers ── + +let tmpDir: string; +let db: Awaited>; +let testChannel: Channel; +let testContentItem: ContentItem; + +async function setupDb(): Promise { + tmpDir = mkdtempSync(join(tmpdir(), 'tubearr-dl-test-')); + const dbPath = join(tmpDir, 'test.db'); + db = await initDatabaseAsync(dbPath); + await runMigrations(dbPath); + + // Create a test channel + testChannel = await createChannel(db, { + name: 'Test Channel', + platform: 'youtube', + platformId: 'UC_test123', + url: 'https://www.youtube.com/channel/UC_test123', + imageUrl: null, + formatProfileId: null, + monitoringEnabled: true, + checkInterval: 360, + metadata: null, + }); + + // Create a test content item in 'monitored' status + testContentItem = (await createContentItem(db, { + channelId: testChannel.id, + title: 'Test Video Title', + platformContentId: 'vid_abc123', + url: 'https://www.youtube.com/watch?v=abc123', + contentType: 'video', + duration: 600, + status: 'monitored', + }))!; +} + +function cleanup(): void { + closeDatabase(); + try { + if (tmpDir && existsSync(tmpDir)) { + rmSync(tmpDir, { recursive: true, force: true }); + } + } catch { + // Windows cleanup best-effort + } +} + +function createMockDeps() { + const mediaPath = join(tmpDir, 'media'); + const cookiePath = join(tmpDir, 'cookies'); + mkdirSync(mediaPath, { recursive: true }); + mkdirSync(cookiePath, { recursive: true }); + + const rateLimiter = new RateLimiter({ + youtube: { minIntervalMs: 0 }, + soundcloud: { minIntervalMs: 0 }, + }); + const fileOrganizer = new FileOrganizer(mediaPath); + const qualityAnalyzer = new QualityAnalyzer(); + const cookieManager = new CookieManager(cookiePath); + + // Spy on rate limiter methods + vi.spyOn(rateLimiter, 'acquire'); + vi.spyOn(rateLimiter, 'reportSuccess'); + vi.spyOn(rateLimiter, 'reportError'); + + // Spy on quality analyzer + vi.spyOn(qualityAnalyzer, 'analyze').mockResolvedValue({ + actualResolution: '1920x1080', + actualCodec: 'h264', + actualBitrate: '5.0 Mbps', + containerFormat: 'mp4', + qualityWarnings: [], + }); + + return { rateLimiter, fileOrganizer, qualityAnalyzer, cookieManager }; +} + +// ── Tests ── + +describe('DownloadService', () => { + beforeEach(async () => { + vi.clearAllMocks(); + await setupDb(); + }); + + afterEach(cleanup); + + describe('downloadItem — successful download', () => { + it('transitions content item from monitored → downloading → downloaded', async () => { + const deps = createMockDeps(); + const service = new DownloadService( + db, deps.rateLimiter, deps.fileOrganizer, + deps.qualityAnalyzer, deps.cookieManager + ); + + // Mock yt-dlp returning a filepath on stdout + const outputPath = join(tmpDir, 'media', 'youtube', 'Test Channel', 'Test Video Title.mp4'); + mkdirSync(join(tmpDir, 'media', 'youtube', 'Test Channel'), { recursive: true }); + writeFileSync(outputPath, 'fake video data'); + + execYtDlpMock.mockResolvedValueOnce({ + stdout: outputPath, + stderr: '', + exitCode: 0, + }); + + statMock.mockResolvedValueOnce({ size: 15_000_000 }); + + const result = await service.downloadItem(testContentItem, testChannel); + + expect(result.status).toBe('downloaded'); + expect(result.filePath).toBe(outputPath); + expect(result.fileSize).toBe(15_000_000); + expect(result.format).toBe('mp4'); + expect(result.qualityMetadata).toBeDefined(); + expect(result.qualityMetadata?.actualResolution).toBe('1920x1080'); + + // Verify DB state + const dbItem = await getContentItemById(db, testContentItem.id); + expect(dbItem?.status).toBe('downloaded'); + expect(dbItem?.filePath).toBe(outputPath); + }); + + it('populates filePath, fileSize, format, and qualityMetadata', async () => { + const deps = createMockDeps(); + const service = new DownloadService( + db, deps.rateLimiter, deps.fileOrganizer, + deps.qualityAnalyzer, deps.cookieManager + ); + + const outputPath = join(tmpDir, 'media', 'youtube', 'Test Channel', 'Test Video Title.webm'); + mkdirSync(join(tmpDir, 'media', 'youtube', 'Test Channel'), { recursive: true }); + writeFileSync(outputPath, 'fake data'); + + execYtDlpMock.mockResolvedValueOnce({ + stdout: outputPath, + stderr: '', + exitCode: 0, + }); + + statMock.mockResolvedValueOnce({ size: 8_500_000 }); + + const qualityInfo: QualityInfo = { + actualResolution: '1280x720', + actualCodec: 'vp9', + actualBitrate: '2.5 Mbps', + containerFormat: 'webm', + qualityWarnings: [], + }; + (deps.qualityAnalyzer.analyze as ReturnType).mockResolvedValueOnce(qualityInfo); + + const result = await service.downloadItem(testContentItem, testChannel); + + expect(result.filePath).toBe(outputPath); + expect(result.fileSize).toBe(8_500_000); + expect(result.format).toBe('webm'); + expect(result.qualityMetadata).toEqual(qualityInfo); + }); + + it('sets downloadedAt on successful download', async () => { + const deps = createMockDeps(); + const service = new DownloadService( + db, deps.rateLimiter, deps.fileOrganizer, + deps.qualityAnalyzer, deps.cookieManager + ); + + const outputPath = join(tmpDir, 'media', 'youtube', 'Test Channel', 'Test Video Title.mp4'); + mkdirSync(join(tmpDir, 'media', 'youtube', 'Test Channel'), { recursive: true }); + writeFileSync(outputPath, 'fake data'); + + execYtDlpMock.mockResolvedValueOnce({ + stdout: outputPath, + stderr: '', + exitCode: 0, + }); + + statMock.mockResolvedValueOnce({ size: 10_000_000 }); + + const before = new Date().toISOString(); + const result = await service.downloadItem(testContentItem, testChannel); + const after = new Date().toISOString(); + + // downloadedAt should be set to a valid ISO datetime + expect(result.downloadedAt).toBeTruthy(); + expect(result.downloadedAt).toMatch(/^\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}/); + // Should be between before and after timestamps + expect(result.downloadedAt! >= before).toBe(true); + expect(result.downloadedAt! <= after).toBe(true); + + // Verify in DB as well + const dbItem = await getContentItemById(db, testContentItem.id); + expect(dbItem?.downloadedAt).toBe(result.downloadedAt); + }); + }); + + describe('downloadItem — failed download', () => { + it('transitions content item to failed on yt-dlp error', async () => { + const deps = createMockDeps(); + const service = new DownloadService( + db, deps.rateLimiter, deps.fileOrganizer, + deps.qualityAnalyzer, deps.cookieManager + ); + + execYtDlpMock.mockRejectedValueOnce( + new YtDlpError('yt-dlp exited with code 1: ERROR: Video not found', 'ERROR: Video not found', 1) + ); + + await expect( + service.downloadItem(testContentItem, testChannel) + ).rejects.toThrow(YtDlpError); + + // Verify status is 'failed' in DB + const dbItem = await getContentItemById(db, testContentItem.id); + expect(dbItem?.status).toBe('failed'); + }); + }); + + describe('downloadItem — rate limiter integration', () => { + it('calls acquire() before download and reportSuccess() after', async () => { + const deps = createMockDeps(); + const service = new DownloadService( + db, deps.rateLimiter, deps.fileOrganizer, + deps.qualityAnalyzer, deps.cookieManager + ); + + const outputPath = join(tmpDir, 'media', 'youtube', 'Test Channel', 'Test Video Title.mp4'); + mkdirSync(join(tmpDir, 'media', 'youtube', 'Test Channel'), { recursive: true }); + writeFileSync(outputPath, 'data'); + + execYtDlpMock.mockResolvedValueOnce({ + stdout: outputPath, + stderr: '', + exitCode: 0, + }); + statMock.mockResolvedValueOnce({ size: 1000 }); + + await service.downloadItem(testContentItem, testChannel); + + expect(deps.rateLimiter.acquire).toHaveBeenCalledWith('youtube'); + expect(deps.rateLimiter.reportSuccess).toHaveBeenCalledWith('youtube'); + expect(deps.rateLimiter.reportError).not.toHaveBeenCalled(); + }); + + it('calls reportError() on download failure', async () => { + const deps = createMockDeps(); + const service = new DownloadService( + db, deps.rateLimiter, deps.fileOrganizer, + deps.qualityAnalyzer, deps.cookieManager + ); + + execYtDlpMock.mockRejectedValueOnce( + new YtDlpError('download error', 'stderr content', 1) + ); + + await expect( + service.downloadItem(testContentItem, testChannel) + ).rejects.toThrow(); + + expect(deps.rateLimiter.acquire).toHaveBeenCalledWith('youtube'); + expect(deps.rateLimiter.reportError).toHaveBeenCalledWith('youtube'); + expect(deps.rateLimiter.reportSuccess).not.toHaveBeenCalled(); + }); + }); + + describe('downloadItem — format profile', () => { + it('applies video resolution format profile with correct -f flag', async () => { + const deps = createMockDeps(); + const service = new DownloadService( + db, deps.rateLimiter, deps.fileOrganizer, + deps.qualityAnalyzer, deps.cookieManager + ); + + const outputPath = join(tmpDir, 'media', 'youtube', 'Test Channel', 'Test Video Title.mkv'); + mkdirSync(join(tmpDir, 'media', 'youtube', 'Test Channel'), { recursive: true }); + writeFileSync(outputPath, 'data'); + + execYtDlpMock.mockResolvedValueOnce({ + stdout: outputPath, + stderr: '', + exitCode: 0, + }); + statMock.mockResolvedValueOnce({ size: 1000 }); + + const profile: FormatProfile = { + id: 1, + name: 'High Quality', + videoResolution: '1080p', + audioCodec: null, + audioBitrate: null, + containerFormat: 'mkv', + isDefault: false, + subtitleLanguages: null, + embedSubtitles: false, + createdAt: '', + updatedAt: '', + }; + + await service.downloadItem(testContentItem, testChannel, profile); + + const args = execYtDlpMock.mock.calls[0][0] as string[]; + expect(args).toContain('-f'); + const fIdx = args.indexOf('-f'); + expect(args[fIdx + 1]).toBe('bestvideo[height<=1080]+bestaudio/best[height<=1080]'); + expect(args).toContain('--merge-output-format'); + const moIdx = args.indexOf('--merge-output-format'); + expect(args[moIdx + 1]).toBe('mkv'); + }); + + it('applies audio codec/bitrate format profile for audio content', async () => { + const deps = createMockDeps(); + const service = new DownloadService( + db, deps.rateLimiter, deps.fileOrganizer, + deps.qualityAnalyzer, deps.cookieManager + ); + + // Create an audio content item + const audioItem = (await createContentItem(db, { + channelId: testChannel.id, + title: 'Test Audio Track', + platformContentId: 'audio_xyz', + url: 'https://soundcloud.com/test/track', + contentType: 'audio', + duration: 300, + status: 'monitored', + }))!; + + const outputPath = join(tmpDir, 'media', 'youtube', 'Test Channel', 'Test Audio Track.opus'); + mkdirSync(join(tmpDir, 'media', 'youtube', 'Test Channel'), { recursive: true }); + writeFileSync(outputPath, 'data'); + + execYtDlpMock.mockResolvedValueOnce({ + stdout: outputPath, + stderr: '', + exitCode: 0, + }); + statMock.mockResolvedValueOnce({ size: 5_000_000 }); + + const profile: FormatProfile = { + id: 2, + name: 'Audio HQ', + videoResolution: null, + audioCodec: 'opus', + audioBitrate: '320k', + containerFormat: null, + isDefault: false, + subtitleLanguages: null, + embedSubtitles: false, + createdAt: '', + updatedAt: '', + }; + + await service.downloadItem(audioItem, testChannel, profile); + + const args = execYtDlpMock.mock.calls[0][0] as string[]; + expect(args).toContain('-f'); + const fIdx = args.indexOf('-f'); + expect(args[fIdx + 1]).toBe('bestaudio'); + expect(args).toContain('--extract-audio'); + expect(args).toContain('--audio-format'); + const afIdx = args.indexOf('--audio-format'); + expect(args[afIdx + 1]).toBe('opus'); + expect(args).toContain('--audio-quality'); + const aqIdx = args.indexOf('--audio-quality'); + expect(args[aqIdx + 1]).toBe('320k'); + }); + + it('falls back to -f "best" for video when no format profile', async () => { + const deps = createMockDeps(); + const service = new DownloadService( + db, deps.rateLimiter, deps.fileOrganizer, + deps.qualityAnalyzer, deps.cookieManager + ); + + const outputPath = join(tmpDir, 'media', 'youtube', 'Test Channel', 'Test Video Title.mp4'); + mkdirSync(join(tmpDir, 'media', 'youtube', 'Test Channel'), { recursive: true }); + writeFileSync(outputPath, 'data'); + + execYtDlpMock.mockResolvedValueOnce({ + stdout: outputPath, + stderr: '', + exitCode: 0, + }); + statMock.mockResolvedValueOnce({ size: 1000 }); + + await service.downloadItem(testContentItem, testChannel); + + const args = execYtDlpMock.mock.calls[0][0] as string[]; + expect(args).toContain('-f'); + const fIdx = args.indexOf('-f'); + expect(args[fIdx + 1]).toBe('best'); + }); + + it('falls back to -f "bestaudio" for audio when no format profile', async () => { + const deps = createMockDeps(); + const service = new DownloadService( + db, deps.rateLimiter, deps.fileOrganizer, + deps.qualityAnalyzer, deps.cookieManager + ); + + const audioItem = (await createContentItem(db, { + channelId: testChannel.id, + title: 'No Profile Audio', + platformContentId: 'audio_np', + url: 'https://soundcloud.com/test/no-profile', + contentType: 'audio', + duration: 200, + status: 'monitored', + }))!; + + const outputPath = join(tmpDir, 'media', 'youtube', 'Test Channel', 'No Profile Audio.mp3'); + mkdirSync(join(tmpDir, 'media', 'youtube', 'Test Channel'), { recursive: true }); + writeFileSync(outputPath, 'data'); + + execYtDlpMock.mockResolvedValueOnce({ + stdout: outputPath, + stderr: '', + exitCode: 0, + }); + statMock.mockResolvedValueOnce({ size: 3_000_000 }); + + await service.downloadItem(audioItem, testChannel); + + const args = execYtDlpMock.mock.calls[0][0] as string[]; + expect(args).toContain('-f'); + const fIdx = args.indexOf('-f'); + expect(args[fIdx + 1]).toBe('bestaudio'); + // No --extract-audio when no profile + expect(args).not.toContain('--extract-audio'); + }); + }); + + describe('downloadItem — cookie support', () => { + it('includes --cookies flag when cookies exist for platform', async () => { + const deps = createMockDeps(); + const service = new DownloadService( + db, deps.rateLimiter, deps.fileOrganizer, + deps.qualityAnalyzer, deps.cookieManager + ); + + // Import a cookie file for youtube + const cookieSource = join(tmpDir, 'source_cookies.txt'); + writeFileSync(cookieSource, '# Netscape HTTP Cookie File\n.youtube.com\tTRUE\t/\tFALSE\t0\tSID\tabc123\n'); + await deps.cookieManager.importCookieFile('youtube', cookieSource); + + const outputPath = join(tmpDir, 'media', 'youtube', 'Test Channel', 'Test Video Title.mp4'); + mkdirSync(join(tmpDir, 'media', 'youtube', 'Test Channel'), { recursive: true }); + writeFileSync(outputPath, 'data'); + + execYtDlpMock.mockResolvedValueOnce({ + stdout: outputPath, + stderr: '', + exitCode: 0, + }); + statMock.mockResolvedValueOnce({ size: 1000 }); + + await service.downloadItem(testContentItem, testChannel); + + const args = execYtDlpMock.mock.calls[0][0] as string[]; + expect(args).toContain('--cookies'); + const cookieIdx = args.indexOf('--cookies'); + expect(args[cookieIdx + 1]).toContain('youtube_cookies.txt'); + }); + + it('does not include --cookies flag when no cookies exist', async () => { + const deps = createMockDeps(); + const service = new DownloadService( + db, deps.rateLimiter, deps.fileOrganizer, + deps.qualityAnalyzer, deps.cookieManager + ); + + const outputPath = join(tmpDir, 'media', 'youtube', 'Test Channel', 'Test Video Title.mp4'); + mkdirSync(join(tmpDir, 'media', 'youtube', 'Test Channel'), { recursive: true }); + writeFileSync(outputPath, 'data'); + + execYtDlpMock.mockResolvedValueOnce({ + stdout: outputPath, + stderr: '', + exitCode: 0, + }); + statMock.mockResolvedValueOnce({ size: 1000 }); + + await service.downloadItem(testContentItem, testChannel); + + const args = execYtDlpMock.mock.calls[0][0] as string[]; + expect(args).not.toContain('--cookies'); + }); + }); + + describe('downloadItem — timeout', () => { + it('uses 30-minute timeout for yt-dlp download calls', async () => { + const deps = createMockDeps(); + const service = new DownloadService( + db, deps.rateLimiter, deps.fileOrganizer, + deps.qualityAnalyzer, deps.cookieManager + ); + + const outputPath = join(tmpDir, 'media', 'youtube', 'Test Channel', 'Test Video Title.mp4'); + mkdirSync(join(tmpDir, 'media', 'youtube', 'Test Channel'), { recursive: true }); + writeFileSync(outputPath, 'data'); + + execYtDlpMock.mockResolvedValueOnce({ + stdout: outputPath, + stderr: '', + exitCode: 0, + }); + statMock.mockResolvedValueOnce({ size: 1000 }); + + await service.downloadItem(testContentItem, testChannel); + + // Check that execYtDlp was called with 30-minute timeout + expect(execYtDlpMock).toHaveBeenCalledWith( + expect.any(Array), + { timeout: 1_800_000 } + ); + }); + }); + + describe('downloadItem — common args', () => { + it('always includes --no-playlist and --print after_move:filepath', async () => { + const deps = createMockDeps(); + const service = new DownloadService( + db, deps.rateLimiter, deps.fileOrganizer, + deps.qualityAnalyzer, deps.cookieManager + ); + + const outputPath = join(tmpDir, 'media', 'youtube', 'Test Channel', 'Test Video Title.mp4'); + mkdirSync(join(tmpDir, 'media', 'youtube', 'Test Channel'), { recursive: true }); + writeFileSync(outputPath, 'data'); + + execYtDlpMock.mockResolvedValueOnce({ + stdout: outputPath, + stderr: '', + exitCode: 0, + }); + statMock.mockResolvedValueOnce({ size: 1000 }); + + await service.downloadItem(testContentItem, testChannel); + + const args = execYtDlpMock.mock.calls[0][0] as string[]; + expect(args).toContain('--no-playlist'); + expect(args).toContain('--print'); + const printIdx = args.indexOf('--print'); + expect(args[printIdx + 1]).toBe('after_move:filepath'); + }); + + it('includes -o output template and URL', async () => { + const deps = createMockDeps(); + const service = new DownloadService( + db, deps.rateLimiter, deps.fileOrganizer, + deps.qualityAnalyzer, deps.cookieManager + ); + + const outputPath = join(tmpDir, 'media', 'youtube', 'Test Channel', 'Test Video Title.mp4'); + mkdirSync(join(tmpDir, 'media', 'youtube', 'Test Channel'), { recursive: true }); + writeFileSync(outputPath, 'data'); + + execYtDlpMock.mockResolvedValueOnce({ + stdout: outputPath, + stderr: '', + exitCode: 0, + }); + statMock.mockResolvedValueOnce({ size: 1000 }); + + await service.downloadItem(testContentItem, testChannel); + + const args = execYtDlpMock.mock.calls[0][0] as string[]; + expect(args).toContain('-o'); + // URL should be the last arg + expect(args[args.length - 1]).toBe('https://www.youtube.com/watch?v=abc123'); + }); + }); + + describe('Best format option', () => { + it('emits bestvideo+bestaudio/best for video with videoResolution "Best"', async () => { + const deps = createMockDeps(); + const service = new DownloadService( + db, deps.rateLimiter, deps.fileOrganizer, + deps.qualityAnalyzer, deps.cookieManager + ); + + const outputPath = join(tmpDir, 'media', 'youtube', 'Test Channel', 'Test Video Title.mp4'); + mkdirSync(join(tmpDir, 'media', 'youtube', 'Test Channel'), { recursive: true }); + writeFileSync(outputPath, 'data'); + + execYtDlpMock.mockResolvedValueOnce({ + stdout: outputPath, + stderr: '', + exitCode: 0, + }); + statMock.mockResolvedValueOnce({ size: 1000 }); + + const profile: FormatProfile = { + id: 10, + name: 'Best Quality', + videoResolution: 'Best', + audioCodec: null, + audioBitrate: null, + containerFormat: null, + isDefault: false, + subtitleLanguages: null, + embedSubtitles: false, + createdAt: '', + updatedAt: '', + }; + + await service.downloadItem(testContentItem, testChannel, profile); + + const args = execYtDlpMock.mock.calls[0][0] as string[]; + const fIdx = args.indexOf('-f'); + expect(fIdx).toBeGreaterThanOrEqual(0); + expect(args[fIdx + 1]).toBe('bestvideo+bestaudio/best'); + // Should default to mp4 merge format when containerFormat is null + expect(args).toContain('--merge-output-format'); + const moIdx = args.indexOf('--merge-output-format'); + expect(args[moIdx + 1]).toBe('mp4'); + }); + + it('uses specified container format with "Best" resolution', async () => { + const deps = createMockDeps(); + const service = new DownloadService( + db, deps.rateLimiter, deps.fileOrganizer, + deps.qualityAnalyzer, deps.cookieManager + ); + + const outputPath = join(tmpDir, 'media', 'youtube', 'Test Channel', 'Test Video Title.mkv'); + mkdirSync(join(tmpDir, 'media', 'youtube', 'Test Channel'), { recursive: true }); + writeFileSync(outputPath, 'data'); + + execYtDlpMock.mockResolvedValueOnce({ + stdout: outputPath, + stderr: '', + exitCode: 0, + }); + statMock.mockResolvedValueOnce({ size: 1000 }); + + const profile: FormatProfile = { + id: 11, + name: 'Best MKV', + videoResolution: 'Best', + audioCodec: null, + audioBitrate: null, + containerFormat: 'mkv', + isDefault: false, + subtitleLanguages: null, + embedSubtitles: false, + createdAt: '', + updatedAt: '', + }; + + await service.downloadItem(testContentItem, testChannel, profile); + + const args = execYtDlpMock.mock.calls[0][0] as string[]; + const fIdx = args.indexOf('-f'); + expect(args[fIdx + 1]).toBe('bestvideo+bestaudio/best'); + const moIdx = args.indexOf('--merge-output-format'); + expect(args[moIdx + 1]).toBe('mkv'); + }); + + it('omits --audio-quality when audioBitrate is "Best"', async () => { + const deps = createMockDeps(); + const service = new DownloadService( + db, deps.rateLimiter, deps.fileOrganizer, + deps.qualityAnalyzer, deps.cookieManager + ); + + // Create an audio content item + const audioItem = (await createContentItem(db, { + channelId: testChannel.id, + title: 'Best Audio Track', + platformContentId: 'audio_best', + url: 'https://soundcloud.com/test/best-track', + contentType: 'audio', + duration: 240, + status: 'monitored', + }))!; + + const outputPath = join(tmpDir, 'media', 'youtube', 'Test Channel', 'Best Audio Track.mp3'); + mkdirSync(join(tmpDir, 'media', 'youtube', 'Test Channel'), { recursive: true }); + writeFileSync(outputPath, 'data'); + + execYtDlpMock.mockResolvedValueOnce({ + stdout: outputPath, + stderr: '', + exitCode: 0, + }); + statMock.mockResolvedValueOnce({ size: 2_000_000 }); + + const profile: FormatProfile = { + id: 12, + name: 'Best Audio', + videoResolution: null, + audioCodec: 'mp3', + audioBitrate: 'Best', + containerFormat: null, + isDefault: false, + subtitleLanguages: null, + embedSubtitles: false, + createdAt: '', + updatedAt: '', + }; + + await service.downloadItem(audioItem, testChannel, profile); + + const args = execYtDlpMock.mock.calls[0][0] as string[]; + + // Should have -f bestaudio + const fIdx = args.indexOf('-f'); + expect(args[fIdx + 1]).toBe('bestaudio'); + + // Should have --extract-audio and --audio-format + expect(args).toContain('--extract-audio'); + expect(args).toContain('--audio-format'); + const afIdx = args.indexOf('--audio-format'); + expect(args[afIdx + 1]).toBe('mp3'); + + // Must NOT have --audio-quality when bitrate is "Best" + expect(args).not.toContain('--audio-quality'); + }); + }); +}); diff --git a/src/__tests__/e2e-flow.test.ts b/src/__tests__/e2e-flow.test.ts new file mode 100644 index 0000000..9a5136d --- /dev/null +++ b/src/__tests__/e2e-flow.test.ts @@ -0,0 +1,407 @@ +import { describe, it, expect, beforeAll, afterAll, vi } from 'vitest'; +import { mkdtempSync, rmSync, existsSync } from 'node:fs'; +import { join } from 'node:path'; +import { tmpdir } from 'node:os'; +import { type FastifyInstance } from 'fastify'; +import { initDatabaseAsync, closeDatabase } from '../db/index'; +import { runMigrations } from '../db/migrate'; +import { buildServer } from '../server/index'; +import { createChannel } from '../db/repositories/channel-repository'; +import { createContentItem } from '../db/repositories/content-repository'; +import { updateQueueItemStatus } from '../db/repositories/queue-repository'; +import { QueueService } from '../services/queue'; +import { systemConfig } from '../db/schema/index'; +import { eq } from 'drizzle-orm'; +import { type LibSQLDatabase } from 'drizzle-orm/libsql'; +import type * as schema from '../db/schema/index'; + +/** + * End-to-end integration test exercising the full application flow: + * channel CRUD → content listing → download enqueue → queue state → + * history records → health check → system status. + * + * Uses a real SQLite database with migrations and Fastify inject() for + * fast HTTP testing without binding ports. The download service is mocked + * so yt-dlp is not required. + */ + +describe('End-to-end flow', () => { + let server: FastifyInstance; + let db: LibSQLDatabase; + let apiKey: string; + let tmpDir: string; + + // IDs populated during test flow + let channelId: number; + let contentItemId: number; + let queueItemId: number; + + // Mock download service — simulates successful downloads + const mockDownloadService = { + downloadItem: vi.fn().mockResolvedValue(undefined), + }; + + beforeAll(async () => { + // Create isolated temp database + tmpDir = mkdtempSync(join(tmpdir(), 'tubearr-e2e-')); + const dbPath = join(tmpDir, 'test.db'); + db = await initDatabaseAsync(dbPath); + await runMigrations(dbPath); + + // Build server with real database + server = await buildServer({ db }); + + // Attach a QueueService with mock download service so enqueue works + const queueService = new QueueService(db, mockDownloadService as any, { + concurrency: 1, + }); + // Stop auto-processing so we control when downloads run + queueService.stop(); + server.queueService = queueService; + + await server.ready(); + + // Read auto-generated API key from database + const rows = await db + .select() + .from(systemConfig) + .where(eq(systemConfig.key, 'api_key')) + .limit(1); + apiKey = rows[0]?.value ?? ''; + expect(apiKey).toBeTruthy(); + }); + + afterAll(async () => { + server.queueService?.stop(); + await server.close(); + closeDatabase(); + // Windows: SQLite WAL/SHM files may be locked briefly (K004) + try { + if (tmpDir && existsSync(tmpDir)) { + rmSync(tmpDir, { recursive: true, force: true }); + } + } catch { + // Temp dir cleanup is best-effort on Windows + } + }); + + // ── Step 1: Create a channel (via repository — bypasses yt-dlp resolution) ── + + describe('Step 1: Channel creation and retrieval', () => { + it('creates a channel in the database', async () => { + const channel = await createChannel(db, { + name: 'E2E Test Channel', + platform: 'youtube', + platformId: 'UC_e2e_test_channel', + url: 'https://www.youtube.com/channel/UC_e2e_test_channel', + imageUrl: 'https://example.com/thumb.jpg', + formatProfileId: null, + monitoringEnabled: true, + checkInterval: 360, + metadata: null, + }); + + expect(channel.id).toBeGreaterThan(0); + channelId = channel.id; + }); + + it('GET /api/v1/channel/:id returns the channel', async () => { + const res = await server.inject({ + method: 'GET', + url: `/api/v1/channel/${channelId}`, + headers: { 'x-api-key': apiKey }, + }); + + expect(res.statusCode).toBe(200); + const body = res.json(); + expect(body.name).toBe('E2E Test Channel'); + expect(body.platform).toBe('youtube'); + expect(body.platformId).toBe('UC_e2e_test_channel'); + }); + + it('GET /api/v1/channel lists channels including ours', async () => { + const res = await server.inject({ + method: 'GET', + url: '/api/v1/channel', + headers: { 'x-api-key': apiKey }, + }); + + expect(res.statusCode).toBe(200); + const channels = res.json(); + expect(Array.isArray(channels)).toBe(true); + expect(channels.some((c: { id: number }) => c.id === channelId)).toBe(true); + }); + }); + + // ── Step 2: Create content item (via repository — simulates scheduler detection) ── + + describe('Step 2: Content creation and listing', () => { + it('creates a content item for the channel', async () => { + const item = await createContentItem(db, { + channelId, + title: 'E2E Test Video — How to Build a Media Server', + platformContentId: 'e2e_test_video_001', + url: 'https://www.youtube.com/watch?v=e2e_test_001', + contentType: 'video', + duration: 600, + status: 'monitored', + }); + + expect(item).not.toBeNull(); + contentItemId = item!.id; + }); + + it('GET /api/v1/content?channelId=:id shows the content item', async () => { + const res = await server.inject({ + method: 'GET', + url: `/api/v1/content?channelId=${channelId}`, + headers: { 'x-api-key': apiKey }, + }); + + expect(res.statusCode).toBe(200); + const body = res.json(); + expect(body.success).toBe(true); + expect(body.data.length).toBeGreaterThanOrEqual(1); + expect(body.data.some((c: { id: number }) => c.id === contentItemId)).toBe(true); + }); + + it('GET /api/v1/channel/:id/content returns channel-specific content', async () => { + const res = await server.inject({ + method: 'GET', + url: `/api/v1/channel/${channelId}/content`, + headers: { 'x-api-key': apiKey }, + }); + + expect(res.statusCode).toBe(200); + const body = res.json(); + expect(body.success).toBe(true); + expect(body.data.length).toBeGreaterThanOrEqual(1); + const item = body.data.find((c: { id: number }) => c.id === contentItemId); + expect(item).toBeDefined(); + expect(item.title).toBe('E2E Test Video — How to Build a Media Server'); + }); + }); + + // ── Step 3: Enqueue download and check queue state ── + + describe('Step 3: Download enqueue and queue management', () => { + it('POST /api/v1/download/:contentItemId enqueues the item', async () => { + const res = await server.inject({ + method: 'POST', + url: `/api/v1/download/${contentItemId}`, + headers: { 'x-api-key': apiKey }, + }); + + expect(res.statusCode).toBe(202); + const body = res.json(); + expect(body.success).toBe(true); + expect(body.data).toHaveProperty('id'); + expect(body.data.contentItemId).toBe(contentItemId); + expect(body.data.status).toBe('pending'); + queueItemId = body.data.id; + }); + + it('GET /api/v1/queue shows the queued item', async () => { + const res = await server.inject({ + method: 'GET', + url: '/api/v1/queue', + headers: { 'x-api-key': apiKey }, + }); + + expect(res.statusCode).toBe(200); + const body = res.json(); + expect(body.success).toBe(true); + expect(body.data.length).toBeGreaterThanOrEqual(1); + const item = body.data.find((q: { id: number }) => q.id === queueItemId); + expect(item).toBeDefined(); + expect(item.status).toBe('pending'); + }); + + it('GET /api/v1/queue?status=pending filters correctly', async () => { + const res = await server.inject({ + method: 'GET', + url: '/api/v1/queue?status=pending', + headers: { 'x-api-key': apiKey }, + }); + + expect(res.statusCode).toBe(200); + const body = res.json(); + expect(body.data.every((q: { status: string }) => q.status === 'pending')).toBe(true); + }); + + it('POST /api/v1/download/:contentItemId rejects duplicate enqueue', async () => { + const res = await server.inject({ + method: 'POST', + url: `/api/v1/download/${contentItemId}`, + headers: { 'x-api-key': apiKey }, + }); + + // Content status is now 'queued', so this should return 409 + expect(res.statusCode).toBe(409); + }); + }); + + // ── Step 4: Simulate download completion and verify history ── + + describe('Step 4: Download completion and history', () => { + it('simulating download completion creates history records', async () => { + // Manually transition the queue item to completed to simulate + // what the QueueService would do after a successful download + await updateQueueItemStatus(db, queueItemId, 'completed', { + completedAt: new Date().toISOString(), + }); + + // Verify queue item is now completed + const queueRes = await server.inject({ + method: 'GET', + url: `/api/v1/queue/${queueItemId}`, + headers: { 'x-api-key': apiKey }, + }); + + expect(queueRes.statusCode).toBe(200); + expect(queueRes.json().data.status).toBe('completed'); + }); + + it('GET /api/v1/history shows history events', async () => { + // The enqueue operation created a 'grabbed' history event + const res = await server.inject({ + method: 'GET', + url: '/api/v1/history', + headers: { 'x-api-key': apiKey }, + }); + + expect(res.statusCode).toBe(200); + const body = res.json(); + expect(body.success).toBe(true); + expect(body.data.length).toBeGreaterThanOrEqual(1); + + // At minimum, we should have a 'grabbed' event from enqueue + const grabbedEvent = body.data.find( + (e: { eventType: string; contentItemId: number | null }) => + e.eventType === 'grabbed' && e.contentItemId === contentItemId + ); + expect(grabbedEvent).toBeDefined(); + }); + + it('GET /api/v1/history?eventType=grabbed filters by event type', async () => { + const res = await server.inject({ + method: 'GET', + url: '/api/v1/history?eventType=grabbed', + headers: { 'x-api-key': apiKey }, + }); + + expect(res.statusCode).toBe(200); + const body = res.json(); + expect(body.data.every((e: { eventType: string }) => e.eventType === 'grabbed')).toBe(true); + }); + + it('GET /api/v1/activity returns recent activity', async () => { + const res = await server.inject({ + method: 'GET', + url: '/api/v1/activity', + headers: { 'x-api-key': apiKey }, + }); + + expect(res.statusCode).toBe(200); + const body = res.json(); + expect(body.success).toBe(true); + expect(body.data.length).toBeGreaterThanOrEqual(1); + }); + }); + + // ── Step 5: Health and System Status ── + + describe('Step 5: Health and system status', () => { + it('GET /ping returns ok (unauthenticated)', async () => { + const res = await server.inject({ + method: 'GET', + url: '/ping', + }); + + expect(res.statusCode).toBe(200); + expect(res.json()).toEqual({ status: 'ok' }); + }); + + it('GET /api/v1/health returns healthy status', async () => { + const res = await server.inject({ + method: 'GET', + url: '/api/v1/health', + headers: { 'x-api-key': apiKey }, + }); + + expect(res.statusCode).toBe(200); + const body = res.json(); + expect(body.status).toBeDefined(); + expect(body.components).toBeDefined(); + expect(Array.isArray(body.components)).toBe(true); + + // Database component should be healthy + const dbComponent = body.components.find( + (c: { name: string }) => c.name === 'database' + ); + expect(dbComponent).toBeDefined(); + expect(dbComponent.status).toBe('healthy'); + }); + + it('GET /api/v1/system/status returns system information', async () => { + const res = await server.inject({ + method: 'GET', + url: '/api/v1/system/status', + headers: { 'x-api-key': apiKey }, + }); + + expect(res.statusCode).toBe(200); + const body = res.json(); + expect(body).toHaveProperty('appName'); + expect(body.appName).toBe('Tubearr'); + expect(body).toHaveProperty('version'); + expect(body).toHaveProperty('uptime'); + expect(body).toHaveProperty('platform'); + expect(body).toHaveProperty('nodeVersion'); + expect(typeof body.uptime).toBe('number'); + }); + }); + + // ── Step 6: Error handling and edge cases ── + + describe('Step 6: Error handling', () => { + it('returns 401 for missing API key on protected routes', async () => { + const res = await server.inject({ + method: 'GET', + url: '/api/v1/system/status', + }); + + expect(res.statusCode).toBe(401); + }); + + it('returns 404 for unknown API routes', async () => { + const res = await server.inject({ + method: 'GET', + url: '/api/v1/nonexistent-route', + headers: { 'x-api-key': apiKey }, + }); + + expect(res.statusCode).toBe(404); + }); + + it('returns 404 for non-existent channel', async () => { + const res = await server.inject({ + method: 'GET', + url: '/api/v1/channel/99999', + headers: { 'x-api-key': apiKey }, + }); + + expect(res.statusCode).toBe(404); + }); + + it('returns 404 for non-existent content item download', async () => { + const res = await server.inject({ + method: 'POST', + url: '/api/v1/download/99999', + headers: { 'x-api-key': apiKey }, + }); + + expect(res.statusCode).toBe(404); + }); + }); +}); diff --git a/src/__tests__/file-organizer.test.ts b/src/__tests__/file-organizer.test.ts new file mode 100644 index 0000000..c7313d2 --- /dev/null +++ b/src/__tests__/file-organizer.test.ts @@ -0,0 +1,216 @@ +import { describe, it, expect, afterEach } from 'vitest'; +import { mkdtempSync, rmSync, writeFileSync, existsSync } from 'node:fs'; +import { join } from 'node:path'; +import { tmpdir } from 'node:os'; +import { FileOrganizer } from '../services/file-organizer'; + +let tmpDir: string; + +function makeTmpDir(): string { + tmpDir = mkdtempSync(join(tmpdir(), 'tubearr-fo-test-')); + return tmpDir; +} + +afterEach(() => { + if (tmpDir && existsSync(tmpDir)) { + rmSync(tmpDir, { recursive: true, force: true }); + } +}); + +describe('FileOrganizer', () => { + describe('buildOutputPath', () => { + it('produces {mediaPath}/{platform}/{channel}/{title}.{ext} paths', () => { + const mediaPath = join('media', 'downloads'); + const fo = new FileOrganizer(mediaPath); + + const result = fo.buildOutputPath('youtube', 'TechChannel', 'My Video', 'mp4'); + // Use path.join behavior — just verify the segments are present + expect(result).toContain('youtube'); + expect(result).toContain('TechChannel'); + expect(result).toContain('My Video.mp4'); + expect(result.startsWith(mediaPath)).toBe(true); + }); + + it('sanitizes channelName and title in the path', () => { + const fo = new FileOrganizer('/media'); + + const result = fo.buildOutputPath( + 'youtube', + 'Bad:Channel*Name', + 'Title "With" Chars', + 'mkv' + ); + + expect(result).not.toContain(':'); + expect(result).not.toContain('*'); + expect(result).not.toContain('"'); + expect(result).not.toContain('<'); + expect(result).not.toContain('>'); + expect(result).toContain('BadChannelName'); + expect(result).toContain('Title With Special Chars.mkv'); + }); + + it('handles extension with or without leading dot', () => { + const fo = new FileOrganizer('/media'); + + const withDot = fo.buildOutputPath('youtube', 'Ch', 'Vid', '.mp4'); + const withoutDot = fo.buildOutputPath('youtube', 'Ch', 'Vid', 'mp4'); + + // Both should produce the same filename + expect(withDot).toContain('Vid.mp4'); + expect(withoutDot).toContain('Vid.mp4'); + }); + }); + + describe('sanitizeFilename', () => { + it('strips forbidden characters / \\ : * ? " < > |', () => { + const fo = new FileOrganizer('/media'); + + const result = fo.sanitizeFilename('a/b\\c:d*e?f"gi|j'); + expect(result).toBe('abcdefghij'); + }); + + it('handles Unicode characters intact', () => { + const fo = new FileOrganizer('/media'); + + expect(fo.sanitizeFilename('日本語テスト')).toBe('日本語テスト'); + expect(fo.sanitizeFilename('Ünîcödé Fïlé')).toBe('Ünîcödé Fïlé'); + expect(fo.sanitizeFilename('🎵 Music 🎶')).toBe('🎵 Music 🎶'); + }); + + it('collapses multiple spaces and underscores', () => { + const fo = new FileOrganizer('/media'); + + expect(fo.sanitizeFilename('too many spaces')).toBe('too many spaces'); + expect(fo.sanitizeFilename('too___many___underscores')).toBe('too_many_underscores'); + }); + + it('handles empty and dot-only names', () => { + const fo = new FileOrganizer('/media'); + + expect(fo.sanitizeFilename('')).toBe('_unnamed'); + expect(fo.sanitizeFilename('...')).toBe('_unnamed'); + expect(fo.sanitizeFilename(' ')).toBe('_unnamed'); + expect(fo.sanitizeFilename('***')).toBe('_unnamed'); + }); + + it('trims leading and trailing dots and spaces', () => { + const fo = new FileOrganizer('/media'); + + expect(fo.sanitizeFilename(' hello ')).toBe('hello'); + expect(fo.sanitizeFilename('..hello..')).toBe('hello'); + expect(fo.sanitizeFilename('. .hello. .')).toBe('hello'); + }); + + it('replaces control characters', () => { + const fo = new FileOrganizer('/media'); + + const withControls = 'hello\x00world\x1f!'; + expect(fo.sanitizeFilename(withControls)).toBe('helloworld!'); + }); + + it('respects max filename length of 200 characters', () => { + const fo = new FileOrganizer('/media'); + + const longName = 'A'.repeat(300); + const result = fo.sanitizeFilename(longName); + expect(result.length).toBeLessThanOrEqual(200); + expect(result.length).toBe(200); + }); + + it('truncates without breaking multi-byte codepoints', () => { + const fo = new FileOrganizer('/media'); + + // Each emoji is 2 code units but 1 codepoint — use 201 single-codepoint chars + const emojiName = '🎵'.repeat(201); + const result = fo.sanitizeFilename(emojiName); + expect(result.length).toBeLessThanOrEqual(200 * 2); // String.length counts UTF-16 code units + expect([...result].length).toBeLessThanOrEqual(200); // Spread counts codepoints + }); + }); + + describe('ensureDirectory', () => { + it('creates parent directories recursively', async () => { + const base = makeTmpDir(); + const fo = new FileOrganizer(base); + + const filePath = join(base, 'youtube', 'channel', 'video.mp4'); + await fo.ensureDirectory(filePath); + + expect(existsSync(join(base, 'youtube', 'channel'))).toBe(true); + }); + + it('succeeds when directory already exists', async () => { + const base = makeTmpDir(); + const fo = new FileOrganizer(base); + + const filePath = join(base, 'youtube', 'channel', 'video.mp4'); + await fo.ensureDirectory(filePath); + // Second call should not throw + await fo.ensureDirectory(filePath); + + expect(existsSync(join(base, 'youtube', 'channel'))).toBe(true); + }); + }); + + describe('resolveUniquePath', () => { + it('returns original path when file does not exist', async () => { + const base = makeTmpDir(); + const fo = new FileOrganizer(base); + + const filePath = join(base, 'nonexistent.mp4'); + const result = await fo.resolveUniquePath(filePath); + expect(result).toBe(filePath); + }); + + it('appends (2) suffix when original file exists', async () => { + const base = makeTmpDir(); + const fo = new FileOrganizer(base); + + const filePath = join(base, 'video.mp4'); + writeFileSync(filePath, 'data'); + + const result = await fo.resolveUniquePath(filePath); + expect(result).toBe(join(base, 'video (2).mp4')); + }); + + it('increments suffix until a free name is found', async () => { + const base = makeTmpDir(); + const fo = new FileOrganizer(base); + + const filePath = join(base, 'video.mp4'); + writeFileSync(filePath, 'data'); + writeFileSync(join(base, 'video (2).mp4'), 'data'); + writeFileSync(join(base, 'video (3).mp4'), 'data'); + + const result = await fo.resolveUniquePath(filePath); + expect(result).toBe(join(base, 'video (4).mp4')); + }); + + it('preserves extension when adding suffix', async () => { + const base = makeTmpDir(); + const fo = new FileOrganizer(base); + + const filePath = join(base, 'song.flac'); + writeFileSync(filePath, 'data'); + + const result = await fo.resolveUniquePath(filePath); + expect(result).toContain('.flac'); + expect(result).toContain('(2)'); + }); + }); + + describe('cross-platform paths', () => { + it('uses path.join (no hardcoded separators)', () => { + const fo = new FileOrganizer('/base/media'); + + const result = fo.buildOutputPath('soundcloud', 'Artist', 'Track', 'mp3'); + + // The path should be well-formed for the current OS + // On Windows path.join uses \, on Unix it uses / + // Just verify it doesn't contain double separators + expect(result).not.toContain('//'); + expect(result).not.toMatch(/\\{2,}/); + }); + }); +}); diff --git a/src/__tests__/format-profile-api.test.ts b/src/__tests__/format-profile-api.test.ts new file mode 100644 index 0000000..42ff61f --- /dev/null +++ b/src/__tests__/format-profile-api.test.ts @@ -0,0 +1,357 @@ +import { describe, it, expect, beforeAll, afterAll } from 'vitest'; +import { mkdtempSync, rmSync, existsSync } from 'node:fs'; +import { join } from 'node:path'; +import { tmpdir } from 'node:os'; +import { type FastifyInstance } from 'fastify'; +import { initDatabaseAsync, closeDatabase } from '../db/index'; +import { runMigrations } from '../db/migrate'; +import { buildServer } from '../server/index'; +import { systemConfig } from '../db/schema/index'; +import { eq } from 'drizzle-orm'; +import { type LibSQLDatabase } from 'drizzle-orm/libsql'; +import type * as schema from '../db/schema/index'; + +/** + * Integration tests for format profile CRUD API endpoints. + * Uses Fastify inject — no real HTTP ports. + */ + +describe('Format Profile API', () => { + let server: FastifyInstance; + let db: LibSQLDatabase; + let apiKey: string; + let tmpDir: string; + + beforeAll(async () => { + tmpDir = mkdtempSync(join(tmpdir(), 'tubearr-fp-api-')); + const dbPath = join(tmpDir, 'test.db'); + db = await initDatabaseAsync(dbPath); + await runMigrations(dbPath); + server = await buildServer({ db }); + await server.ready(); + + // Read API key from database (generated by auth plugin) + const rows = await db + .select() + .from(systemConfig) + .where(eq(systemConfig.key, 'api_key')) + .limit(1); + apiKey = rows[0]?.value ?? ''; + expect(apiKey).toBeTruthy(); + }); + + afterAll(async () => { + await server.close(); + closeDatabase(); + try { + if (tmpDir && existsSync(tmpDir)) { + rmSync(tmpDir, { recursive: true, force: true }); + } + } catch { + // Temp dir cleanup is best-effort on Windows + } + }); + + // ── Helpers ── + + function authed(opts: Record) { + return { + ...opts, + headers: { 'x-api-key': apiKey, ...(opts.headers as Record | undefined) }, + }; + } + + // ── Auth gating ── + + describe('Authentication', () => { + it('returns 401 when no API key is provided', async () => { + const res = await server.inject({ + method: 'GET', + url: '/api/v1/format-profile', + }); + expect(res.statusCode).toBe(401); + }); + }); + + // ── CRUD lifecycle ── + + describe('CRUD lifecycle', () => { + let profileId: number; + + it('POST creates a format profile', async () => { + const res = await server.inject( + authed({ + method: 'POST', + url: '/api/v1/format-profile', + payload: { + name: 'HD Video', + videoResolution: '1080p', + audioCodec: 'aac', + audioBitrate: '192k', + containerFormat: 'mp4', + isDefault: false, + }, + }) + ); + + expect(res.statusCode).toBe(201); + const body = res.json(); + expect(body.name).toBe('HD Video'); + expect(body.videoResolution).toBe('1080p'); + expect(body.audioCodec).toBe('aac'); + expect(body.audioBitrate).toBe('192k'); + expect(body.containerFormat).toBe('mp4'); + expect(body.isDefault).toBe(false); + expect(body.id).toBeDefined(); + profileId = body.id; + }); + + it('GET / lists all profiles', async () => { + const res = await server.inject( + authed({ method: 'GET', url: '/api/v1/format-profile' }) + ); + + expect(res.statusCode).toBe(200); + const body = res.json(); + expect(Array.isArray(body)).toBe(true); + expect(body.length).toBeGreaterThanOrEqual(1); + expect(body.some((p: { id: number }) => p.id === profileId)).toBe(true); + }); + + it('GET /:id returns a single profile', async () => { + const res = await server.inject( + authed({ method: 'GET', url: `/api/v1/format-profile/${profileId}` }) + ); + + expect(res.statusCode).toBe(200); + const body = res.json(); + expect(body.id).toBe(profileId); + expect(body.name).toBe('HD Video'); + }); + + it('PUT /:id updates profile fields', async () => { + const res = await server.inject( + authed({ + method: 'PUT', + url: `/api/v1/format-profile/${profileId}`, + payload: { name: 'Full HD', videoResolution: '1080p' }, + }) + ); + + expect(res.statusCode).toBe(200); + const body = res.json(); + expect(body.id).toBe(profileId); + expect(body.name).toBe('Full HD'); + }); + + it('DELETE /:id removes the profile', async () => { + const res = await server.inject( + authed({ method: 'DELETE', url: `/api/v1/format-profile/${profileId}` }) + ); + + expect(res.statusCode).toBe(204); + + // Verify it's gone + const getRes = await server.inject( + authed({ method: 'GET', url: `/api/v1/format-profile/${profileId}` }) + ); + expect(getRes.statusCode).toBe(404); + }); + }); + + // ── 404 handling ── + + describe('Not found handling', () => { + it('GET /:id returns 404 for non-existent profile', async () => { + const res = await server.inject( + authed({ method: 'GET', url: '/api/v1/format-profile/99999' }) + ); + expect(res.statusCode).toBe(404); + expect(res.json().error).toBe('Not Found'); + }); + + it('PUT /:id returns 404 for non-existent profile', async () => { + const res = await server.inject( + authed({ + method: 'PUT', + url: '/api/v1/format-profile/99999', + payload: { name: 'Nope' }, + }) + ); + expect(res.statusCode).toBe(404); + }); + + it('DELETE /:id returns 404 for non-existent profile', async () => { + const res = await server.inject( + authed({ method: 'DELETE', url: '/api/v1/format-profile/99999' }) + ); + expect(res.statusCode).toBe(404); + }); + }); + + // ── Validation errors ── + + describe('Validation', () => { + it('POST rejects body missing required name', async () => { + const res = await server.inject( + authed({ + method: 'POST', + url: '/api/v1/format-profile', + payload: { videoResolution: '720p' }, + }) + ); + expect(res.statusCode).toBe(400); + }); + + it('POST rejects body with empty name', async () => { + const res = await server.inject( + authed({ + method: 'POST', + url: '/api/v1/format-profile', + payload: { name: '' }, + }) + ); + expect(res.statusCode).toBe(400); + }); + + it('GET /:id returns 400 for non-numeric ID', async () => { + const res = await server.inject( + authed({ method: 'GET', url: '/api/v1/format-profile/abc' }) + ); + expect(res.statusCode).toBe(400); + }); + }); + + // ── Default profile management ── + + describe('Default profile', () => { + it('setting isDefault on one profile clears it from others', async () => { + // Create first profile as default + const res1 = await server.inject( + authed({ + method: 'POST', + url: '/api/v1/format-profile', + payload: { name: 'Default A', isDefault: true }, + }) + ); + expect(res1.statusCode).toBe(201); + const profileA = res1.json(); + expect(profileA.isDefault).toBe(true); + + // Create second profile as default — should clear the first + const res2 = await server.inject( + authed({ + method: 'POST', + url: '/api/v1/format-profile', + payload: { name: 'Default B', isDefault: true }, + }) + ); + expect(res2.statusCode).toBe(201); + const profileB = res2.json(); + expect(profileB.isDefault).toBe(true); + + // Verify first profile is no longer default + const resA = await server.inject( + authed({ method: 'GET', url: `/api/v1/format-profile/${profileA.id}` }) + ); + expect(resA.json().isDefault).toBe(false); + + // Clean up — profileA is not default so it's deletable. + // profileB is default and protected — leave it (shared test DB, no conflict). + await server.inject( + authed({ method: 'DELETE', url: `/api/v1/format-profile/${profileA.id}` }) + ); + }); + }); + + // ── Default profile protection ── + + describe('Default profile protection', () => { + it('DELETE default profile returns 403', async () => { + const createRes = await server.inject( + authed({ + method: 'POST', + url: '/api/v1/format-profile', + payload: { name: 'Protected Default', isDefault: true }, + }) + ); + expect(createRes.statusCode).toBe(201); + const profile = createRes.json(); + + const deleteRes = await server.inject( + authed({ method: 'DELETE', url: `/api/v1/format-profile/${profile.id}` }) + ); + expect(deleteRes.statusCode).toBe(403); + expect(deleteRes.json().message).toBe('Cannot delete the default format profile'); + + // Profile remains in DB (default, protected) — no cleanup needed for test isolation + }); + + it('DELETE non-default profile still works', async () => { + const createRes = await server.inject( + authed({ + method: 'POST', + url: '/api/v1/format-profile', + payload: { name: 'Deletable Profile', isDefault: false }, + }) + ); + expect(createRes.statusCode).toBe(201); + const profile = createRes.json(); + + const deleteRes = await server.inject( + authed({ method: 'DELETE', url: `/api/v1/format-profile/${profile.id}` }) + ); + expect(deleteRes.statusCode).toBe(204); + }); + + it('PUT default profile with isDefault: false returns 400', async () => { + const createRes = await server.inject( + authed({ + method: 'POST', + url: '/api/v1/format-profile', + payload: { name: 'Default No Unset', isDefault: true }, + }) + ); + expect(createRes.statusCode).toBe(201); + const profile = createRes.json(); + + const putRes = await server.inject( + authed({ + method: 'PUT', + url: `/api/v1/format-profile/${profile.id}`, + payload: { isDefault: false }, + }) + ); + expect(putRes.statusCode).toBe(400); + expect(putRes.json().message).toBe('Cannot unset isDefault on the default format profile'); + + // Clean up — force unset via direct DB or just leave (fresh DB per suite) + // We can't unset via API (that's what we're testing), so just leave it + }); + + it('PUT default profile with other fields works', async () => { + const createRes = await server.inject( + authed({ + method: 'POST', + url: '/api/v1/format-profile', + payload: { name: 'Renameable Default', isDefault: true }, + }) + ); + expect(createRes.statusCode).toBe(201); + const profile = createRes.json(); + + const putRes = await server.inject( + authed({ + method: 'PUT', + url: `/api/v1/format-profile/${profile.id}`, + payload: { name: 'Renamed Default', videoResolution: '1080p' }, + }) + ); + expect(putRes.statusCode).toBe(200); + const updated = putRes.json(); + expect(updated.name).toBe('Renamed Default'); + expect(updated.videoResolution).toBe('1080p'); + expect(updated.isDefault).toBe(true); + }); + }); +}); diff --git a/src/__tests__/format-profile.test.ts b/src/__tests__/format-profile.test.ts new file mode 100644 index 0000000..e127057 --- /dev/null +++ b/src/__tests__/format-profile.test.ts @@ -0,0 +1,544 @@ +import { describe, it, expect, afterEach } from 'vitest'; +import { mkdtempSync, rmSync, existsSync } from 'node:fs'; +import { join } from 'node:path'; +import { tmpdir } from 'node:os'; +import { initDatabaseAsync, closeDatabase } from '../db/index'; +import { runMigrations } from '../db/migrate'; +import { + createFormatProfile, + getFormatProfileById, + getAllFormatProfiles, + getDefaultFormatProfile, + updateFormatProfile, + deleteFormatProfile, + ensureDefaultFormatProfile, +} from '../db/repositories/format-profile-repository'; +import { + createChannel, + getChannelById, +} from '../db/repositories/channel-repository'; +import { + createContentItem, + getContentItemById, + updateContentItem, + getContentItemsByStatus, +} from '../db/repositories/content-repository'; +import type { Platform } from '../types/index'; + +let tmpDir: string; + +function freshDbPath(): string { + tmpDir = mkdtempSync(join(tmpdir(), 'tubearr-fp-test-')); + return join(tmpDir, 'test.db'); +} + +function cleanup(): void { + closeDatabase(); + try { + if (tmpDir && existsSync(tmpDir)) { + rmSync(tmpDir, { recursive: true, force: true }); + } + } catch { + // Windows cleanup best-effort (K004) + } +} + +// ── Format Profile CRUD ── + +describe('Format Profile Repository', () => { + afterEach(cleanup); + + it('creates a format profile and reads it back', async () => { + const dbPath = freshDbPath(); + const db = await initDatabaseAsync(dbPath); + await runMigrations(dbPath); + + const profile = await createFormatProfile(db, { + name: 'High Quality', + videoResolution: '1080p', + audioCodec: 'opus', + audioBitrate: '320k', + containerFormat: 'mkv', + isDefault: false, + }); + + expect(profile.id).toBeGreaterThan(0); + expect(profile.name).toBe('High Quality'); + expect(profile.videoResolution).toBe('1080p'); + expect(profile.audioCodec).toBe('opus'); + expect(profile.audioBitrate).toBe('320k'); + expect(profile.containerFormat).toBe('mkv'); + expect(profile.isDefault).toBe(false); + expect(profile.createdAt).toBeTruthy(); + expect(profile.updatedAt).toBeTruthy(); + + const fetched = await getFormatProfileById(db, profile.id); + expect(fetched).toEqual(profile); + }); + + it('returns null for non-existent profile ID', async () => { + const dbPath = freshDbPath(); + const db = await initDatabaseAsync(dbPath); + await runMigrations(dbPath); + + const result = await getFormatProfileById(db, 999); + expect(result).toBeNull(); + }); + + it('lists all profiles ordered by name', async () => { + const dbPath = freshDbPath(); + const db = await initDatabaseAsync(dbPath); + await runMigrations(dbPath); + + await createFormatProfile(db, { name: 'Zebra' }); + await createFormatProfile(db, { name: 'Alpha' }); + await createFormatProfile(db, { name: 'Middle' }); + + const all = await getAllFormatProfiles(db); + expect(all).toHaveLength(3); + expect(all[0].name).toBe('Alpha'); + expect(all[1].name).toBe('Middle'); + expect(all[2].name).toBe('Zebra'); + }); + + it('updates a format profile', async () => { + const dbPath = freshDbPath(); + const db = await initDatabaseAsync(dbPath); + await runMigrations(dbPath); + + const profile = await createFormatProfile(db, { + name: 'Original', + videoResolution: '720p', + }); + + const updated = await updateFormatProfile(db, profile.id, { + name: 'Updated', + videoResolution: '1080p', + audioCodec: 'aac', + }); + + expect(updated).not.toBeNull(); + expect(updated!.name).toBe('Updated'); + expect(updated!.videoResolution).toBe('1080p'); + expect(updated!.audioCodec).toBe('aac'); + // Verify updatedAt is set (may match createdAt if within same second) + expect(updated!.updatedAt).toBeTruthy(); + }); + + it('returns null when updating non-existent profile', async () => { + const dbPath = freshDbPath(); + const db = await initDatabaseAsync(dbPath); + await runMigrations(dbPath); + + const result = await updateFormatProfile(db, 999, { name: 'Nope' }); + expect(result).toBeNull(); + }); + + it('deletes a format profile', async () => { + const dbPath = freshDbPath(); + const db = await initDatabaseAsync(dbPath); + await runMigrations(dbPath); + + const profile = await createFormatProfile(db, { name: 'Delete Me' }); + const deleted = await deleteFormatProfile(db, profile.id); + expect(deleted).toBe(true); + + const after = await getFormatProfileById(db, profile.id); + expect(after).toBeNull(); + }); + + it('returns false when deleting non-existent profile', async () => { + const dbPath = freshDbPath(); + const db = await initDatabaseAsync(dbPath); + await runMigrations(dbPath); + + const deleted = await deleteFormatProfile(db, 999); + expect(deleted).toBe(false); + }); +}); + +// ── Default Profile Logic ── + +describe('Default Format Profile', () => { + afterEach(cleanup); + + it('returns null when no default profile exists', async () => { + const dbPath = freshDbPath(); + const db = await initDatabaseAsync(dbPath); + await runMigrations(dbPath); + + const result = await getDefaultFormatProfile(db); + expect(result).toBeNull(); + }); + + it('marks a profile as default on create', async () => { + const dbPath = freshDbPath(); + const db = await initDatabaseAsync(dbPath); + await runMigrations(dbPath); + + const profile = await createFormatProfile(db, { + name: 'Default Profile', + isDefault: true, + }); + + expect(profile.isDefault).toBe(true); + + const fetched = await getDefaultFormatProfile(db); + expect(fetched).not.toBeNull(); + expect(fetched!.id).toBe(profile.id); + }); + + it('clears previous default when creating a new default profile', async () => { + const dbPath = freshDbPath(); + const db = await initDatabaseAsync(dbPath); + await runMigrations(dbPath); + + const first = await createFormatProfile(db, { + name: 'First Default', + isDefault: true, + }); + + const second = await createFormatProfile(db, { + name: 'Second Default', + isDefault: true, + }); + + // Second should be default + const defaultProfile = await getDefaultFormatProfile(db); + expect(defaultProfile!.id).toBe(second.id); + + // First should no longer be default + const firstUpdated = await getFormatProfileById(db, first.id); + expect(firstUpdated!.isDefault).toBe(false); + }); + + it('clears previous default when updating a profile to be default', async () => { + const dbPath = freshDbPath(); + const db = await initDatabaseAsync(dbPath); + await runMigrations(dbPath); + + const first = await createFormatProfile(db, { + name: 'Current Default', + isDefault: true, + }); + + const second = await createFormatProfile(db, { + name: 'Will Become Default', + isDefault: false, + }); + + await updateFormatProfile(db, second.id, { isDefault: true }); + + const defaultProfile = await getDefaultFormatProfile(db); + expect(defaultProfile!.id).toBe(second.id); + + const firstUpdated = await getFormatProfileById(db, first.id); + expect(firstUpdated!.isDefault).toBe(false); + }); +}); + +// ── Ensure Default Profile (Seed) ── + +describe('ensureDefaultFormatProfile', () => { + afterEach(cleanup); + + it('creates a default profile when none exists', async () => { + const dbPath = freshDbPath(); + const db = await initDatabaseAsync(dbPath); + await runMigrations(dbPath); + + const profile = await ensureDefaultFormatProfile(db); + + expect(profile.name).toBe('Default'); + expect(profile.isDefault).toBe(true); + expect(profile.videoResolution).toBeNull(); + expect(profile.audioCodec).toBeNull(); + expect(profile.audioBitrate).toBeNull(); + expect(profile.containerFormat).toBeNull(); + + // Verify it's retrievable + const fetched = await getDefaultFormatProfile(db); + expect(fetched).not.toBeNull(); + expect(fetched!.id).toBe(profile.id); + }); + + it('is idempotent — does not create a duplicate', async () => { + const dbPath = freshDbPath(); + const db = await initDatabaseAsync(dbPath); + await runMigrations(dbPath); + + const first = await ensureDefaultFormatProfile(db); + const second = await ensureDefaultFormatProfile(db); + + expect(first.id).toBe(second.id); + + const all = await getAllFormatProfiles(db); + expect(all).toHaveLength(1); + }); + + it('does not overwrite an existing default profile', async () => { + const dbPath = freshDbPath(); + const db = await initDatabaseAsync(dbPath); + await runMigrations(dbPath); + + // Create a custom default profile first + const custom = await createFormatProfile(db, { + name: 'Custom Default', + isDefault: true, + videoResolution: '1080p', + }); + + const result = await ensureDefaultFormatProfile(db); + + // Should return the existing one, not create a new one + expect(result.id).toBe(custom.id); + expect(result.name).toBe('Custom Default'); + expect(result.videoResolution).toBe('1080p'); + + const all = await getAllFormatProfiles(db); + expect(all).toHaveLength(1); + }); +}); + +// ── Channel ↔ Format Profile FK ── + +describe('Channel-FormatProfile FK relationship', () => { + afterEach(cleanup); + + it('creates a channel with a format profile ID', async () => { + const dbPath = freshDbPath(); + const db = await initDatabaseAsync(dbPath); + await runMigrations(dbPath); + + const profile = await createFormatProfile(db, { name: 'Test Profile' }); + + const channel = await createChannel(db, { + name: 'Test Channel', + platform: 'youtube' as Platform, + platformId: 'UC123', + url: 'https://www.youtube.com/@Test', + monitoringEnabled: true, + checkInterval: 360, + imageUrl: null, + metadata: null, + formatProfileId: profile.id, + }); + + expect(channel.formatProfileId).toBe(profile.id); + }); + + it('creates a channel with null format profile ID', async () => { + const dbPath = freshDbPath(); + const db = await initDatabaseAsync(dbPath); + await runMigrations(dbPath); + + const channel = await createChannel(db, { + name: 'No Profile Channel', + platform: 'youtube' as Platform, + platformId: 'UC456', + url: 'https://www.youtube.com/@NoProfile', + monitoringEnabled: true, + checkInterval: 360, + imageUrl: null, + metadata: null, + formatProfileId: null, + }); + + expect(channel.formatProfileId).toBeNull(); + }); +}); + +// ── Content Item Update Functions ── + +describe('Content Item Update & Query Functions', () => { + afterEach(cleanup); + + async function setupChannelWithItem(db: ReturnType extends Promise ? T : never) { + const channel = await createChannel(db, { + name: 'Content Channel', + platform: 'youtube' as Platform, + platformId: 'UC_CONTENT_TEST', + url: 'https://www.youtube.com/@ContentChannel', + monitoringEnabled: true, + checkInterval: 360, + imageUrl: null, + metadata: null, + formatProfileId: null, + }); + + const item = await createContentItem(db, { + channelId: channel.id, + title: 'Test Video', + platformContentId: 'vid123', + url: 'https://www.youtube.com/watch?v=vid123', + contentType: 'video' as const, + duration: 600, + }); + + return { channel, item: item! }; + } + + it('gets a content item by ID', async () => { + const dbPath = freshDbPath(); + const db = await initDatabaseAsync(dbPath); + await runMigrations(dbPath); + const { item } = await setupChannelWithItem(db); + + const fetched = await getContentItemById(db, item.id); + expect(fetched).not.toBeNull(); + expect(fetched!.id).toBe(item.id); + expect(fetched!.title).toBe('Test Video'); + expect(fetched!.status).toBe('monitored'); + }); + + it('returns null for non-existent content item ID', async () => { + const dbPath = freshDbPath(); + const db = await initDatabaseAsync(dbPath); + await runMigrations(dbPath); + + const result = await getContentItemById(db, 999); + expect(result).toBeNull(); + }); + + it('updates content item with download results', async () => { + const dbPath = freshDbPath(); + const db = await initDatabaseAsync(dbPath); + await runMigrations(dbPath); + const { item } = await setupChannelWithItem(db); + + const qualityMetadata = { + actualResolution: '1920x1080', + actualCodec: 'h264', + actualBitrate: '5000kbps', + containerFormat: 'mp4', + qualityWarnings: [], + }; + + const updated = await updateContentItem(db, item.id, { + filePath: '/media/youtube/channel/test-video.mp4', + fileSize: 52428800, + format: 'mp4', + qualityMetadata, + status: 'downloaded', + }); + + expect(updated).not.toBeNull(); + expect(updated!.filePath).toBe('/media/youtube/channel/test-video.mp4'); + expect(updated!.fileSize).toBe(52428800); + expect(updated!.format).toBe('mp4'); + expect(updated!.qualityMetadata).toEqual(qualityMetadata); + expect(updated!.status).toBe('downloaded'); + // Verify updatedAt is set (may match original if within same second) + expect(updated!.updatedAt).toBeTruthy(); + }); + + it('returns null when updating non-existent content item', async () => { + const dbPath = freshDbPath(); + const db = await initDatabaseAsync(dbPath); + await runMigrations(dbPath); + + const result = await updateContentItem(db, 999, { status: 'failed' }); + expect(result).toBeNull(); + }); + + it('gets content items by status', async () => { + const dbPath = freshDbPath(); + const db = await initDatabaseAsync(dbPath); + await runMigrations(dbPath); + + const channel = await createChannel(db, { + name: 'Multi Channel', + platform: 'youtube' as Platform, + platformId: 'UC_MULTI', + url: 'https://www.youtube.com/@Multi', + monitoringEnabled: true, + checkInterval: 360, + imageUrl: null, + metadata: null, + formatProfileId: null, + }); + + // Create items with different statuses + await createContentItem(db, { + channelId: channel.id, + title: 'Item 1', + platformContentId: 'v1', + url: 'https://youtube.com/watch?v=v1', + contentType: 'video' as const, + duration: null, + status: 'monitored', + }); + + const item2 = await createContentItem(db, { + channelId: channel.id, + title: 'Item 2', + platformContentId: 'v2', + url: 'https://youtube.com/watch?v=v2', + contentType: 'video' as const, + duration: null, + status: 'monitored', + }); + + await createContentItem(db, { + channelId: channel.id, + title: 'Item 3', + platformContentId: 'v3', + url: 'https://youtube.com/watch?v=v3', + contentType: 'audio' as const, + duration: null, + status: 'downloaded', + }); + + const monitored = await getContentItemsByStatus(db, 'monitored'); + expect(monitored).toHaveLength(2); + + const downloaded = await getContentItemsByStatus(db, 'downloaded'); + expect(downloaded).toHaveLength(1); + expect(downloaded[0].title).toBe('Item 3'); + }); + + it('respects limit parameter on getContentItemsByStatus', async () => { + const dbPath = freshDbPath(); + const db = await initDatabaseAsync(dbPath); + await runMigrations(dbPath); + + const channel = await createChannel(db, { + name: 'Limit Channel', + platform: 'youtube' as Platform, + platformId: 'UC_LIMIT', + url: 'https://www.youtube.com/@Limit', + monitoringEnabled: true, + checkInterval: 360, + imageUrl: null, + metadata: null, + formatProfileId: null, + }); + + for (let i = 0; i < 5; i++) { + await createContentItem(db, { + channelId: channel.id, + title: `Item ${i}`, + platformContentId: `vid_${i}`, + url: `https://youtube.com/watch?v=vid_${i}`, + contentType: 'video' as const, + duration: null, + status: 'monitored', + }); + } + + const limited = await getContentItemsByStatus(db, 'monitored', 2); + expect(limited).toHaveLength(2); + }); +}); + +// ── Config ── + +describe('Config download fields', () => { + it('has mediaPath, concurrentDownloads, and cookiePath with defaults', async () => { + // Config is loaded at import time, so just verify the fields exist with defaults + const { appConfig } = await import('../config/index'); + expect(appConfig.mediaPath).toBe('./media'); + expect(appConfig.concurrentDownloads).toBe(2); + expect(appConfig.cookiePath).toBe('./data/cookies'); + }); +}); diff --git a/src/__tests__/health-service.test.ts b/src/__tests__/health-service.test.ts new file mode 100644 index 0000000..1b82585 --- /dev/null +++ b/src/__tests__/health-service.test.ts @@ -0,0 +1,326 @@ +import { describe, it, expect, vi, beforeEach, afterEach } from 'vitest'; +import { mkdtempSync, rmSync, existsSync } from 'node:fs'; +import { join } from 'node:path'; +import { tmpdir } from 'node:os'; +import { initDatabaseAsync, closeDatabase } from '../db/index'; +import { runMigrations } from '../db/migrate'; +import { createHistoryEvent } from '../db/repositories/history-repository'; +import { HealthService } from '../services/health'; +import type { SchedulerState } from '../services/scheduler'; + +// ── Mock yt-dlp ── +vi.mock('../sources/yt-dlp', () => ({ + getYtDlpVersion: vi.fn(), +})); + +// ── Mock statfs ── +vi.mock('node:fs/promises', async (importOriginal) => { + const actual = await importOriginal(); + return { + ...actual, + statfs: vi.fn(), + }; +}); + +import { getYtDlpVersion } from '../sources/yt-dlp'; +import { statfs } from 'node:fs/promises'; + +const mockGetYtDlpVersion = vi.mocked(getYtDlpVersion); +const mockStatfs = vi.mocked(statfs); + +// ── Test Helpers ── + +let tmpDir: string; +let db: Awaited>; + +async function setupDb(): Promise { + tmpDir = mkdtempSync(join(tmpdir(), 'tubearr-health-')); + const dbPath = join(tmpDir, 'test.db'); + db = await initDatabaseAsync(dbPath); + await runMigrations(dbPath); +} + +function cleanup(): void { + closeDatabase(); + try { + if (tmpDir && existsSync(tmpDir)) { + rmSync(tmpDir, { recursive: true, force: true }); + } + } catch { + // Windows cleanup best-effort + } +} + +function makeSchedulerState(overrides?: Partial): SchedulerState { + return { + running: true, + channelCount: 3, + channels: [], + ...overrides, + }; +} + +function makeStatfsResult(availableRatio: number, totalBlocks = 1000000) { + // bsize=4096, total blocks = totalBlocks, available = totalBlocks * ratio + const bsize = 4096; + const bavail = Math.floor(totalBlocks * availableRatio); + return { + type: 0, + bsize: BigInt(bsize), + blocks: BigInt(totalBlocks), + bfree: BigInt(bavail), + bavail: BigInt(bavail), + files: BigInt(0), + ffree: BigInt(0), + }; +} + +// ── Tests ── + +describe('HealthService', () => { + beforeEach(async () => { + await setupDb(); + vi.clearAllMocks(); + // Default mocks + mockGetYtDlpVersion.mockResolvedValue('2024.12.23'); + mockStatfs.mockResolvedValue(makeStatfsResult(0.5) as never); // 50% free + }); + + afterEach(() => { + cleanup(); + }); + + // ── Scheduler Component ── + + describe('scheduler component', () => { + it('returns healthy with channel count when scheduler is running', async () => { + const service = new HealthService( + db, + () => makeSchedulerState({ running: true, channelCount: 5 }), + '/tmp/media' + ); + + const components = await service.getComponentHealth(); + const scheduler = components.find((c) => c.name === 'scheduler'); + + expect(scheduler).toBeDefined(); + expect(scheduler!.status).toBe('healthy'); + expect(scheduler!.message).toBe('Running — 5 channel(s) monitored'); + expect(scheduler!.details).toEqual({ channelCount: 5 }); + }); + + it('returns degraded when scheduler is disabled (null)', async () => { + const service = new HealthService(db, () => null, '/tmp/media'); + + const components = await service.getComponentHealth(); + const scheduler = components.find((c) => c.name === 'scheduler'); + + expect(scheduler!.status).toBe('degraded'); + expect(scheduler!.message).toBe('Scheduler disabled'); + }); + + it('returns unhealthy when scheduler is stopped', async () => { + const service = new HealthService( + db, + () => makeSchedulerState({ running: false }), + '/tmp/media' + ); + + const components = await service.getComponentHealth(); + const scheduler = components.find((c) => c.name === 'scheduler'); + + expect(scheduler!.status).toBe('unhealthy'); + expect(scheduler!.message).toBe('Scheduler stopped'); + }); + }); + + // ── yt-dlp Component ── + + describe('yt-dlp component', () => { + it('returns healthy with version when yt-dlp is available', async () => { + mockGetYtDlpVersion.mockResolvedValue('2024.12.23'); + + const service = new HealthService(db, () => makeSchedulerState(), '/tmp/media'); + const components = await service.getComponentHealth(); + const ytDlp = components.find((c) => c.name === 'ytDlp'); + + expect(ytDlp!.status).toBe('healthy'); + expect(ytDlp!.message).toBe('yt-dlp 2024.12.23'); + expect(ytDlp!.details).toEqual({ version: '2024.12.23' }); + }); + + it('returns unhealthy when yt-dlp is not available', async () => { + mockGetYtDlpVersion.mockResolvedValue(null); + + const service = new HealthService(db, () => makeSchedulerState(), '/tmp/media'); + const components = await service.getComponentHealth(); + const ytDlp = components.find((c) => c.name === 'ytDlp'); + + expect(ytDlp!.status).toBe('unhealthy'); + expect(ytDlp!.message).toBe('yt-dlp not found'); + }); + }); + + // ── Disk Space Component ── + + describe('disk space component', () => { + it('returns healthy when >10% free', async () => { + mockStatfs.mockResolvedValue(makeStatfsResult(0.5) as never); // 50% free + + const service = new HealthService(db, () => makeSchedulerState(), '/tmp/media'); + const components = await service.getComponentHealth(); + const disk = components.find((c) => c.name === 'diskSpace'); + + expect(disk!.status).toBe('healthy'); + expect(disk!.message).toMatch(/GB free of/); + expect(disk!.message).toMatch(/50%/); + expect(disk!.details).toHaveProperty('availableBytes'); + expect(disk!.details).toHaveProperty('totalBytes'); + expect(disk!.details).toHaveProperty('freePercent'); + }); + + it('returns degraded when 5-10% free', async () => { + mockStatfs.mockResolvedValue(makeStatfsResult(0.07) as never); // 7% free + + const service = new HealthService(db, () => makeSchedulerState(), '/tmp/media'); + const components = await service.getComponentHealth(); + const disk = components.find((c) => c.name === 'diskSpace'); + + expect(disk!.status).toBe('degraded'); + }); + + it('returns unhealthy when <5% free', async () => { + mockStatfs.mockResolvedValue(makeStatfsResult(0.03) as never); // 3% free + + const service = new HealthService(db, () => makeSchedulerState(), '/tmp/media'); + const components = await service.getComponentHealth(); + const disk = components.find((c) => c.name === 'diskSpace'); + + expect(disk!.status).toBe('unhealthy'); + }); + + it('returns degraded on statfs error', async () => { + mockStatfs.mockRejectedValue(new Error('ENOENT: no such file or directory')); + + const service = new HealthService(db, () => makeSchedulerState(), '/tmp/media'); + const components = await service.getComponentHealth(); + const disk = components.find((c) => c.name === 'diskSpace'); + + expect(disk!.status).toBe('degraded'); + expect(disk!.message).toMatch(/Disk check failed/); + expect(disk!.message).toMatch(/ENOENT/); + }); + }); + + // ── Recent Errors Component ── + + describe('recent errors component', () => { + it('returns healthy when no errors in 24h', async () => { + const service = new HealthService(db, () => makeSchedulerState(), '/tmp/media'); + const components = await service.getComponentHealth(); + const errors = components.find((c) => c.name === 'recentErrors'); + + expect(errors!.status).toBe('healthy'); + expect(errors!.message).toBe('0 error(s) in the last 24 hours'); + }); + + it('returns degraded when 1-5 errors in 24h', async () => { + // Insert 3 failed history events + for (let i = 0; i < 3; i++) { + await createHistoryEvent(db, { + eventType: 'failed', + status: 'failed', + details: { error: `Test error ${i}` }, + }); + } + + const service = new HealthService(db, () => makeSchedulerState(), '/tmp/media'); + const components = await service.getComponentHealth(); + const errors = components.find((c) => c.name === 'recentErrors'); + + expect(errors!.status).toBe('degraded'); + expect(errors!.message).toBe('3 error(s) in the last 24 hours'); + }); + + it('returns unhealthy when >5 errors in 24h', async () => { + // Insert 7 failed events + for (let i = 0; i < 7; i++) { + await createHistoryEvent(db, { + eventType: 'failed', + status: 'failed', + details: { error: `Test error ${i}` }, + }); + } + + const service = new HealthService(db, () => makeSchedulerState(), '/tmp/media'); + const components = await service.getComponentHealth(); + const errors = components.find((c) => c.name === 'recentErrors'); + + expect(errors!.status).toBe('unhealthy'); + expect(errors!.message).toBe('7 error(s) in the last 24 hours'); + expect(errors!.details).toEqual({ errorCount: 7 }); + }); + + it('does not count non-failed events', async () => { + // Insert a downloaded event — should not count + await createHistoryEvent(db, { + eventType: 'downloaded', + status: 'success', + }); + // Insert a failed event — should count + await createHistoryEvent(db, { + eventType: 'failed', + status: 'failed', + }); + + const service = new HealthService(db, () => makeSchedulerState(), '/tmp/media'); + const components = await service.getComponentHealth(); + const errors = components.find((c) => c.name === 'recentErrors'); + + expect(errors!.status).toBe('degraded'); + expect(errors!.message).toBe('1 error(s) in the last 24 hours'); + }); + }); + + // ── Caching ── + + describe('caching', () => { + it('caches yt-dlp version — second call does not invoke getYtDlpVersion', async () => { + mockGetYtDlpVersion.mockResolvedValue('2024.12.23'); + + const service = new HealthService(db, () => makeSchedulerState(), '/tmp/media'); + await service.getComponentHealth(); + await service.getComponentHealth(); + + // getYtDlpVersion should only be called once due to caching + expect(mockGetYtDlpVersion).toHaveBeenCalledTimes(1); + }); + + it('caches disk space — second call does not invoke statfs', async () => { + mockStatfs.mockResolvedValue(makeStatfsResult(0.5) as never); + + const service = new HealthService(db, () => makeSchedulerState(), '/tmp/media'); + await service.getComponentHealth(); + await service.getComponentHealth(); + + // statfs should only be called once due to caching + expect(mockStatfs).toHaveBeenCalledTimes(1); + }); + }); + + // ── Full Response Shape ── + + describe('full response', () => { + it('returns all four components', async () => { + const service = new HealthService(db, () => makeSchedulerState(), '/tmp/media'); + const components = await service.getComponentHealth(); + + expect(components).toHaveLength(4); + const names = components.map((c) => c.name); + expect(names).toContain('scheduler'); + expect(names).toContain('ytDlp'); + expect(names).toContain('diskSpace'); + expect(names).toContain('recentErrors'); + }); + }); +}); diff --git a/src/__tests__/history-api.test.ts b/src/__tests__/history-api.test.ts new file mode 100644 index 0000000..70f112c --- /dev/null +++ b/src/__tests__/history-api.test.ts @@ -0,0 +1,255 @@ +import { describe, it, expect, beforeAll, afterAll } from 'vitest'; +import { mkdtempSync, rmSync, existsSync } from 'node:fs'; +import { join } from 'node:path'; +import { tmpdir } from 'node:os'; +import { type FastifyInstance } from 'fastify'; +import { initDatabaseAsync, closeDatabase } from '../db/index'; +import { runMigrations } from '../db/migrate'; +import { buildServer } from '../server/index'; +import { systemConfig } from '../db/schema/index'; +import { eq } from 'drizzle-orm'; +import { type LibSQLDatabase } from 'drizzle-orm/libsql'; +import type * as schema from '../db/schema/index'; +import { createChannel } from '../db/repositories/channel-repository'; +import { createContentItem } from '../db/repositories/content-repository'; +import { createHistoryEvent } from '../db/repositories/history-repository'; +import type { Channel, ContentItem } from '../types/index'; + +/** + * Integration tests for history and activity API endpoints. + */ + +describe('History API', () => { + let server: FastifyInstance; + let db: LibSQLDatabase; + let apiKey: string; + let tmpDir: string; + let testChannel: Channel; + let testContent: ContentItem; + + beforeAll(async () => { + tmpDir = mkdtempSync(join(tmpdir(), 'tubearr-history-api-')); + const dbPath = join(tmpDir, 'test.db'); + db = await initDatabaseAsync(dbPath); + await runMigrations(dbPath); + server = await buildServer({ db }); + await server.ready(); + + // Read API key + const rows = await db + .select() + .from(systemConfig) + .where(eq(systemConfig.key, 'api_key')) + .limit(1); + apiKey = rows[0]?.value ?? ''; + expect(apiKey).toBeTruthy(); + + // Create test data + testChannel = await createChannel(db, { + name: 'History API Test Channel', + platform: 'youtube', + platformId: 'UC_history_api_test', + url: 'https://www.youtube.com/channel/UC_history_api_test', + monitoringEnabled: true, + checkInterval: 360, + imageUrl: null, + metadata: null, + formatProfileId: null, + }); + + testContent = (await createContentItem(db, { + channelId: testChannel.id, + title: 'History API Test Video', + platformContentId: 'vid_hist_api_1', + url: 'https://www.youtube.com/watch?v=hist_test', + contentType: 'video', + duration: 300, + status: 'monitored', + }))!; + + // Seed some history events + await createHistoryEvent(db, { + contentItemId: testContent.id, + channelId: testChannel.id, + eventType: 'grabbed', + status: 'pending', + details: { title: testContent.title }, + }); + await createHistoryEvent(db, { + contentItemId: testContent.id, + channelId: testChannel.id, + eventType: 'downloaded', + status: 'completed', + details: { title: testContent.title }, + }); + await createHistoryEvent(db, { + contentItemId: testContent.id, + channelId: testChannel.id, + eventType: 'failed', + status: 'failed', + details: { error: 'test error' }, + }); + }); + + afterAll(async () => { + await server.close(); + closeDatabase(); + try { + if (tmpDir && existsSync(tmpDir)) { + rmSync(tmpDir, { recursive: true, force: true }); + } + } catch { + // Temp dir cleanup is best-effort on Windows + } + }); + + // ── Helpers ── + + function authed(opts: Record) { + return { + ...opts, + headers: { + 'x-api-key': apiKey, + ...(opts.headers as Record | undefined), + }, + }; + } + + // ── Auth gating ── + + describe('Authentication', () => { + it('GET /api/v1/history returns 401 without API key', async () => { + const res = await server.inject({ method: 'GET', url: '/api/v1/history' }); + expect(res.statusCode).toBe(401); + }); + + it('GET /api/v1/activity returns 401 without API key', async () => { + const res = await server.inject({ method: 'GET', url: '/api/v1/activity' }); + expect(res.statusCode).toBe(401); + }); + }); + + // ── GET /api/v1/history ── + + describe('GET /api/v1/history', () => { + it('returns paginated history events', async () => { + const res = await server.inject( + authed({ method: 'GET', url: '/api/v1/history' }) + ); + expect(res.statusCode).toBe(200); + const body = res.json(); + expect(body.success).toBe(true); + expect(Array.isArray(body.data)).toBe(true); + expect(body.pagination).toBeDefined(); + expect(body.pagination.page).toBe(1); + expect(body.pagination.pageSize).toBe(20); + expect(body.pagination.totalItems).toBeGreaterThanOrEqual(3); + expect(body.pagination.totalPages).toBeGreaterThanOrEqual(1); + }); + + it('respects page and pageSize parameters', async () => { + const res = await server.inject( + authed({ method: 'GET', url: '/api/v1/history?page=1&pageSize=2' }) + ); + expect(res.statusCode).toBe(200); + const body = res.json(); + expect(body.data.length).toBeLessThanOrEqual(2); + expect(body.pagination.pageSize).toBe(2); + }); + + it('filters by eventType', async () => { + const res = await server.inject( + authed({ method: 'GET', url: '/api/v1/history?eventType=grabbed' }) + ); + expect(res.statusCode).toBe(200); + const body = res.json(); + expect(body.data.every((e: { eventType: string }) => e.eventType === 'grabbed')).toBe( + true + ); + }); + + it('filters by channelId', async () => { + const res = await server.inject( + authed({ + method: 'GET', + url: `/api/v1/history?channelId=${testChannel.id}`, + }) + ); + expect(res.statusCode).toBe(200); + const body = res.json(); + expect(body.data.length).toBeGreaterThanOrEqual(3); + expect( + body.data.every((e: { channelId: number }) => e.channelId === testChannel.id) + ).toBe(true); + }); + + it('filters by contentItemId', async () => { + const res = await server.inject( + authed({ + method: 'GET', + url: `/api/v1/history?contentItemId=${testContent.id}`, + }) + ); + expect(res.statusCode).toBe(200); + const body = res.json(); + expect(body.data.length).toBeGreaterThanOrEqual(3); + }); + + it('returns empty data for unmatched filters', async () => { + const res = await server.inject( + authed({ method: 'GET', url: '/api/v1/history?eventType=nonexistent' }) + ); + expect(res.statusCode).toBe(200); + const body = res.json(); + expect(body.data).toHaveLength(0); + expect(body.pagination.totalItems).toBe(0); + }); + }); + + // ── GET /api/v1/activity ── + + describe('GET /api/v1/activity', () => { + it('returns recent activity feed', async () => { + const res = await server.inject( + authed({ method: 'GET', url: '/api/v1/activity' }) + ); + expect(res.statusCode).toBe(200); + const body = res.json(); + expect(body.success).toBe(true); + expect(Array.isArray(body.data)).toBe(true); + expect(body.data.length).toBeGreaterThanOrEqual(3); + }); + + it('respects limit parameter', async () => { + const res = await server.inject( + authed({ method: 'GET', url: '/api/v1/activity?limit=2' }) + ); + expect(res.statusCode).toBe(200); + const body = res.json(); + expect(body.data.length).toBeLessThanOrEqual(2); + }); + + it('returns events in newest-first order', async () => { + const res = await server.inject( + authed({ method: 'GET', url: '/api/v1/activity' }) + ); + const body = res.json(); + const dates = body.data.map((e: { createdAt: string; id: number }) => ({ + createdAt: e.createdAt, + id: e.id, + })); + + // Events should be ordered by createdAt DESC, then ID DESC + for (let i = 1; i < dates.length; i++) { + const prev = dates[i - 1]; + const curr = dates[i]; + const prevTime = new Date(prev.createdAt).getTime(); + const currTime = new Date(curr.createdAt).getTime(); + expect(prevTime).toBeGreaterThanOrEqual(currTime); + if (prevTime === currTime) { + expect(prev.id).toBeGreaterThan(curr.id); + } + } + }); + }); +}); diff --git a/src/__tests__/history-repository.test.ts b/src/__tests__/history-repository.test.ts new file mode 100644 index 0000000..0df4935 --- /dev/null +++ b/src/__tests__/history-repository.test.ts @@ -0,0 +1,311 @@ +import { describe, it, expect, beforeEach, afterEach } from 'vitest'; +import { mkdtempSync, rmSync, existsSync } from 'node:fs'; +import { join } from 'node:path'; +import { tmpdir } from 'node:os'; +import { initDatabaseAsync, closeDatabase } from '../db/index'; +import { runMigrations } from '../db/migrate'; +import { createChannel } from '../db/repositories/channel-repository'; +import { createContentItem } from '../db/repositories/content-repository'; +import { + createHistoryEvent, + getHistoryEvents, + getRecentActivity, +} from '../db/repositories/history-repository'; +import type { Channel, ContentItem } from '../types/index'; + +// ── Test Helpers ── + +let tmpDir: string; +let db: Awaited>; +let testChannel: Channel; +let testChannel2: Channel; +let testContentItem: ContentItem; +let testContentItem2: ContentItem; + +async function setupDb(): Promise { + tmpDir = mkdtempSync(join(tmpdir(), 'tubearr-history-test-')); + const dbPath = join(tmpDir, 'test.db'); + db = await initDatabaseAsync(dbPath); + await runMigrations(dbPath); + + testChannel = await createChannel(db, { + name: 'History Test Channel', + platform: 'youtube', + platformId: 'UC_hist_test', + url: 'https://www.youtube.com/channel/UC_hist_test', + imageUrl: null, + formatProfileId: null, + monitoringEnabled: true, + checkInterval: 360, + metadata: null, + }); + + testChannel2 = await createChannel(db, { + name: 'History Test Channel 2', + platform: 'soundcloud', + platformId: 'hist_test_2', + url: 'https://soundcloud.com/hist_test_2', + imageUrl: null, + formatProfileId: null, + monitoringEnabled: true, + checkInterval: 360, + metadata: null, + }); + + testContentItem = (await createContentItem(db, { + channelId: testChannel.id, + title: 'History Test Video', + platformContentId: 'vid_hist_1', + url: 'https://www.youtube.com/watch?v=hist1', + contentType: 'video', + duration: 600, + status: 'monitored', + }))!; + + testContentItem2 = (await createContentItem(db, { + channelId: testChannel2.id, + title: 'History Test Audio', + platformContentId: 'audio_hist_1', + url: 'https://soundcloud.com/hist_test_2/track1', + contentType: 'audio', + duration: 300, + status: 'monitored', + }))!; +} + +function cleanup(): void { + closeDatabase(); + try { + if (tmpDir && existsSync(tmpDir)) { + rmSync(tmpDir, { recursive: true, force: true }); + } + } catch { + // Windows cleanup best-effort + } +} + +// ── Tests ── + +describe('History Repository', () => { + beforeEach(async () => { + await setupDb(); + }); + + afterEach(cleanup); + + describe('createHistoryEvent', () => { + it('creates a history event with all fields', async () => { + const event = await createHistoryEvent(db, { + contentItemId: testContentItem.id, + channelId: testChannel.id, + eventType: 'grabbed', + status: 'success', + details: { source: 'monitoring', resolution: '1080p' }, + }); + + expect(event.id).toBeGreaterThan(0); + expect(event.contentItemId).toBe(testContentItem.id); + expect(event.channelId).toBe(testChannel.id); + expect(event.eventType).toBe('grabbed'); + expect(event.status).toBe('success'); + expect(event.details).toEqual({ source: 'monitoring', resolution: '1080p' }); + expect(event.createdAt).toBeTruthy(); + }); + + it('creates a history event with null optional fields', async () => { + const event = await createHistoryEvent(db, { + eventType: 'imported', + status: 'success', + }); + + expect(event.contentItemId).toBeNull(); + expect(event.channelId).toBeNull(); + expect(event.details).toBeNull(); + }); + }); + + describe('getHistoryEvents', () => { + it('returns all events ordered by createdAt DESC', async () => { + await createHistoryEvent(db, { + contentItemId: testContentItem.id, + channelId: testChannel.id, + eventType: 'grabbed', + status: 'success', + }); + await createHistoryEvent(db, { + contentItemId: testContentItem.id, + channelId: testChannel.id, + eventType: 'downloaded', + status: 'success', + }); + + const result = await getHistoryEvents(db); + expect(result.items).toHaveLength(2); + expect(result.total).toBe(2); + // Most recent first + expect(result.items[0].eventType).toBe('downloaded'); + expect(result.items[1].eventType).toBe('grabbed'); + }); + + it('paginates correctly with page and pageSize', async () => { + // Create 5 events + for (let i = 0; i < 5; i++) { + await createHistoryEvent(db, { + eventType: 'grabbed', + status: 'success', + details: { index: i }, + }); + } + + const page1 = await getHistoryEvents(db, undefined, 1, 2); + expect(page1.items).toHaveLength(2); + expect(page1.total).toBe(5); + + const page2 = await getHistoryEvents(db, undefined, 2, 2); + expect(page2.items).toHaveLength(2); + expect(page2.total).toBe(5); + + const page3 = await getHistoryEvents(db, undefined, 3, 2); + expect(page3.items).toHaveLength(1); + expect(page3.total).toBe(5); + + // Pages should not overlap + const allIds = [ + ...page1.items.map((i) => i.id), + ...page2.items.map((i) => i.id), + ...page3.items.map((i) => i.id), + ]; + expect(new Set(allIds).size).toBe(5); + }); + + it('filters by eventType', async () => { + await createHistoryEvent(db, { + eventType: 'grabbed', + status: 'success', + }); + await createHistoryEvent(db, { + eventType: 'downloaded', + status: 'success', + }); + await createHistoryEvent(db, { + eventType: 'failed', + status: 'error', + }); + + const result = await getHistoryEvents(db, { eventType: 'grabbed' }); + expect(result.items).toHaveLength(1); + expect(result.total).toBe(1); + expect(result.items[0].eventType).toBe('grabbed'); + }); + + it('filters by channelId', async () => { + await createHistoryEvent(db, { + channelId: testChannel.id, + eventType: 'grabbed', + status: 'success', + }); + await createHistoryEvent(db, { + channelId: testChannel2.id, + eventType: 'grabbed', + status: 'success', + }); + + const result = await getHistoryEvents(db, { channelId: testChannel.id }); + expect(result.items).toHaveLength(1); + expect(result.total).toBe(1); + expect(result.items[0].channelId).toBe(testChannel.id); + }); + + it('filters by contentItemId', async () => { + await createHistoryEvent(db, { + contentItemId: testContentItem.id, + eventType: 'downloaded', + status: 'success', + }); + await createHistoryEvent(db, { + contentItemId: testContentItem2.id, + eventType: 'downloaded', + status: 'success', + }); + + const result = await getHistoryEvents(db, { contentItemId: testContentItem.id }); + expect(result.items).toHaveLength(1); + expect(result.total).toBe(1); + expect(result.items[0].contentItemId).toBe(testContentItem.id); + }); + + it('combines multiple filters', async () => { + await createHistoryEvent(db, { + contentItemId: testContentItem.id, + channelId: testChannel.id, + eventType: 'grabbed', + status: 'success', + }); + await createHistoryEvent(db, { + contentItemId: testContentItem.id, + channelId: testChannel.id, + eventType: 'downloaded', + status: 'success', + }); + await createHistoryEvent(db, { + contentItemId: testContentItem2.id, + channelId: testChannel2.id, + eventType: 'grabbed', + status: 'success', + }); + + const result = await getHistoryEvents(db, { + eventType: 'grabbed', + channelId: testChannel.id, + }); + expect(result.items).toHaveLength(1); + expect(result.total).toBe(1); + expect(result.items[0].contentItemId).toBe(testContentItem.id); + }); + + it('returns empty results for no-match filters', async () => { + await createHistoryEvent(db, { + eventType: 'grabbed', + status: 'success', + }); + + const result = await getHistoryEvents(db, { eventType: 'deleted' }); + expect(result.items).toHaveLength(0); + expect(result.total).toBe(0); + }); + }); + + describe('getRecentActivity', () => { + it('returns most recent events up to default limit', async () => { + for (let i = 0; i < 3; i++) { + await createHistoryEvent(db, { + eventType: 'grabbed', + status: 'success', + details: { index: i }, + }); + } + + const recent = await getRecentActivity(db); + expect(recent).toHaveLength(3); + // Should be newest first + expect((recent[0].details as Record)?.index).toBe(2); + }); + + it('respects custom limit', async () => { + for (let i = 0; i < 5; i++) { + await createHistoryEvent(db, { + eventType: 'grabbed', + status: 'success', + }); + } + + const recent = await getRecentActivity(db, 2); + expect(recent).toHaveLength(2); + }); + + it('returns empty array when no events exist', async () => { + const recent = await getRecentActivity(db); + expect(recent).toHaveLength(0); + }); + }); +}); diff --git a/src/__tests__/monitoring-api.test.ts b/src/__tests__/monitoring-api.test.ts new file mode 100644 index 0000000..a50e21a --- /dev/null +++ b/src/__tests__/monitoring-api.test.ts @@ -0,0 +1,421 @@ +import { describe, it, expect, beforeAll, afterAll } from 'vitest'; +import { mkdtempSync, rmSync, existsSync } from 'node:fs'; +import { join } from 'node:path'; +import { tmpdir } from 'node:os'; +import { type FastifyInstance } from 'fastify'; +import { initDatabaseAsync, closeDatabase } from '../db/index'; +import { runMigrations } from '../db/migrate'; +import { buildServer } from '../server/index'; +import { systemConfig } from '../db/schema/index'; +import { eq } from 'drizzle-orm'; +import { type LibSQLDatabase } from 'drizzle-orm/libsql'; +import type * as schema from '../db/schema/index'; +import { createChannel } from '../db/repositories/channel-repository'; +import { createContentItem } from '../db/repositories/content-repository'; +import type { Channel, ContentItem } from '../types/index'; + +/** + * Integration tests for monitoring API endpoints: + * PATCH /api/v1/content/:id/monitored — single item toggle + * PATCH /api/v1/content/bulk/monitored — bulk toggle + * PUT /api/v1/channel/:id/monitoring-mode — channel monitoring mode with cascade + */ + +describe('monitoring-api', () => { + let server: FastifyInstance; + let db: LibSQLDatabase; + let apiKey: string; + let tmpDir: string; + let channel: Channel; + const items: ContentItem[] = []; + + beforeAll(async () => { + tmpDir = mkdtempSync(join(tmpdir(), 'tubearr-monitoring-api-')); + const dbPath = join(tmpDir, 'test.db'); + db = await initDatabaseAsync(dbPath); + await runMigrations(dbPath); + server = await buildServer({ db }); + await server.ready(); + + // Read API key + const rows = await db + .select() + .from(systemConfig) + .where(eq(systemConfig.key, 'api_key')) + .limit(1); + apiKey = rows[0]?.value ?? ''; + expect(apiKey).toBeTruthy(); + + // Create a test channel + channel = await createChannel(db, { + name: 'Monitoring Test Channel', + platform: 'youtube', + platformId: 'UC_MONITORING_TEST', + url: 'https://www.youtube.com/channel/UC_MONITORING_TEST', + monitoringEnabled: true, + checkInterval: 360, + imageUrl: null, + metadata: null, + formatProfileId: null, + }); + + // Create 4 content items with mixed monitored states + const itemDefs = [ + { platformContentId: 'mon_v1', title: 'Monitored Item One', monitored: true }, + { platformContentId: 'mon_v2', title: 'Monitored Item Two', monitored: true }, + { platformContentId: 'mon_v3', title: 'Unmonitored Item Three', monitored: false }, + { platformContentId: 'mon_v4', title: 'Unmonitored Item Four', monitored: false }, + ]; + + for (const def of itemDefs) { + const created = await createContentItem(db, { + channelId: channel.id, + title: def.title, + platformContentId: def.platformContentId, + url: `https://youtube.com/watch?v=${def.platformContentId}`, + contentType: 'video', + duration: 600, + monitored: def.monitored, + }); + if (created) items.push(created); + } + + expect(items.length).toBe(4); + }); + + afterAll(async () => { + await server.close(); + closeDatabase(); + try { + if (tmpDir && existsSync(tmpDir)) { + rmSync(tmpDir, { recursive: true, force: true }); + } + } catch { + // Temp dir cleanup is best-effort on Windows (K004) + } + }); + + // ── PATCH /api/v1/content/:id/monitored ── + + describe('PATCH /api/v1/content/:id/monitored', () => { + it('toggles monitored from true to false', async () => { + const item = items[0]; // monitored: true + const res = await server.inject({ + method: 'PATCH', + url: `/api/v1/content/${item.id}/monitored`, + headers: { 'x-api-key': apiKey }, + payload: { monitored: false }, + }); + + expect(res.statusCode).toBe(200); + const body = res.json(); + expect(body.success).toBe(true); + expect(body.data.id).toBe(item.id); + expect(body.data.monitored).toBe(false); + }); + + it('toggles monitored from false to true', async () => { + const item = items[2]; // monitored: false + const res = await server.inject({ + method: 'PATCH', + url: `/api/v1/content/${item.id}/monitored`, + headers: { 'x-api-key': apiKey }, + payload: { monitored: true }, + }); + + expect(res.statusCode).toBe(200); + const body = res.json(); + expect(body.success).toBe(true); + expect(body.data.id).toBe(item.id); + expect(body.data.monitored).toBe(true); + }); + + it('returns 404 for non-existent content item ID', async () => { + const res = await server.inject({ + method: 'PATCH', + url: '/api/v1/content/99999/monitored', + headers: { 'x-api-key': apiKey }, + payload: { monitored: true }, + }); + + expect(res.statusCode).toBe(404); + const body = res.json(); + expect(body.error).toBe('Not Found'); + }); + + it('returns 400 for invalid body (missing monitored field)', async () => { + const res = await server.inject({ + method: 'PATCH', + url: `/api/v1/content/${items[0].id}/monitored`, + headers: { 'x-api-key': apiKey }, + payload: {}, + }); + + expect(res.statusCode).toBe(400); + }); + + it('returns 401 without API key', async () => { + const res = await server.inject({ + method: 'PATCH', + url: `/api/v1/content/${items[0].id}/monitored`, + payload: { monitored: true }, + }); + + expect(res.statusCode).toBe(401); + }); + }); + + // ── PATCH /api/v1/content/bulk/monitored ── + + describe('PATCH /api/v1/content/bulk/monitored', () => { + it('bulk sets multiple items to false', async () => { + const ids = items.map((i) => i.id); + const res = await server.inject({ + method: 'PATCH', + url: '/api/v1/content/bulk/monitored', + headers: { 'x-api-key': apiKey }, + payload: { ids, monitored: false }, + }); + + expect(res.statusCode).toBe(200); + const body = res.json(); + expect(body.success).toBe(true); + expect(body.data.updated).toBe(ids.length); + }); + + it('returns count of only existing items when some IDs are invalid', async () => { + const ids = [items[0].id, items[1].id, 99998, 99999]; + const res = await server.inject({ + method: 'PATCH', + url: '/api/v1/content/bulk/monitored', + headers: { 'x-api-key': apiKey }, + payload: { ids, monitored: true }, + }); + + expect(res.statusCode).toBe(200); + const body = res.json(); + expect(body.success).toBe(true); + expect(body.data.updated).toBe(2); // Only the 2 existing items + }); + + it('verifies items actually changed by fetching them via GET', async () => { + // First set all to false + const ids = items.map((i) => i.id); + await server.inject({ + method: 'PATCH', + url: '/api/v1/content/bulk/monitored', + headers: { 'x-api-key': apiKey }, + payload: { ids, monitored: false }, + }); + + // Then set all to true + await server.inject({ + method: 'PATCH', + url: '/api/v1/content/bulk/monitored', + headers: { 'x-api-key': apiKey }, + payload: { ids, monitored: true }, + }); + + // Fetch via GET and verify + const getRes = await server.inject({ + method: 'GET', + url: `/api/v1/channel/${channel.id}/content`, + headers: { 'x-api-key': apiKey }, + }); + + expect(getRes.statusCode).toBe(200); + const body = getRes.json(); + expect(body.data.length).toBe(4); + for (const item of body.data) { + expect(item.monitored).toBe(true); + } + }); + + it('returns 400 for invalid body (missing ids)', async () => { + const res = await server.inject({ + method: 'PATCH', + url: '/api/v1/content/bulk/monitored', + headers: { 'x-api-key': apiKey }, + payload: { monitored: true }, + }); + + expect(res.statusCode).toBe(400); + }); + + it('returns 401 without API key', async () => { + const res = await server.inject({ + method: 'PATCH', + url: '/api/v1/content/bulk/monitored', + payload: { ids: [1], monitored: true }, + }); + + expect(res.statusCode).toBe(401); + }); + }); + + // ── PUT /api/v1/channel/:id/monitoring-mode ── + + describe('PUT /api/v1/channel/:id/monitoring-mode', () => { + it("set mode to 'all': channel has monitoringMode 'all', all items monitored", async () => { + const res = await server.inject({ + method: 'PUT', + url: `/api/v1/channel/${channel.id}/monitoring-mode`, + headers: { 'x-api-key': apiKey }, + payload: { monitoringMode: 'all' }, + }); + + expect(res.statusCode).toBe(200); + const body = res.json(); + expect(body.success).toBe(true); + expect(body.data.monitoringMode).toBe('all'); + expect(body.data.monitoringEnabled).toBe(true); + + // Verify all content items are monitored + const contentRes = await server.inject({ + method: 'GET', + url: `/api/v1/channel/${channel.id}/content`, + headers: { 'x-api-key': apiKey }, + }); + const contentBody = contentRes.json(); + expect(contentBody.data.length).toBe(4); + for (const item of contentBody.data) { + expect(item.monitored).toBe(true); + } + }); + + it("set mode to 'future': all existing items become unmonitored", async () => { + const res = await server.inject({ + method: 'PUT', + url: `/api/v1/channel/${channel.id}/monitoring-mode`, + headers: { 'x-api-key': apiKey }, + payload: { monitoringMode: 'future' }, + }); + + expect(res.statusCode).toBe(200); + const body = res.json(); + expect(body.data.monitoringMode).toBe('future'); + expect(body.data.monitoringEnabled).toBe(true); + + // Verify all content items are unmonitored (existing content, mode is 'future') + const contentRes = await server.inject({ + method: 'GET', + url: `/api/v1/channel/${channel.id}/content`, + headers: { 'x-api-key': apiKey }, + }); + const contentBody = contentRes.json(); + for (const item of contentBody.data) { + expect(item.monitored).toBe(false); + } + }); + + it("set mode to 'existing': all existing items become monitored", async () => { + const res = await server.inject({ + method: 'PUT', + url: `/api/v1/channel/${channel.id}/monitoring-mode`, + headers: { 'x-api-key': apiKey }, + payload: { monitoringMode: 'existing' }, + }); + + expect(res.statusCode).toBe(200); + const body = res.json(); + expect(body.data.monitoringMode).toBe('existing'); + expect(body.data.monitoringEnabled).toBe(true); + + // Verify all content items are monitored + const contentRes = await server.inject({ + method: 'GET', + url: `/api/v1/channel/${channel.id}/content`, + headers: { 'x-api-key': apiKey }, + }); + const contentBody = contentRes.json(); + for (const item of contentBody.data) { + expect(item.monitored).toBe(true); + } + }); + + it("set mode to 'none': all items unmonitored AND monitoringEnabled is false", async () => { + const res = await server.inject({ + method: 'PUT', + url: `/api/v1/channel/${channel.id}/monitoring-mode`, + headers: { 'x-api-key': apiKey }, + payload: { monitoringMode: 'none' }, + }); + + expect(res.statusCode).toBe(200); + const body = res.json(); + expect(body.data.monitoringMode).toBe('none'); + expect(body.data.monitoringEnabled).toBe(false); + + // Verify all content items are unmonitored + const contentRes = await server.inject({ + method: 'GET', + url: `/api/v1/channel/${channel.id}/content`, + headers: { 'x-api-key': apiKey }, + }); + const contentBody = contentRes.json(); + for (const item of contentBody.data) { + expect(item.monitored).toBe(false); + } + }); + + it("set mode back to 'all' from 'none': monitoringEnabled restored to true", async () => { + // Precondition: mode is 'none' from previous test + const res = await server.inject({ + method: 'PUT', + url: `/api/v1/channel/${channel.id}/monitoring-mode`, + headers: { 'x-api-key': apiKey }, + payload: { monitoringMode: 'all' }, + }); + + expect(res.statusCode).toBe(200); + const body = res.json(); + expect(body.data.monitoringMode).toBe('all'); + expect(body.data.monitoringEnabled).toBe(true); + + // Verify all items are monitored again + const contentRes = await server.inject({ + method: 'GET', + url: `/api/v1/channel/${channel.id}/content`, + headers: { 'x-api-key': apiKey }, + }); + const contentBody = contentRes.json(); + for (const item of contentBody.data) { + expect(item.monitored).toBe(true); + } + }); + + it('returns 404 for non-existent channel ID', async () => { + const res = await server.inject({ + method: 'PUT', + url: '/api/v1/channel/99999/monitoring-mode', + headers: { 'x-api-key': apiKey }, + payload: { monitoringMode: 'all' }, + }); + + expect(res.statusCode).toBe(404); + const body = res.json(); + expect(body.error).toBe('Not Found'); + }); + + it('returns 400 for invalid mode value', async () => { + const res = await server.inject({ + method: 'PUT', + url: `/api/v1/channel/${channel.id}/monitoring-mode`, + headers: { 'x-api-key': apiKey }, + payload: { monitoringMode: 'invalid_mode' }, + }); + + expect(res.statusCode).toBe(400); + }); + + it('returns 401 without API key', async () => { + const res = await server.inject({ + method: 'PUT', + url: `/api/v1/channel/${channel.id}/monitoring-mode`, + payload: { monitoringMode: 'all' }, + }); + + expect(res.statusCode).toBe(401); + }); + }); +}); diff --git a/src/__tests__/notification-api.test.ts b/src/__tests__/notification-api.test.ts new file mode 100644 index 0000000..0487323 --- /dev/null +++ b/src/__tests__/notification-api.test.ts @@ -0,0 +1,355 @@ +import { describe, it, expect, vi, beforeAll, afterAll } from 'vitest'; +import { mkdtempSync, rmSync, existsSync } from 'node:fs'; +import { join } from 'node:path'; +import { tmpdir } from 'node:os'; +import { type FastifyInstance } from 'fastify'; +import { initDatabaseAsync, closeDatabase } from '../db/index'; +import { runMigrations } from '../db/migrate'; +import { buildServer } from '../server/index'; +import { systemConfig } from '../db/schema/index'; +import { eq } from 'drizzle-orm'; +import { type LibSQLDatabase } from 'drizzle-orm/libsql'; +import type * as schema from '../db/schema/index'; + +/** + * Integration tests for notification CRUD API endpoints. + * Uses Fastify inject — no real HTTP ports. + */ + +describe('Notification API', () => { + let server: FastifyInstance; + let db: LibSQLDatabase; + let apiKey: string; + let tmpDir: string; + + beforeAll(async () => { + tmpDir = mkdtempSync(join(tmpdir(), 'tubearr-notif-api-')); + const dbPath = join(tmpDir, 'test.db'); + db = await initDatabaseAsync(dbPath); + await runMigrations(dbPath); + server = await buildServer({ db }); + await server.ready(); + + // Read API key from database (generated by auth plugin) + const rows = await db + .select() + .from(systemConfig) + .where(eq(systemConfig.key, 'api_key')) + .limit(1); + apiKey = rows[0]?.value ?? ''; + expect(apiKey).toBeTruthy(); + }); + + afterAll(async () => { + await server.close(); + closeDatabase(); + try { + if (tmpDir && existsSync(tmpDir)) { + rmSync(tmpDir, { recursive: true, force: true }); + } + } catch { + // Temp dir cleanup is best-effort on Windows + } + }); + + // ── Helpers ── + + function authed(opts: Record) { + return { + ...opts, + headers: { 'x-api-key': apiKey, ...(opts.headers as Record | undefined) }, + }; + } + + // ── Auth gating ── + + describe('Authentication', () => { + it('returns 401 when no API key is provided', async () => { + const res = await server.inject({ + method: 'GET', + url: '/api/v1/notification', + }); + expect(res.statusCode).toBe(401); + }); + }); + + // ── CRUD lifecycle ── + + describe('CRUD lifecycle', () => { + let settingId: number; + + it('POST creates a notification setting', async () => { + const res = await server.inject( + authed({ + method: 'POST', + url: '/api/v1/notification', + payload: { + type: 'discord', + name: 'My Discord Server', + config: { webhookUrl: 'https://discord.com/api/webhooks/123456789012345678/abcdefghijklmnopqrstuvwxyz' }, + enabled: true, + onGrab: true, + onDownload: true, + onFailure: false, + }, + }) + ); + + expect(res.statusCode).toBe(201); + const body = res.json(); + expect(body.name).toBe('My Discord Server'); + expect(body.type).toBe('discord'); + expect(body.enabled).toBe(true); + expect(body.onGrab).toBe(true); + expect(body.onDownload).toBe(true); + expect(body.onFailure).toBe(false); + expect(body.id).toBeDefined(); + // Webhook URL should be redacted in response + expect(body.config.webhookUrl).toContain('...'); + expect(body.config.webhookUrl.length).toBeLessThan( + 'https://discord.com/api/webhooks/123456789012345678/abcdefghijklmnopqrstuvwxyz'.length + ); + settingId = body.id; + }); + + it('GET / lists all notification settings with redacted URLs', async () => { + const res = await server.inject( + authed({ method: 'GET', url: '/api/v1/notification' }) + ); + + expect(res.statusCode).toBe(200); + const body = res.json(); + expect(Array.isArray(body)).toBe(true); + expect(body.length).toBeGreaterThanOrEqual(1); + + const setting = body.find((s: { id: number }) => s.id === settingId); + expect(setting).toBeDefined(); + // Webhook URL should be redacted + expect(setting.config.webhookUrl).toContain('...'); + }); + + it('GET /:id returns a single setting with redacted URL', async () => { + const res = await server.inject( + authed({ method: 'GET', url: `/api/v1/notification/${settingId}` }) + ); + + expect(res.statusCode).toBe(200); + const body = res.json(); + expect(body.id).toBe(settingId); + expect(body.name).toBe('My Discord Server'); + // Webhook URL should be redacted + expect(body.config.webhookUrl).toContain('...'); + }); + + it('PUT /:id updates setting fields', async () => { + const res = await server.inject( + authed({ + method: 'PUT', + url: `/api/v1/notification/${settingId}`, + payload: { name: 'Updated Discord', onFailure: true }, + }) + ); + + expect(res.statusCode).toBe(200); + const body = res.json(); + expect(body.id).toBe(settingId); + expect(body.name).toBe('Updated Discord'); + expect(body.onFailure).toBe(true); + }); + + it('DELETE /:id removes the setting', async () => { + const res = await server.inject( + authed({ method: 'DELETE', url: `/api/v1/notification/${settingId}` }) + ); + + expect(res.statusCode).toBe(204); + + // Verify it's gone + const getRes = await server.inject( + authed({ method: 'GET', url: `/api/v1/notification/${settingId}` }) + ); + expect(getRes.statusCode).toBe(404); + }); + }); + + // ── 404 handling ── + + describe('Not found handling', () => { + it('GET /:id returns 404 for non-existent setting', async () => { + const res = await server.inject( + authed({ method: 'GET', url: '/api/v1/notification/99999' }) + ); + expect(res.statusCode).toBe(404); + expect(res.json().error).toBe('Not Found'); + }); + + it('PUT /:id returns 404 for non-existent setting', async () => { + const res = await server.inject( + authed({ + method: 'PUT', + url: '/api/v1/notification/99999', + payload: { name: 'Nope' }, + }) + ); + expect(res.statusCode).toBe(404); + }); + + it('DELETE /:id returns 404 for non-existent setting', async () => { + const res = await server.inject( + authed({ method: 'DELETE', url: '/api/v1/notification/99999' }) + ); + expect(res.statusCode).toBe(404); + }); + }); + + // ── Validation errors ── + + describe('Validation', () => { + it('POST rejects unsupported notification type', async () => { + const res = await server.inject( + authed({ + method: 'POST', + url: '/api/v1/notification', + payload: { + type: 'telegram', + name: 'Bad Type', + config: { webhookUrl: 'https://example.com' }, + }, + }) + ); + expect(res.statusCode).toBe(400); + }); + + it('POST rejects body missing required fields', async () => { + const res = await server.inject( + authed({ + method: 'POST', + url: '/api/v1/notification', + payload: { name: 'Missing type and config' }, + }) + ); + expect(res.statusCode).toBe(400); + }); + + it('POST rejects config without webhookUrl', async () => { + const res = await server.inject( + authed({ + method: 'POST', + url: '/api/v1/notification', + payload: { + type: 'discord', + name: 'No URL', + config: {}, + }, + }) + ); + expect(res.statusCode).toBe(400); + }); + + it('GET /:id returns 400 for non-numeric ID', async () => { + const res = await server.inject( + authed({ method: 'GET', url: '/api/v1/notification/abc' }) + ); + expect(res.statusCode).toBe(400); + }); + }); + + // ── Test endpoint ── + + describe('Test endpoint', () => { + it('POST /:id/test returns 404 for non-existent setting', async () => { + const res = await server.inject( + authed({ + method: 'POST', + url: '/api/v1/notification/99999/test', + }) + ); + expect(res.statusCode).toBe(404); + }); + + it('POST /:id/test sends test notification and returns success', async () => { + // Create a setting first + const createRes = await server.inject( + authed({ + method: 'POST', + url: '/api/v1/notification', + payload: { + type: 'discord', + name: 'Test Channel', + config: { webhookUrl: 'https://discord.com/api/webhooks/test/token123' }, + }, + }) + ); + const setting = createRes.json(); + + // Mock global fetch for the test notification + const mockFetch = vi.fn().mockResolvedValue({ ok: true, status: 200 }); + vi.stubGlobal('fetch', mockFetch); + + const res = await server.inject( + authed({ + method: 'POST', + url: `/api/v1/notification/${setting.id}/test`, + }) + ); + + expect(res.statusCode).toBe(200); + const body = res.json(); + expect(body.success).toBe(true); + expect(mockFetch).toHaveBeenCalledOnce(); + + // Verify the webhook was called with correct Discord embed format + const [url, options] = mockFetch.mock.calls[0]; + expect(url).toBe('https://discord.com/api/webhooks/test/token123'); + const payload = JSON.parse(options.body); + expect(payload.embeds).toHaveLength(1); + expect(payload.embeds[0].title).toBe('🧪 Test Notification'); + + vi.restoreAllMocks(); + + // Clean up + await server.inject( + authed({ method: 'DELETE', url: `/api/v1/notification/${setting.id}` }) + ); + }); + + it('POST /:id/test returns success:false on fetch error', async () => { + // Create a setting first + const createRes = await server.inject( + authed({ + method: 'POST', + url: '/api/v1/notification', + payload: { + type: 'discord', + name: 'Fail Channel', + config: { webhookUrl: 'https://discord.com/api/webhooks/test/token456' }, + }, + }) + ); + const setting = createRes.json(); + + // Mock global fetch to reject + const mockFetch = vi.fn().mockRejectedValue(new Error('connection refused')); + vi.stubGlobal('fetch', mockFetch); + + const res = await server.inject( + authed({ + method: 'POST', + url: `/api/v1/notification/${setting.id}/test`, + }) + ); + + expect(res.statusCode).toBe(200); + const body = res.json(); + expect(body.success).toBe(false); + expect(body.error).toBe('connection refused'); + + vi.restoreAllMocks(); + + // Clean up + await server.inject( + authed({ method: 'DELETE', url: `/api/v1/notification/${setting.id}` }) + ); + }); + }); +}); diff --git a/src/__tests__/notification-queue-integration.test.ts b/src/__tests__/notification-queue-integration.test.ts new file mode 100644 index 0000000..928dd23 --- /dev/null +++ b/src/__tests__/notification-queue-integration.test.ts @@ -0,0 +1,228 @@ +import { describe, it, expect, vi, beforeEach, afterEach } from 'vitest'; +import { mkdtempSync, rmSync, existsSync } from 'node:fs'; +import { join } from 'node:path'; +import { tmpdir } from 'node:os'; +import { initDatabaseAsync, closeDatabase } from '../db/index'; +import { runMigrations } from '../db/migrate'; +import { createChannel } from '../db/repositories/channel-repository'; +import { createContentItem } from '../db/repositories/content-repository'; +import { updateQueueItemStatus } from '../db/repositories/queue-repository'; +import { QueueService } from '../services/queue'; +import type { ContentItem, Channel } from '../types/index'; +import type { NotificationEvent } from '../services/notification'; + +// ── Test Helpers ── + +let tmpDir: string; +let db: Awaited>; +let testChannel: Channel; +let contentItems: ContentItem[]; + +function createMockDownloadService() { + return { + downloadItem: vi.fn().mockResolvedValue(undefined), + }; +} + +async function setupDb(): Promise { + tmpDir = mkdtempSync(join(tmpdir(), 'tubearr-notif-queue-')); + const dbPath = join(tmpDir, 'test.db'); + db = await initDatabaseAsync(dbPath); + await runMigrations(dbPath); + + testChannel = await createChannel(db, { + name: 'Notification Test Channel', + platform: 'youtube', + platformId: 'UC_notif_test', + url: 'https://www.youtube.com/channel/UC_notif_test', + imageUrl: null, + formatProfileId: null, + monitoringEnabled: true, + checkInterval: 360, + metadata: null, + }); + + contentItems = []; + for (let i = 1; i <= 3; i++) { + const item = await createContentItem(db, { + channelId: testChannel.id, + title: `Notif Test Video ${i}`, + platformContentId: `vid_notif_${i}`, + url: `https://www.youtube.com/watch?v=notif${i}`, + contentType: 'video', + duration: 300, + status: 'monitored', + }); + contentItems.push(item!); + } +} + +function cleanup(): void { + closeDatabase(); + try { + if (tmpDir && existsSync(tmpDir)) { + rmSync(tmpDir, { recursive: true, force: true }); + } + } catch { + // Windows cleanup best-effort + } +} + +async function tick(ms = 50): Promise { + await new Promise((resolve) => setTimeout(resolve, ms)); +} + +// ── Tests ── + +describe('QueueService notification callbacks', () => { + let mockDownloadService: ReturnType; + + beforeEach(async () => { + await setupDb(); + mockDownloadService = createMockDownloadService(); + }); + + afterEach(() => { + cleanup(); + }); + + describe('onDownloadComplete', () => { + it('fires callback with correct event data on successful download', async () => { + const onDownloadComplete = vi.fn(); + + const qs = new QueueService(db, mockDownloadService as any, { + concurrency: 1, + onDownloadComplete, + }); + + await qs.enqueue(contentItems[0].id); + await tick(150); + qs.stop(); + + expect(onDownloadComplete).toHaveBeenCalledOnce(); + const event: NotificationEvent = onDownloadComplete.mock.calls[0][0]; + expect(event.contentTitle).toBe('Notif Test Video 1'); + expect(event.channelName).toBe('Notification Test Channel'); + expect(event.platform).toBe('youtube'); + expect(event.url).toBe('https://www.youtube.com/watch?v=notif1'); + }); + + it('fires callback for each completed download', async () => { + const onDownloadComplete = vi.fn(); + + const qs = new QueueService(db, mockDownloadService as any, { + concurrency: 2, + onDownloadComplete, + }); + + await qs.enqueue(contentItems[0].id); + await qs.enqueue(contentItems[1].id); + await tick(200); + qs.stop(); + + expect(onDownloadComplete).toHaveBeenCalledTimes(2); + }); + + it('does not fire callback when no callback is configured', async () => { + // No callbacks — just verify it doesn't throw + const qs = new QueueService(db, mockDownloadService as any, { + concurrency: 1, + }); + + await qs.enqueue(contentItems[0].id); + await tick(150); + qs.stop(); + + // No assertion needed — just verifying no error was thrown + }); + }); + + describe('onDownloadFailed', () => { + it('fires callback with error info when all attempts exhausted', async () => { + mockDownloadService.downloadItem.mockRejectedValue(new Error('permanent failure')); + const onDownloadFailed = vi.fn(); + + const qs = new QueueService(db, mockDownloadService as any, { + concurrency: 1, + onDownloadFailed, + }); + qs.stop(); + + // Enqueue and set attempts to maxAttempts - 1 so next failure exhausts + await qs.enqueue(contentItems[0].id); + await updateQueueItemStatus(db, 1, 'pending', { attempts: 2 }); // maxAttempts defaults to 3 + + qs.start(); + await tick(150); + qs.stop(); + + expect(onDownloadFailed).toHaveBeenCalledOnce(); + const event: NotificationEvent = onDownloadFailed.mock.calls[0][0]; + expect(event.contentTitle).toBe('Notif Test Video 1'); + expect(event.channelName).toBe('Notification Test Channel'); + expect(event.platform).toBe('youtube'); + expect(event.error).toBe('permanent failure'); + expect(event.attempt).toBe(3); + expect(event.maxAttempts).toBe(3); + }); + + it('does not fire callback when retry is still possible', async () => { + // Only fail once, then resolve on retry + let callCount = 0; + mockDownloadService.downloadItem.mockImplementation(() => { + callCount++; + if (callCount === 1) { + return Promise.reject(new Error('transient error')); + } + return Promise.resolve(undefined); + }); + + const onDownloadFailed = vi.fn(); + const onDownloadComplete = vi.fn(); + + const qs = new QueueService(db, mockDownloadService as any, { + concurrency: 1, + onDownloadFailed, + onDownloadComplete, + }); + + await qs.enqueue(contentItems[0].id); + await tick(300); + qs.stop(); + + // Failed callback should NOT have been called (retry succeeded) + expect(onDownloadFailed).not.toHaveBeenCalled(); + // Complete callback SHOULD have been called on the retry + expect(onDownloadComplete).toHaveBeenCalledOnce(); + }); + }); + + describe('callback error isolation', () => { + it('does not break queue processing when callback throws', async () => { + const onDownloadComplete = vi.fn().mockImplementation(() => { + throw new Error('callback exploded'); + }); + + const consoleSpy = vi.spyOn(console, 'log').mockImplementation(() => {}); + + const qs = new QueueService(db, mockDownloadService as any, { + concurrency: 1, + onDownloadComplete, + }); + + await qs.enqueue(contentItems[0].id); + await tick(150); + + // Enqueue another to verify queue is still working + await qs.enqueue(contentItems[1].id); + await tick(150); + qs.stop(); + + // Both downloads should have completed despite callback error + expect(mockDownloadService.downloadItem).toHaveBeenCalledTimes(2); + expect(onDownloadComplete).toHaveBeenCalledTimes(2); + + consoleSpy.mockRestore(); + }); + }); +}); diff --git a/src/__tests__/notification-service.test.ts b/src/__tests__/notification-service.test.ts new file mode 100644 index 0000000..faa9896 --- /dev/null +++ b/src/__tests__/notification-service.test.ts @@ -0,0 +1,338 @@ +import { describe, it, expect, vi, beforeEach, afterEach } from 'vitest'; +import { mkdtempSync, rmSync, existsSync } from 'node:fs'; +import { join } from 'node:path'; +import { tmpdir } from 'node:os'; +import { initDatabaseAsync, closeDatabase } from '../db/index'; +import { runMigrations } from '../db/migrate'; +import { createNotificationSetting } from '../db/repositories/notification-repository'; +import { NotificationService } from '../services/notification'; + +// ── Test Helpers ── + +let tmpDir: string; +let db: Awaited>; + +async function setupDb(): Promise { + tmpDir = mkdtempSync(join(tmpdir(), 'tubearr-notif-svc-')); + const dbPath = join(tmpDir, 'test.db'); + db = await initDatabaseAsync(dbPath); + await runMigrations(dbPath); +} + +function cleanup(): void { + closeDatabase(); + try { + if (tmpDir && existsSync(tmpDir)) { + rmSync(tmpDir, { recursive: true, force: true }); + } + } catch { + // Windows cleanup best-effort + } +} + +// ── Tests ── + +describe('NotificationService', () => { + let mockFetch: ReturnType; + + beforeEach(async () => { + await setupDb(); + // Mock global fetch + mockFetch = vi.fn().mockResolvedValue({ + ok: true, + status: 200, + }); + vi.stubGlobal('fetch', mockFetch); + }); + + afterEach(() => { + vi.restoreAllMocks(); + cleanup(); + }); + + // ── notifyDownload ── + + describe('notifyDownload', () => { + it('sends correctly formatted Discord embed', async () => { + await createNotificationSetting(db, { + type: 'discord', + name: 'Test Discord', + config: { webhookUrl: 'https://discord.com/api/webhooks/test/token' }, + enabled: true, + onDownload: true, + }); + + const service = new NotificationService(db); + await service.notifyDownload('My Video', 'Channel A', 'youtube', 'https://example.com/vid'); + + expect(mockFetch).toHaveBeenCalledOnce(); + const [url, options] = mockFetch.mock.calls[0]; + expect(url).toBe('https://discord.com/api/webhooks/test/token'); + expect(options.method).toBe('POST'); + expect(options.headers['Content-Type']).toBe('application/json'); + + const body = JSON.parse(options.body); + expect(body.embeds).toHaveLength(1); + const embed = body.embeds[0]; + expect(embed.title).toBe('✅ Download Complete'); + expect(embed.description).toContain('My Video'); + expect(embed.color).toBe(0x2ecc71); + expect(embed.fields).toEqual( + expect.arrayContaining([ + expect.objectContaining({ name: 'Channel', value: 'Channel A' }), + expect.objectContaining({ name: 'Platform', value: 'youtube' }), + expect.objectContaining({ name: 'URL', value: 'https://example.com/vid' }), + ]) + ); + expect(embed.timestamp).toBeDefined(); + }); + + it('includes filePath field when provided', async () => { + await createNotificationSetting(db, { + type: 'discord', + name: 'Test Discord', + config: { webhookUrl: 'https://discord.com/api/webhooks/test/token' }, + enabled: true, + onDownload: true, + }); + + const service = new NotificationService(db); + await service.notifyDownload('My Video', 'Channel A', 'youtube', 'https://example.com/vid', '/media/video.mp4'); + + const body = JSON.parse(mockFetch.mock.calls[0][1].body); + const embed = body.embeds[0]; + expect(embed.fields).toEqual( + expect.arrayContaining([ + expect.objectContaining({ name: 'File', value: '/media/video.mp4' }), + ]) + ); + }); + }); + + // ── Event toggles ── + + describe('event toggles', () => { + it('skips settings with onDownload=false for download events', async () => { + await createNotificationSetting(db, { + type: 'discord', + name: 'No Downloads', + config: { webhookUrl: 'https://discord.com/api/webhooks/test/token' }, + enabled: true, + onDownload: false, + onGrab: true, + onFailure: true, + }); + + const service = new NotificationService(db); + await service.notifyDownload('My Video', 'Channel', 'youtube', 'https://example.com'); + + expect(mockFetch).not.toHaveBeenCalled(); + }); + + it('skips settings with onGrab=false for grab events', async () => { + await createNotificationSetting(db, { + type: 'discord', + name: 'No Grabs', + config: { webhookUrl: 'https://discord.com/api/webhooks/test/token' }, + enabled: true, + onGrab: false, + }); + + const service = new NotificationService(db); + await service.notifyGrab('My Video', 'Channel', 'youtube', 'https://example.com'); + + expect(mockFetch).not.toHaveBeenCalled(); + }); + + it('skips settings with onFailure=false for failure events', async () => { + await createNotificationSetting(db, { + type: 'discord', + name: 'No Failures', + config: { webhookUrl: 'https://discord.com/api/webhooks/test/token' }, + enabled: true, + onFailure: false, + }); + + const service = new NotificationService(db); + await service.notifyFailure('My Video', 'Channel', 'youtube', 'Some error', 1, 3); + + expect(mockFetch).not.toHaveBeenCalled(); + }); + + it('dispatches to multiple matching channels', async () => { + await createNotificationSetting(db, { + type: 'discord', + name: 'Channel A', + config: { webhookUrl: 'https://discord.com/api/webhooks/a/token_a' }, + enabled: true, + onDownload: true, + }); + await createNotificationSetting(db, { + type: 'discord', + name: 'Channel B', + config: { webhookUrl: 'https://discord.com/api/webhooks/b/token_b' }, + enabled: true, + onDownload: true, + }); + + const service = new NotificationService(db); + await service.notifyDownload('My Video', 'Channel', 'youtube', 'https://example.com'); + + expect(mockFetch).toHaveBeenCalledTimes(2); + }); + }); + + // ── Disabled settings ── + + describe('disabled settings', () => { + it('skips disabled settings entirely', async () => { + await createNotificationSetting(db, { + type: 'discord', + name: 'Disabled Channel', + config: { webhookUrl: 'https://discord.com/api/webhooks/test/token' }, + enabled: false, + onDownload: true, + }); + + const service = new NotificationService(db); + await service.notifyDownload('My Video', 'Channel', 'youtube', 'https://example.com'); + + expect(mockFetch).not.toHaveBeenCalled(); + }); + }); + + // ── Error handling ── + + describe('error handling', () => { + it('catches and logs fetch errors without throwing', async () => { + await createNotificationSetting(db, { + type: 'discord', + name: 'Bad Channel', + config: { webhookUrl: 'https://discord.com/api/webhooks/test/token' }, + enabled: true, + onDownload: true, + }); + + mockFetch.mockRejectedValue(new Error('network failure')); + const consoleSpy = vi.spyOn(console, 'log').mockImplementation(() => {}); + + const service = new NotificationService(db); + + // Should NOT throw + await expect( + service.notifyDownload('My Video', 'Channel', 'youtube', 'https://example.com') + ).resolves.toBeUndefined(); + + // Should have logged the error + expect(consoleSpy).toHaveBeenCalledWith( + expect.stringContaining('[notification] dispatch error') + ); + expect(consoleSpy).toHaveBeenCalledWith( + expect.stringContaining('network failure') + ); + // Should log channel name, not webhook URL + expect(consoleSpy).toHaveBeenCalledWith( + expect.stringContaining('Bad Channel') + ); + + consoleSpy.mockRestore(); + }); + + it('logs non-OK HTTP responses without throwing', async () => { + await createNotificationSetting(db, { + type: 'discord', + name: 'Rate Limited', + config: { webhookUrl: 'https://discord.com/api/webhooks/test/token' }, + enabled: true, + onDownload: true, + }); + + mockFetch.mockResolvedValue({ ok: false, status: 429 }); + const consoleSpy = vi.spyOn(console, 'log').mockImplementation(() => {}); + + const service = new NotificationService(db); + await service.notifyDownload('My Video', 'Channel', 'youtube', 'https://example.com'); + + expect(consoleSpy).toHaveBeenCalledWith( + expect.stringContaining('dispatch failed') + ); + expect(consoleSpy).toHaveBeenCalledWith( + expect.stringContaining('httpStatus=429') + ); + + consoleSpy.mockRestore(); + }); + + it('skips settings with no webhook URL configured', async () => { + await createNotificationSetting(db, { + type: 'discord', + name: 'No URL', + config: {}, + enabled: true, + onDownload: true, + }); + + const consoleSpy = vi.spyOn(console, 'log').mockImplementation(() => {}); + + const service = new NotificationService(db); + await service.notifyDownload('My Video', 'Channel', 'youtube', 'https://example.com'); + + expect(mockFetch).not.toHaveBeenCalled(); + expect(consoleSpy).toHaveBeenCalledWith( + expect.stringContaining('no webhook URL') + ); + + consoleSpy.mockRestore(); + }); + }); + + // ── notifyGrab ── + + describe('notifyGrab', () => { + it('sends blue embed with grab title', async () => { + await createNotificationSetting(db, { + type: 'discord', + name: 'Grab Channel', + config: { webhookUrl: 'https://discord.com/api/webhooks/test/token' }, + enabled: true, + onGrab: true, + }); + + const service = new NotificationService(db); + await service.notifyGrab('My Video', 'Channel', 'youtube', 'https://example.com'); + + const body = JSON.parse(mockFetch.mock.calls[0][1].body); + const embed = body.embeds[0]; + expect(embed.title).toBe('📥 Content Grabbed'); + expect(embed.color).toBe(0x3498db); + }); + }); + + // ── notifyFailure ── + + describe('notifyFailure', () => { + it('sends red embed with failure details', async () => { + await createNotificationSetting(db, { + type: 'discord', + name: 'Failure Channel', + config: { webhookUrl: 'https://discord.com/api/webhooks/test/token' }, + enabled: true, + onFailure: true, + }); + + const service = new NotificationService(db); + await service.notifyFailure('My Video', 'Channel', 'youtube', 'network timeout', 3, 3); + + const body = JSON.parse(mockFetch.mock.calls[0][1].body); + const embed = body.embeds[0]; + expect(embed.title).toBe('❌ Download Failed'); + expect(embed.color).toBe(0xe74c3c); + expect(embed.fields).toEqual( + expect.arrayContaining([ + expect.objectContaining({ name: 'Error', value: 'network timeout' }), + expect.objectContaining({ name: 'Attempt', value: '3 / 3' }), + ]) + ); + }); + }); +}); diff --git a/src/__tests__/platform-settings-api.test.ts b/src/__tests__/platform-settings-api.test.ts new file mode 100644 index 0000000..4fb6b30 --- /dev/null +++ b/src/__tests__/platform-settings-api.test.ts @@ -0,0 +1,328 @@ +import { describe, it, expect, beforeAll, afterAll } from 'vitest'; +import { mkdtempSync, rmSync, existsSync } from 'node:fs'; +import { join } from 'node:path'; +import { tmpdir } from 'node:os'; +import { type FastifyInstance } from 'fastify'; +import { initDatabaseAsync, closeDatabase } from '../db/index'; +import { runMigrations } from '../db/migrate'; +import { buildServer } from '../server/index'; +import { systemConfig } from '../db/schema/index'; +import { eq } from 'drizzle-orm'; +import { type LibSQLDatabase } from 'drizzle-orm/libsql'; +import type * as schema from '../db/schema/index'; + +/** + * Integration tests for platform settings CRUD API. + * + * Pattern follows channel.test.ts: temp DB, migrations, + * buildServer, inject(). + */ +describe('Platform Settings API', () => { + let server: FastifyInstance; + let db: LibSQLDatabase; + let apiKey: string; + let tmpDir: string; + + beforeAll(async () => { + tmpDir = mkdtempSync(join(tmpdir(), 'tubearr-platform-settings-')); + const dbPath = join(tmpDir, 'test.db'); + db = await initDatabaseAsync(dbPath); + await runMigrations(dbPath); + server = await buildServer({ db }); + await server.ready(); + + // Read auto-generated API key + const rows = await db + .select() + .from(systemConfig) + .where(eq(systemConfig.key, 'api_key')) + .limit(1); + apiKey = rows[0]?.value ?? ''; + expect(apiKey).toBeTruthy(); + }); + + afterAll(async () => { + await server.close(); + closeDatabase(); + try { + if (tmpDir && existsSync(tmpDir)) { + rmSync(tmpDir, { recursive: true, force: true }); + } + } catch { + // Temp dir cleanup is best-effort on Windows (K004) + } + }); + + // ── GET /api/v1/platform-settings (empty) ── + + describe('GET /api/v1/platform-settings', () => { + it('returns empty array initially', async () => { + const res = await server.inject({ + method: 'GET', + url: '/api/v1/platform-settings', + headers: { 'x-api-key': apiKey }, + }); + + expect(res.statusCode).toBe(200); + const body = res.json(); + expect(Array.isArray(body)).toBe(true); + expect(body.length).toBe(0); + }); + }); + + // ── PUT /api/v1/platform-settings/:platform (create) ── + + describe('PUT /api/v1/platform-settings/:platform', () => { + it('creates a new YouTube platform setting', async () => { + const res = await server.inject({ + method: 'PUT', + url: '/api/v1/platform-settings/youtube', + headers: { 'x-api-key': apiKey }, + payload: { + checkInterval: 120, + concurrencyLimit: 3, + subtitleLanguages: 'en,es', + grabAllEnabled: true, + grabAllOrder: 'newest', + scanLimit: 200, + rateLimitDelay: 500, + }, + }); + + expect(res.statusCode).toBe(200); + const body = res.json(); + expect(body.platform).toBe('youtube'); + expect(body.checkInterval).toBe(120); + expect(body.concurrencyLimit).toBe(3); + expect(body.subtitleLanguages).toBe('en,es'); + expect(body.grabAllEnabled).toBe(true); + expect(body.grabAllOrder).toBe('newest'); + expect(body.scanLimit).toBe(200); + expect(body.rateLimitDelay).toBe(500); + expect(body.defaultFormatProfileId).toBeNull(); + expect(body.createdAt).toBeTruthy(); + expect(body.updatedAt).toBeTruthy(); + }); + + it('upserts (updates) an existing platform setting', async () => { + const res = await server.inject({ + method: 'PUT', + url: '/api/v1/platform-settings/youtube', + headers: { 'x-api-key': apiKey }, + payload: { + checkInterval: 240, + concurrencyLimit: 5, + grabAllEnabled: false, + grabAllOrder: 'oldest', + }, + }); + + expect(res.statusCode).toBe(200); + const body = res.json(); + expect(body.platform).toBe('youtube'); + expect(body.checkInterval).toBe(240); + expect(body.concurrencyLimit).toBe(5); + expect(body.grabAllEnabled).toBe(false); + expect(body.grabAllOrder).toBe('oldest'); + }); + + it('creates a SoundCloud platform setting', async () => { + const res = await server.inject({ + method: 'PUT', + url: '/api/v1/platform-settings/soundcloud', + headers: { 'x-api-key': apiKey }, + payload: { + checkInterval: 720, + concurrencyLimit: 1, + }, + }); + + expect(res.statusCode).toBe(200); + const body = res.json(); + expect(body.platform).toBe('soundcloud'); + expect(body.checkInterval).toBe(720); + expect(body.concurrencyLimit).toBe(1); + // Defaults + expect(body.grabAllEnabled).toBe(false); + expect(body.grabAllOrder).toBe('newest'); + expect(body.scanLimit).toBe(100); + expect(body.rateLimitDelay).toBe(1000); + }); + + it('rejects invalid platform (tiktok)', async () => { + const res = await server.inject({ + method: 'PUT', + url: '/api/v1/platform-settings/tiktok', + headers: { 'x-api-key': apiKey }, + payload: { + checkInterval: 60, + }, + }); + + expect(res.statusCode).toBe(400); + const body = res.json(); + expect(body.error).toBe('Bad Request'); + expect(body.message).toContain('Invalid platform'); + }); + + it('rejects invalid body (bad grabAllOrder value)', async () => { + const res = await server.inject({ + method: 'PUT', + url: '/api/v1/platform-settings/youtube', + headers: { 'x-api-key': apiKey }, + payload: { + grabAllOrder: 'random', + }, + }); + + expect(res.statusCode).toBe(400); + }); + + it('ignores additional properties in body (stripped by schema)', async () => { + const res = await server.inject({ + method: 'PUT', + url: '/api/v1/platform-settings/youtube', + headers: { 'x-api-key': apiKey }, + payload: { + checkInterval: 300, + unknownField: 'should be stripped', + }, + }); + + // Fastify strips additional properties rather than rejecting them + expect(res.statusCode).toBe(200); + const body = res.json(); + expect(body.checkInterval).toBe(300); + expect(body).not.toHaveProperty('unknownField'); + }); + }); + + // ── GET /api/v1/platform-settings/:platform ── + + describe('GET /api/v1/platform-settings/:platform', () => { + it('returns the YouTube platform setting', async () => { + const res = await server.inject({ + method: 'GET', + url: '/api/v1/platform-settings/youtube', + headers: { 'x-api-key': apiKey }, + }); + + expect(res.statusCode).toBe(200); + const body = res.json(); + expect(body.platform).toBe('youtube'); + expect(body.checkInterval).toBeTypeOf('number'); + expect(body.concurrencyLimit).toBeTypeOf('number'); + expect(body.scanLimit).toBeTypeOf('number'); + expect(body.rateLimitDelay).toBeTypeOf('number'); + }); + + it('returns 404 for non-existent platform', async () => { + const res = await server.inject({ + method: 'GET', + url: '/api/v1/platform-settings/twitch', + headers: { 'x-api-key': apiKey }, + }); + + expect(res.statusCode).toBe(404); + const body = res.json(); + expect(body.error).toBe('Not Found'); + }); + }); + + // ── GET /api/v1/platform-settings (populated) ── + + describe('GET /api/v1/platform-settings (populated)', () => { + it('returns all platform settings after creating two', async () => { + const res = await server.inject({ + method: 'GET', + url: '/api/v1/platform-settings', + headers: { 'x-api-key': apiKey }, + }); + + expect(res.statusCode).toBe(200); + const body = res.json(); + expect(Array.isArray(body)).toBe(true); + expect(body.length).toBe(2); + + const platforms = body.map((s: { platform: string }) => s.platform); + expect(platforms).toContain('youtube'); + expect(platforms).toContain('soundcloud'); + }); + }); + + // ── DELETE /api/v1/platform-settings/:platform ── + + describe('DELETE /api/v1/platform-settings/:platform', () => { + it('deletes the SoundCloud platform setting', async () => { + const res = await server.inject({ + method: 'DELETE', + url: '/api/v1/platform-settings/soundcloud', + headers: { 'x-api-key': apiKey }, + }); + + expect(res.statusCode).toBe(204); + }); + + it('returns 404 when deleting non-existent platform setting', async () => { + const res = await server.inject({ + method: 'DELETE', + url: '/api/v1/platform-settings/soundcloud', + headers: { 'x-api-key': apiKey }, + }); + + expect(res.statusCode).toBe(404); + }); + + it('GET confirms SoundCloud is deleted', async () => { + const res = await server.inject({ + method: 'GET', + url: '/api/v1/platform-settings/soundcloud', + headers: { 'x-api-key': apiKey }, + }); + + expect(res.statusCode).toBe(404); + }); + + it('GET all returns only YouTube after deletion', async () => { + const res = await server.inject({ + method: 'GET', + url: '/api/v1/platform-settings', + headers: { 'x-api-key': apiKey }, + }); + + expect(res.statusCode).toBe(200); + const body = res.json(); + expect(body.length).toBe(1); + expect(body[0].platform).toBe('youtube'); + }); + }); + + // ── Auth ── + + describe('Authentication', () => { + it('returns 401 on GET without API key', async () => { + const res = await server.inject({ + method: 'GET', + url: '/api/v1/platform-settings', + }); + expect(res.statusCode).toBe(401); + }); + + it('returns 401 on PUT without API key', async () => { + const res = await server.inject({ + method: 'PUT', + url: '/api/v1/platform-settings/youtube', + payload: { checkInterval: 60 }, + }); + expect(res.statusCode).toBe(401); + }); + + it('returns 401 on DELETE without API key', async () => { + const res = await server.inject({ + method: 'DELETE', + url: '/api/v1/platform-settings/youtube', + }); + expect(res.statusCode).toBe(401); + }); + }); +}); diff --git a/src/__tests__/playlist-api.test.ts b/src/__tests__/playlist-api.test.ts new file mode 100644 index 0000000..3f94afa --- /dev/null +++ b/src/__tests__/playlist-api.test.ts @@ -0,0 +1,310 @@ +import { describe, it, expect, beforeAll, afterAll, vi } from 'vitest'; +import { mkdtempSync, rmSync, existsSync } from 'node:fs'; +import { join } from 'node:path'; +import { tmpdir } from 'node:os'; +import { type FastifyInstance } from 'fastify'; +import { initDatabaseAsync, closeDatabase } from '../db/index'; +import { runMigrations } from '../db/migrate'; +import { buildServer } from '../server/index'; +import { systemConfig } from '../db/schema/index'; +import { eq } from 'drizzle-orm'; +import { type LibSQLDatabase } from 'drizzle-orm/libsql'; +import type * as schema from '../db/schema/index'; +import { createChannel } from '../db/repositories/channel-repository'; +import { createContentItem } from '../db/repositories/content-repository'; +import type { Channel, ContentItem } from '../types/index'; + +// Mock execYtDlp to avoid real yt-dlp calls +vi.mock('../sources/yt-dlp', async (importOriginal) => { + const actual = (await importOriginal()) as Record; + return { + ...actual, + execYtDlp: vi.fn(), + checkYtDlpAvailable: vi.fn().mockResolvedValue(true), + getYtDlpVersion: vi.fn().mockResolvedValue('2024.12.23'), + }; +}); + +import { execYtDlp } from '../sources/yt-dlp'; + +const mockedExecYtDlp = vi.mocked(execYtDlp); + +/** + * Integration tests for playlist API endpoints: + * GET /api/v1/channel/:id/playlists — list playlists + content mappings + * POST /api/v1/channel/:id/playlists/refresh — refresh playlists from platform + */ +describe('playlist-api', () => { + let server: FastifyInstance; + let db: LibSQLDatabase; + let apiKey: string; + let tmpDir: string; + let channel: Channel; + const contentItemsList: ContentItem[] = []; + + beforeAll(async () => { + tmpDir = mkdtempSync(join(tmpdir(), 'tubearr-playlist-api-')); + const dbPath = join(tmpDir, 'test.db'); + db = await initDatabaseAsync(dbPath); + await runMigrations(dbPath); + server = await buildServer({ db }); + await server.ready(); + + // Read API key + const rows = await db + .select() + .from(systemConfig) + .where(eq(systemConfig.key, 'api_key')) + .limit(1); + apiKey = rows[0]?.value ?? ''; + expect(apiKey).toBeTruthy(); + + // Create a test channel (YouTube) + channel = await createChannel(db, { + name: 'Playlist Test Channel', + platform: 'youtube', + platformId: 'UC_PLAYLIST_TEST', + url: 'https://www.youtube.com/@PlaylistTestChannel', + monitoringEnabled: true, + checkInterval: 360, + imageUrl: null, + metadata: null, + formatProfileId: null, + }); + + // Create content items that can be linked to playlists + const videoDefs = [ + { platformContentId: 'vid_pl_1', title: 'Playlist Video One' }, + { platformContentId: 'vid_pl_2', title: 'Playlist Video Two' }, + { platformContentId: 'vid_pl_3', title: 'Playlist Video Three' }, + { platformContentId: 'vid_pl_4', title: 'Playlist Video Four' }, + ]; + + for (const def of videoDefs) { + const created = await createContentItem(db, { + channelId: channel.id, + title: def.title, + platformContentId: def.platformContentId, + url: `https://youtube.com/watch?v=${def.platformContentId}`, + contentType: 'video', + duration: 300, + }); + if (created) contentItemsList.push(created); + } + + expect(contentItemsList.length).toBe(4); + }); + + afterAll(async () => { + await server.close(); + closeDatabase(); + try { + if (tmpDir && existsSync(tmpDir)) { + rmSync(tmpDir, { recursive: true, force: true }); + } + } catch { + // Temp dir cleanup is best-effort on Windows + } + }); + + // ── GET /api/v1/channel/:id/playlists ── + + describe('GET /api/v1/channel/:id/playlists', () => { + it('returns empty playlists array for channel with no playlists', async () => { + const res = await server.inject({ + method: 'GET', + url: `/api/v1/channel/${channel.id}/playlists`, + headers: { 'x-api-key': apiKey }, + }); + + expect(res.statusCode).toBe(200); + const body = res.json(); + expect(body.success).toBe(true); + expect(body.data.playlists).toEqual([]); + expect(body.data.mappings).toEqual({}); + }); + + it('returns 404 for non-existent channel', async () => { + const res = await server.inject({ + method: 'GET', + url: '/api/v1/channel/99999/playlists', + headers: { 'x-api-key': apiKey }, + }); + + expect(res.statusCode).toBe(404); + const body = res.json(); + expect(body.error).toBe('Not Found'); + }); + + it('returns 401 without API key', async () => { + const res = await server.inject({ + method: 'GET', + url: `/api/v1/channel/${channel.id}/playlists`, + }); + + expect(res.statusCode).toBe(401); + }); + }); + + // ── POST /api/v1/channel/:id/playlists/refresh ── + + describe('POST /api/v1/channel/:id/playlists/refresh', () => { + it('refreshes playlists for a YouTube channel', async () => { + // Mock execYtDlp: first call = playlist list, subsequent calls = playlist contents + mockedExecYtDlp + // Call 1: Enumerate playlists from /playlists tab + .mockResolvedValueOnce({ + stdout: [ + JSON.stringify({ id: 'PL_playlist_A', title: 'Best Videos' }), + JSON.stringify({ id: 'PL_playlist_B', title: 'Tutorial Series' }), + ].join('\n'), + stderr: '', + exitCode: 0, + }) + // Call 2: Fetch video IDs for playlist A + .mockResolvedValueOnce({ + stdout: [ + JSON.stringify({ id: 'vid_pl_1' }), + JSON.stringify({ id: 'vid_pl_2' }), + JSON.stringify({ id: 'vid_pl_nonexistent' }), // Not in our DB — should be skipped + ].join('\n'), + stderr: '', + exitCode: 0, + }) + // Call 3: Fetch video IDs for playlist B + .mockResolvedValueOnce({ + stdout: [ + JSON.stringify({ id: 'vid_pl_3' }), + JSON.stringify({ id: 'vid_pl_4' }), + ].join('\n'), + stderr: '', + exitCode: 0, + }); + + const res = await server.inject({ + method: 'POST', + url: `/api/v1/channel/${channel.id}/playlists/refresh`, + headers: { 'x-api-key': apiKey }, + }); + + expect(res.statusCode).toBe(200); + const body = res.json(); + expect(body.success).toBe(true); + expect(body.data.playlists).toHaveLength(2); + expect(body.data.playlists[0].title).toBe('Best Videos'); + expect(body.data.playlists[0].platformPlaylistId).toBe('PL_playlist_A'); + expect(body.data.playlists[1].title).toBe('Tutorial Series'); + expect(body.data.playlists[1].platformPlaylistId).toBe('PL_playlist_B'); + + // Verify mappings — playlist A should have 2 items (nonexistent skipped) + const playlistA = body.data.playlists[0]; + const playlistB = body.data.playlists[1]; + const mappingsA = body.data.mappings[String(playlistA.id)]; + const mappingsB = body.data.mappings[String(playlistB.id)]; + expect(mappingsA).toHaveLength(2); + expect(mappingsB).toHaveLength(2); + }); + + it('GET returns populated playlists after refresh', async () => { + const res = await server.inject({ + method: 'GET', + url: `/api/v1/channel/${channel.id}/playlists`, + headers: { 'x-api-key': apiKey }, + }); + + expect(res.statusCode).toBe(200); + const body = res.json(); + expect(body.data.playlists).toHaveLength(2); + expect(body.data.playlists[0].title).toBe('Best Videos'); + expect(body.data.playlists[1].title).toBe('Tutorial Series'); + + // Verify mappings exist + const keys = Object.keys(body.data.mappings); + expect(keys.length).toBe(2); + }); + + it('refreshing again updates existing playlists (idempotent upsert)', async () => { + // Mock with updated playlist titles and slightly different video membership + mockedExecYtDlp + .mockResolvedValueOnce({ + stdout: [ + JSON.stringify({ id: 'PL_playlist_A', title: 'Best Videos (Updated)' }), + JSON.stringify({ id: 'PL_playlist_B', title: 'Tutorial Series (Updated)' }), + ].join('\n'), + stderr: '', + exitCode: 0, + }) + .mockResolvedValueOnce({ + stdout: [ + JSON.stringify({ id: 'vid_pl_1' }), + ].join('\n'), + stderr: '', + exitCode: 0, + }) + .mockResolvedValueOnce({ + stdout: [ + JSON.stringify({ id: 'vid_pl_2' }), + JSON.stringify({ id: 'vid_pl_3' }), + JSON.stringify({ id: 'vid_pl_4' }), + ].join('\n'), + stderr: '', + exitCode: 0, + }); + + const res = await server.inject({ + method: 'POST', + url: `/api/v1/channel/${channel.id}/playlists/refresh`, + headers: { 'x-api-key': apiKey }, + }); + + expect(res.statusCode).toBe(200); + const body = res.json(); + expect(body.success).toBe(true); + expect(body.data.playlists).toHaveLength(2); + expect(body.data.playlists[0].title).toBe('Best Videos (Updated)'); + expect(body.data.playlists[1].title).toBe('Tutorial Series (Updated)'); + + // Verify updated mappings + const playlistA = body.data.playlists[0]; + const playlistB = body.data.playlists[1]; + expect(body.data.mappings[String(playlistA.id)]).toHaveLength(1); + expect(body.data.mappings[String(playlistB.id)]).toHaveLength(3); + }); + + it('returns 404 for non-existent channel', async () => { + const res = await server.inject({ + method: 'POST', + url: '/api/v1/channel/99999/playlists/refresh', + headers: { 'x-api-key': apiKey }, + }); + + expect(res.statusCode).toBe(404); + const body = res.json(); + expect(body.error).toBe('Not Found'); + }); + + it('returns 500 when yt-dlp fails', async () => { + mockedExecYtDlp.mockRejectedValueOnce(new Error('yt-dlp process crashed')); + + const res = await server.inject({ + method: 'POST', + url: `/api/v1/channel/${channel.id}/playlists/refresh`, + headers: { 'x-api-key': apiKey }, + }); + + expect(res.statusCode).toBe(500); + const body = res.json(); + expect(body.error).toBe('Internal Server Error'); + expect(body.message).toContain('Failed to refresh playlists'); + }); + + it('returns 401 without API key', async () => { + const res = await server.inject({ + method: 'POST', + url: `/api/v1/channel/${channel.id}/playlists/refresh`, + }); + + expect(res.statusCode).toBe(401); + }); + }); +}); diff --git a/src/__tests__/quality-analyzer.test.ts b/src/__tests__/quality-analyzer.test.ts new file mode 100644 index 0000000..f814932 --- /dev/null +++ b/src/__tests__/quality-analyzer.test.ts @@ -0,0 +1,375 @@ +import { describe, it, expect, vi, beforeEach } from 'vitest'; + +// vi.hoisted ensures this runs before hoisted vi.mock calls +const { execFileMock } = vi.hoisted(() => ({ + execFileMock: vi.fn(), +})); + +vi.mock('node:child_process', () => ({ + execFile: execFileMock, +})); + +vi.mock('node:util', () => ({ + promisify: () => execFileMock, +})); + +import { QualityAnalyzer } from '../services/quality-analyzer'; + +describe('QualityAnalyzer', () => { + let analyzer: QualityAnalyzer; + + beforeEach(() => { + vi.clearAllMocks(); + analyzer = new QualityAnalyzer(); + }); + + describe('analyze', () => { + it('parses video ffprobe output with correct resolution, codec, bitrate, container', async () => { + const ffprobeOutput = { + streams: [ + { + codec_type: 'video', + codec_name: 'h264', + width: 1920, + height: 1080, + bit_rate: '5000000', + }, + { + codec_type: 'audio', + codec_name: 'aac', + bit_rate: '192000', + }, + ], + format: { + format_name: 'mp4', + bit_rate: '5192000', + }, + }; + + execFileMock.mockResolvedValueOnce({ + stdout: JSON.stringify(ffprobeOutput), + stderr: '', + }); + + const result = await analyzer.analyze('/path/to/video.mp4'); + + expect(result.actualResolution).toBe('1920x1080'); + expect(result.actualCodec).toBe('h264'); + expect(result.actualBitrate).toBe('5.0 Mbps'); + expect(result.containerFormat).toBe('mp4'); + expect(result.qualityWarnings).toEqual([]); + }); + + it('parses audio-only ffprobe output with null resolution', async () => { + const ffprobeOutput = { + streams: [ + { + codec_type: 'audio', + codec_name: 'opus', + bit_rate: '320000', + }, + ], + format: { + format_name: 'webm', + bit_rate: '320000', + }, + }; + + execFileMock.mockResolvedValueOnce({ + stdout: JSON.stringify(ffprobeOutput), + stderr: '', + }); + + const result = await analyzer.analyze('/path/to/audio.webm'); + + expect(result.actualResolution).toBeNull(); + expect(result.actualCodec).toBe('opus'); + expect(result.actualBitrate).toBe('320 kbps'); + expect(result.containerFormat).toBe('webm'); + expect(result.qualityWarnings).toEqual([]); + }); + + it('detects lossy-in-lossless: WAV container with opus codec', async () => { + const ffprobeOutput = { + streams: [ + { + codec_type: 'audio', + codec_name: 'opus', + bit_rate: '256000', + }, + ], + format: { + format_name: 'wav', + bit_rate: '256000', + }, + }; + + execFileMock.mockResolvedValueOnce({ + stdout: JSON.stringify(ffprobeOutput), + stderr: '', + }); + + const result = await analyzer.analyze('/path/to/audio.wav'); + + expect(result.qualityWarnings).toHaveLength(1); + expect(result.qualityWarnings[0]).toContain('Lossy codec'); + expect(result.qualityWarnings[0]).toContain('opus'); + expect(result.qualityWarnings[0]).toContain('wav'); + }); + + it('detects lossy-in-lossless: FLAC container with aac codec', async () => { + const ffprobeOutput = { + streams: [ + { + codec_type: 'audio', + codec_name: 'aac', + bit_rate: '256000', + }, + ], + format: { + format_name: 'flac', + bit_rate: '256000', + }, + }; + + execFileMock.mockResolvedValueOnce({ + stdout: JSON.stringify(ffprobeOutput), + stderr: '', + }); + + const result = await analyzer.analyze('/path/to/audio.flac'); + + expect(result.qualityWarnings).toHaveLength(1); + expect(result.qualityWarnings[0]).toContain('Lossy codec'); + expect(result.qualityWarnings[0]).toContain('aac'); + expect(result.qualityWarnings[0]).toContain('flac'); + }); + + it('detects low bitrate audio in lossless container', async () => { + const ffprobeOutput = { + streams: [ + { + codec_type: 'audio', + codec_name: 'pcm_s16le', + bit_rate: '128000', + }, + ], + format: { + format_name: 'wav', + bit_rate: '128000', + }, + }; + + execFileMock.mockResolvedValueOnce({ + stdout: JSON.stringify(ffprobeOutput), + stderr: '', + }); + + const result = await analyzer.analyze('/path/to/audio.wav'); + + expect(result.qualityWarnings).toHaveLength(1); + expect(result.qualityWarnings[0]).toContain('Low audio bitrate'); + expect(result.qualityWarnings[0]).toContain('128 kbps'); + expect(result.qualityWarnings[0]).toContain('wav'); + }); + + it('generates both warnings when lossy codec AND low bitrate in lossless container', async () => { + const ffprobeOutput = { + streams: [ + { + codec_type: 'audio', + codec_name: 'mp3', + bit_rate: '96000', + }, + ], + format: { + format_name: 'flac', + bit_rate: '96000', + }, + }; + + execFileMock.mockResolvedValueOnce({ + stdout: JSON.stringify(ffprobeOutput), + stderr: '', + }); + + const result = await analyzer.analyze('/path/to/audio.flac'); + + expect(result.qualityWarnings).toHaveLength(2); + expect(result.qualityWarnings[0]).toContain('Lossy codec'); + expect(result.qualityWarnings[1]).toContain('Low audio bitrate'); + }); + + it('no warnings for non-lossless containers with lossy codecs', async () => { + const ffprobeOutput = { + streams: [ + { + codec_type: 'audio', + codec_name: 'aac', + bit_rate: '128000', + }, + ], + format: { + format_name: 'mp4', + bit_rate: '128000', + }, + }; + + execFileMock.mockResolvedValueOnce({ + stdout: JSON.stringify(ffprobeOutput), + stderr: '', + }); + + const result = await analyzer.analyze('/path/to/audio.mp4'); + + expect(result.qualityWarnings).toEqual([]); + }); + + it('degrades gracefully when ffprobe is not found (ENOENT)', async () => { + const error = Object.assign(new Error('spawn ffprobe ENOENT'), { + code: 'ENOENT', + }); + execFileMock.mockRejectedValueOnce(error); + + const result = await analyzer.analyze('/path/to/video.mp4'); + + expect(result.actualResolution).toBeNull(); + expect(result.actualCodec).toBeNull(); + expect(result.actualBitrate).toBeNull(); + expect(result.containerFormat).toBeNull(); + expect(result.qualityWarnings).toHaveLength(1); + expect(result.qualityWarnings[0]).toContain('ffprobe analysis failed'); + expect(result.qualityWarnings[0]).toContain('not found'); + }); + + it('degrades gracefully when ffprobe times out', async () => { + const error = Object.assign(new Error('The operation was aborted'), { + name: 'AbortError', + code: 'ABORT_ERR', + }); + execFileMock.mockRejectedValueOnce(error); + + const result = await analyzer.analyze('/path/to/large-video.mp4'); + + expect(result.qualityWarnings).toHaveLength(1); + expect(result.qualityWarnings[0]).toContain('ffprobe analysis failed'); + expect(result.qualityWarnings[0]).toContain('timed out'); + }); + + it('degrades gracefully on invalid JSON output', async () => { + execFileMock.mockResolvedValueOnce({ + stdout: 'not valid json', + stderr: '', + }); + + const result = await analyzer.analyze('/path/to/corrupt.mp4'); + + expect(result.qualityWarnings).toHaveLength(1); + expect(result.qualityWarnings[0]).toContain('ffprobe analysis failed'); + }); + + it('handles missing stream bitrate by falling back to format bitrate', async () => { + const ffprobeOutput = { + streams: [ + { + codec_type: 'video', + codec_name: 'vp9', + width: 1280, + height: 720, + // No bit_rate on stream + }, + ], + format: { + format_name: 'webm', + bit_rate: '2500000', + }, + }; + + execFileMock.mockResolvedValueOnce({ + stdout: JSON.stringify(ffprobeOutput), + stderr: '', + }); + + const result = await analyzer.analyze('/path/to/video.webm'); + + expect(result.actualResolution).toBe('1280x720'); + expect(result.actualCodec).toBe('vp9'); + expect(result.actualBitrate).toBe('2.5 Mbps'); + expect(result.containerFormat).toBe('webm'); + }); + + it('handles empty streams array', async () => { + const ffprobeOutput = { + streams: [], + format: { + format_name: 'mp4', + bit_rate: '0', + }, + }; + + execFileMock.mockResolvedValueOnce({ + stdout: JSON.stringify(ffprobeOutput), + stderr: '', + }); + + const result = await analyzer.analyze('/path/to/empty.mp4'); + + expect(result.actualResolution).toBeNull(); + expect(result.actualCodec).toBeNull(); + expect(result.containerFormat).toBe('mp4'); + }); + + it('formats high bitrate as Mbps', async () => { + const ffprobeOutput = { + streams: [ + { + codec_type: 'video', + codec_name: 'h265', + width: 3840, + height: 2160, + bit_rate: '25000000', + }, + ], + format: { + format_name: 'mp4', + bit_rate: '25000000', + }, + }; + + execFileMock.mockResolvedValueOnce({ + stdout: JSON.stringify(ffprobeOutput), + stderr: '', + }); + + const result = await analyzer.analyze('/path/to/4k.mp4'); + + expect(result.actualBitrate).toBe('25.0 Mbps'); + }); + }); + + describe('checkFfprobeAvailable', () => { + it('returns true when ffprobe -version succeeds', async () => { + execFileMock.mockResolvedValueOnce({ + stdout: 'ffprobe version 6.0', + stderr: '', + }); + + const available = await analyzer.checkFfprobeAvailable(); + + expect(available).toBe(true); + expect(execFileMock).toHaveBeenCalledWith( + 'ffprobe', + ['-version'], + expect.objectContaining({ timeout: 5_000 }) + ); + }); + + it('returns false when ffprobe is not found', async () => { + execFileMock.mockRejectedValueOnce(new Error('ENOENT')); + + const available = await analyzer.checkFfprobeAvailable(); + + expect(available).toBe(false); + }); + }); +}); diff --git a/src/__tests__/queue-api.test.ts b/src/__tests__/queue-api.test.ts new file mode 100644 index 0000000..a0e297a --- /dev/null +++ b/src/__tests__/queue-api.test.ts @@ -0,0 +1,423 @@ +import { describe, it, expect, beforeAll, afterAll, vi } from 'vitest'; +import { mkdtempSync, rmSync, existsSync } from 'node:fs'; +import { join } from 'node:path'; +import { tmpdir } from 'node:os'; +import { type FastifyInstance } from 'fastify'; +import { initDatabaseAsync, closeDatabase } from '../db/index'; +import { runMigrations } from '../db/migrate'; +import { buildServer } from '../server/index'; +import { systemConfig } from '../db/schema/index'; +import { eq } from 'drizzle-orm'; +import { type LibSQLDatabase } from 'drizzle-orm/libsql'; +import type * as schema from '../db/schema/index'; +import { createChannel } from '../db/repositories/channel-repository'; +import { createContentItem } from '../db/repositories/content-repository'; +import { QueueService } from '../services/queue'; +import type { ContentItem, Channel } from '../types/index'; +import type { DownloadService } from '../services/download'; + +/** + * Integration tests for queue API endpoints. + * Uses a QueueService with a mocked DownloadService. + */ + +describe('Queue API', () => { + let server: FastifyInstance; + let db: LibSQLDatabase; + let apiKey: string; + let tmpDir: string; + let testChannel: Channel; + let queueService: QueueService; + let mockDownloadService: { downloadItem: ReturnType }; + + beforeAll(async () => { + tmpDir = mkdtempSync(join(tmpdir(), 'tubearr-queue-api-')); + const dbPath = join(tmpDir, 'test.db'); + db = await initDatabaseAsync(dbPath); + await runMigrations(dbPath); + server = await buildServer({ db }); + + // Create mock download service and queue service + mockDownloadService = { downloadItem: vi.fn().mockResolvedValue(undefined) }; + queueService = new QueueService( + db, + mockDownloadService as unknown as DownloadService, + 2 + ); + // Stop auto-processing so tests stay deterministic + queueService.stop(); + + (server as { queueService: QueueService | null }).queueService = queueService; + (server as { downloadService: DownloadService | null }).downloadService = + mockDownloadService as unknown as DownloadService; + + await server.ready(); + + // Read API key + const rows = await db + .select() + .from(systemConfig) + .where(eq(systemConfig.key, 'api_key')) + .limit(1); + apiKey = rows[0]?.value ?? ''; + expect(apiKey).toBeTruthy(); + + // Create a test channel + testChannel = await createChannel(db, { + name: 'Queue API Test Channel', + platform: 'youtube', + platformId: 'UC_queue_api_test', + url: 'https://www.youtube.com/channel/UC_queue_api_test', + monitoringEnabled: true, + checkInterval: 360, + imageUrl: null, + metadata: null, + formatProfileId: null, + }); + }); + + afterAll(async () => { + queueService.stop(); + await server.close(); + closeDatabase(); + try { + if (tmpDir && existsSync(tmpDir)) { + rmSync(tmpDir, { recursive: true, force: true }); + } + } catch { + // Temp dir cleanup is best-effort on Windows + } + }); + + // ── Helpers ── + + function authed(opts: Record) { + return { + ...opts, + headers: { + 'x-api-key': apiKey, + ...(opts.headers as Record | undefined), + }, + }; + } + + let contentCounter = 0; + async function createTestContentItem( + overrides: { status?: string } = {} + ): Promise { + contentCounter++; + const item = await createContentItem(db, { + channelId: testChannel.id, + title: `Queue API Test Video ${contentCounter}`, + platformContentId: `vid_qa_${Date.now()}_${contentCounter}`, + url: 'https://www.youtube.com/watch?v=test_queue_api', + contentType: 'video', + duration: 300, + status: (overrides.status ?? 'monitored') as 'monitored', + }); + return item!; + } + + // ── Auth gating ── + + describe('Authentication', () => { + it('GET /api/v1/queue returns 401 without API key', async () => { + const res = await server.inject({ method: 'GET', url: '/api/v1/queue' }); + expect(res.statusCode).toBe(401); + }); + + it('POST /api/v1/queue returns 401 without API key', async () => { + const res = await server.inject({ + method: 'POST', + url: '/api/v1/queue', + payload: { contentItemId: 1 }, + }); + expect(res.statusCode).toBe(401); + }); + + it('DELETE /api/v1/queue/1 returns 401 without API key', async () => { + const res = await server.inject({ method: 'DELETE', url: '/api/v1/queue/1' }); + expect(res.statusCode).toBe(401); + }); + + it('GET /api/v1/queue/:id returns 401 without API key', async () => { + const res = await server.inject({ method: 'GET', url: '/api/v1/queue/1' }); + expect(res.statusCode).toBe(401); + }); + + it('POST /api/v1/queue/1/retry returns 401 without API key', async () => { + const res = await server.inject({ method: 'POST', url: '/api/v1/queue/1/retry' }); + expect(res.statusCode).toBe(401); + }); + }); + + // ── GET /api/v1/queue ── + + describe('GET /api/v1/queue', () => { + it('returns empty array when no items in queue', async () => { + const res = await server.inject(authed({ method: 'GET', url: '/api/v1/queue' })); + expect(res.statusCode).toBe(200); + const body = res.json(); + expect(body.success).toBe(true); + expect(Array.isArray(body.data)).toBe(true); + }); + + it('returns queue items with channelName and contentTitle fields', async () => { + const item = await createTestContentItem(); + await queueService.enqueue(item.id); + + const res = await server.inject(authed({ method: 'GET', url: '/api/v1/queue' })); + expect(res.statusCode).toBe(200); + const body = res.json(); + expect(body.success).toBe(true); + const match = body.data.find((qi: { contentItemId: number }) => qi.contentItemId === item.id); + expect(match).toBeTruthy(); + expect(match.channelName).toBe('Queue API Test Channel'); + expect(match.contentTitle).toMatch(/^Queue API Test Video/); + }); + + it('returns queue items filtered by status', async () => { + const item = await createTestContentItem(); + await queueService.enqueue(item.id); + + const res = await server.inject( + authed({ method: 'GET', url: '/api/v1/queue?status=pending' }) + ); + expect(res.statusCode).toBe(200); + const body = res.json(); + expect(body.success).toBe(true); + expect(body.data.length).toBeGreaterThanOrEqual(1); + expect(body.data.every((qi: { status: string }) => qi.status === 'pending')).toBe(true); + // Verify joined display fields are present in API response + const match = body.data.find((qi: { contentItemId: number }) => qi.contentItemId === item.id); + expect(match).toBeTruthy(); + expect(match.channelName).toBe('Queue API Test Channel'); + expect(match.contentTitle).toBeTruthy(); + }); + + it('returns 400 for invalid status filter', async () => { + const res = await server.inject( + authed({ method: 'GET', url: '/api/v1/queue?status=bogus' }) + ); + expect(res.statusCode).toBe(400); + expect(res.json().message).toContain('Invalid status filter'); + }); + }); + + // ── GET /api/v1/queue/:id ── + + describe('GET /api/v1/queue/:id', () => { + it('returns a queue item by ID', async () => { + const item = await createTestContentItem(); + const qi = await queueService.enqueue(item.id); + + const res = await server.inject( + authed({ method: 'GET', url: `/api/v1/queue/${qi.id}` }) + ); + expect(res.statusCode).toBe(200); + const body = res.json(); + expect(body.success).toBe(true); + expect(body.data.id).toBe(qi.id); + expect(body.data.contentItemId).toBe(item.id); + }); + + it('returns 404 for non-existent queue item', async () => { + const res = await server.inject( + authed({ method: 'GET', url: '/api/v1/queue/99999' }) + ); + expect(res.statusCode).toBe(404); + }); + + it('returns 400 for non-numeric ID', async () => { + const res = await server.inject( + authed({ method: 'GET', url: '/api/v1/queue/abc' }) + ); + expect(res.statusCode).toBe(400); + }); + }); + + // ── POST /api/v1/queue ── + + describe('POST /api/v1/queue', () => { + it('enqueues a content item and returns 201', async () => { + const item = await createTestContentItem(); + + const res = await server.inject( + authed({ + method: 'POST', + url: '/api/v1/queue', + payload: { contentItemId: item.id }, + }) + ); + expect(res.statusCode).toBe(201); + const body = res.json(); + expect(body.success).toBe(true); + expect(body.data.contentItemId).toBe(item.id); + expect(body.data.status).toBe('pending'); + }); + + it('returns 201 with custom priority', async () => { + const item = await createTestContentItem(); + + const res = await server.inject( + authed({ + method: 'POST', + url: '/api/v1/queue', + payload: { contentItemId: item.id, priority: 5 }, + }) + ); + expect(res.statusCode).toBe(201); + expect(res.json().data.priority).toBe(5); + }); + + it('returns 404 for non-existent content item', async () => { + const res = await server.inject( + authed({ + method: 'POST', + url: '/api/v1/queue', + payload: { contentItemId: 99999 }, + }) + ); + expect(res.statusCode).toBe(404); + }); + + it('returns 409 when content item is already queued', async () => { + const item = await createTestContentItem(); + await queueService.enqueue(item.id); + + const res = await server.inject( + authed({ + method: 'POST', + url: '/api/v1/queue', + payload: { contentItemId: item.id }, + }) + ); + expect(res.statusCode).toBe(409); + expect(res.json().message).toContain('already in the queue'); + }); + + it('returns 400 when contentItemId is missing', async () => { + const res = await server.inject( + authed({ + method: 'POST', + url: '/api/v1/queue', + payload: {}, + }) + ); + expect(res.statusCode).toBe(400); + expect(res.json().message).toContain('contentItemId'); + }); + + it('returns 400 when contentItemId is not a number', async () => { + const res = await server.inject( + authed({ + method: 'POST', + url: '/api/v1/queue', + payload: { contentItemId: 'abc' }, + }) + ); + expect(res.statusCode).toBe(400); + expect(res.json().message).toContain('contentItemId'); + }); + }); + + // ── DELETE /api/v1/queue/:id ── + + describe('DELETE /api/v1/queue/:id', () => { + it('cancels a pending queue item and returns 200', async () => { + const item = await createTestContentItem(); + const qi = await queueService.enqueue(item.id); + + const res = await server.inject( + authed({ method: 'DELETE', url: `/api/v1/queue/${qi.id}` }) + ); + expect(res.statusCode).toBe(200); + const body = res.json(); + expect(body.success).toBe(true); + expect(body.data.status).toBe('cancelled'); + }); + + it('returns 404 for non-existent queue item', async () => { + const res = await server.inject( + authed({ method: 'DELETE', url: '/api/v1/queue/99999' }) + ); + expect(res.statusCode).toBe(404); + }); + + it('returns 409 for non-cancellable item', async () => { + const item = await createTestContentItem(); + const qi = await queueService.enqueue(item.id); + + // Force status to completed (not cancellable) + const { updateQueueItemStatus } = await import( + '../db/repositories/queue-repository' + ); + await updateQueueItemStatus(db, qi.id, 'completed'); + + const res = await server.inject( + authed({ method: 'DELETE', url: `/api/v1/queue/${qi.id}` }) + ); + expect(res.statusCode).toBe(409); + expect(res.json().message).toContain('Cannot cancel'); + }); + }); + + // ── POST /api/v1/queue/:id/retry ── + + describe('POST /api/v1/queue/:id/retry', () => { + it('retries a failed queue item and returns 200', async () => { + const item = await createTestContentItem(); + const qi = await queueService.enqueue(item.id); + + // Force to failed with 1 attempt (maxAttempts=3, so retryable) + const { updateQueueItemStatus } = await import( + '../db/repositories/queue-repository' + ); + await updateQueueItemStatus(db, qi.id, 'failed', { attempts: 1 }); + + const res = await server.inject( + authed({ method: 'POST', url: `/api/v1/queue/${qi.id}/retry` }) + ); + expect(res.statusCode).toBe(200); + const body = res.json(); + expect(body.success).toBe(true); + expect(body.data.status).toBe('pending'); + }); + + it('returns 404 for non-existent queue item', async () => { + const res = await server.inject( + authed({ method: 'POST', url: '/api/v1/queue/99999/retry' }) + ); + expect(res.statusCode).toBe(404); + }); + + it('returns 409 for non-retryable item', async () => { + const item = await createTestContentItem(); + const qi = await queueService.enqueue(item.id); + + // Still pending — not failed, so can't retry + const res = await server.inject( + authed({ method: 'POST', url: `/api/v1/queue/${qi.id}/retry` }) + ); + expect(res.statusCode).toBe(409); + expect(res.json().message).toContain('Cannot retry'); + }); + + it('returns 409 when max attempts exhausted', async () => { + const item = await createTestContentItem(); + const qi = await queueService.enqueue(item.id); + + // Force to failed with maxAttempts exhausted + const { updateQueueItemStatus } = await import( + '../db/repositories/queue-repository' + ); + await updateQueueItemStatus(db, qi.id, 'failed', { attempts: 3 }); + + const res = await server.inject( + authed({ method: 'POST', url: `/api/v1/queue/${qi.id}/retry` }) + ); + expect(res.statusCode).toBe(409); + expect(res.json().message).toContain('Cannot retry'); + }); + }); +}); diff --git a/src/__tests__/queue-repository.test.ts b/src/__tests__/queue-repository.test.ts new file mode 100644 index 0000000..f6eee3f --- /dev/null +++ b/src/__tests__/queue-repository.test.ts @@ -0,0 +1,383 @@ +import { describe, it, expect, beforeEach, afterEach } from 'vitest'; +import { mkdtempSync, rmSync, existsSync } from 'node:fs'; +import { join } from 'node:path'; +import { tmpdir } from 'node:os'; +import { initDatabaseAsync, closeDatabase } from '../db/index'; +import { runMigrations } from '../db/migrate'; +import { createChannel } from '../db/repositories/channel-repository'; +import { createContentItem } from '../db/repositories/content-repository'; +import { + createQueueItem, + getAllQueueItems, + getQueueItemById, + getQueueItemsByStatus, + getPendingQueueItems, + updateQueueItemStatus, + countQueueItemsByStatus, + deleteQueueItem, + getQueueItemByContentItemId, +} from '../db/repositories/queue-repository'; +import type { Channel, ContentItem } from '../types/index'; + +// ── Test Helpers ── + +let tmpDir: string; +let db: Awaited>; +let testChannel: Channel; +let testContentItem: ContentItem; +let testContentItem2: ContentItem; + +async function setupDb(): Promise { + tmpDir = mkdtempSync(join(tmpdir(), 'tubearr-queue-test-')); + const dbPath = join(tmpDir, 'test.db'); + db = await initDatabaseAsync(dbPath); + await runMigrations(dbPath); + + testChannel = await createChannel(db, { + name: 'Queue Test Channel', + platform: 'youtube', + platformId: 'UC_queue_test', + url: 'https://www.youtube.com/channel/UC_queue_test', + imageUrl: null, + formatProfileId: null, + monitoringEnabled: true, + checkInterval: 360, + metadata: null, + }); + + testContentItem = (await createContentItem(db, { + channelId: testChannel.id, + title: 'Queue Test Video 1', + platformContentId: 'vid_queue_1', + url: 'https://www.youtube.com/watch?v=queue1', + contentType: 'video', + duration: 600, + status: 'monitored', + }))!; + + testContentItem2 = (await createContentItem(db, { + channelId: testChannel.id, + title: 'Queue Test Video 2', + platformContentId: 'vid_queue_2', + url: 'https://www.youtube.com/watch?v=queue2', + contentType: 'video', + duration: 300, + status: 'monitored', + }))!; +} + +function cleanup(): void { + closeDatabase(); + try { + if (tmpDir && existsSync(tmpDir)) { + rmSync(tmpDir, { recursive: true, force: true }); + } + } catch { + // Windows cleanup best-effort + } +} + +// ── Tests ── + +describe('Queue Repository', () => { + beforeEach(async () => { + await setupDb(); + }); + + afterEach(cleanup); + + describe('createQueueItem', () => { + it('creates a queue item with default values', async () => { + const item = await createQueueItem(db, { + contentItemId: testContentItem.id, + }); + + expect(item.id).toBeGreaterThan(0); + expect(item.contentItemId).toBe(testContentItem.id); + expect(item.status).toBe('pending'); + expect(item.priority).toBe(0); + expect(item.attempts).toBe(0); + expect(item.maxAttempts).toBe(3); + expect(item.error).toBeNull(); + expect(item.startedAt).toBeNull(); + expect(item.completedAt).toBeNull(); + expect(item.createdAt).toBeTruthy(); + expect(item.updatedAt).toBeTruthy(); + }); + + it('creates a queue item with custom priority and maxAttempts', async () => { + const item = await createQueueItem(db, { + contentItemId: testContentItem.id, + priority: 10, + maxAttempts: 5, + }); + + expect(item.priority).toBe(10); + expect(item.maxAttempts).toBe(5); + }); + }); + + describe('getQueueItemById', () => { + it('returns item when found', async () => { + const created = await createQueueItem(db, { + contentItemId: testContentItem.id, + }); + + const found = await getQueueItemById(db, created.id); + expect(found).not.toBeNull(); + expect(found!.id).toBe(created.id); + expect(found!.contentItemId).toBe(testContentItem.id); + }); + + it('returns null for non-existent ID', async () => { + const found = await getQueueItemById(db, 99999); + expect(found).toBeNull(); + }); + }); + + describe('getQueueItemsByStatus', () => { + it('returns items matching the given status', async () => { + await createQueueItem(db, { contentItemId: testContentItem.id }); + await createQueueItem(db, { contentItemId: testContentItem2.id }); + + const pending = await getQueueItemsByStatus(db, 'pending'); + expect(pending).toHaveLength(2); + expect(pending.every((i) => i.status === 'pending')).toBe(true); + // Verify joined fields are populated + expect(pending[0].channelName).toBe('Queue Test Channel'); + expect(pending[0].contentTitle).toBeTruthy(); + expect(pending[1].channelName).toBe('Queue Test Channel'); + expect(pending[1].contentTitle).toBeTruthy(); + }); + + it('returns empty array when no items match', async () => { + await createQueueItem(db, { contentItemId: testContentItem.id }); + + const downloading = await getQueueItemsByStatus(db, 'downloading'); + expect(downloading).toHaveLength(0); + }); + + it('orders by priority DESC then createdAt ASC', async () => { + const low = await createQueueItem(db, { + contentItemId: testContentItem.id, + priority: 1, + }); + const high = await createQueueItem(db, { + contentItemId: testContentItem2.id, + priority: 10, + }); + + const items = await getQueueItemsByStatus(db, 'pending'); + expect(items[0].id).toBe(high.id); + expect(items[1].id).toBe(low.id); + }); + }); + + describe('getAllQueueItems', () => { + it('returns all items with channelName and contentTitle populated', async () => { + await createQueueItem(db, { contentItemId: testContentItem.id }); + await createQueueItem(db, { contentItemId: testContentItem2.id }); + + const items = await getAllQueueItems(db); + expect(items).toHaveLength(2); + + // Verify joined fields + for (const item of items) { + expect(item.channelName).toBe('Queue Test Channel'); + expect(item.contentTitle).toBeTruthy(); + } + + // Verify specific content titles + const titles = items.map((i) => i.contentTitle); + expect(titles).toContain('Queue Test Video 1'); + expect(titles).toContain('Queue Test Video 2'); + }); + + it('returns null channelName/contentTitle when content item is deleted', async () => { + // With ON DELETE CASCADE, deleting a content item also removes the queue item. + // This test verifies the LEFT JOIN works correctly for items that exist. + const { createContentItem } = await import( + '../db/repositories/content-repository' + ); + const tempContent = (await createContentItem(db, { + channelId: testChannel.id, + title: 'Temp Video', + platformContentId: 'vid_temp_join_test', + url: 'https://www.youtube.com/watch?v=temp', + contentType: 'video', + duration: 100, + status: 'monitored', + }))!; + + await createQueueItem(db, { contentItemId: tempContent.id }); + + const items = await getAllQueueItems(db); + expect(items.length).toBeGreaterThanOrEqual(1); + const found = items.find((i) => i.contentItemId === tempContent.id); + expect(found).toBeTruthy(); + expect(found!.contentTitle).toBe('Temp Video'); + expect(found!.channelName).toBe('Queue Test Channel'); + }); + }); + + describe('getPendingQueueItems', () => { + it('returns only pending items in priority order', async () => { + const item1 = await createQueueItem(db, { + contentItemId: testContentItem.id, + priority: 5, + }); + const item2 = await createQueueItem(db, { + contentItemId: testContentItem2.id, + priority: 10, + }); + + // Mark item1 as downloading (no longer pending) + await updateQueueItemStatus(db, item1.id, 'downloading'); + + const pending = await getPendingQueueItems(db); + expect(pending).toHaveLength(1); + expect(pending[0].id).toBe(item2.id); + }); + + it('respects limit parameter', async () => { + await createQueueItem(db, { contentItemId: testContentItem.id }); + await createQueueItem(db, { contentItemId: testContentItem2.id }); + + const limited = await getPendingQueueItems(db, 1); + expect(limited).toHaveLength(1); + }); + + it('returns items ordered by priority DESC then createdAt ASC', async () => { + // Create with same priority — should order by createdAt ASC + const first = await createQueueItem(db, { + contentItemId: testContentItem.id, + priority: 5, + }); + const second = await createQueueItem(db, { + contentItemId: testContentItem2.id, + priority: 5, + }); + + const items = await getPendingQueueItems(db); + expect(items[0].id).toBe(first.id); + expect(items[1].id).toBe(second.id); + }); + }); + + describe('updateQueueItemStatus', () => { + it('updates status and sets updatedAt', async () => { + const item = await createQueueItem(db, { + contentItemId: testContentItem.id, + }); + const originalUpdatedAt = item.updatedAt; + + const updated = await updateQueueItemStatus(db, item.id, 'downloading'); + expect(updated).not.toBeNull(); + expect(updated!.status).toBe('downloading'); + expect(updated!.updatedAt).toBeTruthy(); + }); + + it('sets error message on failure', async () => { + const item = await createQueueItem(db, { + contentItemId: testContentItem.id, + }); + + const updated = await updateQueueItemStatus(db, item.id, 'failed', { + error: 'Network timeout', + attempts: 1, + }); + + expect(updated!.status).toBe('failed'); + expect(updated!.error).toBe('Network timeout'); + expect(updated!.attempts).toBe(1); + }); + + it('sets timestamps for startedAt and completedAt', async () => { + const item = await createQueueItem(db, { + contentItemId: testContentItem.id, + }); + + const startTime = new Date().toISOString().replace('T', ' ').slice(0, 19); + const started = await updateQueueItemStatus(db, item.id, 'downloading', { + startedAt: startTime, + }); + expect(started!.startedAt).toBe(startTime); + + const completeTime = new Date().toISOString().replace('T', ' ').slice(0, 19); + const completed = await updateQueueItemStatus(db, item.id, 'completed', { + completedAt: completeTime, + }); + expect(completed!.completedAt).toBe(completeTime); + }); + + it('returns null for non-existent ID', async () => { + const result = await updateQueueItemStatus(db, 99999, 'downloading'); + expect(result).toBeNull(); + }); + }); + + describe('countQueueItemsByStatus', () => { + it('returns counts for each status', async () => { + const item1 = await createQueueItem(db, { + contentItemId: testContentItem.id, + }); + await createQueueItem(db, { contentItemId: testContentItem2.id }); + + // Move item1 to downloading + await updateQueueItemStatus(db, item1.id, 'downloading'); + + const counts = await countQueueItemsByStatus(db); + expect(counts.pending).toBe(1); + expect(counts.downloading).toBe(1); + expect(counts.completed).toBe(0); + expect(counts.failed).toBe(0); + expect(counts.cancelled).toBe(0); + }); + + it('returns all zeros when queue is empty', async () => { + const counts = await countQueueItemsByStatus(db); + expect(counts.pending).toBe(0); + expect(counts.downloading).toBe(0); + expect(counts.completed).toBe(0); + expect(counts.failed).toBe(0); + expect(counts.cancelled).toBe(0); + }); + }); + + describe('deleteQueueItem', () => { + it('deletes an existing item and returns true', async () => { + const item = await createQueueItem(db, { + contentItemId: testContentItem.id, + }); + + const deleted = await deleteQueueItem(db, item.id); + expect(deleted).toBe(true); + + const found = await getQueueItemById(db, item.id); + expect(found).toBeNull(); + }); + + it('returns false for non-existent ID', async () => { + const deleted = await deleteQueueItem(db, 99999); + expect(deleted).toBe(false); + }); + }); + + describe('getQueueItemByContentItemId', () => { + it('returns queue item for a given content item ID', async () => { + const item = await createQueueItem(db, { + contentItemId: testContentItem.id, + }); + + const found = await getQueueItemByContentItemId(db, testContentItem.id); + expect(found).not.toBeNull(); + expect(found!.id).toBe(item.id); + expect(found!.contentItemId).toBe(testContentItem.id); + }); + + it('returns null when no queue item exists for content item', async () => { + const found = await getQueueItemByContentItemId(db, 99999); + expect(found).toBeNull(); + }); + }); +}); diff --git a/src/__tests__/queue-service.test.ts b/src/__tests__/queue-service.test.ts new file mode 100644 index 0000000..f3f61e7 --- /dev/null +++ b/src/__tests__/queue-service.test.ts @@ -0,0 +1,593 @@ +import { describe, it, expect, vi, beforeEach, afterEach } from 'vitest'; +import { mkdtempSync, rmSync, existsSync } from 'node:fs'; +import { join } from 'node:path'; +import { tmpdir } from 'node:os'; +import { initDatabaseAsync, closeDatabase } from '../db/index'; +import { runMigrations } from '../db/migrate'; +import { createChannel } from '../db/repositories/channel-repository'; +import { + createContentItem, + getContentItemById, +} from '../db/repositories/content-repository'; +import { + getQueueItemById, + updateQueueItemStatus, + createQueueItem, + countQueueItemsByStatus, +} from '../db/repositories/queue-repository'; +import { getHistoryEvents } from '../db/repositories/history-repository'; +import { QueueService } from '../services/queue'; +import type { ContentItem, Channel } from '../types/index'; + +// ── Test Helpers ── + +let tmpDir: string; +let db: Awaited>; +let testChannel: Channel; +let contentItems: ContentItem[]; + +/** Create a mock DownloadService with a controllable downloadItem. */ +function createMockDownloadService() { + return { + downloadItem: vi.fn().mockResolvedValue(undefined), + }; +} + +async function setupDb(): Promise { + tmpDir = mkdtempSync(join(tmpdir(), 'tubearr-qs-test-')); + const dbPath = join(tmpDir, 'test.db'); + db = await initDatabaseAsync(dbPath); + await runMigrations(dbPath); + + testChannel = await createChannel(db, { + name: 'Queue Service Test Channel', + platform: 'youtube', + platformId: 'UC_qs_test', + url: 'https://www.youtube.com/channel/UC_qs_test', + imageUrl: null, + formatProfileId: null, + monitoringEnabled: true, + checkInterval: 360, + metadata: null, + }); + + // Pre-create a batch of content items for tests + contentItems = []; + for (let i = 1; i <= 6; i++) { + const item = await createContentItem(db, { + channelId: testChannel.id, + title: `QS Test Video ${i}`, + platformContentId: `vid_qs_${i}`, + url: `https://www.youtube.com/watch?v=qs${i}`, + contentType: 'video', + duration: 300 + i * 60, + status: 'monitored', + }); + contentItems.push(item!); + } +} + +function cleanup(): void { + closeDatabase(); + try { + if (tmpDir && existsSync(tmpDir)) { + rmSync(tmpDir, { recursive: true, force: true }); + } + } catch { + // Windows cleanup best-effort + } +} + +/** Wait for async queue processing to settle. */ +async function tick(ms = 50): Promise { + await new Promise((resolve) => setTimeout(resolve, ms)); +} + +// ── Tests ── + +describe('QueueService', () => { + let mockDownloadService: ReturnType; + + beforeEach(async () => { + await setupDb(); + mockDownloadService = createMockDownloadService(); + }); + + afterEach(() => { + cleanup(); + }); + + // ── Enqueue ── + + describe('enqueue', () => { + it('creates queue item with pending status', async () => { + const qs = new QueueService(db, mockDownloadService as any, 0); // concurrency=0 to prevent auto-processing + qs.stop(); + + const queueItem = await qs.enqueue(contentItems[0].id); + + expect(queueItem).toBeDefined(); + expect(queueItem.contentItemId).toBe(contentItems[0].id); + expect(queueItem.status).toBe('pending'); + expect(queueItem.priority).toBe(0); + expect(queueItem.attempts).toBe(0); + }); + + it('updates content status to queued', async () => { + const qs = new QueueService(db, mockDownloadService as any, 0); + qs.stop(); + + await qs.enqueue(contentItems[0].id); + + const updated = await getContentItemById(db, contentItems[0].id); + expect(updated!.status).toBe('queued'); + }); + + it('records grabbed history event', async () => { + const qs = new QueueService(db, mockDownloadService as any, 0); + qs.stop(); + + await qs.enqueue(contentItems[0].id); + + const { items } = await getHistoryEvents(db); + expect(items.length).toBe(1); + expect(items[0].eventType).toBe('grabbed'); + expect(items[0].contentItemId).toBe(contentItems[0].id); + expect(items[0].channelId).toBe(testChannel.id); + expect(items[0].status).toBe('pending'); + }); + + it('respects priority parameter', async () => { + const qs = new QueueService(db, mockDownloadService as any, 0); + qs.stop(); + + const queueItem = await qs.enqueue(contentItems[0].id, 5); + expect(queueItem.priority).toBe(5); + }); + + it('throws on duplicate enqueue for pending item', async () => { + const qs = new QueueService(db, mockDownloadService as any, 0); + qs.stop(); + + await qs.enqueue(contentItems[0].id); + await expect(qs.enqueue(contentItems[0].id)).rejects.toThrow( + /already in the queue/ + ); + }); + + it('throws for non-existent content item', async () => { + const qs = new QueueService(db, mockDownloadService as any, 0); + qs.stop(); + + await expect(qs.enqueue(99999)).rejects.toThrow(/not found/); + }); + }); + + // ── Process ── + + describe('processItem', () => { + it('transitions item through downloading to completed', async () => { + const qs = new QueueService(db, mockDownloadService as any, 1); + + await qs.enqueue(contentItems[0].id); + await tick(100); + + const queueItem = await getQueueItemById(db, 1); + expect(queueItem!.status).toBe('completed'); + expect(queueItem!.completedAt).not.toBeNull(); + + // Verify downloadItem was called + expect(mockDownloadService.downloadItem).toHaveBeenCalledOnce(); + const [calledContentItem, calledChannel] = mockDownloadService.downloadItem.mock.calls[0]; + expect(calledContentItem.id).toBe(contentItems[0].id); + expect(calledChannel.id).toBe(testChannel.id); + }); + + it('records downloaded history event on success', async () => { + const qs = new QueueService(db, mockDownloadService as any, 1); + + await qs.enqueue(contentItems[0].id); + await tick(100); + + const { items } = await getHistoryEvents(db); + const downloadedEvent = items.find((e) => e.eventType === 'downloaded'); + expect(downloadedEvent).toBeDefined(); + expect(downloadedEvent!.contentItemId).toBe(contentItems[0].id); + expect(downloadedEvent!.status).toBe('completed'); + }); + + it('retries on failure — sets status to pending with incremented attempts', async () => { + // Only fail once, then stop to prevent retry loop + let callCount = 0; + mockDownloadService.downloadItem.mockImplementation(() => { + callCount++; + if (callCount === 1) { + return Promise.reject(new Error('network timeout')); + } + // On retry, return a deferred that never resolves so we can inspect state + return new Promise(() => {}); + }); + + const qs = new QueueService(db, mockDownloadService as any, 1); + + await qs.enqueue(contentItems[0].id); + // Wait for first attempt to fail and item to be reset to pending, + // then the retry attempt starts (downloading) + await tick(150); + qs.stop(); + + // After the first failure, the item was reset to pending, then picked up again + // Since we stopped, let's check the call count + expect(callCount).toBeGreaterThanOrEqual(1); + + // After first failure, attempts should have been incremented to 1 + // The item may have been picked up again (status=downloading), so check via history + const { items } = await getHistoryEvents(db); + const failedEvents = items.filter((e) => e.eventType === 'failed'); + expect(failedEvents.length).toBeGreaterThanOrEqual(1); + expect(failedEvents[0].details).toHaveProperty('error', 'network timeout'); + expect(failedEvents[0].details).toHaveProperty('attempt', 1); + }); + + it('records failed history event with error details', async () => { + // Use a deferred to control exactly when the download completes + let rejectFn: (err: Error) => void; + mockDownloadService.downloadItem.mockImplementationOnce(() => { + return new Promise((_, reject) => { + rejectFn = reject; + }); + }); + + const qs = new QueueService(db, mockDownloadService as any, 1); + qs.stop(); + await qs.enqueue(contentItems[0].id); + qs.start(); + await tick(50); + + // Now reject the download — this triggers the failure path + rejectFn!(new Error('network timeout')); + await tick(50); + qs.stop(); + + const { items } = await getHistoryEvents(db); + const failedEvent = items.find((e) => e.eventType === 'failed'); + expect(failedEvent).toBeDefined(); + expect(failedEvent!.details).toHaveProperty('error', 'network timeout'); + expect(failedEvent!.details).toHaveProperty('attempt', 1); + expect(failedEvent!.details).toHaveProperty('exhausted', false); + }); + + it('marks as failed when max attempts exhausted', async () => { + mockDownloadService.downloadItem.mockRejectedValue(new Error('permanent failure')); + + const qs = new QueueService(db, mockDownloadService as any, 1); + qs.stop(); + + // Enqueue, then manually set attempts to maxAttempts - 1 + await qs.enqueue(contentItems[0].id); + await updateQueueItemStatus(db, 1, 'pending', { attempts: 2 }); // maxAttempts defaults to 3 + + qs.start(); + await tick(100); + qs.stop(); + + const queueItem = await getQueueItemById(db, 1); + expect(queueItem!.status).toBe('failed'); + expect(queueItem!.attempts).toBe(3); + + // Content status should be set to failed + const contentItem = await getContentItemById(db, contentItems[0].id); + expect(contentItem!.status).toBe('failed'); + + // History event should indicate exhaustion + const { items } = await getHistoryEvents(db); + const failedEvent = items.find( + (e) => e.eventType === 'failed' && (e.details as any)?.exhausted === true + ); + expect(failedEvent).toBeDefined(); + }); + }); + + // ── Concurrency ── + + describe('concurrency', () => { + it('limits simultaneous downloads to concurrency value', async () => { + // Track how many downloads are running simultaneously + let concurrentCount = 0; + let maxConcurrentCount = 0; + + const deferreds: Array<{ resolve: () => void; reject: (err: Error) => void }> = []; + + mockDownloadService.downloadItem.mockImplementation(() => { + return new Promise((resolve, reject) => { + concurrentCount++; + maxConcurrentCount = Math.max(maxConcurrentCount, concurrentCount); + deferreds.push({ + resolve: () => { concurrentCount--; resolve(); }, + reject: (err: Error) => { concurrentCount--; reject(err); }, + }); + }); + }); + + const qs = new QueueService(db, mockDownloadService as any, 2); + + // Enqueue 4 items + await qs.enqueue(contentItems[0].id); + await qs.enqueue(contentItems[1].id); + await qs.enqueue(contentItems[2].id); + await qs.enqueue(contentItems[3].id); + + // Wait for processNext to fire + await tick(100); + + // At most 2 should be downloading + expect(maxConcurrentCount).toBe(2); + expect(concurrentCount).toBe(2); + + // Resolve first two downloads + deferreds[0].resolve(); + await tick(50); + deferreds[1].resolve(); + await tick(150); + + // Remaining items should have been picked up and completed or still in-flight + // Resolve any remaining deferreds + for (let i = 2; i < deferreds.length; i++) { + deferreds[i].resolve(); + await tick(50); + } + await tick(100); + + // All 4 should be completed + const counts = await countQueueItemsByStatus(db); + expect(counts.completed).toBe(4); + expect(counts.pending).toBe(0); + // Most important: never exceeded concurrency limit of 2 + expect(maxConcurrentCount).toBe(2); + }); + }); + + // ── Retry ── + + describe('retryItem', () => { + it('resets failed item to pending and triggers processing', async () => { + const qs = new QueueService(db, mockDownloadService as any, 0); + qs.stop(); + + // Create and manually fail a queue item + await qs.enqueue(contentItems[0].id); + await updateQueueItemStatus(db, 1, 'failed', { attempts: 1, error: 'test error' }); + + const retried = await qs.retryItem(1); + expect(retried.status).toBe('pending'); + expect(retried.error).toBeNull(); + + // Content status should be reset to queued + const contentItem = await getContentItemById(db, contentItems[0].id); + expect(contentItem!.status).toBe('queued'); + }); + + it('throws for non-failed item', async () => { + const qs = new QueueService(db, mockDownloadService as any, 0); + qs.stop(); + + await qs.enqueue(contentItems[0].id); + + await expect(qs.retryItem(1)).rejects.toThrow(/expected 'failed'/); + }); + + it('throws when attempts exhausted', async () => { + const qs = new QueueService(db, mockDownloadService as any, 0); + qs.stop(); + + await qs.enqueue(contentItems[0].id); + await updateQueueItemStatus(db, 1, 'failed', { attempts: 3 }); // maxAttempts defaults to 3 + + await expect(qs.retryItem(1)).rejects.toThrow(/attempts.*>=.*maxAttempts/); + }); + + it('throws for non-existent item', async () => { + const qs = new QueueService(db, mockDownloadService as any, 0); + await expect(qs.retryItem(99999)).rejects.toThrow(/not found/); + }); + }); + + // ── Cancel ── + + describe('cancelItem', () => { + it('cancels a pending item', async () => { + const qs = new QueueService(db, mockDownloadService as any, 0); + qs.stop(); + + await qs.enqueue(contentItems[0].id); + + const cancelled = await qs.cancelItem(1); + expect(cancelled.status).toBe('cancelled'); + }); + + it('cancels a failed item', async () => { + const qs = new QueueService(db, mockDownloadService as any, 0); + qs.stop(); + + await qs.enqueue(contentItems[0].id); + await updateQueueItemStatus(db, 1, 'failed', { attempts: 1, error: 'err' }); + + const cancelled = await qs.cancelItem(1); + expect(cancelled.status).toBe('cancelled'); + }); + + it('throws for downloading item', async () => { + const qs = new QueueService(db, mockDownloadService as any, 0); + qs.stop(); + + await qs.enqueue(contentItems[0].id); + await updateQueueItemStatus(db, 1, 'downloading'); + + await expect(qs.cancelItem(1)).rejects.toThrow(/must be 'pending' or 'failed'/); + }); + + it('throws for completed item', async () => { + const qs = new QueueService(db, mockDownloadService as any, 0); + qs.stop(); + + await qs.enqueue(contentItems[0].id); + await updateQueueItemStatus(db, 1, 'completed'); + + await expect(qs.cancelItem(1)).rejects.toThrow(/must be 'pending' or 'failed'/); + }); + + it('throws for non-existent item', async () => { + const qs = new QueueService(db, mockDownloadService as any, 0); + await expect(qs.cancelItem(99999)).rejects.toThrow(/not found/); + }); + }); + + // ── Restart Recovery ── + + describe('recoverOnStartup', () => { + it('resets downloading items to pending', async () => { + const qs = new QueueService(db, mockDownloadService as any, 0); + qs.stop(); + + // Directly insert items in 'downloading' status (simulating a crash) + await createQueueItem(db, { contentItemId: contentItems[0].id }); + await updateQueueItemStatus(db, 1, 'downloading', { startedAt: '2026-01-01T00:00:00Z' }); + + await createQueueItem(db, { contentItemId: contentItems[1].id }); + await updateQueueItemStatus(db, 2, 'downloading', { startedAt: '2026-01-01T00:00:00Z' }); + + // Also create one pending item — should not be affected + await createQueueItem(db, { contentItemId: contentItems[2].id }); + + const recovered = await qs.recoverOnStartup(); + expect(recovered).toBe(2); + + const item1 = await getQueueItemById(db, 1); + expect(item1!.status).toBe('pending'); + expect(item1!.startedAt).toBeNull(); + + const item2 = await getQueueItemById(db, 2); + expect(item2!.status).toBe('pending'); + + // Pending item should remain unchanged + const item3 = await getQueueItemById(db, 3); + expect(item3!.status).toBe('pending'); + }); + + it('returns 0 when no items are stuck', async () => { + const qs = new QueueService(db, mockDownloadService as any, 0); + const recovered = await qs.recoverOnStartup(); + expect(recovered).toBe(0); + }); + }); + + // ── Stop / Start ── + + describe('stop/start', () => { + it('stop prevents new items from being picked up', async () => { + const qs = new QueueService(db, mockDownloadService as any, 2); + qs.stop(); + + await qs.enqueue(contentItems[0].id); + await tick(100); + + // Item should still be pending — processNext is a no-op while stopped + const item = await getQueueItemById(db, 1); + expect(item!.status).toBe('pending'); + expect(mockDownloadService.downloadItem).not.toHaveBeenCalled(); + }); + + it('start resumes processing', async () => { + const qs = new QueueService(db, mockDownloadService as any, 2); + qs.stop(); + + await qs.enqueue(contentItems[0].id); + await tick(50); + + // Verify still pending + let item = await getQueueItemById(db, 1); + expect(item!.status).toBe('pending'); + + // Resume + qs.start(); + await tick(100); + + item = await getQueueItemById(db, 1); + expect(item!.status).toBe('completed'); + expect(mockDownloadService.downloadItem).toHaveBeenCalledOnce(); + }); + }); + + // ── getState ── + + describe('getState', () => { + it('returns correct counts by status', async () => { + const qs = new QueueService(db, mockDownloadService as any, 0); + qs.stop(); + + await qs.enqueue(contentItems[0].id); + await qs.enqueue(contentItems[1].id); + await qs.enqueue(contentItems[2].id); + + // Manually set some statuses + await updateQueueItemStatus(db, 2, 'completed'); + await updateQueueItemStatus(db, 3, 'failed'); + + const state = await qs.getState(); + expect(state.pending).toBe(1); + expect(state.completed).toBe(1); + expect(state.failed).toBe(1); + expect(state.downloading).toBe(0); + expect(state.cancelled).toBe(0); + }); + + it('returns all zeros when queue is empty', async () => { + const qs = new QueueService(db, mockDownloadService as any, 0); + + const state = await qs.getState(); + expect(state).toEqual({ + pending: 0, + downloading: 0, + completed: 0, + failed: 0, + cancelled: 0, + }); + }); + }); + + // ── Integration: full lifecycle ── + + describe('full lifecycle', () => { + it('enqueue → process → complete lifecycle works end-to-end', async () => { + const qs = new QueueService(db, mockDownloadService as any, 2); + + // Enqueue two items + const q1 = await qs.enqueue(contentItems[0].id); + const q2 = await qs.enqueue(contentItems[1].id, 5); + + // Wait for processing + await tick(150); + + // Both should be completed + const item1 = await getQueueItemById(db, q1.id); + const item2 = await getQueueItemById(db, q2.id); + expect(item1!.status).toBe('completed'); + expect(item2!.status).toBe('completed'); + + // Download service should have been called twice + expect(mockDownloadService.downloadItem).toHaveBeenCalledTimes(2); + + // Should have history events: 2 grabbed + 2 downloaded + const { items: history } = await getHistoryEvents(db); + const grabbed = history.filter((e) => e.eventType === 'grabbed'); + const downloaded = history.filter((e) => e.eventType === 'downloaded'); + expect(grabbed.length).toBe(2); + expect(downloaded.length).toBe(2); + + // State should reflect completed + const state = await qs.getState(); + expect(state.completed).toBe(2); + expect(state.pending).toBe(0); + }); + }); +}); diff --git a/src/__tests__/scan-api.test.ts b/src/__tests__/scan-api.test.ts new file mode 100644 index 0000000..1eac8fe --- /dev/null +++ b/src/__tests__/scan-api.test.ts @@ -0,0 +1,375 @@ +import { describe, it, expect, beforeAll, afterAll, beforeEach, vi } from 'vitest'; +import { mkdtempSync, rmSync, existsSync } from 'node:fs'; +import { join } from 'node:path'; +import { tmpdir } from 'node:os'; +import { type FastifyInstance } from 'fastify'; +import { initDatabaseAsync, closeDatabase } from '../db/index'; +import { runMigrations } from '../db/migrate'; +import { buildServer } from '../server/index'; +import { systemConfig } from '../db/schema/index'; +import { eq } from 'drizzle-orm'; +import { type LibSQLDatabase } from 'drizzle-orm/libsql'; +import type * as schema from '../db/schema/index'; +import { SchedulerService } from '../services/scheduler'; +import { RateLimiter } from '../services/rate-limiter'; +import { PlatformRegistry } from '../sources/platform-source'; +import type { PlatformSource, FetchRecentContentOptions } from '../sources/platform-source'; +import type { PlatformContentMetadata } from '../types/index'; +import { createChannel } from '../db/repositories/channel-repository'; +import * as channelRepo from '../db/repositories/channel-repository'; +import { Platform } from '../types/index'; +import type { Channel, PlatformSourceMetadata } from '../types/index'; + +// ── Mock yt-dlp to avoid real subprocess calls ── + +const { execYtDlpMock } = vi.hoisted(() => ({ + execYtDlpMock: vi.fn(), +})); + +vi.mock('../sources/yt-dlp', async (importOriginal) => { + const actual = await importOriginal(); + return { + ...actual, + execYtDlp: execYtDlpMock, + }; +}); + +// ── Test Helpers ── + +function buildMockRegistry(): { + registry: PlatformRegistry; + fetchFn: ReturnType; +} { + const fetchFn = vi.fn< + (channel: Channel, options?: FetchRecentContentOptions) => Promise + >(); + const resolveFn = vi.fn< + (url: string) => Promise + >(); + + const mockSource: PlatformSource = { + resolveChannel: resolveFn, + fetchRecentContent: fetchFn, + }; + + const reg = new PlatformRegistry(); + reg.register(Platform.YouTube, mockSource); + reg.register(Platform.SoundCloud, mockSource); + + return { registry: reg, fetchFn }; +} + +function makeCannedContent(count: number, prefix = 'scan'): PlatformContentMetadata[] { + return Array.from({ length: count }, (_, i) => ({ + platformContentId: `${prefix}_${i + 1}`, + title: `${prefix} Title ${i + 1}`, + url: `https://www.youtube.com/watch?v=${prefix}_${i + 1}`, + contentType: 'video' as const, + duration: 600, + thumbnailUrl: null, + publishedAt: null, + })); +} + +// ── Integration Tests ── + +describe('Scan API', () => { + let server: FastifyInstance; + let db: LibSQLDatabase; + let apiKey: string; + let tmpDir: string; + let mockFetch: ReturnType; + let scheduler: SchedulerService; + + beforeAll(async () => { + tmpDir = mkdtempSync(join(tmpdir(), 'tubearr-scan-api-')); + const dbPath = join(tmpDir, 'test.db'); + db = await initDatabaseAsync(dbPath); + await runMigrations(dbPath); + server = await buildServer({ db }); + + // Create scheduler with mocked platform registry + const mock = buildMockRegistry(); + mockFetch = mock.fetchFn; + const rateLimiter = new RateLimiter({ + [Platform.YouTube]: { minIntervalMs: 0 }, + [Platform.SoundCloud]: { minIntervalMs: 0 }, + }); + scheduler = new SchedulerService(db, mock.registry, rateLimiter); + server.scheduler = scheduler; + + await server.ready(); + + // Read auto-generated API key + const rows = await db + .select() + .from(systemConfig) + .where(eq(systemConfig.key, 'api_key')) + .limit(1); + apiKey = rows[0]?.value ?? ''; + expect(apiKey).toBeTruthy(); + }); + + afterAll(async () => { + scheduler.stop(); + await server.close(); + closeDatabase(); + try { + if (tmpDir && existsSync(tmpDir)) { + rmSync(tmpDir, { recursive: true, force: true }); + } + } catch { + // K004: best-effort cleanup on Windows + } + }); + + beforeEach(() => { + mockFetch.mockReset(); + }); + + // ── Helper to create a test channel in the DB ── + async function insertTestChannel( + overrides: Partial[1]> = {} + ): Promise { + const defaults = { + name: `Scan Test Channel ${Date.now()}`, + platform: Platform.YouTube as Platform, + platformId: `UC_SCAN_${Date.now()}_${Math.random().toString(36).slice(2, 6)}`, + url: 'https://www.youtube.com/@ScanTest', + monitoringEnabled: true, + checkInterval: 60, + imageUrl: null, + metadata: null, + formatProfileId: null, + }; + return createChannel(db, { ...defaults, ...overrides }); + } + + // ── Auth ── + + describe('Authentication', () => { + it('returns 401 on POST /api/v1/channel/:id/scan without API key', async () => { + const res = await server.inject({ + method: 'POST', + url: '/api/v1/channel/1/scan', + }); + expect(res.statusCode).toBe(401); + }); + + it('returns 401 on POST /api/v1/channel/scan-all without API key', async () => { + const res = await server.inject({ + method: 'POST', + url: '/api/v1/channel/scan-all', + }); + expect(res.statusCode).toBe(401); + }); + }); + + // ── POST /api/v1/channel/:id/scan ── + + describe('POST /api/v1/channel/:id/scan', () => { + it('returns 200 with CheckChannelResult for a valid channel', async () => { + const channel = await insertTestChannel(); + const content = makeCannedContent(3, `single_${channel.id}`); + mockFetch.mockResolvedValueOnce(content); + + const res = await server.inject({ + method: 'POST', + url: `/api/v1/channel/${channel.id}/scan`, + headers: { 'x-api-key': apiKey }, + }); + + expect(res.statusCode).toBe(200); + const body = res.json(); + expect(body).toMatchObject({ + channelId: channel.id, + channelName: channel.name, + status: 'success', + newItems: 3, + totalFetched: 3, + }); + }); + + it('returns 404 for non-existent channel ID', async () => { + const res = await server.inject({ + method: 'POST', + url: '/api/v1/channel/99999/scan', + headers: { 'x-api-key': apiKey }, + }); + + expect(res.statusCode).toBe(404); + const body = res.json(); + expect(body.message).toContain('not found'); + }); + + it('returns 400 for non-numeric ID', async () => { + const res = await server.inject({ + method: 'POST', + url: '/api/v1/channel/abc/scan', + headers: { 'x-api-key': apiKey }, + }); + + expect(res.statusCode).toBe(400); + const body = res.json(); + expect(body.message).toContain('number'); + }); + + it('returns 503 when scheduler is null', async () => { + // Temporarily null out the scheduler + const savedScheduler = server.scheduler; + server.scheduler = null; + + try { + const res = await server.inject({ + method: 'POST', + url: '/api/v1/channel/1/scan', + headers: { 'x-api-key': apiKey }, + }); + + expect(res.statusCode).toBe(503); + const body = res.json(); + expect(body.message).toContain('Scheduler is not running'); + } finally { + server.scheduler = savedScheduler; + } + }); + + it('returns already_running status when channel is currently being scanned', async () => { + const channel = await insertTestChannel(); + + // Make fetchRecentContent hang so the first scan is still "in progress" + let resolveHang: ((value: PlatformContentMetadata[]) => void) | undefined; + mockFetch.mockImplementationOnce( + () => new Promise((resolve) => { resolveHang = resolve; }) + ); + // Return empty for the second call (which will hit the lock) + mockFetch.mockResolvedValueOnce([]); + + // Start first scan (don't await — it's hanging on fetchRecentContent) + const firstScan = scheduler.checkChannel(channel); + + // Give the first scan time to reach the lock and progress through + // async steps (rateLimiter.acquire, getPlatformSettings, getRecentContentIds) + // before fetchRecentContent hangs + await new Promise((r) => setTimeout(r, 50)); + + // Second scan should get already_running + const result = await scheduler.checkChannel(channel); + expect(result.status).toBe('already_running'); + + // Let the first scan finish + resolveHang?.([]); + await firstScan; + }); + }); + + // ── POST /api/v1/channel/scan-all ── + + describe('POST /api/v1/channel/scan-all', () => { + it('returns 200 with results array and summary', async () => { + const channel1 = await insertTestChannel(); + const channel2 = await insertTestChannel(); + + // Each channel gets their own content response + mockFetch + .mockResolvedValueOnce(makeCannedContent(2, `all_${channel1.id}`)) + .mockResolvedValueOnce(makeCannedContent(1, `all_${channel2.id}`)); + + const res = await server.inject({ + method: 'POST', + url: '/api/v1/channel/scan-all', + headers: { 'x-api-key': apiKey }, + }); + + expect(res.statusCode).toBe(200); + const body = res.json(); + + expect(body.results).toBeInstanceOf(Array); + expect(body.results.length).toBeGreaterThanOrEqual(2); + expect(body.summary).toMatchObject({ + total: expect.any(Number), + scanned: expect.any(Number), + newItems: expect.any(Number), + errors: expect.any(Number), + }); + // At least our two channels' new items should be counted + expect(body.summary.newItems).toBeGreaterThanOrEqual(3); + }); + + it('returns 503 when scheduler is null', async () => { + const savedScheduler = server.scheduler; + server.scheduler = null; + + try { + const res = await server.inject({ + method: 'POST', + url: '/api/v1/channel/scan-all', + headers: { 'x-api-key': apiKey }, + }); + + expect(res.statusCode).toBe(503); + const body = res.json(); + expect(body.message).toContain('Scheduler is not running'); + } finally { + server.scheduler = savedScheduler; + } + }); + + it('processes channels sequentially (not in parallel)', async () => { + const channel1 = await insertTestChannel(); + const channel2 = await insertTestChannel(); + const callOrder: number[] = []; + + // Track call order via mock side effects + mockFetch.mockImplementation(async (channel: Channel) => { + callOrder.push(channel.id); + // Small delay to make parallelism detectable if it existed + await new Promise((r) => setTimeout(r, 10)); + return makeCannedContent(1, `seq_${channel.id}_${Date.now()}`); + }); + + const res = await server.inject({ + method: 'POST', + url: '/api/v1/channel/scan-all', + headers: { 'x-api-key': apiKey }, + }); + + expect(res.statusCode).toBe(200); + + // Verify that both our channels were called + expect(callOrder).toContain(channel1.id); + expect(callOrder).toContain(channel2.id); + + // Verify sequential order: channel1 should appear before channel2 + // if they were inserted in that order (getEnabledChannels returns by insertion order) + const idx1 = callOrder.indexOf(channel1.id); + const idx2 = callOrder.indexOf(channel2.id); + expect(idx1).toBeLessThan(idx2); + }); + + it('returns empty results and summary.total: 0 when no enabled channels exist', async () => { + // Temporarily mock getEnabledChannels to return empty array + const spy = vi.spyOn(channelRepo, 'getEnabledChannels').mockResolvedValueOnce([]); + + try { + const res = await server.inject({ + method: 'POST', + url: '/api/v1/channel/scan-all', + headers: { 'x-api-key': apiKey }, + }); + + expect(res.statusCode).toBe(200); + const body = res.json(); + expect(body.results).toEqual([]); + expect(body.summary).toEqual({ + total: 0, + scanned: 0, + newItems: 0, + errors: 0, + }); + } finally { + spy.mockRestore(); + } + }); + }); +}); diff --git a/src/__tests__/scheduler.test.ts b/src/__tests__/scheduler.test.ts new file mode 100644 index 0000000..9a5cd70 --- /dev/null +++ b/src/__tests__/scheduler.test.ts @@ -0,0 +1,884 @@ +import { + describe, + it, + expect, + beforeAll, + beforeEach, + afterAll, + vi, +} from 'vitest'; +import { mkdtempSync, rmSync, existsSync } from 'node:fs'; +import { join } from 'node:path'; +import { tmpdir } from 'node:os'; +import { initDatabaseAsync, closeDatabase } from '../db/index'; +import { runMigrations } from '../db/migrate'; +import { RateLimiter } from '../services/rate-limiter'; +import { SchedulerService } from '../services/scheduler'; +import type { CheckChannelResult } from '../services/scheduler'; +import { PlatformRegistry } from '../sources/platform-source'; +import type { PlatformSource, FetchRecentContentOptions } from '../sources/platform-source'; +import { YtDlpError } from '../sources/yt-dlp'; +import { + createChannel, + getChannelById, + getEnabledChannels, +} from '../db/repositories/channel-repository'; +import { + getContentByChannelId, + getRecentContentIds, +} from '../db/repositories/content-repository'; +import * as contentRepo from '../db/repositories/content-repository'; +import { Platform } from '../types/index'; +import type { + Channel, + PlatformSourceMetadata, + PlatformContentMetadata, +} from '../types/index'; +import type { LibSQLDatabase } from 'drizzle-orm/libsql'; +import type * as schema from '../db/schema/index'; + +// ── Rate Limiter Tests ── + +describe('RateLimiter', () => { + it('first acquire is immediate (no wait)', async () => { + const limiter = new RateLimiter({ + [Platform.YouTube]: { minIntervalMs: 1000 }, + }); + + const start = Date.now(); + await limiter.acquire(Platform.YouTube); + const elapsed = Date.now() - start; + + // First call should be near-instant + expect(elapsed).toBeLessThan(50); + }); + + it('enforces minimum interval between calls', async () => { + const limiter = new RateLimiter({ + [Platform.YouTube]: { minIntervalMs: 100 }, + }); + + await limiter.acquire(Platform.YouTube); + const start = Date.now(); + await limiter.acquire(Platform.YouTube); + const elapsed = Date.now() - start; + + // Second call should have waited ~100ms + expect(elapsed).toBeGreaterThanOrEqual(80); + expect(elapsed).toBeLessThan(200); + }); + + it('does not wait if enough time has passed', async () => { + const limiter = new RateLimiter({ + [Platform.YouTube]: { minIntervalMs: 50 }, + }); + + await limiter.acquire(Platform.YouTube); + // Wait longer than the interval + await new Promise((r) => setTimeout(r, 80)); + + const start = Date.now(); + await limiter.acquire(Platform.YouTube); + const elapsed = Date.now() - start; + + expect(elapsed).toBeLessThan(30); + }); + + it('tracks platforms independently', async () => { + const limiter = new RateLimiter({ + [Platform.YouTube]: { minIntervalMs: 100 }, + [Platform.SoundCloud]: { minIntervalMs: 100 }, + }); + + await limiter.acquire(Platform.YouTube); + + // SoundCloud first call should be instant even though YouTube just went + const start = Date.now(); + await limiter.acquire(Platform.SoundCloud); + const elapsed = Date.now() - start; + + expect(elapsed).toBeLessThan(30); + }); + + it('doubles effective interval on error (exponential backoff)', () => { + const limiter = new RateLimiter({ + [Platform.YouTube]: { minIntervalMs: 1000 }, + }); + + limiter.reportError(Platform.YouTube); + const state1 = limiter.getState()[Platform.YouTube]; + expect(state1.errorCount).toBe(1); + expect(state1.effectiveIntervalMs).toBe(2000); // 1000 * 2^1 + + limiter.reportError(Platform.YouTube); + const state2 = limiter.getState()[Platform.YouTube]; + expect(state2.errorCount).toBe(2); + expect(state2.effectiveIntervalMs).toBe(4000); // 1000 * 2^2 + }); + + it('caps backoff at MAX_BACKOFF_MS (60s)', () => { + const limiter = new RateLimiter({ + [Platform.YouTube]: { minIntervalMs: 1000 }, + }); + + // 2^7 = 128, so 1000 * 128 = 128000 > 60000 + for (let i = 0; i < 7; i++) { + limiter.reportError(Platform.YouTube); + } + + const state = limiter.getState()[Platform.YouTube]; + expect(state.effectiveIntervalMs).toBe(60_000); + expect(state.errorCount).toBe(7); + }); + + it('resets to minimum interval on success', () => { + const limiter = new RateLimiter({ + [Platform.YouTube]: { minIntervalMs: 1000 }, + }); + + limiter.reportError(Platform.YouTube); + limiter.reportError(Platform.YouTube); + expect(limiter.getState()[Platform.YouTube].effectiveIntervalMs).toBe(4000); + + limiter.reportSuccess(Platform.YouTube); + const state = limiter.getState()[Platform.YouTube]; + expect(state.errorCount).toBe(0); + expect(state.effectiveIntervalMs).toBe(1000); + }); + + it('getState returns correct structure for all platforms', () => { + const limiter = new RateLimiter({ + [Platform.YouTube]: { minIntervalMs: 1000 }, + [Platform.SoundCloud]: { minIntervalMs: 3000 }, + }); + + const state = limiter.getState(); + expect(state[Platform.YouTube]).toEqual({ + lastCallTime: null, + errorCount: 0, + effectiveIntervalMs: 1000, + }); + expect(state[Platform.SoundCloud]).toEqual({ + lastCallTime: null, + errorCount: 0, + effectiveIntervalMs: 3000, + }); + }); + + it('getState reflects updated lastCallTime after acquire', async () => { + const limiter = new RateLimiter({ + [Platform.YouTube]: { minIntervalMs: 50 }, + }); + + expect(limiter.getState()[Platform.YouTube].lastCallTime).toBeNull(); + + await limiter.acquire(Platform.YouTube); + const state = limiter.getState()[Platform.YouTube]; + expect(state.lastCallTime).toBeTypeOf('number'); + expect(state.lastCallTime! - Date.now()).toBeLessThan(100); + }); +}); + +// ── Scheduler Tests ── + +describe('SchedulerService', () => { + let db: LibSQLDatabase; + let tmpDir: string; + let registry: PlatformRegistry; + let rateLimiter: RateLimiter; + let mockFetchRecentContent: ReturnType; + let mockResolveChannel: ReturnType; + + // Build a mock PlatformSource that returns canned content + function buildMockRegistry(): { + registry: PlatformRegistry; + fetchFn: ReturnType; + resolveFn: ReturnType; + } { + const fetchFn = vi.fn< + (channel: Channel, options?: FetchRecentContentOptions) => Promise + >(); + const resolveFn = vi.fn< + (url: string) => Promise + >(); + + const mockSource: PlatformSource = { + resolveChannel: resolveFn, + fetchRecentContent: fetchFn, + }; + + const reg = new PlatformRegistry(); + reg.register(Platform.YouTube, mockSource); + reg.register(Platform.SoundCloud, mockSource); + + return { registry: reg, fetchFn, resolveFn }; + } + + function makeCannedContent(count: number, prefix = 'vid'): PlatformContentMetadata[] { + return Array.from({ length: count }, (_, i) => ({ + platformContentId: `${prefix}_${i + 1}`, + title: `${prefix} Title ${i + 1}`, + url: `https://www.youtube.com/watch?v=${prefix}_${i + 1}`, + contentType: 'video' as const, + duration: 600, + thumbnailUrl: null, + publishedAt: null, + })); + } + + beforeAll(async () => { + tmpDir = mkdtempSync(join(tmpdir(), 'tubearr-scheduler-')); + const dbPath = join(tmpDir, 'test.db'); + db = await initDatabaseAsync(dbPath); + await runMigrations(dbPath); + }); + + beforeEach(() => { + const mock = buildMockRegistry(); + registry = mock.registry; + mockFetchRecentContent = mock.fetchFn; + mockResolveChannel = mock.resolveFn; + rateLimiter = new RateLimiter({ + [Platform.YouTube]: { minIntervalMs: 0 }, // No delay in tests + [Platform.SoundCloud]: { minIntervalMs: 0 }, + }); + }); + + afterAll(async () => { + closeDatabase(); + try { + if (tmpDir && existsSync(tmpDir)) { + rmSync(tmpDir, { recursive: true, force: true }); + } + } catch { + // K004: best-effort cleanup on Windows + } + }); + + // ── Helper to create a test channel in the DB ── + async function insertTestChannel( + overrides: Partial[1]> = {} + ): Promise { + const defaults = { + name: `Test Channel ${Date.now()}`, + platform: Platform.YouTube as Platform, + platformId: `UC_TEST_${Date.now()}_${Math.random().toString(36).slice(2, 6)}`, + url: 'https://www.youtube.com/@Test', + monitoringEnabled: true, + checkInterval: 60, + imageUrl: null, + metadata: null, + formatProfileId: null, + monitoringMode: 'all' as const, + }; + return createChannel(db, { ...defaults, ...overrides }); + } + + // ── start() ── + + describe('start()', () => { + it('loads enabled channels and creates cron jobs', async () => { + const channel = await insertTestChannel({ monitoringEnabled: true }); + const scheduler = new SchedulerService(db, registry, rateLimiter); + + const count = await scheduler.start(); + expect(count).toBeGreaterThanOrEqual(1); + + const state = scheduler.getState(); + expect(state.running).toBe(true); + expect(state.channelCount).toBeGreaterThanOrEqual(1); + + // Find our channel in the state + const channelState = state.channels.find( + (c) => c.channelId === channel.id + ); + expect(channelState).toBeDefined(); + expect(channelState!.nextRun).toBeInstanceOf(Date); + + scheduler.stop(); + }); + + it('does not create jobs for disabled channels', async () => { + const disabledChannel = await insertTestChannel({ + monitoringEnabled: false, + }); + const scheduler = new SchedulerService(db, registry, rateLimiter); + + await scheduler.start(); + const state = scheduler.getState(); + + const channelState = state.channels.find( + (c) => c.channelId === disabledChannel.id + ); + expect(channelState).toBeUndefined(); + + scheduler.stop(); + }); + }); + + // ── checkChannel() ── + + describe('checkChannel()', () => { + it('inserts new content items and updates lastCheckedAt/lastCheckStatus', async () => { + const channel = await insertTestChannel(); + const scheduler = new SchedulerService(db, registry, rateLimiter); + + // Mock: return 3 new content items + mockFetchRecentContent.mockResolvedValueOnce(makeCannedContent(3)); + + await scheduler.checkChannel(channel); + + // Verify content was inserted + const content = await getContentByChannelId(db, channel.id); + expect(content.length).toBe(3); + expect(content[0].status).toBe('monitored'); + expect(content[0].platformContentId).toBeTruthy(); + + // Verify channel status was updated + const updated = await getChannelById(db, channel.id); + expect(updated!.lastCheckedAt).toBeTruthy(); + expect(updated!.lastCheckStatus).toBe('success'); + + scheduler.stop(); + }); + + it('skips duplicate content items', async () => { + const channel = await insertTestChannel(); + const scheduler = new SchedulerService(db, registry, rateLimiter); + + const items = makeCannedContent(3, `dedup_${channel.id}`); + + // First check — all 3 are new + mockFetchRecentContent.mockResolvedValueOnce(items); + await scheduler.checkChannel(channel); + + const contentAfterFirst = await getContentByChannelId(db, channel.id); + expect(contentAfterFirst.length).toBe(3); + + // Second check — same 3 items should be deduplicated + mockFetchRecentContent.mockResolvedValueOnce(items); + await scheduler.checkChannel(channel); + + const contentAfterSecond = await getContentByChannelId(db, channel.id); + expect(contentAfterSecond.length).toBe(3); // No duplicates added + }); + + it('inserts only genuinely new items when some already exist', async () => { + const channel = await insertTestChannel(); + const scheduler = new SchedulerService(db, registry, rateLimiter); + + // First check — insert 2 items + const existing = makeCannedContent(2, `partial_${channel.id}`); + mockFetchRecentContent.mockResolvedValueOnce(existing); + await scheduler.checkChannel(channel); + + // Second check — return the same 2 + 1 new + const combined = [ + ...existing, + { + platformContentId: `partial_${channel.id}_new_1`, + title: 'New Item', + url: 'https://www.youtube.com/watch?v=new1', + contentType: 'video' as const, + duration: 300, + thumbnailUrl: null, + }, + ]; + mockFetchRecentContent.mockResolvedValueOnce(combined); + await scheduler.checkChannel(channel); + + const content = await getContentByChannelId(db, channel.id); + expect(content.length).toBe(3); // 2 existing + 1 new + }); + + it('sets lastCheckStatus to "error" on generic error', async () => { + const channel = await insertTestChannel(); + const scheduler = new SchedulerService(db, registry, rateLimiter); + + mockFetchRecentContent.mockRejectedValueOnce( + new Error('Network timeout') + ); + + await scheduler.checkChannel(channel); + + const updated = await getChannelById(db, channel.id); + expect(updated!.lastCheckStatus).toBe('error'); + expect(updated!.lastCheckedAt).toBeTruthy(); + }); + + it('sets lastCheckStatus to "rate_limited" on YtDlpError with isRateLimit', async () => { + const channel = await insertTestChannel(); + const scheduler = new SchedulerService(db, registry, rateLimiter); + + mockFetchRecentContent.mockRejectedValueOnce( + new YtDlpError( + 'HTTP Error 429: Too Many Requests', + 'ERROR: HTTP Error 429: Too Many Requests', + 1 + ) + ); + + await scheduler.checkChannel(channel); + + const updated = await getChannelById(db, channel.id); + expect(updated!.lastCheckStatus).toBe('rate_limited'); + }); + + it('calls rateLimiter.reportError on check failure', async () => { + const channel = await insertTestChannel(); + const scheduler = new SchedulerService(db, registry, rateLimiter); + const reportErrorSpy = vi.spyOn(rateLimiter, 'reportError'); + + mockFetchRecentContent.mockRejectedValueOnce(new Error('fail')); + + await scheduler.checkChannel(channel); + + expect(reportErrorSpy).toHaveBeenCalledWith(channel.platform); + }); + + it('calls rateLimiter.reportSuccess on successful check', async () => { + const channel = await insertTestChannel(); + const scheduler = new SchedulerService(db, registry, rateLimiter); + const reportSuccessSpy = vi.spyOn(rateLimiter, 'reportSuccess'); + + mockFetchRecentContent.mockResolvedValueOnce([]); + + await scheduler.checkChannel(channel); + + expect(reportSuccessSpy).toHaveBeenCalledWith(channel.platform); + }); + + // ── Return value tests ── + + it('returns CheckChannelResult with correct newItems and totalFetched on success', async () => { + const channel = await insertTestChannel(); + const scheduler = new SchedulerService(db, registry, rateLimiter); + + mockFetchRecentContent.mockResolvedValueOnce(makeCannedContent(5, `rv_${channel.id}`)); + + const result = await scheduler.checkChannel(channel); + + expect(result).toEqual({ + channelId: channel.id, + channelName: channel.name, + newItems: 5, + totalFetched: 5, + status: 'success', + } satisfies CheckChannelResult); + }); + + it('returns status "error" when the platform source throws', async () => { + const channel = await insertTestChannel(); + const scheduler = new SchedulerService(db, registry, rateLimiter); + + mockFetchRecentContent.mockRejectedValueOnce(new Error('Network timeout')); + + const result = await scheduler.checkChannel(channel); + + expect(result.channelId).toBe(channel.id); + expect(result.channelName).toBe(channel.name); + expect(result.status).toBe('error'); + expect(result.newItems).toBe(0); + expect(result.totalFetched).toBe(0); + }); + + it('returns status "rate_limited" when YtDlpError with isRateLimit is thrown', async () => { + const channel = await insertTestChannel(); + const scheduler = new SchedulerService(db, registry, rateLimiter); + + mockFetchRecentContent.mockRejectedValueOnce( + new YtDlpError( + 'HTTP Error 429: Too Many Requests', + 'ERROR: HTTP Error 429: Too Many Requests', + 1 + ) + ); + + const result = await scheduler.checkChannel(channel); + + expect(result.channelId).toBe(channel.id); + expect(result.status).toBe('rate_limited'); + expect(result.newItems).toBe(0); + expect(result.totalFetched).toBe(0); + }); + + // ── Per-channel lock tests ── + + it('returns "already_running" when the same channel is checked concurrently', async () => { + const channel = await insertTestChannel(); + const scheduler = new SchedulerService(db, registry, rateLimiter); + + // Mock a slow platform source — holds for 200ms + mockFetchRecentContent.mockImplementation( + () => new Promise((resolve) => setTimeout(() => resolve([]), 200)) + ); + + // Start first check (will be in-flight for ~200ms) + const first = scheduler.checkChannel(channel); + + // Immediately start second check — should hit the lock + const second = await scheduler.checkChannel(channel); + + expect(second.status).toBe('already_running'); + expect(second.channelId).toBe(channel.id); + expect(second.newItems).toBe(0); + expect(second.totalFetched).toBe(0); + + // First check should complete normally + const firstResult = await first; + expect(firstResult.status).toBe('success'); + }); + + it('releases the lock after completion — subsequent call works normally', async () => { + const channel = await insertTestChannel(); + const scheduler = new SchedulerService(db, registry, rateLimiter); + + mockFetchRecentContent.mockResolvedValueOnce(makeCannedContent(2, `lock_release_${channel.id}`)); + + // First call completes + const first = await scheduler.checkChannel(channel); + expect(first.status).toBe('success'); + + // Second call should NOT be locked out + mockFetchRecentContent.mockResolvedValueOnce([]); + const second = await scheduler.checkChannel(channel); + expect(second.status).toBe('success'); + }); + + it('releases the lock even when the check fails', async () => { + const channel = await insertTestChannel(); + const scheduler = new SchedulerService(db, registry, rateLimiter); + + mockFetchRecentContent.mockRejectedValueOnce(new Error('Boom')); + + // First call fails + const first = await scheduler.checkChannel(channel); + expect(first.status).toBe('error'); + + // Second call should NOT be locked out + mockFetchRecentContent.mockResolvedValueOnce([]); + const second = await scheduler.checkChannel(channel); + expect(second.status).toBe('success'); + }); + + // ── onNewContent + monitored gate tests ── + + it('calls onNewContent for each newly inserted (monitored) item', async () => { + const channel = await insertTestChannel(); + const onNewContentSpy = vi.fn(); + const scheduler = new SchedulerService(db, registry, rateLimiter, { + onNewContent: onNewContentSpy, + }); + + mockFetchRecentContent.mockResolvedValueOnce( + makeCannedContent(3, `on_new_${channel.id}`) + ); + + const result = await scheduler.checkChannel(channel); + + expect(result.newItems).toBe(3); + expect(onNewContentSpy).toHaveBeenCalledTimes(3); + // Each call receives the content item ID (a positive integer) + for (const call of onNewContentSpy.mock.calls) { + expect(call[0]).toBeTypeOf('number'); + expect(call[0]).toBeGreaterThan(0); + } + + scheduler.stop(); + }); + + it('does NOT call onNewContent for items with monitored=false', async () => { + const channel = await insertTestChannel(); + const onNewContentSpy = vi.fn(); + const scheduler = new SchedulerService(db, registry, rateLimiter, { + onNewContent: onNewContentSpy, + }); + + // Spy on createContentItem to return items with monitored=false + const createSpy = vi.spyOn(contentRepo, 'createContentItem').mockResolvedValue({ + id: 9999, + channelId: channel.id, + title: 'Unmonitored Item', + platformContentId: 'unmon_1', + url: 'https://example.com/unmon_1', + contentType: 'video', + duration: 600, + thumbnailUrl: null, + filePath: null, + fileSize: null, + format: null, + qualityMetadata: null, + status: 'monitored', + monitored: false, + publishedAt: null, + downloadedAt: null, + createdAt: new Date().toISOString(), + updatedAt: new Date().toISOString(), + }); + + mockFetchRecentContent.mockResolvedValueOnce( + makeCannedContent(2, `unmon_${channel.id}`) + ); + + await scheduler.checkChannel(channel); + + // createContentItem was called for each new item + expect(createSpy).toHaveBeenCalledTimes(2); + // But onNewContent was NOT called because monitored=false + expect(onNewContentSpy).not.toHaveBeenCalled(); + + createSpy.mockRestore(); + scheduler.stop(); + }); + + it('passes publishedAt from platform metadata to createContentItem', async () => { + const channel = await insertTestChannel(); + const scheduler = new SchedulerService(db, registry, rateLimiter); + + const items: PlatformContentMetadata[] = [ + { + platformContentId: `pub_${channel.id}_1`, + title: 'Item With PublishedAt', + url: 'https://www.youtube.com/watch?v=pub1', + contentType: 'video', + duration: 300, + thumbnailUrl: null, + publishedAt: '2024-06-15T00:00:00.000Z', + }, + ]; + mockFetchRecentContent.mockResolvedValueOnce(items); + + await scheduler.checkChannel(channel); + + // Verify the item was stored with publishedAt + const content = await getContentByChannelId(db, channel.id); + const inserted = content.find(c => c.platformContentId === `pub_${channel.id}_1`); + expect(inserted).toBeDefined(); + expect(inserted!.publishedAt).toBe('2024-06-15T00:00:00.000Z'); + + scheduler.stop(); + }); + + // ── monitoringMode-aware item creation tests ── + + it("creates items with monitored=false when channel monitoringMode is 'none'", async () => { + const channel = await insertTestChannel({ monitoringMode: 'none', monitoringEnabled: false }); + const onNewContentSpy = vi.fn(); + const scheduler = new SchedulerService(db, registry, rateLimiter, { + onNewContent: onNewContentSpy, + }); + + mockFetchRecentContent.mockResolvedValueOnce( + makeCannedContent(2, `mode_none_${channel.id}`) + ); + + const result = await scheduler.checkChannel(channel); + expect(result.status).toBe('success'); + expect(result.newItems).toBe(2); + + // Verify items were created with monitored=false + const content = await getContentByChannelId(db, channel.id); + expect(content.length).toBe(2); + for (const item of content) { + expect(item.monitored).toBe(false); + } + + // onNewContent should NOT be called because monitored=false + expect(onNewContentSpy).not.toHaveBeenCalled(); + + scheduler.stop(); + }); + + it("creates items with monitored=true when channel monitoringMode is 'future'", async () => { + const channel = await insertTestChannel({ monitoringMode: 'future' }); + const onNewContentSpy = vi.fn(); + const scheduler = new SchedulerService(db, registry, rateLimiter, { + onNewContent: onNewContentSpy, + }); + + mockFetchRecentContent.mockResolvedValueOnce( + makeCannedContent(2, `mode_future_${channel.id}`) + ); + + const result = await scheduler.checkChannel(channel); + expect(result.status).toBe('success'); + expect(result.newItems).toBe(2); + + // Scheduler discovers *new* content (future), so 'future' → monitored=true + const content = await getContentByChannelId(db, channel.id); + expect(content.length).toBe(2); + for (const item of content) { + expect(item.monitored).toBe(true); + } + + // onNewContent should be called because items are monitored + expect(onNewContentSpy).toHaveBeenCalledTimes(2); + + scheduler.stop(); + }); + }); + + // ── addChannel() / removeChannel() ── + + describe('addChannel() / removeChannel()', () => { + it('addChannel creates a new job', async () => { + const channel = await insertTestChannel(); + const scheduler = new SchedulerService(db, registry, rateLimiter); + + scheduler.addChannel(channel); + + const state = scheduler.getState(); + const channelState = state.channels.find( + (c) => c.channelId === channel.id + ); + expect(channelState).toBeDefined(); + expect(channelState!.nextRun).toBeInstanceOf(Date); + + scheduler.stop(); + }); + + it('addChannel skips disabled channels', async () => { + const channel = await insertTestChannel({ monitoringEnabled: false }); + const scheduler = new SchedulerService(db, registry, rateLimiter); + + scheduler.addChannel(channel); + + const state = scheduler.getState(); + const channelState = state.channels.find( + (c) => c.channelId === channel.id + ); + expect(channelState).toBeUndefined(); + + scheduler.stop(); + }); + + it('removeChannel stops and removes the job', async () => { + const channel = await insertTestChannel(); + const scheduler = new SchedulerService(db, registry, rateLimiter); + + scheduler.addChannel(channel); + expect( + scheduler.getState().channels.find((c) => c.channelId === channel.id) + ).toBeDefined(); + + scheduler.removeChannel(channel.id); + expect( + scheduler.getState().channels.find((c) => c.channelId === channel.id) + ).toBeUndefined(); + + scheduler.stop(); + }); + + it('removeChannel is safe for non-existent channel', () => { + const scheduler = new SchedulerService(db, registry, rateLimiter); + + // Should not throw + scheduler.removeChannel(99999); + scheduler.stop(); + }); + }); + + // ── updateChannel() ── + + describe('updateChannel()', () => { + it('removes old job and creates new one', async () => { + const channel = await insertTestChannel({ checkInterval: 60 }); + const scheduler = new SchedulerService(db, registry, rateLimiter); + + scheduler.addChannel(channel); + expect( + scheduler.getState().channels.find((c) => c.channelId === channel.id) + ).toBeDefined(); + + // Update with different interval + const updatedChannel: Channel = { + ...channel, + checkInterval: 120, + }; + scheduler.updateChannel(updatedChannel); + + // Should still have a job for this channel + const channelState = scheduler.getState().channels.find( + (c) => c.channelId === channel.id + ); + expect(channelState).toBeDefined(); + + scheduler.stop(); + }); + + it('removes job when monitoring is disabled', async () => { + const channel = await insertTestChannel({ monitoringEnabled: true }); + const scheduler = new SchedulerService(db, registry, rateLimiter); + + scheduler.addChannel(channel); + + const updatedChannel: Channel = { + ...channel, + monitoringEnabled: false, + }; + scheduler.updateChannel(updatedChannel); + + const channelState = scheduler.getState().channels.find( + (c) => c.channelId === channel.id + ); + expect(channelState).toBeUndefined(); + + scheduler.stop(); + }); + }); + + // ── getState() ── + + describe('getState()', () => { + it('returns correct structure', async () => { + const channel = await insertTestChannel(); + const scheduler = new SchedulerService(db, registry, rateLimiter); + + scheduler.addChannel(channel); + const state = scheduler.getState(); + + expect(state).toHaveProperty('running'); + expect(state).toHaveProperty('channelCount'); + expect(state).toHaveProperty('channels'); + expect(Array.isArray(state.channels)).toBe(true); + + const cs = state.channels[0]; + expect(cs).toHaveProperty('channelId'); + expect(cs).toHaveProperty('channelName'); + expect(cs).toHaveProperty('platform'); + expect(cs).toHaveProperty('isRunning'); + expect(cs).toHaveProperty('nextRun'); + + scheduler.stop(); + }); + + it('reports running: false before start and after stop', async () => { + const scheduler = new SchedulerService(db, registry, rateLimiter); + + expect(scheduler.getState().running).toBe(false); + + await scheduler.start(); + expect(scheduler.getState().running).toBe(true); + + scheduler.stop(); + expect(scheduler.getState().running).toBe(false); + }); + }); + + // ── stop() ── + + describe('stop()', () => { + it('stops all jobs and clears state', async () => { + const channel = await insertTestChannel(); + const scheduler = new SchedulerService(db, registry, rateLimiter); + + scheduler.addChannel(channel); + expect(scheduler.getState().channelCount).toBeGreaterThanOrEqual(1); + + scheduler.stop(); + expect(scheduler.getState().channelCount).toBe(0); + expect(scheduler.getState().channels).toEqual([]); + expect(scheduler.getState().running).toBe(false); + }); + }); +}); diff --git a/src/__tests__/server.integration.test.ts b/src/__tests__/server.integration.test.ts new file mode 100644 index 0000000..f8d8de8 --- /dev/null +++ b/src/__tests__/server.integration.test.ts @@ -0,0 +1,208 @@ +import { describe, it, expect, beforeAll, afterAll } from 'vitest'; +import { mkdtempSync, rmSync, existsSync } from 'node:fs'; +import { join } from 'node:path'; +import { tmpdir } from 'node:os'; +import { type FastifyInstance } from 'fastify'; +import { initDatabaseAsync, closeDatabase } from '../db/index'; +import { runMigrations } from '../db/migrate'; +import { buildServer } from '../server/index'; +import { systemConfig } from '../db/schema/index'; +import { eq } from 'drizzle-orm'; +import { type LibSQLDatabase } from 'drizzle-orm/libsql'; +import type * as schema from '../db/schema/index'; + +/** + * Integration tests for the Fastify server, auth middleware, and endpoints. + * + * Each describe block creates an isolated temp database, runs migrations, + * builds a server instance, and uses Fastify's inject() for fast HTTP testing. + * No real ports are bound. + */ + +describe('Server integration', () => { + let server: FastifyInstance; + let db: LibSQLDatabase; + let apiKey: string; + let tmpDir: string; + + beforeAll(async () => { + tmpDir = mkdtempSync(join(tmpdir(), 'tubearr-srv-')); + const dbPath = join(tmpDir, 'test.db'); + db = await initDatabaseAsync(dbPath); + await runMigrations(dbPath); + server = await buildServer({ db }); + await server.ready(); + + // Auth plugin generates the API key at registration time. + // Read it from the database for use in authenticated requests. + const rows = await db + .select() + .from(systemConfig) + .where(eq(systemConfig.key, 'api_key')) + .limit(1); + apiKey = rows[0]?.value ?? ''; + expect(apiKey).toBeTruthy(); + }); + + afterAll(async () => { + await server.close(); + closeDatabase(); + // On Windows, SQLite WAL/SHM files may still be locked briefly after + // closeDatabase(). Use try/catch to avoid EPERM failures in cleanup — + // the OS temp directory is cleaned automatically. + try { + if (tmpDir && existsSync(tmpDir)) { + rmSync(tmpDir, { recursive: true, force: true }); + } + } catch { + // Temp dir cleanup is best-effort on Windows + } + }); + + // ── Liveness ── + + describe('GET /ping', () => { + it('returns 200 with { status: "ok" } — no auth required', async () => { + const res = await server.inject({ method: 'GET', url: '/ping' }); + + expect(res.statusCode).toBe(200); + expect(res.json()).toEqual({ status: 'ok' }); + }); + }); + + // ── Authentication ── + + describe('Authentication gating', () => { + it('returns 401 when no API key and no same-origin headers are provided (external request)', async () => { + const res = await server.inject({ + method: 'GET', + url: '/api/v1/system/status', + }); + + expect(res.statusCode).toBe(401); + const body = res.json(); + expect(body.statusCode).toBe(401); + expect(body.error).toBe('Unauthorized'); + expect(body.message).toContain('API key'); + }); + + it('returns 401 when an incorrect API key is provided', async () => { + const res = await server.inject({ + method: 'GET', + url: '/api/v1/system/status', + headers: { 'x-api-key': 'wrong-key-value' }, + }); + + expect(res.statusCode).toBe(401); + expect(res.json().error).toBe('Unauthorized'); + }); + + it('returns 200 when correct API key is provided via X-Api-Key header', async () => { + const res = await server.inject({ + method: 'GET', + url: '/api/v1/system/status', + headers: { 'x-api-key': apiKey }, + }); + + expect(res.statusCode).toBe(200); + }); + + it('returns 200 when correct API key is provided via apikey query param', async () => { + const res = await server.inject({ + method: 'GET', + url: `/api/v1/system/status?apikey=${apiKey}`, + }); + + expect(res.statusCode).toBe(200); + }); + + it('returns 200 when same-origin Origin header matches server host (no API key needed)', async () => { + const res = await server.inject({ + method: 'GET', + url: '/api/v1/system/status', + headers: { origin: 'http://localhost:8989' }, + }); + + expect(res.statusCode).toBe(200); + }); + }); + + // ── System Status ── + + describe('GET /api/v1/system/status', () => { + it('returns system info with expected fields', async () => { + const res = await server.inject({ + method: 'GET', + url: '/api/v1/system/status', + headers: { 'x-api-key': apiKey }, + }); + + expect(res.statusCode).toBe(200); + const body = res.json(); + + expect(body).toHaveProperty('appName'); + expect(body).toHaveProperty('version'); + expect(body).toHaveProperty('uptime'); + expect(body).toHaveProperty('platform'); + expect(body).toHaveProperty('nodeVersion'); + expect(body).toHaveProperty('arch'); + expect(body).toHaveProperty('memoryUsage'); + expect(body.memoryUsage).toHaveProperty('heapUsed'); + expect(body.memoryUsage).toHaveProperty('heapTotal'); + expect(body.memoryUsage).toHaveProperty('rss'); + expect(typeof body.uptime).toBe('number'); + }); + }); + + // ── Health ── + + describe('GET /api/v1/health', () => { + it('returns 200 with component health statuses including database', async () => { + const res = await server.inject({ + method: 'GET', + url: '/api/v1/health', + headers: { 'x-api-key': apiKey }, + }); + + expect(res.statusCode).toBe(200); + const body = res.json(); + + expect(body).toHaveProperty('status'); + expect(body).toHaveProperty('components'); + expect(body).toHaveProperty('uptime'); + expect(Array.isArray(body.components)).toBe(true); + + // Find database component + const dbComponent = body.components.find( + (c: { name: string }) => c.name === 'database' + ); + expect(dbComponent).toBeDefined(); + expect(dbComponent.status).toBe('healthy'); + + // Find server component + const serverComponent = body.components.find( + (c: { name: string }) => c.name === 'server' + ); + expect(serverComponent).toBeDefined(); + expect(serverComponent.status).toBe('healthy'); + }); + }); + + // ── Error handling ── + + describe('Error handling', () => { + it('returns structured 404 JSON for unknown routes', async () => { + const res = await server.inject({ + method: 'GET', + url: '/api/v1/nonexistent-route', + headers: { 'x-api-key': apiKey }, + }); + + expect(res.statusCode).toBe(404); + const body = res.json(); + expect(body).toHaveProperty('statusCode', 404); + expect(body).toHaveProperty('error', 'Not Found'); + expect(body).toHaveProperty('message'); + }); + }); +}); diff --git a/src/__tests__/sources.test.ts b/src/__tests__/sources.test.ts new file mode 100644 index 0000000..60dc603 --- /dev/null +++ b/src/__tests__/sources.test.ts @@ -0,0 +1,880 @@ +import { describe, it, expect, vi, beforeEach } from 'vitest'; + +// Mock the yt-dlp module before importing sources +vi.mock('../sources/yt-dlp', () => ({ + execYtDlp: vi.fn(), + parseSingleJson: vi.fn(), + parseJsonLines: vi.fn(), + checkYtDlpAvailable: vi.fn(), + YtDlpError: class YtDlpError extends Error { + stderr: string; + exitCode: number; + isRateLimit: boolean; + constructor(message: string, stderr: string, exitCode: number) { + super(message); + this.name = 'YtDlpError'; + this.stderr = stderr; + this.exitCode = exitCode; + this.isRateLimit = stderr.toLowerCase().includes('429'); + } + }, +})); + +import { execYtDlp, parseSingleJson, parseJsonLines } from '../sources/yt-dlp'; +import { YouTubeSource, isYouTubeUrl } from '../sources/youtube'; +import { SoundCloudSource, isSoundCloudChannelUrl } from '../sources/soundcloud'; +import { PlatformRegistry } from '../sources/platform-source'; +import { Platform, ContentType } from '../types/index'; +import type { Channel } from '../types/index'; + +const mockExecYtDlp = execYtDlp as ReturnType; +const mockParseSingleJson = parseSingleJson as ReturnType; +const mockParseJsonLines = parseJsonLines as ReturnType; + +// ── Canned Fixtures ── + +const YOUTUBE_CHANNEL_JSON = { + channel: 'Linus Tech Tips', + channel_id: 'UCXuqSBlHAE6Xw-yeJA0Tunw', + channel_url: 'https://www.youtube.com/channel/UCXuqSBlHAE6Xw-yeJA0Tunw', + uploader: 'Linus Tech Tips', + uploader_id: '@LinusTechTips', + uploader_url: 'https://www.youtube.com/@LinusTechTips', + thumbnails: [ + { url: 'https://i.ytimg.com/vi/thumb_small.jpg', width: 120, height: 90 }, + { url: 'https://i.ytimg.com/vi/thumb_large.jpg', width: 1280, height: 720 }, + ], +}; + +const YOUTUBE_PLAYLIST_ENTRIES = [ + { + id: 'dQw4w9WgXcQ', + title: 'Never Gonna Give You Up', + url: 'https://www.youtube.com/watch?v=dQw4w9WgXcQ', + webpage_url: 'https://www.youtube.com/watch?v=dQw4w9WgXcQ', + duration: 212, + thumbnail: 'https://i.ytimg.com/vi/dQw4w9WgXcQ/hqdefault.jpg', + live_status: null, + upload_date: '20240315', + }, + { + id: 'abc123xyz', + title: 'Tech Review 2024', + url: 'https://www.youtube.com/watch?v=abc123xyz', + duration: 845, + thumbnails: [ + { url: 'https://i.ytimg.com/vi/abc123xyz/sd.jpg' }, + { url: 'https://i.ytimg.com/vi/abc123xyz/hd.jpg' }, + ], + live_status: null, + upload_date: '20240620', + }, + { + id: 'live001', + title: 'Live Stream Event', + url: 'https://www.youtube.com/watch?v=live001', + duration: null, + thumbnail: 'https://i.ytimg.com/vi/live001/hqdefault.jpg', + live_status: 'is_live', + upload_date: '20240701', + }, +]; + +const SOUNDCLOUD_ARTIST_JSON = { + uploader: 'Deadmau5', + uploader_id: 'deadmau5', + uploader_url: 'https://soundcloud.com/deadmau5', + channel: null, + channel_id: null, + channel_url: null, + thumbnails: [ + { url: 'https://i1.sndcdn.com/avatars-small.jpg' }, + { url: 'https://i1.sndcdn.com/avatars-large.jpg' }, + ], +}; + +const SOUNDCLOUD_TRACK_ENTRIES = [ + { + id: 'sc-track-001', + title: 'Strobe (Club Edit)', + url: 'https://soundcloud.com/deadmau5/strobe-club-edit', + duration: 421, + thumbnail: 'https://i1.sndcdn.com/artworks-track1.jpg', + }, + { + id: 'sc-track-002', + title: 'Ghosts n Stuff', + url: 'https://soundcloud.com/deadmau5/ghosts-n-stuff', + duration: 335, + thumbnails: [ + { url: 'https://i1.sndcdn.com/artworks-track2-sm.jpg' }, + { url: 'https://i1.sndcdn.com/artworks-track2-lg.jpg' }, + ], + }, + { + id: 'sc-track-003', + title: 'Raise Your Weapon', + url: 'https://soundcloud.com/deadmau5/raise-your-weapon', + duration: 498, + thumbnail: null, + }, +]; + +// ── Helper ── + +function makeChannel(overrides: Partial = {}): Channel { + return { + id: 1, + name: 'Test Channel', + platform: Platform.YouTube, + platformId: 'UC123', + url: 'https://www.youtube.com/@TestChannel', + monitoringEnabled: true, + checkInterval: 360, + imageUrl: null, + metadata: null, + formatProfileId: null, + monitoringMode: 'all', + createdAt: '2024-01-01T00:00:00Z', + updatedAt: '2024-01-01T00:00:00Z', + lastCheckedAt: null, + lastCheckStatus: null, + ...overrides, + }; +} + +// ── Tests ── + +describe('YouTube URL validation', () => { + it('accepts @handle format', () => { + expect(isYouTubeUrl('https://www.youtube.com/@LinusTechTips')).toBe(true); + expect(isYouTubeUrl('https://youtube.com/@LinusTechTips')).toBe(true); + }); + + it('accepts /channel/ format', () => { + expect( + isYouTubeUrl( + 'https://www.youtube.com/channel/UCXuqSBlHAE6Xw-yeJA0Tunw' + ) + ).toBe(true); + }); + + it('accepts /c/ format', () => { + expect(isYouTubeUrl('https://www.youtube.com/c/LinusTechTips')).toBe(true); + }); + + it('accepts /user/ format', () => { + expect(isYouTubeUrl('https://www.youtube.com/user/LinusTechTips')).toBe( + true + ); + }); + + it('accepts youtu.be short URLs', () => { + expect(isYouTubeUrl('https://youtu.be/dQw4w9WgXcQ')).toBe(true); + }); + + it('rejects non-YouTube URLs', () => { + expect(isYouTubeUrl('https://soundcloud.com/artist')).toBe(false); + expect(isYouTubeUrl('https://example.com')).toBe(false); + }); +}); + +describe('SoundCloud URL validation', () => { + it('accepts artist-level URLs', () => { + expect(isSoundCloudChannelUrl('https://soundcloud.com/deadmau5')).toBe( + true + ); + expect(isSoundCloudChannelUrl('https://www.soundcloud.com/deadmau5')).toBe( + true + ); + }); + + it('rejects track URLs', () => { + expect( + isSoundCloudChannelUrl( + 'https://soundcloud.com/deadmau5/tracks/strobe' + ) + ).toBe(false); + }); + + it('rejects set URLs', () => { + expect( + isSoundCloudChannelUrl( + 'https://soundcloud.com/deadmau5/sets/album' + ) + ).toBe(false); + }); + + it('rejects non-SoundCloud URLs', () => { + expect(isSoundCloudChannelUrl('https://youtube.com/@test')).toBe(false); + expect(isSoundCloudChannelUrl('https://example.com')).toBe(false); + }); +}); + +describe('YouTubeSource', () => { + const youtube = new YouTubeSource(); + + beforeEach(() => { + vi.clearAllMocks(); + }); + + describe('resolveChannel', () => { + it('resolves channel metadata from a YouTube URL', async () => { + mockExecYtDlp.mockResolvedValueOnce({ + stdout: JSON.stringify(YOUTUBE_CHANNEL_JSON), + stderr: '', + exitCode: 0, + }); + mockParseSingleJson.mockReturnValueOnce(YOUTUBE_CHANNEL_JSON); + + const result = await youtube.resolveChannel( + 'https://www.youtube.com/@LinusTechTips' + ); + + expect(result).toEqual({ + name: 'Linus Tech Tips', + platformId: 'UCXuqSBlHAE6Xw-yeJA0Tunw', + imageUrl: 'https://i.ytimg.com/vi/thumb_large.jpg', + url: 'https://www.youtube.com/channel/UCXuqSBlHAE6Xw-yeJA0Tunw', + platform: 'youtube', + }); + + // Verify yt-dlp was called with correct args + expect(mockExecYtDlp).toHaveBeenCalledWith( + [ + '--dump-single-json', + '--playlist-items', + '0', + '--flat-playlist', + 'https://www.youtube.com/@LinusTechTips', + ], + { timeout: 30_000 } + ); + }); + + it('falls back to uploader when channel is missing', async () => { + const noChannelJson = { + ...YOUTUBE_CHANNEL_JSON, + channel: undefined, + channel_id: undefined, + channel_url: undefined, + }; + mockExecYtDlp.mockResolvedValueOnce({ + stdout: '', + stderr: '', + exitCode: 0, + }); + mockParseSingleJson.mockReturnValueOnce(noChannelJson); + + const result = await youtube.resolveChannel( + 'https://www.youtube.com/@Test' + ); + + expect(result.name).toBe('Linus Tech Tips'); // falls back to uploader + }); + }); + + describe('fetchRecentContent', () => { + it('Phase 1 discovers items via --flat-playlist, Phase 2 enriches new items', async () => { + const channel = makeChannel(); + + // Phase 1: flat-playlist discovery + mockExecYtDlp.mockResolvedValueOnce({ + stdout: '', + stderr: '', + exitCode: 0, + }); + mockParseJsonLines.mockReturnValueOnce(YOUTUBE_PLAYLIST_ENTRIES); + + // Phase 2: enrichment for each new item (3 items, none in existingIds) + for (const entry of YOUTUBE_PLAYLIST_ENTRIES) { + const enrichedEntry = { ...entry, upload_date: entry.upload_date }; + mockExecYtDlp.mockResolvedValueOnce({ + stdout: JSON.stringify(enrichedEntry), + stderr: '', + exitCode: 0, + }); + mockParseSingleJson.mockReturnValueOnce(enrichedEntry); + } + + const result = await youtube.fetchRecentContent(channel, { + limit: 50, + existingIds: new Set(), + rateLimitDelay: 0, // no delay in tests + }); + + expect(result).toHaveLength(3); + + // First entry — regular video with enriched publishedAt + expect(result[0]).toEqual({ + platformContentId: 'dQw4w9WgXcQ', + title: 'Never Gonna Give You Up', + url: 'https://www.youtube.com/watch?v=dQw4w9WgXcQ', + contentType: 'video', + duration: 212, + thumbnailUrl: 'https://i.ytimg.com/vi/dQw4w9WgXcQ/hqdefault.jpg', + publishedAt: '2024-03-15T00:00:00Z', + }); + + // Second entry — uses thumbnails array + expect(result[1]?.thumbnailUrl).toBe( + 'https://i.ytimg.com/vi/abc123xyz/hd.jpg' + ); + + // Third entry — live stream + expect(result[2]?.contentType).toBe('livestream'); + + // Verify Phase 1 call uses --flat-playlist + expect(mockExecYtDlp).toHaveBeenNthCalledWith( + 1, + ['--flat-playlist', '--dump-json', '--playlist-items', '1:50', channel.url], + { timeout: 60_000 } + ); + + // Verify Phase 2 calls use --dump-json --no-playlist per video + expect(mockExecYtDlp).toHaveBeenNthCalledWith( + 2, + ['--dump-json', '--no-playlist', 'https://www.youtube.com/watch?v=dQw4w9WgXcQ'], + { timeout: 15_000 } + ); + }); + + it('skips enrichment for items in existingIds', async () => { + const channel = makeChannel(); + + // Phase 1: discovery returns 3 items + mockExecYtDlp.mockResolvedValueOnce({ stdout: '', stderr: '', exitCode: 0 }); + mockParseJsonLines.mockReturnValueOnce(YOUTUBE_PLAYLIST_ENTRIES); + + // Provide 2 of 3 as existing — only 1 needs enrichment + const existingIds = new Set(['dQw4w9WgXcQ', 'abc123xyz']); + + // Phase 2: only live001 needs enrichment + const enrichedLive = { ...YOUTUBE_PLAYLIST_ENTRIES[2], upload_date: '20240701' }; + mockExecYtDlp.mockResolvedValueOnce({ stdout: JSON.stringify(enrichedLive), stderr: '', exitCode: 0 }); + mockParseSingleJson.mockReturnValueOnce(enrichedLive); + + const result = await youtube.fetchRecentContent(channel, { + limit: 50, + existingIds, + rateLimitDelay: 0, + }); + + expect(result).toHaveLength(3); + // Only 2 execYtDlp calls: 1 flat-playlist + 1 enrichment + expect(mockExecYtDlp).toHaveBeenCalledTimes(2); + }); + + it('skips all enrichment when all items are existing', async () => { + const channel = makeChannel(); + + mockExecYtDlp.mockResolvedValueOnce({ stdout: '', stderr: '', exitCode: 0 }); + mockParseJsonLines.mockReturnValueOnce(YOUTUBE_PLAYLIST_ENTRIES); + + const allIds = new Set(YOUTUBE_PLAYLIST_ENTRIES.map((e) => e.id)); + + const result = await youtube.fetchRecentContent(channel, { + limit: 50, + existingIds: allIds, + rateLimitDelay: 0, + }); + + expect(result).toHaveLength(3); + // Only 1 execYtDlp call: flat-playlist only, no enrichment + expect(mockExecYtDlp).toHaveBeenCalledTimes(1); + }); + + it('tolerates individual enrichment failures', async () => { + const channel = makeChannel(); + + // Phase 1: discovery + mockExecYtDlp.mockResolvedValueOnce({ stdout: '', stderr: '', exitCode: 0 }); + mockParseJsonLines.mockReturnValueOnce(YOUTUBE_PLAYLIST_ENTRIES); + + // Phase 2: first enrichment succeeds + const enriched1 = { ...YOUTUBE_PLAYLIST_ENTRIES[0] }; + mockExecYtDlp.mockResolvedValueOnce({ stdout: JSON.stringify(enriched1), stderr: '', exitCode: 0 }); + mockParseSingleJson.mockReturnValueOnce(enriched1); + + // Second enrichment fails + mockExecYtDlp.mockRejectedValueOnce(new Error('network timeout')); + + // Third enrichment succeeds + const enriched3 = { ...YOUTUBE_PLAYLIST_ENTRIES[2] }; + mockExecYtDlp.mockResolvedValueOnce({ stdout: JSON.stringify(enriched3), stderr: '', exitCode: 0 }); + mockParseSingleJson.mockReturnValueOnce(enriched3); + + // Suppress console.warn for expected enrichment failure + const warnSpy = vi.spyOn(console, 'warn').mockImplementation(() => {}); + + const result = await youtube.fetchRecentContent(channel, { + limit: 50, + existingIds: new Set(), + rateLimitDelay: 0, + }); + + warnSpy.mockRestore(); + + // All 3 items returned — failed one uses flat-playlist data + expect(result).toHaveLength(3); + // First item is enriched + expect(result[0]?.publishedAt).toBe('2024-03-15T00:00:00Z'); + // Second item falls back to flat data (no upload_date from flat-playlist → null) + // The flat-playlist entry had upload_date:'20240620', so mapEntry gives it publishedAt + // BUT the enrichment failed, so it uses the flat entry directly which DID have upload_date + // Actually the flat entries include upload_date, so mapEntry will parse it + expect(result[1]?.platformContentId).toBe('abc123xyz'); + // Third item is enriched + expect(result[2]?.publishedAt).toBe('2024-07-01T00:00:00Z'); + }); + + it('uses correct yt-dlp args with custom limit', async () => { + const channel = makeChannel(); + mockExecYtDlp.mockResolvedValueOnce({ stdout: '', stderr: '', exitCode: 0 }); + mockParseJsonLines.mockReturnValueOnce([]); + + await youtube.fetchRecentContent(channel, { limit: 10 }); + + expect(mockExecYtDlp).toHaveBeenCalledWith( + ['--flat-playlist', '--dump-json', '--playlist-items', '1:10', channel.url], + { timeout: 60_000 } + ); + }); + + it('uses defaults when no options provided', async () => { + const channel = makeChannel(); + mockExecYtDlp.mockResolvedValueOnce({ stdout: '', stderr: '', exitCode: 0 }); + mockParseJsonLines.mockReturnValueOnce([]); + + await youtube.fetchRecentContent(channel); + + expect(mockExecYtDlp).toHaveBeenCalledWith( + ['--flat-playlist', '--dump-json', '--playlist-items', '1:50', channel.url], + { timeout: 60_000 } + ); + }); + }); + + describe('publishedAt extraction', () => { + it('extracts publishedAt from upload_date in YYYYMMDD format', async () => { + const channel = makeChannel(); + // Phase 1: flat discovery + mockExecYtDlp.mockResolvedValueOnce({ stdout: '', stderr: '', exitCode: 0 }); + mockParseJsonLines.mockReturnValueOnce(YOUTUBE_PLAYLIST_ENTRIES); + + // All items already exist — no enrichment needed + const allIds = new Set(YOUTUBE_PLAYLIST_ENTRIES.map((e) => e.id)); + + const result = await youtube.fetchRecentContent(channel, { + limit: 50, + existingIds: allIds, + rateLimitDelay: 0, + }); + + // Flat entries include upload_date, so mapEntry parses them + expect(result[0]?.publishedAt).toBe('2024-03-15T00:00:00Z'); + expect(result[1]?.publishedAt).toBe('2024-06-20T00:00:00Z'); + expect(result[2]?.publishedAt).toBe('2024-07-01T00:00:00Z'); + }); + + it('returns null publishedAt when upload_date is missing', async () => { + const channel = makeChannel(); + const entryNoDate = [{ + id: 'vid1', title: 'No Date', + url: 'https://youtube.com/watch?v=vid1', + duration: 100, live_status: null, + }]; + // Phase 1 + mockExecYtDlp.mockResolvedValueOnce({ stdout: '', stderr: '', exitCode: 0 }); + mockParseJsonLines.mockReturnValueOnce(entryNoDate); + + // Phase 2: enrich vid1 — return same data without upload_date + mockExecYtDlp.mockResolvedValueOnce({ stdout: '', stderr: '', exitCode: 0 }); + mockParseSingleJson.mockReturnValueOnce(entryNoDate[0]); + + const result = await youtube.fetchRecentContent(channel, { + limit: 50, + existingIds: new Set(), + rateLimitDelay: 0, + }); + + expect(result[0]?.publishedAt).toBeNull(); + }); + + it('returns null publishedAt when upload_date is malformed', async () => { + const channel = makeChannel(); + const entryBadDate = [{ + id: 'vid2', title: 'Bad Date', + url: 'https://youtube.com/watch?v=vid2', + duration: 200, live_status: null, + upload_date: '2024', + }]; + // Phase 1 + mockExecYtDlp.mockResolvedValueOnce({ stdout: '', stderr: '', exitCode: 0 }); + mockParseJsonLines.mockReturnValueOnce(entryBadDate); + + // Phase 2: enrich vid2 — return same data with malformed upload_date + mockExecYtDlp.mockResolvedValueOnce({ stdout: '', stderr: '', exitCode: 0 }); + mockParseSingleJson.mockReturnValueOnce(entryBadDate[0]); + + const result = await youtube.fetchRecentContent(channel, { + limit: 50, + existingIds: new Set(), + rateLimitDelay: 0, + }); + + expect(result[0]?.publishedAt).toBeNull(); + }); + }); + + describe('fetchPlaylists', () => { + it('returns playlist metadata with video mappings', async () => { + const channel = makeChannel(); + + // First call: enumerate playlists from /playlists tab + mockExecYtDlp.mockResolvedValueOnce({ stdout: '', stderr: '', exitCode: 0 }); + mockParseJsonLines.mockReturnValueOnce([ + { id: 'PLabc123', title: 'Best Of 2024' }, + { id: 'PLdef456', title: 'Tutorials' }, + ]); + + // Second call: videos in first playlist + mockExecYtDlp.mockResolvedValueOnce({ stdout: '', stderr: '', exitCode: 0 }); + mockParseJsonLines.mockReturnValueOnce([ + { id: 'vid-a1' }, + { id: 'vid-a2' }, + { id: 'vid-a3' }, + ]); + + // Third call: videos in second playlist + mockExecYtDlp.mockResolvedValueOnce({ stdout: '', stderr: '', exitCode: 0 }); + mockParseJsonLines.mockReturnValueOnce([ + { id: 'vid-b1' }, + { id: 'vid-b2' }, + ]); + + const result = await youtube.fetchPlaylists(channel); + + expect(result).toHaveLength(2); + expect(result[0]).toEqual({ + platformPlaylistId: 'PLabc123', + title: 'Best Of 2024', + videoIds: ['vid-a1', 'vid-a2', 'vid-a3'], + }); + expect(result[1]).toEqual({ + platformPlaylistId: 'PLdef456', + title: 'Tutorials', + videoIds: ['vid-b1', 'vid-b2'], + }); + + // Verify first call fetches playlists tab + expect(mockExecYtDlp).toHaveBeenNthCalledWith( + 1, + ['--flat-playlist', '--dump-json', `${channel.url}/playlists`], + { timeout: 60_000 } + ); + + // Verify subsequent calls fetch individual playlist videos + expect(mockExecYtDlp).toHaveBeenNthCalledWith( + 2, + ['--flat-playlist', '--dump-json', 'https://www.youtube.com/playlist?list=PLabc123'], + { timeout: 60_000 } + ); + expect(mockExecYtDlp).toHaveBeenNthCalledWith( + 3, + ['--flat-playlist', '--dump-json', 'https://www.youtube.com/playlist?list=PLdef456'], + { timeout: 60_000 } + ); + }); + + it('handles empty playlists tab', async () => { + const channel = makeChannel(); + mockExecYtDlp.mockResolvedValueOnce({ stdout: '', stderr: '', exitCode: 0 }); + mockParseJsonLines.mockReturnValueOnce([]); + + const result = await youtube.fetchPlaylists(channel); + + expect(result).toEqual([]); + expect(mockExecYtDlp).toHaveBeenCalledTimes(1); + }); + + it('skips entries without playlist ID', async () => { + const channel = makeChannel(); + + // Playlists tab returns one valid and one without id + mockExecYtDlp.mockResolvedValueOnce({ stdout: '', stderr: '', exitCode: 0 }); + mockParseJsonLines.mockReturnValueOnce([ + { id: 'PLvalid', title: 'Valid Playlist' }, + { title: 'No ID Playlist' }, + { id: '', title: 'Empty ID Playlist' }, + ]); + + // Only the valid playlist triggers a video fetch + mockExecYtDlp.mockResolvedValueOnce({ stdout: '', stderr: '', exitCode: 0 }); + mockParseJsonLines.mockReturnValueOnce([{ id: 'vid-1' }]); + + const result = await youtube.fetchPlaylists(channel); + + expect(result).toHaveLength(1); + expect(result[0]).toEqual({ + platformPlaylistId: 'PLvalid', + title: 'Valid Playlist', + videoIds: ['vid-1'], + }); + // 1 call for playlists tab + 1 call for the valid playlist's videos + expect(mockExecYtDlp).toHaveBeenCalledTimes(2); + }); + + it('uses "Untitled Playlist" when title is missing', async () => { + const channel = makeChannel(); + + mockExecYtDlp.mockResolvedValueOnce({ stdout: '', stderr: '', exitCode: 0 }); + mockParseJsonLines.mockReturnValueOnce([ + { id: 'PLnoTitle' }, + ]); + + mockExecYtDlp.mockResolvedValueOnce({ stdout: '', stderr: '', exitCode: 0 }); + mockParseJsonLines.mockReturnValueOnce([]); + + const result = await youtube.fetchPlaylists(channel); + + expect(result[0]?.title).toBe('Untitled Playlist'); + }); + }); +}); + +describe('SoundCloudSource', () => { + const soundcloud = new SoundCloudSource(); + + beforeEach(() => { + vi.clearAllMocks(); + }); + + describe('resolveChannel', () => { + it('resolves artist metadata from a SoundCloud URL', async () => { + mockExecYtDlp.mockResolvedValueOnce({ + stdout: '', + stderr: '', + exitCode: 0, + }); + mockParseSingleJson.mockReturnValueOnce(SOUNDCLOUD_ARTIST_JSON); + + const result = await soundcloud.resolveChannel( + 'https://soundcloud.com/deadmau5' + ); + + expect(result).toEqual({ + name: 'Deadmau5', + platformId: 'deadmau5', + imageUrl: 'https://i1.sndcdn.com/avatars-large.jpg', + url: 'https://soundcloud.com/deadmau5', + platform: 'soundcloud', + }); + }); + }); + + describe('fetchRecentContent', () => { + it('fetches tracks with audio content type', async () => { + const channel = makeChannel({ + platform: Platform.SoundCloud, + url: 'https://soundcloud.com/deadmau5', + }); + mockExecYtDlp.mockResolvedValueOnce({ + stdout: '', + stderr: '', + exitCode: 0, + }); + mockParseJsonLines.mockReturnValueOnce(SOUNDCLOUD_TRACK_ENTRIES); + + const result = await soundcloud.fetchRecentContent(channel, { limit: 20 }); + + expect(result).toHaveLength(3); + + // All SoundCloud entries should be audio type + result.forEach((item) => { + expect(item.contentType).toBe('audio'); + }); + + expect(result[0]).toEqual({ + platformContentId: 'sc-track-001', + title: 'Strobe (Club Edit)', + url: 'https://soundcloud.com/deadmau5/strobe-club-edit', + contentType: 'audio', + duration: 421, + thumbnailUrl: 'https://i1.sndcdn.com/artworks-track1.jpg', + publishedAt: null, + }); + + // Entry with thumbnails array + expect(result[1]?.thumbnailUrl).toBe( + 'https://i1.sndcdn.com/artworks-track2-lg.jpg' + ); + + // Entry with null thumbnail + expect(result[2]?.thumbnailUrl).toBeNull(); + }); + + it('passes --sleep-requests 2 for rate limit mitigation', async () => { + const channel = makeChannel({ + platform: Platform.SoundCloud, + url: 'https://soundcloud.com/deadmau5', + }); + mockExecYtDlp.mockResolvedValueOnce({ + stdout: '', + stderr: '', + exitCode: 0, + }); + mockParseJsonLines.mockReturnValueOnce([]); + + await soundcloud.fetchRecentContent(channel, { limit: 50 }); + + expect(mockExecYtDlp).toHaveBeenCalledWith( + expect.arrayContaining(['--sleep-requests', '2']), + expect.any(Object) + ); + }); + }); +}); + +describe('PlatformRegistry', () => { + let registry: PlatformRegistry; + const youtube = new YouTubeSource(); + const soundcloud = new SoundCloudSource(); + + beforeEach(() => { + registry = new PlatformRegistry(); + registry.register(Platform.YouTube, youtube); + registry.register(Platform.SoundCloud, soundcloud); + }); + + describe('get', () => { + it('returns registered source for a platform', () => { + expect(registry.get(Platform.YouTube)).toBe(youtube); + expect(registry.get(Platform.SoundCloud)).toBe(soundcloud); + }); + + it('returns undefined for unregistered platform', () => { + const empty = new PlatformRegistry(); + expect(empty.get(Platform.YouTube)).toBeUndefined(); + }); + }); + + describe('getForUrl', () => { + it('resolves YouTube @handle URLs', () => { + const result = registry.getForUrl( + 'https://www.youtube.com/@LinusTechTips' + ); + expect(result?.platform).toBe('youtube'); + expect(result?.source).toBe(youtube); + }); + + it('resolves YouTube /channel/ URLs', () => { + const result = registry.getForUrl( + 'https://www.youtube.com/channel/UC123' + ); + expect(result?.platform).toBe('youtube'); + }); + + it('resolves YouTube /c/ URLs', () => { + const result = registry.getForUrl( + 'https://www.youtube.com/c/LinusTechTips' + ); + expect(result?.platform).toBe('youtube'); + }); + + it('resolves YouTube /user/ URLs', () => { + const result = registry.getForUrl( + 'https://www.youtube.com/user/LinusTechTips' + ); + expect(result?.platform).toBe('youtube'); + }); + + it('resolves youtu.be short URLs', () => { + const result = registry.getForUrl('https://youtu.be/dQw4w9WgXcQ'); + expect(result?.platform).toBe('youtube'); + }); + + it('resolves SoundCloud artist URLs', () => { + const result = registry.getForUrl('https://soundcloud.com/deadmau5'); + expect(result?.platform).toBe('soundcloud'); + expect(result?.source).toBe(soundcloud); + }); + + it('rejects SoundCloud track URLs', () => { + const result = registry.getForUrl( + 'https://soundcloud.com/deadmau5/tracks/strobe' + ); + expect(result).toBeNull(); + }); + + it('rejects SoundCloud set URLs', () => { + const result = registry.getForUrl( + 'https://soundcloud.com/deadmau5/sets/album' + ); + expect(result).toBeNull(); + }); + + it('returns null for unknown URLs', () => { + expect(registry.getForUrl('https://example.com')).toBeNull(); + expect(registry.getForUrl('https://spotify.com/artist/123')).toBeNull(); + }); + + it('returns null when platform has no registered source', () => { + const emptyRegistry = new PlatformRegistry(); + const result = emptyRegistry.getForUrl( + 'https://www.youtube.com/@Test' + ); + expect(result).toBeNull(); + }); + }); +}); + +describe('Error propagation', () => { + const youtube = new YouTubeSource(); + const soundcloud = new SoundCloudSource(); + + beforeEach(() => { + vi.clearAllMocks(); + }); + + it('YouTube resolveChannel propagates yt-dlp errors', async () => { + const error = new Error('yt-dlp failed'); + mockExecYtDlp.mockRejectedValueOnce(error); + + await expect( + youtube.resolveChannel('https://www.youtube.com/@Bad') + ).rejects.toThrow('yt-dlp failed'); + }); + + it('SoundCloud resolveChannel propagates yt-dlp errors', async () => { + const error = new Error('yt-dlp failed'); + mockExecYtDlp.mockRejectedValueOnce(error); + + await expect( + soundcloud.resolveChannel('https://soundcloud.com/bad') + ).rejects.toThrow('yt-dlp failed'); + }); + + it('YouTube fetchRecentContent propagates yt-dlp errors', async () => { + const channel = makeChannel(); + const error = new Error('network error'); + mockExecYtDlp.mockRejectedValueOnce(error); + + await expect(youtube.fetchRecentContent(channel)).rejects.toThrow( + 'network error' + ); + }); + + it('SoundCloud fetchRecentContent propagates yt-dlp errors', async () => { + const channel = makeChannel({ + platform: Platform.SoundCloud, + url: 'https://soundcloud.com/artist', + }); + const error = new Error('timeout'); + mockExecYtDlp.mockRejectedValueOnce(error); + + await expect(soundcloud.fetchRecentContent(channel)).rejects.toThrow( + 'timeout' + ); + }); +}); diff --git a/src/__tests__/subtitle-download.test.ts b/src/__tests__/subtitle-download.test.ts new file mode 100644 index 0000000..b542e62 --- /dev/null +++ b/src/__tests__/subtitle-download.test.ts @@ -0,0 +1,249 @@ +import { describe, it, expect, vi, beforeEach, afterEach } from 'vitest'; +import { mkdtempSync, rmSync, existsSync, writeFileSync, mkdirSync } from 'node:fs'; +import { join } from 'node:path'; +import { tmpdir } from 'node:os'; +import { initDatabaseAsync, closeDatabase } from '../db/index'; +import { runMigrations } from '../db/migrate'; +import { createChannel } from '../db/repositories/channel-repository'; +import { createContentItem } from '../db/repositories/content-repository'; +import { DownloadService } from '../services/download'; +import { QualityAnalyzer } from '../services/quality-analyzer'; +import { FileOrganizer } from '../services/file-organizer'; +import { CookieManager } from '../services/cookie-manager'; +import { RateLimiter } from '../services/rate-limiter'; +import type { ContentItem, Channel, FormatProfile } from '../types/index'; + +// ── Mocks ── + +const execYtDlpMock = vi.fn(); +vi.mock('../sources/yt-dlp', async (importOriginal) => { + const actual = (await importOriginal()) as Record; + return { + ...actual, + execYtDlp: (...args: unknown[]) => execYtDlpMock(...args), + }; +}); + +const statMock = vi.fn(); +vi.mock('node:fs/promises', async (importOriginal) => { + const actual = (await importOriginal()) as Record; + return { + ...actual, + stat: (...args: unknown[]) => statMock(...args), + }; +}); + +// ── Test Helpers ── + +let tmpDir: string; +let db: Awaited>; +let testChannel: Channel; +let testContentItem: ContentItem; + +async function setupDb(): Promise { + tmpDir = mkdtempSync(join(tmpdir(), 'tubearr-sub-test-')); + const dbPath = join(tmpDir, 'test.db'); + db = await initDatabaseAsync(dbPath); + await runMigrations(dbPath); + + testChannel = await createChannel(db, { + name: 'Sub Test Channel', + platform: 'youtube', + platformId: 'UC_sub_test', + url: 'https://www.youtube.com/channel/UC_sub_test', + imageUrl: null, + formatProfileId: null, + monitoringEnabled: true, + checkInterval: 360, + metadata: null, + }); + + testContentItem = (await createContentItem(db, { + channelId: testChannel.id, + title: 'Subtitle Test Video', + platformContentId: 'vid_sub_test', + url: 'https://www.youtube.com/watch?v=sub_test', + contentType: 'video', + duration: 600, + status: 'monitored', + }))!; +} + +function cleanup(): void { + closeDatabase(); + try { + if (tmpDir && existsSync(tmpDir)) { + rmSync(tmpDir, { recursive: true, force: true }); + } + } catch { + // Windows cleanup best-effort + } +} + +function createMockDeps() { + const mediaPath = join(tmpDir, 'media'); + const cookiePath = join(tmpDir, 'cookies'); + mkdirSync(mediaPath, { recursive: true }); + mkdirSync(cookiePath, { recursive: true }); + + const rateLimiter = new RateLimiter({ + youtube: { minIntervalMs: 0 }, + soundcloud: { minIntervalMs: 0 }, + }); + const fileOrganizer = new FileOrganizer(mediaPath); + const qualityAnalyzer = new QualityAnalyzer(); + const cookieManager = new CookieManager(cookiePath); + + vi.spyOn(rateLimiter, 'acquire'); + vi.spyOn(rateLimiter, 'reportSuccess'); + vi.spyOn(rateLimiter, 'reportError'); + + vi.spyOn(qualityAnalyzer, 'analyze').mockResolvedValue({ + actualResolution: '1920x1080', + actualCodec: 'h264', + actualBitrate: '5.0 Mbps', + containerFormat: 'mp4', + qualityWarnings: [], + }); + + return { rateLimiter, fileOrganizer, qualityAnalyzer, cookieManager }; +} + +/** Prepare a mock download and return the captured yt-dlp args. */ +async function downloadWithProfile( + profile: FormatProfile | undefined, +): Promise { + const deps = createMockDeps(); + const service = new DownloadService( + db, + deps.rateLimiter, + deps.fileOrganizer, + deps.qualityAnalyzer, + deps.cookieManager, + ); + + const outputPath = join(tmpDir, 'media', 'youtube', 'Sub Test Channel', 'Subtitle Test Video.mp4'); + mkdirSync(join(tmpDir, 'media', 'youtube', 'Sub Test Channel'), { recursive: true }); + writeFileSync(outputPath, 'fake video data'); + + execYtDlpMock.mockResolvedValueOnce({ + stdout: outputPath, + stderr: '', + exitCode: 0, + }); + statMock.mockResolvedValueOnce({ size: 10_000_000 }); + + await service.downloadItem(testContentItem, testChannel, profile); + + return execYtDlpMock.mock.calls[0][0] as string[]; +} + +function makeProfile(overrides: Partial = {}): FormatProfile { + return { + id: 1, + name: 'Test Profile', + videoResolution: '1080p', + audioCodec: null, + audioBitrate: null, + containerFormat: 'mp4', + isDefault: false, + subtitleLanguages: null, + embedSubtitles: false, + createdAt: '', + updatedAt: '', + ...overrides, + }; +} + +// ── Tests ── + +describe('DownloadService — subtitle args', () => { + beforeEach(async () => { + vi.clearAllMocks(); + await setupDb(); + }); + + afterEach(cleanup); + + it('does not include subtitle args when format profile has no subtitle fields', async () => { + const args = await downloadWithProfile(makeProfile()); + + expect(args).not.toContain('--write-subs'); + expect(args).not.toContain('--sub-langs'); + expect(args).not.toContain('--embed-subs'); + }); + + it('includes --write-subs and --sub-langs when subtitleLanguages is "en"', async () => { + const args = await downloadWithProfile( + makeProfile({ subtitleLanguages: 'en' }), + ); + + expect(args).toContain('--write-subs'); + expect(args).toContain('--sub-langs'); + const subLangsIdx = args.indexOf('--sub-langs'); + expect(args[subLangsIdx + 1]).toBe('en'); + }); + + it('includes --write-subs and --sub-langs for multiple languages "en,es,fr"', async () => { + const args = await downloadWithProfile( + makeProfile({ subtitleLanguages: 'en,es,fr' }), + ); + + expect(args).toContain('--write-subs'); + expect(args).toContain('--sub-langs'); + const subLangsIdx = args.indexOf('--sub-langs'); + expect(args[subLangsIdx + 1]).toBe('en,es,fr'); + }); + + it('includes --embed-subs when embedSubtitles is true (with subtitleLanguages)', async () => { + const args = await downloadWithProfile( + makeProfile({ subtitleLanguages: 'en', embedSubtitles: true }), + ); + + expect(args).toContain('--embed-subs'); + }); + + it('includes --write-subs, --sub-langs, and --embed-subs together', async () => { + const args = await downloadWithProfile( + makeProfile({ subtitleLanguages: 'en,ja', embedSubtitles: true }), + ); + + expect(args).toContain('--write-subs'); + expect(args).toContain('--sub-langs'); + const subLangsIdx = args.indexOf('--sub-langs'); + expect(args[subLangsIdx + 1]).toBe('en,ja'); + expect(args).toContain('--embed-subs'); + + // Subtitle args should come before --no-playlist + const writeSubsIdx = args.indexOf('--write-subs'); + const noPlaylistIdx = args.indexOf('--no-playlist'); + expect(writeSubsIdx).toBeLessThan(noPlaylistIdx); + }); + + it('does not include any subtitle args when subtitleLanguages is null even if embedSubtitles is true', async () => { + const args = await downloadWithProfile( + makeProfile({ subtitleLanguages: null, embedSubtitles: true }), + ); + + expect(args).not.toContain('--write-subs'); + expect(args).not.toContain('--sub-langs'); + expect(args).not.toContain('--embed-subs'); + }); + + it('does not include --write-subs when subtitleLanguages is empty string', async () => { + const args = await downloadWithProfile( + makeProfile({ subtitleLanguages: '' }), + ); + + expect(args).not.toContain('--write-subs'); + expect(args).not.toContain('--sub-langs'); + }); + + it('does not include subtitle args when no format profile is provided', async () => { + const args = await downloadWithProfile(undefined); + + expect(args).not.toContain('--write-subs'); + expect(args).not.toContain('--sub-langs'); + expect(args).not.toContain('--embed-subs'); + }); +}); diff --git a/src/__tests__/system-settings.test.ts b/src/__tests__/system-settings.test.ts new file mode 100644 index 0000000..d59f615 --- /dev/null +++ b/src/__tests__/system-settings.test.ts @@ -0,0 +1,284 @@ +import { describe, it, expect, beforeAll, afterAll } from 'vitest'; +import { mkdtempSync, rmSync, existsSync } from 'node:fs'; +import { join } from 'node:path'; +import { tmpdir } from 'node:os'; +import { type FastifyInstance } from 'fastify'; +import { initDatabaseAsync, closeDatabase } from '../db/index'; +import { runMigrations } from '../db/migrate'; +import { buildServer } from '../server/index'; +import { systemConfig } from '../db/schema/index'; +import { eq } from 'drizzle-orm'; +import { type LibSQLDatabase } from 'drizzle-orm/libsql'; +import type * as schema from '../db/schema/index'; +import { + getAppSetting, + setAppSetting, + getAppSettings, + seedAppDefaults, + APP_CHECK_INTERVAL, + APP_CONCURRENT_DOWNLOADS, +} from '../db/repositories/system-config-repository'; +import { QueueService } from '../services/queue'; +import { DownloadService } from '../services/download'; + +/** + * Tests for app settings repository, API endpoints, seeding, and runtime concurrency. + */ +describe('System Settings', () => { + let server: FastifyInstance; + let db: LibSQLDatabase; + let apiKey: string; + let tmpDir: string; + + beforeAll(async () => { + tmpDir = mkdtempSync(join(tmpdir(), 'tubearr-settings-')); + const dbPath = join(tmpDir, 'test.db'); + db = await initDatabaseAsync(dbPath); + await runMigrations(dbPath); + server = await buildServer({ db }); + await server.ready(); + + // Read API key from database (generated by auth plugin) + const rows = await db + .select() + .from(systemConfig) + .where(eq(systemConfig.key, 'api_key')) + .limit(1); + apiKey = rows[0]?.value ?? ''; + expect(apiKey).toBeTruthy(); + }); + + afterAll(async () => { + await server.close(); + closeDatabase(); + try { + if (tmpDir && existsSync(tmpDir)) { + rmSync(tmpDir, { recursive: true, force: true }); + } + } catch { + // Temp dir cleanup is best-effort on Windows + } + }); + + // ── Helpers ── + + function authed(opts: Record) { + return { + ...opts, + headers: { 'x-api-key': apiKey, ...(opts.headers as Record | undefined) }, + }; + } + + // ── Repository: getAppSetting ── + + describe('getAppSetting', () => { + it('returns null for a missing key', async () => { + const result = await getAppSetting(db, 'nonexistent.key'); + expect(result).toBeNull(); + }); + + it('returns the value after setAppSetting', async () => { + await setAppSetting(db, 'test.key', '42'); + const result = await getAppSetting(db, 'test.key'); + expect(result).toBe('42'); + }); + }); + + // ── Repository: setAppSetting (upsert) ── + + describe('setAppSetting', () => { + it('inserts a new key', async () => { + await setAppSetting(db, 'upsert.test', 'initial'); + const result = await getAppSetting(db, 'upsert.test'); + expect(result).toBe('initial'); + }); + + it('updates an existing key (upsert)', async () => { + await setAppSetting(db, 'upsert.test', 'updated'); + const result = await getAppSetting(db, 'upsert.test'); + expect(result).toBe('updated'); + }); + }); + + // ── Repository: getAppSettings (batch) ── + + describe('getAppSettings', () => { + it('returns a record with null for missing keys', async () => { + const result = await getAppSettings(db, ['missing.a', 'missing.b']); + expect(result).toEqual({ 'missing.a': null, 'missing.b': null }); + }); + + it('returns values for existing keys', async () => { + await setAppSetting(db, 'batch.a', '1'); + await setAppSetting(db, 'batch.b', '2'); + const result = await getAppSettings(db, ['batch.a', 'batch.b', 'batch.c']); + expect(result).toEqual({ 'batch.a': '1', 'batch.b': '2', 'batch.c': null }); + }); + }); + + // ── Repository: seedAppDefaults ── + + describe('seedAppDefaults', () => { + it('inserts defaults when keys are missing', async () => { + // Clear the keys first (if leftover from other tests) + await db.delete(systemConfig).where(eq(systemConfig.key, APP_CHECK_INTERVAL)); + await db.delete(systemConfig).where(eq(systemConfig.key, APP_CONCURRENT_DOWNLOADS)); + + await seedAppDefaults(db); + + const interval = await getAppSetting(db, APP_CHECK_INTERVAL); + const concurrent = await getAppSetting(db, APP_CONCURRENT_DOWNLOADS); + expect(interval).not.toBeNull(); + expect(concurrent).not.toBeNull(); + }); + + it('does NOT overwrite existing values', async () => { + // Set custom values + await setAppSetting(db, APP_CHECK_INTERVAL, '999'); + await setAppSetting(db, APP_CONCURRENT_DOWNLOADS, '7'); + + // Seed again + await seedAppDefaults(db); + + // Verify custom values persist + const interval = await getAppSetting(db, APP_CHECK_INTERVAL); + const concurrent = await getAppSetting(db, APP_CONCURRENT_DOWNLOADS); + expect(interval).toBe('999'); + expect(concurrent).toBe('7'); + }); + }); + + // ── API: GET /api/v1/system/settings ── + + describe('GET /api/v1/system/settings', () => { + it('returns seeded defaults as numbers', async () => { + // Ensure defaults are seeded + await db.delete(systemConfig).where(eq(systemConfig.key, APP_CHECK_INTERVAL)); + await db.delete(systemConfig).where(eq(systemConfig.key, APP_CONCURRENT_DOWNLOADS)); + await seedAppDefaults(db); + + const res = await server.inject( + authed({ method: 'GET', url: '/api/v1/system/settings' }) + ); + expect(res.statusCode).toBe(200); + const body = JSON.parse(res.payload); + expect(typeof body.checkInterval).toBe('number'); + expect(typeof body.concurrentDownloads).toBe('number'); + expect(body.checkInterval).toBeGreaterThanOrEqual(1); + expect(body.concurrentDownloads).toBeGreaterThanOrEqual(1); + }); + + it('returns 401 without auth', async () => { + const res = await server.inject({ method: 'GET', url: '/api/v1/system/settings' }); + expect(res.statusCode).toBe(401); + }); + }); + + // ── API: PUT /api/v1/system/settings ── + + describe('PUT /api/v1/system/settings', () => { + it('updates check interval and returns new values', async () => { + const res = await server.inject( + authed({ + method: 'PUT', + url: '/api/v1/system/settings', + payload: { checkInterval: 120 }, + }) + ); + expect(res.statusCode).toBe(200); + const body = JSON.parse(res.payload); + expect(body.checkInterval).toBe(120); + }); + + it('updates concurrent downloads and returns new values', async () => { + const res = await server.inject( + authed({ + method: 'PUT', + url: '/api/v1/system/settings', + payload: { concurrentDownloads: 5 }, + }) + ); + expect(res.statusCode).toBe(200); + const body = JSON.parse(res.payload); + expect(body.concurrentDownloads).toBe(5); + }); + + it('updates both at once', async () => { + const res = await server.inject( + authed({ + method: 'PUT', + url: '/api/v1/system/settings', + payload: { checkInterval: 60, concurrentDownloads: 3 }, + }) + ); + expect(res.statusCode).toBe(200); + const body = JSON.parse(res.payload); + expect(body.checkInterval).toBe(60); + expect(body.concurrentDownloads).toBe(3); + }); + + it('rejects checkInterval < 1', async () => { + const res = await server.inject( + authed({ + method: 'PUT', + url: '/api/v1/system/settings', + payload: { checkInterval: 0 }, + }) + ); + expect(res.statusCode).toBe(400); + const body = JSON.parse(res.payload); + expect(body.message).toContain('checkInterval'); + }); + + it('rejects concurrentDownloads < 1', async () => { + const res = await server.inject( + authed({ + method: 'PUT', + url: '/api/v1/system/settings', + payload: { concurrentDownloads: 0 }, + }) + ); + expect(res.statusCode).toBe(400); + const body = JSON.parse(res.payload); + expect(body.message).toContain('concurrentDownloads'); + }); + + it('rejects concurrentDownloads > 10', async () => { + const res = await server.inject( + authed({ + method: 'PUT', + url: '/api/v1/system/settings', + payload: { concurrentDownloads: 11 }, + }) + ); + expect(res.statusCode).toBe(400); + const body = JSON.parse(res.payload); + expect(body.message).toContain('concurrentDownloads'); + }); + + it('returns 401 without auth', async () => { + const res = await server.inject({ + method: 'PUT', + url: '/api/v1/system/settings', + payload: { checkInterval: 100 }, + }); + expect(res.statusCode).toBe(401); + }); + }); + + // ── QueueService.setConcurrency ── + + describe('QueueService.setConcurrency', () => { + it('updates the concurrency value', () => { + // Create a minimal QueueService with a mock DownloadService + const mockDownloadService = {} as DownloadService; + const qs = new QueueService(db, mockDownloadService, { concurrency: 2 }); + + // Access private field via cast to verify initial value + expect((qs as any).concurrency).toBe(2); + + qs.setConcurrency(5); + expect((qs as any).concurrency).toBe(5); + }); + }); +}); diff --git a/src/__tests__/yt-dlp.test.ts b/src/__tests__/yt-dlp.test.ts new file mode 100644 index 0000000..b80a66d --- /dev/null +++ b/src/__tests__/yt-dlp.test.ts @@ -0,0 +1,269 @@ +import { describe, it, expect, vi, beforeEach } from 'vitest'; + +// vi.hoisted ensures this runs before hoisted vi.mock calls +const { execFileMock } = vi.hoisted(() => ({ + execFileMock: vi.fn(), +})); + +vi.mock('node:child_process', () => ({ + execFile: execFileMock, +})); + +vi.mock('node:util', () => ({ + promisify: () => execFileMock, +})); + +import { + execYtDlp, + parseJsonLines, + parseSingleJson, + checkYtDlpAvailable, + YtDlpError, +} from '../sources/yt-dlp'; + +describe('yt-dlp wrapper', () => { + beforeEach(() => { + vi.clearAllMocks(); + }); + + describe('execYtDlp', () => { + it('returns stdout/stderr on successful execution', async () => { + execFileMock.mockResolvedValueOnce({ + stdout: '{"id": "abc123"}', + stderr: '', + }); + + const result = await execYtDlp(['--dump-json', 'https://example.com']); + + expect(result).toEqual({ + stdout: '{"id": "abc123"}', + stderr: '', + exitCode: 0, + }); + expect(execFileMock).toHaveBeenCalledWith( + 'yt-dlp', + ['--dump-json', 'https://example.com'], + expect.objectContaining({ + timeout: 30_000, + windowsHide: true, + }) + ); + }); + + it('respects custom timeout', async () => { + execFileMock.mockResolvedValueOnce({ stdout: '{}', stderr: '' }); + + await execYtDlp(['--version'], { timeout: 5_000 }); + + expect(execFileMock).toHaveBeenCalledWith( + 'yt-dlp', + ['--version'], + expect.objectContaining({ timeout: 5_000 }) + ); + }); + + it('throws YtDlpError on non-zero exit code', async () => { + const error = Object.assign(new Error('Command failed'), { + code: 1, + stderr: 'ERROR: Video not found', + stdout: '', + }); + execFileMock.mockRejectedValueOnce(error); + + await expect(execYtDlp(['--dump-json', 'bad-url'])).rejects.toThrow( + YtDlpError + ); + }); + + it('throws YtDlpError with stderr and exitCode on failure', async () => { + const error = Object.assign(new Error('Command failed'), { + code: 2, + stderr: 'ERROR: Unable to download', + stdout: '', + }); + execFileMock.mockRejectedValueOnce(error); + + try { + await execYtDlp(['--dump-json', 'bad-url']); + expect.unreachable('should have thrown'); + } catch (err) { + expect(err).toBeInstanceOf(YtDlpError); + const ytErr = err as YtDlpError; + expect(ytErr.stderr).toBe('ERROR: Unable to download'); + expect(ytErr.exitCode).toBe(2); + expect(ytErr.isRateLimit).toBe(false); + } + }); + + it('detects rate limiting from stderr containing "429"', async () => { + const error = Object.assign(new Error('Command failed'), { + code: 1, + stderr: 'ERROR: HTTP Error 429: Too Many Requests', + stdout: '', + }); + execFileMock.mockRejectedValueOnce(error); + + try { + await execYtDlp(['--dump-json', 'some-url']); + expect.unreachable('should have thrown'); + } catch (err) { + expect(err).toBeInstanceOf(YtDlpError); + const ytErr = err as YtDlpError; + expect(ytErr.isRateLimit).toBe(true); + } + }); + + it('detects rate limiting from "Too Many Requests" text', async () => { + const error = Object.assign(new Error('Command failed'), { + code: 1, + stderr: 'ERROR: Too Many Requests. Please retry later.', + stdout: '', + }); + execFileMock.mockRejectedValueOnce(error); + + try { + await execYtDlp(['--dump-json', 'some-url']); + expect.unreachable('should have thrown'); + } catch (err) { + const ytErr = err as YtDlpError; + expect(ytErr.isRateLimit).toBe(true); + } + }); + + it('throws timeout error on AbortError', async () => { + const error = Object.assign(new Error('The operation was aborted'), { + name: 'AbortError', + stderr: '', + stdout: '', + }); + execFileMock.mockRejectedValueOnce(error); + + try { + await execYtDlp(['--dump-json', 'slow-url'], { timeout: 1000 }); + expect.unreachable('should have thrown'); + } catch (err) { + expect(err).toBeInstanceOf(YtDlpError); + const ytErr = err as YtDlpError; + expect(ytErr.message).toContain('timed out'); + expect(ytErr.exitCode).toBe(-1); + } + }); + + it('handles empty stdout/stderr fields gracefully', async () => { + execFileMock.mockResolvedValueOnce({ + stdout: undefined, + stderr: undefined, + }); + + const result = await execYtDlp(['--version']); + + expect(result.stdout).toBe(''); + expect(result.stderr).toBe(''); + }); + }); + + describe('parseJsonLines', () => { + it('parses multiple newline-delimited JSON objects', () => { + const input = [ + '{"id": "a", "title": "Video A"}', + '{"id": "b", "title": "Video B"}', + '{"id": "c", "title": "Video C"}', + ].join('\n'); + + const result = parseJsonLines(input); + + expect(result).toHaveLength(3); + expect(result[0]).toEqual({ id: 'a', title: 'Video A' }); + expect(result[2]).toEqual({ id: 'c', title: 'Video C' }); + }); + + it('skips empty lines', () => { + const input = '{"id": "a"}\n\n\n{"id": "b"}\n'; + + const result = parseJsonLines(input); + + expect(result).toHaveLength(2); + }); + + it('skips unparseable lines', () => { + const input = '{"id": "a"}\n[download] 50%\n{"id": "b"}'; + + const result = parseJsonLines(input); + + expect(result).toHaveLength(2); + }); + + it('returns empty array for empty input', () => { + expect(parseJsonLines('')).toEqual([]); + expect(parseJsonLines('\n\n')).toEqual([]); + }); + }); + + describe('parseSingleJson', () => { + it('parses a valid JSON object', () => { + const input = '{"channel": "Test Channel", "channel_id": "UC123"}'; + + const result = parseSingleJson(input) as Record; + + expect(result.channel).toBe('Test Channel'); + expect(result.channel_id).toBe('UC123'); + }); + + it('trims whitespace before parsing', () => { + const input = ' \n{"id": "test"}\n '; + + const result = parseSingleJson(input) as Record; + + expect(result.id).toBe('test'); + }); + + it('throws YtDlpError on empty output', () => { + expect(() => parseSingleJson('')).toThrow(YtDlpError); + expect(() => parseSingleJson(' \n ')).toThrow(YtDlpError); + }); + + it('throws YtDlpError on invalid JSON', () => { + expect(() => parseSingleJson('not json at all')).toThrow(YtDlpError); + }); + }); + + describe('checkYtDlpAvailable', () => { + it('returns true when yt-dlp --version succeeds', async () => { + execFileMock.mockResolvedValueOnce({ + stdout: '2024.01.01', + stderr: '', + }); + + const available = await checkYtDlpAvailable(); + + expect(available).toBe(true); + }); + + it('returns false when yt-dlp is not found', async () => { + execFileMock.mockRejectedValueOnce(new Error('ENOENT')); + + const available = await checkYtDlpAvailable(); + + expect(available).toBe(false); + }); + }); + + describe('YtDlpError', () => { + it('carries stderr, exitCode, and name', () => { + const err = new YtDlpError('test error', 'some stderr', 42); + + expect(err.message).toBe('test error'); + expect(err.name).toBe('YtDlpError'); + expect(err.stderr).toBe('some stderr'); + expect(err.exitCode).toBe(42); + expect(err.isRateLimit).toBe(false); + expect(err).toBeInstanceOf(Error); + }); + + it('detects rate limit from stderr', () => { + const err = new YtDlpError('rate limited', 'HTTP Error 429', 1); + + expect(err.isRateLimit).toBe(true); + }); + }); +}); diff --git a/src/config/index.ts b/src/config/index.ts new file mode 100644 index 0000000..128c8e4 --- /dev/null +++ b/src/config/index.ts @@ -0,0 +1,117 @@ +import { config as loadEnv } from 'dotenv'; + +loadEnv(); + +export interface RateLimiterPlatformConfig { + minIntervalMs: number; +} + +export interface AppConfig { + /** Server listen port */ + port: number; + /** Path to SQLite database file */ + dbPath: string; + /** Fastify log level */ + logLevel: string; + /** API key from env (optional — auto-generated and stored in DB if not set) */ + apiKey: string | null; + /** Node environment */ + nodeEnv: string; + /** Scheduler configuration */ + scheduler: { + enabled: boolean; + defaultCheckInterval: number; // minutes + }; + /** Per-platform rate limiter configuration */ + rateLimiter: { + youtube: RateLimiterPlatformConfig; + soundcloud: RateLimiterPlatformConfig; + }; + /** Root directory for organized downloaded files */ + mediaPath: string; + /** Maximum concurrent downloads */ + concurrentDownloads: number; + /** Directory for per-platform cookie files */ + cookiePath: string; +} + +function parsePort(raw: string | undefined): number { + const fallback = 8989; + if (!raw) return fallback; + const parsed = parseInt(raw, 10); + if (isNaN(parsed) || parsed < 1 || parsed > 65535) { + console.error( + `[config] TUBEARR_PORT="${raw}" is not a valid port (1–65535). Falling back to ${fallback}.` + ); + return fallback; + } + return parsed; +} + +const VALID_LOG_LEVELS = ['trace', 'debug', 'info', 'warn', 'error', 'fatal'] as const; + +function parseLogLevel(raw: string | undefined): string { + const fallback = 'info'; + if (!raw) return fallback; + const normalized = raw.toLowerCase().trim(); + if (!(VALID_LOG_LEVELS as readonly string[]).includes(normalized)) { + console.error( + `[config] TUBEARR_LOG_LEVEL="${raw}" is not valid (${VALID_LOG_LEVELS.join(', ')}). Falling back to "${fallback}".` + ); + return fallback; + } + return normalized; +} + +function parsePositiveInt(raw: string | undefined, fallback: number): number { + if (!raw) return fallback; + const parsed = parseInt(raw, 10); + if (isNaN(parsed) || parsed < 1) return fallback; + return parsed; +} + +function buildConfig(): AppConfig { + const port = parsePort(process.env.TUBEARR_PORT); + const dbPath = process.env.TUBEARR_DB_PATH || './data/tubearr.db'; + const logLevel = parseLogLevel(process.env.TUBEARR_LOG_LEVEL); + const apiKey = process.env.TUBEARR_API_KEY || null; + const nodeEnv = process.env.NODE_ENV || 'development'; + + const scheduler = { + enabled: process.env.TUBEARR_SCHEDULER_ENABLED !== 'false', + defaultCheckInterval: parsePositiveInt( + process.env.TUBEARR_DEFAULT_CHECK_INTERVAL, + 360 + ), + }; + + const rateLimiter = { + youtube: { + minIntervalMs: parsePositiveInt( + process.env.TUBEARR_RATELIMIT_YOUTUBE_MS, + 1000 + ), + }, + soundcloud: { + minIntervalMs: parsePositiveInt( + process.env.TUBEARR_RATELIMIT_SOUNDCLOUD_MS, + 3000 + ), + }, + }; + + const mediaPath = process.env.TUBEARR_MEDIA_PATH || './media'; + const concurrentDownloads = parsePositiveInt( + process.env.TUBEARR_CONCURRENT_DOWNLOADS, + 2 + ); + const cookiePath = process.env.TUBEARR_COOKIE_PATH || './data/cookies'; + + return Object.freeze({ + port, dbPath, logLevel, apiKey, nodeEnv, scheduler, rateLimiter, + mediaPath, concurrentDownloads, cookiePath, + }); +} + +/** Frozen application configuration — loaded once at import time. */ +export const appConfig = buildConfig(); diff --git a/src/db/index.ts b/src/db/index.ts new file mode 100644 index 0000000..af69b49 --- /dev/null +++ b/src/db/index.ts @@ -0,0 +1,95 @@ +import { createClient, type Client } from '@libsql/client'; +import { drizzle, type LibSQLDatabase } from 'drizzle-orm/libsql'; +import { mkdirSync } from 'node:fs'; +import { dirname } from 'node:path'; +import * as schema from './schema/index'; + +let libsqlClient: Client | null = null; +let drizzleDb: LibSQLDatabase | null = null; + +/** + * Initialize the SQLite database connection with WAL mode. + * Creates the data directory if it doesn't exist. + * Uses @libsql/client for cross-platform compatibility (no native compilation). + */ +export function initDatabase(dbPath: string): LibSQLDatabase { + if (drizzleDb) return drizzleDb; + + // Ensure the directory exists + const dir = dirname(dbPath); + mkdirSync(dir, { recursive: true }); + + // libsql requires file: prefix for local files + const url = dbPath.startsWith('file:') ? dbPath : `file:${dbPath}`; + libsqlClient = createClient({ url }); + + drizzleDb = drizzle(libsqlClient, { schema }); + + // Enable WAL mode and performance pragmas synchronously via executeMultiple + // Note: libsql client pragma calls are async, but we fire them eagerly. + // WAL mode is set on first actual query via the pragmas below. + void libsqlClient.executeMultiple(` + PRAGMA journal_mode = WAL; + PRAGMA busy_timeout = 5000; + PRAGMA foreign_keys = ON; + `).then(() => { + console.log(`[database] Opened ${dbPath} (WAL mode enabled)`); + }).catch((err) => { + console.error(`[database] Failed to set pragmas:`, err); + }); + + return drizzleDb; +} + +/** + * Initialize the database and wait for WAL mode to be confirmed. + * Use this when you need to guarantee pragmas are set before proceeding. + */ +export async function initDatabaseAsync(dbPath: string): Promise> { + if (drizzleDb) return drizzleDb; + + const dir = dirname(dbPath); + mkdirSync(dir, { recursive: true }); + + const url = dbPath.startsWith('file:') ? dbPath : `file:${dbPath}`; + libsqlClient = createClient({ url }); + + await libsqlClient.executeMultiple(` + PRAGMA journal_mode = WAL; + PRAGMA busy_timeout = 5000; + PRAGMA foreign_keys = ON; + `); + + const walCheck = await libsqlClient.execute('PRAGMA journal_mode'); + const journalMode = walCheck.rows[0]?.journal_mode ?? 'unknown'; + console.log(`[database] Opened ${dbPath} (journal_mode: ${journalMode})`); + + drizzleDb = drizzle(libsqlClient, { schema }); + return drizzleDb; +} + +/** Get the active Drizzle database instance. Throws if not initialized. */ +export function getDb(): LibSQLDatabase { + if (!drizzleDb) { + throw new Error('[database] Database not initialized. Call initDatabase() first.'); + } + return drizzleDb; +} + +/** Get the raw libsql client instance. Throws if not initialized. */ +export function getRawClient(): Client { + if (!libsqlClient) { + throw new Error('[database] Database not initialized. Call initDatabase() first.'); + } + return libsqlClient; +} + +/** Close the database connection cleanly. */ +export function closeDatabase(): void { + if (libsqlClient) { + libsqlClient.close(); + libsqlClient = null; + drizzleDb = null; + console.log('[database] Connection closed.'); + } +} diff --git a/src/db/migrate.ts b/src/db/migrate.ts new file mode 100644 index 0000000..267df3e --- /dev/null +++ b/src/db/migrate.ts @@ -0,0 +1,37 @@ +import { migrate } from 'drizzle-orm/libsql/migrator'; +import { resolve, dirname } from 'node:path'; +import { fileURLToPath } from 'node:url'; +import { appConfig } from '../config/index'; +import { initDatabaseAsync, closeDatabase } from './index'; + +const __dirname = dirname(fileURLToPath(import.meta.url)); + +/** + * Run database migrations from the drizzle/ directory. + * Safe to call multiple times — already-applied migrations are skipped. + */ +export async function runMigrations(dbPath?: string): Promise { + const path = dbPath ?? appConfig.dbPath; + const db = await initDatabaseAsync(path); + const migrationsFolder = resolve(__dirname, '../../drizzle'); + + console.log(`[migrate] Applying migrations from ${migrationsFolder}`); + await migrate(db, { migrationsFolder }); + console.log('[migrate] Migrations applied successfully.'); +} + +// Run directly when invoked as a script +const isDirectRun = + process.argv[1]?.replace(/\\/g, '/').endsWith('migrate.ts') || + process.argv[1]?.replace(/\\/g, '/').endsWith('migrate.js'); + +if (isDirectRun) { + runMigrations() + .catch((err) => { + console.error('[migrate] Migration failed:', err); + process.exit(1); + }) + .finally(() => { + closeDatabase(); + }); +} diff --git a/src/db/repositories/channel-repository.ts b/src/db/repositories/channel-repository.ts new file mode 100644 index 0000000..ecc955b --- /dev/null +++ b/src/db/repositories/channel-repository.ts @@ -0,0 +1,193 @@ +import { eq, and, sql } from 'drizzle-orm'; +import { type LibSQLDatabase } from 'drizzle-orm/libsql'; +import type * as schema from '../schema/index'; +import { channels, contentItems } from '../schema/index'; +import type { Channel, Platform, MonitoringMode } from '../../types/index'; + +// ── Types ── + +/** Fields needed to create a new channel (auto-generated fields excluded). */ +export type CreateChannelData = Omit< + Channel, + 'id' | 'createdAt' | 'updatedAt' | 'lastCheckedAt' | 'lastCheckStatus' | 'monitoringMode' +> & { monitoringMode?: Channel['monitoringMode'] }; + +/** Fields that can be updated on an existing channel. */ +export type UpdateChannelData = Partial< + Pick +>; + +type Db = LibSQLDatabase; + +// ── Repository Functions ── + +/** Insert a new channel and return the created row. */ +export async function createChannel( + db: Db, + data: CreateChannelData +): Promise { + const result = await db + .insert(channels) + .values({ + name: data.name, + platform: data.platform, + platformId: data.platformId, + url: data.url, + monitoringEnabled: data.monitoringEnabled, + checkInterval: data.checkInterval, + imageUrl: data.imageUrl, + metadata: data.metadata, + formatProfileId: data.formatProfileId, + monitoringMode: data.monitoringMode ?? 'all', + }) + .returning(); + + return mapRow(result[0]); +} + +/** Get a channel by ID. Returns null if not found. */ +export async function getChannelById( + db: Db, + id: number +): Promise { + const rows = await db + .select() + .from(channels) + .where(eq(channels.id, id)) + .limit(1); + + return rows.length > 0 ? mapRow(rows[0]) : null; +} + +/** Get all channels, ordered by name. */ +export async function getAllChannels(db: Db): Promise { + const rows = await db.select().from(channels).orderBy(channels.name); + return rows.map(mapRow); +} + +/** Get all channels with monitoring enabled. */ +export async function getEnabledChannels(db: Db): Promise { + const rows = await db + .select() + .from(channels) + .where(eq(channels.monitoringEnabled, true)) + .orderBy(channels.name); + + return rows.map(mapRow); +} + +/** Update specific fields on a channel. Sets updatedAt to now. Returns updated row or null. */ +export async function updateChannel( + db: Db, + id: number, + data: UpdateChannelData +): Promise { + const result = await db + .update(channels) + .set({ + ...data, + updatedAt: sql`(datetime('now'))`, + }) + .where(eq(channels.id, id)) + .returning(); + + return result.length > 0 ? mapRow(result[0]) : null; +} + +/** + * Change a channel's monitoring mode and cascade the `monitored` flag to all existing content items. + * + * Cascade logic: + * - 'all' or 'existing' → existing items become monitored (true) + * - 'future' or 'none' → existing items become unmonitored (false) + * + * Also syncs `monitoringEnabled`: mode !== 'none' → enabled (per D022). + * Returns updated channel or null if not found. + */ +export async function setMonitoringMode( + db: Db, + id: number, + mode: MonitoringMode +): Promise { + // Step 1: Cascade monitored flag to all content items for this channel + const cascadeMonitored = mode === 'all' || mode === 'existing'; + await db + .update(contentItems) + .set({ + monitored: cascadeMonitored, + updatedAt: sql`(datetime('now'))`, + }) + .where(eq(contentItems.channelId, id)); + + // Step 2: Update the channel's monitoringMode and monitoringEnabled + const result = await db + .update(channels) + .set({ + monitoringMode: mode, + monitoringEnabled: mode !== 'none', + updatedAt: sql`(datetime('now'))`, + }) + .where(eq(channels.id, id)) + .returning(); + + return result.length > 0 ? mapRow(result[0]) : null; +} + +/** Delete a channel by ID. Returns true if a row was deleted. */ +export async function deleteChannel( + db: Db, + id: number +): Promise { + const result = await db + .delete(channels) + .where(eq(channels.id, id)) + .returning({ id: channels.id }); + + return result.length > 0; +} + +/** Find a channel by platform and platformId (for duplicate detection). */ +export async function getChannelByPlatformId( + db: Db, + platform: string, + platformId: string +): Promise { + const rows = await db + .select() + .from(channels) + .where( + and( + eq(channels.platform, platform), + eq(channels.platformId, platformId) + ) + ) + .limit(1); + + return rows.length > 0 ? mapRow(rows[0]) : null; +} + +// ── Row Mapping ── + +/** + * Map a raw Drizzle row to the Channel domain type. + * Ensures boolean and nullable fields are correctly typed. + */ +function mapRow(row: typeof channels.$inferSelect): Channel { + return { + id: row.id, + name: row.name, + platform: row.platform as Platform, + platformId: row.platformId, + url: row.url, + monitoringEnabled: row.monitoringEnabled, + checkInterval: row.checkInterval, + imageUrl: row.imageUrl, + metadata: row.metadata as Record | null, + formatProfileId: row.formatProfileId, + monitoringMode: (row.monitoringMode ?? 'all') as Channel['monitoringMode'], + createdAt: row.createdAt, + updatedAt: row.updatedAt, + lastCheckedAt: row.lastCheckedAt, + lastCheckStatus: row.lastCheckStatus as Channel['lastCheckStatus'], + }; +} diff --git a/src/db/repositories/content-repository.ts b/src/db/repositories/content-repository.ts new file mode 100644 index 0000000..a371274 --- /dev/null +++ b/src/db/repositories/content-repository.ts @@ -0,0 +1,364 @@ +import { eq, and, desc, like, sql, inArray } from 'drizzle-orm'; +import { type LibSQLDatabase } from 'drizzle-orm/libsql'; +import type * as schema from '../schema/index'; +import { contentItems } from '../schema/index'; +import type { ContentItem, ContentType, ContentStatus, QualityInfo } from '../../types/index'; +import type { ContentCounts } from '../../types/api'; + +// ── Types ── + +/** Fields needed to create a new content item. */ +export interface CreateContentItemData { + channelId: number; + title: string; + platformContentId: string; + url: string; + contentType: ContentType; + duration: number | null; + thumbnailUrl?: string | null; + status?: ContentStatus; + publishedAt?: string | null; + monitored?: boolean; +} + +/** Fields that can be updated on an existing content item (post-download). */ +export interface UpdateContentItemData { + filePath?: string | null; + fileSize?: number | null; + format?: string | null; + qualityMetadata?: QualityInfo | null; + status?: ContentStatus; + downloadedAt?: string | null; +} + +type Db = LibSQLDatabase; + +// ── Repository Functions ── + +/** + * Insert a new content item with dedup on (channelId, platformContentId). + * Returns the created row, or null if the item already exists. + */ +export async function createContentItem( + db: Db, + data: CreateContentItemData +): Promise { + // Check for existing item first — dedup by (channelId, platformContentId) + const existing = await db + .select({ id: contentItems.id }) + .from(contentItems) + .where( + and( + eq(contentItems.channelId, data.channelId), + eq(contentItems.platformContentId, data.platformContentId) + ) + ) + .limit(1); + + if (existing.length > 0) { + return null; // Already exists — skip + } + + const result = await db + .insert(contentItems) + .values({ + channelId: data.channelId, + title: data.title, + platformContentId: data.platformContentId, + url: data.url, + contentType: data.contentType, + duration: data.duration, + thumbnailUrl: data.thumbnailUrl ?? null, + status: data.status ?? 'monitored', + publishedAt: data.publishedAt ?? null, + monitored: data.monitored ?? true, + }) + .returning(); + + return mapRow(result[0]); +} + +/** Get all content items for a channel, ordered by creation date (newest first). */ +export async function getContentByChannelId( + db: Db, + channelId: number +): Promise { + const rows = await db + .select() + .from(contentItems) + .where(eq(contentItems.channelId, channelId)) + .orderBy(desc(contentItems.createdAt)); + + return rows.map(mapRow); +} + +/** Check if a specific content item exists for a channel. Returns the item or null. */ +export async function getContentByPlatformContentId( + db: Db, + channelId: number, + platformContentId: string +): Promise { + const rows = await db + .select() + .from(contentItems) + .where( + and( + eq(contentItems.channelId, channelId), + eq(contentItems.platformContentId, platformContentId) + ) + ) + .limit(1); + + return rows.length > 0 ? mapRow(rows[0]) : null; +} + +/** + * Get recent platformContentIds for a channel (for fast dedup checking during monitoring). + * Returns just the IDs, not full rows, to minimize memory usage. + */ +export async function getRecentContentIds( + db: Db, + channelId: number, + limit = 200 +): Promise { + const rows = await db + .select({ platformContentId: contentItems.platformContentId }) + .from(contentItems) + .where(eq(contentItems.channelId, channelId)) + .orderBy(desc(contentItems.createdAt)) + .limit(limit); + + return rows.map((r) => r.platformContentId); +} + +// ── Single-Item Access ── + +/** Get a content item by ID. Returns null if not found. */ +export async function getContentItemById( + db: Db, + id: number +): Promise { + const rows = await db + .select() + .from(contentItems) + .where(eq(contentItems.id, id)) + .limit(1); + + return rows.length > 0 ? mapRow(rows[0]) : null; +} + +/** + * Update a content item with partial data. Sets updatedAt to now. + * Returns updated item or null if not found. + */ +export async function updateContentItem( + db: Db, + id: number, + data: UpdateContentItemData +): Promise { + const result = await db + .update(contentItems) + .set({ + ...data, + updatedAt: sql`(datetime('now'))`, + }) + .where(eq(contentItems.id, id)) + .returning(); + + return result.length > 0 ? mapRow(result[0]) : null; +} + +/** + * Set the `monitored` flag on a single content item. + * Returns updated item or null if not found. + */ +export async function setMonitored( + db: Db, + id: number, + monitored: boolean +): Promise { + const result = await db + .update(contentItems) + .set({ + monitored, + updatedAt: sql`(datetime('now'))`, + }) + .where(eq(contentItems.id, id)) + .returning(); + + return result.length > 0 ? mapRow(result[0]) : null; +} + +/** + * Set the `monitored` flag on multiple content items at once. + * Returns the number of items actually updated. + */ +export async function bulkSetMonitored( + db: Db, + ids: number[], + monitored: boolean +): Promise { + if (ids.length === 0) return 0; + + const result = await db + .update(contentItems) + .set({ + monitored, + updatedAt: sql`(datetime('now'))`, + }) + .where(inArray(contentItems.id, ids)) + .returning({ id: contentItems.id }); + + return result.length; +} + +/** Get content items by status, ordered by creation date (oldest first). */ +export async function getContentItemsByStatus( + db: Db, + status: ContentStatus, + limit?: number +): Promise { + let query = db + .select() + .from(contentItems) + .where(eq(contentItems.status, status)) + .orderBy(contentItems.createdAt); + + if (limit !== undefined) { + query = query.limit(limit) as typeof query; + } + + const rows = await query; + return rows.map(mapRow); +} + +// ── Paginated Listing ── + +/** Optional filters for querying content items. */ +export interface ContentItemFilters { + status?: ContentStatus; + contentType?: ContentType; + channelId?: number; + search?: string; +} + +/** Paginated result of content items. */ +export interface PaginatedContentResult { + items: ContentItem[]; + total: number; +} + +/** + * Get content items with optional filters and pagination. + * Returns items ordered by id DESC (newest first) and total count for pagination. + * Uses id DESC as tiebreaker per K001 (datetime granularity). + */ +export async function getAllContentItems( + db: Db, + filters?: ContentItemFilters, + page = 1, + pageSize = 25 +): Promise { + const conditions = buildContentFilterConditions(filters); + const offset = (page - 1) * pageSize; + + // Count total matching records + const countResult = await db + .select({ count: sql`count(*)` }) + .from(contentItems) + .where(conditions.length > 0 ? and(...conditions) : undefined); + + const total = Number(countResult[0].count); + + // Fetch paginated results + const rows = await db + .select() + .from(contentItems) + .where(conditions.length > 0 ? and(...conditions) : undefined) + .orderBy(desc(contentItems.id)) + .limit(pageSize) + .offset(offset); + + return { + items: rows.map(mapRow), + total, + }; +} + +function buildContentFilterConditions(filters?: ContentItemFilters) { + const conditions = []; + + if (filters?.status) { + conditions.push(eq(contentItems.status, filters.status)); + } + if (filters?.contentType) { + conditions.push(eq(contentItems.contentType, filters.contentType)); + } + if (filters?.channelId !== undefined) { + conditions.push(eq(contentItems.channelId, filters.channelId)); + } + if (filters?.search) { + conditions.push(like(contentItems.title, `%${filters.search}%`)); + } + + return conditions; +} + +// ── Content Counts by Channel ── + +/** + * Aggregate content counts (total, monitored, downloaded) grouped by channel ID. + * Returns a Map so callers can merge counts into channel objects efficiently. + */ +export async function getContentCountsByChannelIds( + db: Db, + channelIds: number[] +): Promise> { + if (channelIds.length === 0) return new Map(); + + const rows = await db + .select({ + channelId: contentItems.channelId, + total: sql`count(*)`, + monitored: sql`sum(case when ${contentItems.monitored} = 1 then 1 else 0 end)`, + downloaded: sql`sum(case when ${contentItems.status} = 'downloaded' then 1 else 0 end)`, + }) + .from(contentItems) + .where(inArray(contentItems.channelId, channelIds)) + .groupBy(contentItems.channelId); + + const map = new Map(); + for (const row of rows) { + map.set(row.channelId, { + total: Number(row.total), + monitored: Number(row.monitored), + downloaded: Number(row.downloaded), + }); + } + return map; +} + +// ── Row Mapping ── + +function mapRow(row: typeof contentItems.$inferSelect): ContentItem { + return { + id: row.id, + channelId: row.channelId, + title: row.title, + platformContentId: row.platformContentId, + url: row.url, + contentType: row.contentType as ContentType, + duration: row.duration, + filePath: row.filePath, + fileSize: row.fileSize, + format: row.format, + qualityMetadata: row.qualityMetadata as ContentItem['qualityMetadata'], + status: row.status as ContentStatus, + thumbnailUrl: row.thumbnailUrl ?? null, + publishedAt: row.publishedAt ?? null, + downloadedAt: row.downloadedAt ?? null, + monitored: row.monitored, + createdAt: row.createdAt, + updatedAt: row.updatedAt, + }; +} diff --git a/src/db/repositories/format-profile-repository.ts b/src/db/repositories/format-profile-repository.ts new file mode 100644 index 0000000..2900891 --- /dev/null +++ b/src/db/repositories/format-profile-repository.ts @@ -0,0 +1,186 @@ +import { eq, sql } from 'drizzle-orm'; +import { type LibSQLDatabase } from 'drizzle-orm/libsql'; +import type * as schema from '../schema/index'; +import { formatProfiles } from '../schema/index'; +import type { FormatProfile } from '../../types/index'; + +// ── Types ── + +/** Fields needed to create a new format profile. */ +export interface CreateFormatProfileData { + name: string; + videoResolution?: string | null; + audioCodec?: string | null; + audioBitrate?: string | null; + containerFormat?: string | null; + isDefault?: boolean; + subtitleLanguages?: string | null; + embedSubtitles?: boolean; +} + +/** Fields that can be updated on an existing format profile. */ +export interface UpdateFormatProfileData { + name?: string; + videoResolution?: string | null; + audioCodec?: string | null; + audioBitrate?: string | null; + containerFormat?: string | null; + isDefault?: boolean; + subtitleLanguages?: string | null; + embedSubtitles?: boolean; +} + +type Db = LibSQLDatabase; + +// ── Repository Functions ── + +/** + * Insert a new format profile. + * If isDefault is true, clears isDefault on all other profiles first. + */ +export async function createFormatProfile( + db: Db, + data: CreateFormatProfileData +): Promise { + if (data.isDefault) { + await db + .update(formatProfiles) + .set({ isDefault: false, updatedAt: sql`(datetime('now'))` }) + .where(eq(formatProfiles.isDefault, true)); + } + + const result = await db + .insert(formatProfiles) + .values({ + name: data.name, + videoResolution: data.videoResolution ?? null, + audioCodec: data.audioCodec ?? null, + audioBitrate: data.audioBitrate ?? null, + containerFormat: data.containerFormat ?? null, + isDefault: data.isDefault ?? false, + subtitleLanguages: data.subtitleLanguages ?? null, + embedSubtitles: data.embedSubtitles ?? false, + }) + .returning(); + + return mapRow(result[0]); +} + +/** Get a format profile by ID. Returns null if not found. */ +export async function getFormatProfileById( + db: Db, + id: number +): Promise { + const rows = await db + .select() + .from(formatProfiles) + .where(eq(formatProfiles.id, id)) + .limit(1); + + return rows.length > 0 ? mapRow(rows[0]) : null; +} + +/** Get all format profiles, ordered by name. */ +export async function getAllFormatProfiles( + db: Db +): Promise { + const rows = await db + .select() + .from(formatProfiles) + .orderBy(formatProfiles.name); + + return rows.map(mapRow); +} + +/** Get the default format profile. Returns null if none is marked default. */ +export async function getDefaultFormatProfile( + db: Db +): Promise { + const rows = await db + .select() + .from(formatProfiles) + .where(eq(formatProfiles.isDefault, true)) + .limit(1); + + return rows.length > 0 ? mapRow(rows[0]) : null; +} + +/** + * Update a format profile. Sets updatedAt to now. + * If isDefault is set to true, clears isDefault on all other profiles first. + * Returns updated profile or null if not found. + */ +export async function updateFormatProfile( + db: Db, + id: number, + data: UpdateFormatProfileData +): Promise { + if (data.isDefault) { + await db + .update(formatProfiles) + .set({ isDefault: false, updatedAt: sql`(datetime('now'))` }) + .where(eq(formatProfiles.isDefault, true)); + } + + const result = await db + .update(formatProfiles) + .set({ + ...data, + updatedAt: sql`(datetime('now'))`, + }) + .where(eq(formatProfiles.id, id)) + .returning(); + + return result.length > 0 ? mapRow(result[0]) : null; +} + +/** Delete a format profile by ID. Returns true if a row was deleted. */ +export async function deleteFormatProfile( + db: Db, + id: number +): Promise { + const result = await db + .delete(formatProfiles) + .where(eq(formatProfiles.id, id)) + .returning({ id: formatProfiles.id }); + + return result.length > 0; +} + +/** + * Ensure a default format profile exists. + * Idempotent — safe to call on every startup. + * If a default profile already exists (any profile with isDefault: true), returns it unchanged. + * Otherwise, creates a "Default" profile with all quality fields null (meaning "any/best available"). + */ +export async function ensureDefaultFormatProfile( + db: Db +): Promise { + const existing = await getDefaultFormatProfile(db); + if (existing) { + return existing; + } + + return createFormatProfile(db, { + name: 'Default', + isDefault: true, + }); +} + +// ── Row Mapping ── + +function mapRow(row: typeof formatProfiles.$inferSelect): FormatProfile { + return { + id: row.id, + name: row.name, + videoResolution: row.videoResolution, + audioCodec: row.audioCodec, + audioBitrate: row.audioBitrate, + containerFormat: row.containerFormat, + isDefault: row.isDefault, + subtitleLanguages: row.subtitleLanguages ?? null, + embedSubtitles: row.embedSubtitles, + createdAt: row.createdAt, + updatedAt: row.updatedAt, + }; +} diff --git a/src/db/repositories/history-repository.ts b/src/db/repositories/history-repository.ts new file mode 100644 index 0000000..84a1914 --- /dev/null +++ b/src/db/repositories/history-repository.ts @@ -0,0 +1,158 @@ +import { eq, and, desc, sql, gt } from 'drizzle-orm'; +import { type LibSQLDatabase } from 'drizzle-orm/libsql'; +import type * as schema from '../schema/index'; +import { downloadHistory } from '../schema/index'; +import type { DownloadHistoryRecord } from '../../types/index'; + +// ── Types ── + +/** Fields needed to create a new history event. */ +export interface CreateHistoryEventData { + contentItemId?: number | null; + channelId?: number | null; + eventType: string; + status: string; + details?: Record | null; +} + +/** Optional filters for querying history events. */ +export interface HistoryEventFilters { + eventType?: string; + channelId?: number; + contentItemId?: number; +} + +/** Paginated result of history events. */ +export interface PaginatedHistoryResult { + items: DownloadHistoryRecord[]; + total: number; +} + +type Db = LibSQLDatabase; + +// ── Repository Functions ── + +/** + * Insert a new history event. Returns the created row. + */ +export async function createHistoryEvent( + db: Db, + data: CreateHistoryEventData +): Promise { + const result = await db + .insert(downloadHistory) + .values({ + contentItemId: data.contentItemId ?? null, + channelId: data.channelId ?? null, + eventType: data.eventType, + status: data.status, + details: data.details ?? null, + }) + .returning(); + + return mapRow(result[0]); +} + +/** + * Get history events with optional filters and pagination. + * Returns items ordered by createdAt DESC and total count for pagination. + */ +export async function getHistoryEvents( + db: Db, + filters?: HistoryEventFilters, + page = 1, + pageSize = 25 +): Promise { + const conditions = buildFilterConditions(filters); + const offset = (page - 1) * pageSize; + + // Count total matching records + const countResult = await db + .select({ count: sql`count(*)` }) + .from(downloadHistory) + .where(conditions.length > 0 ? and(...conditions) : undefined); + + const total = Number(countResult[0].count); + + // Fetch paginated results + const rows = await db + .select() + .from(downloadHistory) + .where(conditions.length > 0 ? and(...conditions) : undefined) + .orderBy(desc(downloadHistory.createdAt), desc(downloadHistory.id)) + .limit(pageSize) + .offset(offset); + + return { + items: rows.map(mapRow), + total, + }; +} + +/** + * Get the most recent history events. Default limit is 50. + * Used for the combined activity feed. + */ +export async function getRecentActivity( + db: Db, + limit = 50 +): Promise { + const rows = await db + .select() + .from(downloadHistory) + .orderBy(desc(downloadHistory.createdAt), desc(downloadHistory.id)) + .limit(limit); + + return rows.map(mapRow); +} + +/** + * Count history events with eventType='failed' created after `since` (ISO 8601 string). + * Used by HealthService to report recent error rate. + */ +export async function getRecentErrorCount( + db: Db, + since: string +): Promise { + const result = await db + .select({ count: sql`count(*)` }) + .from(downloadHistory) + .where( + and( + eq(downloadHistory.eventType, 'failed'), + gt(downloadHistory.createdAt, since) + ) + ); + + return Number(result[0].count); +} + +// ── Internal Helpers ── + +function buildFilterConditions(filters?: HistoryEventFilters) { + const conditions = []; + + if (filters?.eventType) { + conditions.push(eq(downloadHistory.eventType, filters.eventType)); + } + if (filters?.channelId !== undefined) { + conditions.push(eq(downloadHistory.channelId, filters.channelId)); + } + if (filters?.contentItemId !== undefined) { + conditions.push(eq(downloadHistory.contentItemId, filters.contentItemId)); + } + + return conditions; +} + +function mapRow(row: typeof downloadHistory.$inferSelect): DownloadHistoryRecord { + return { + id: row.id, + contentItemId: row.contentItemId, + channelId: row.channelId, + eventType: row.eventType, + status: row.status, + details: row.details as Record | null, + createdAt: row.createdAt, + }; +} diff --git a/src/db/repositories/notification-repository.ts b/src/db/repositories/notification-repository.ts new file mode 100644 index 0000000..a006505 --- /dev/null +++ b/src/db/repositories/notification-repository.ts @@ -0,0 +1,143 @@ +import { eq } from 'drizzle-orm'; +import { type LibSQLDatabase } from 'drizzle-orm/libsql'; +import type * as schema from '../schema/index'; +import { notificationSettings } from '../schema/index'; +import type { NotificationSetting } from '../../types/index'; + +// ── Types ── + +/** Fields needed to create a new notification setting. */ +export interface CreateNotificationSettingData { + type: 'discord' | 'email' | 'pushover' | 'telegram'; + name: string; + config: Record; + enabled?: boolean; + onGrab?: boolean; + onDownload?: boolean; + onFailure?: boolean; +} + +/** Fields that can be updated on an existing notification setting. */ +export interface UpdateNotificationSettingData { + name?: string; + type?: 'discord' | 'email' | 'pushover' | 'telegram'; + config?: Record; + enabled?: boolean; + onGrab?: boolean; + onDownload?: boolean; + onFailure?: boolean; +} + +type Db = LibSQLDatabase; + +// ── Repository Functions ── + +/** Insert a new notification setting. Returns the created row. */ +export async function createNotificationSetting( + db: Db, + data: CreateNotificationSettingData +): Promise { + const result = await db + .insert(notificationSettings) + .values({ + type: data.type, + name: data.name, + config: data.config, + enabled: data.enabled ?? true, + onGrab: data.onGrab ?? true, + onDownload: data.onDownload ?? true, + onFailure: data.onFailure ?? true, + }) + .returning(); + + return mapRow(result[0]); +} + +/** Get all notification settings, ordered by name. */ +export async function getAllNotificationSettings( + db: Db +): Promise { + const rows = await db + .select() + .from(notificationSettings) + .orderBy(notificationSettings.name); + + return rows.map(mapRow); +} + +/** Get a notification setting by ID. Returns null if not found. */ +export async function getNotificationSettingById( + db: Db, + id: number +): Promise { + const rows = await db + .select() + .from(notificationSettings) + .where(eq(notificationSettings.id, id)) + .limit(1); + + return rows.length > 0 ? mapRow(rows[0]) : null; +} + +/** Get all enabled notification settings. */ +export async function getEnabledNotificationSettings( + db: Db +): Promise { + const rows = await db + .select() + .from(notificationSettings) + .where(eq(notificationSettings.enabled, true)); + + return rows.map(mapRow); +} + +/** + * Update a notification setting. Sets updatedAt to current time. + * Returns updated setting or null if not found. + */ +export async function updateNotificationSetting( + db: Db, + id: number, + data: UpdateNotificationSettingData +): Promise { + const result = await db + .update(notificationSettings) + .set({ + ...data, + updatedAt: new Date().toISOString(), + }) + .where(eq(notificationSettings.id, id)) + .returning(); + + return result.length > 0 ? mapRow(result[0]) : null; +} + +/** Delete a notification setting by ID. Returns true if a row was deleted. */ +export async function deleteNotificationSetting( + db: Db, + id: number +): Promise { + const result = await db + .delete(notificationSettings) + .where(eq(notificationSettings.id, id)) + .returning({ id: notificationSettings.id }); + + return result.length > 0; +} + +// ── Row Mapping ── + +function mapRow(row: typeof notificationSettings.$inferSelect): NotificationSetting { + return { + id: row.id, + type: row.type as NotificationSetting['type'], + name: row.name, + enabled: row.enabled, + config: row.config as Record, + onGrab: row.onGrab, + onDownload: row.onDownload, + onFailure: row.onFailure, + createdAt: row.createdAt, + updatedAt: row.updatedAt, + }; +} diff --git a/src/db/repositories/platform-settings-repository.ts b/src/db/repositories/platform-settings-repository.ts new file mode 100644 index 0000000..9a6ba8b --- /dev/null +++ b/src/db/repositories/platform-settings-repository.ts @@ -0,0 +1,119 @@ +import { eq, sql } from 'drizzle-orm'; +import { type LibSQLDatabase } from 'drizzle-orm/libsql'; +import type * as schema from '../schema/index'; +import { platformSettings } from '../schema/index'; +import type { PlatformSettings, Platform } from '../../types/index'; + +// ── Types ── + +/** Fields accepted when upserting platform settings. */ +export interface UpsertPlatformSettingsData { + platform: Platform; + defaultFormatProfileId?: number | null; + checkInterval?: number; + concurrencyLimit?: number; + subtitleLanguages?: string | null; + grabAllEnabled?: boolean; + grabAllOrder?: 'newest' | 'oldest'; + scanLimit?: number; + rateLimitDelay?: number; +} + +type Db = LibSQLDatabase; + +// ── Repository Functions ── + +/** Get all platform settings, ordered by platform name. */ +export async function getAllPlatformSettings(db: Db): Promise { + const rows = await db.select().from(platformSettings).orderBy(platformSettings.platform); + return rows.map(mapRow); +} + +/** Get platform settings for a specific platform. Returns null if not found. */ +export async function getPlatformSettings( + db: Db, + platform: string +): Promise { + const rows = await db + .select() + .from(platformSettings) + .where(eq(platformSettings.platform, platform)) + .limit(1); + + return rows.length > 0 ? mapRow(rows[0]) : null; +} + +/** + * Upsert platform settings — INSERT or UPDATE on conflict. + * Uses INSERT ... ON CONFLICT DO UPDATE for atomic upsert semantics. + */ +export async function upsertPlatformSettings( + db: Db, + data: UpsertPlatformSettingsData +): Promise { + const now = sql`(datetime('now'))`; + + const result = await db + .insert(platformSettings) + .values({ + platform: data.platform, + defaultFormatProfileId: data.defaultFormatProfileId ?? null, + checkInterval: data.checkInterval ?? 360, + concurrencyLimit: data.concurrencyLimit ?? 2, + subtitleLanguages: data.subtitleLanguages ?? null, + grabAllEnabled: data.grabAllEnabled ?? false, + grabAllOrder: data.grabAllOrder ?? 'newest', + scanLimit: data.scanLimit ?? 100, + rateLimitDelay: data.rateLimitDelay ?? 1000, + createdAt: now, + updatedAt: now, + }) + .onConflictDoUpdate({ + target: platformSettings.platform, + set: { + defaultFormatProfileId: data.defaultFormatProfileId ?? null, + checkInterval: data.checkInterval ?? 360, + concurrencyLimit: data.concurrencyLimit ?? 2, + subtitleLanguages: data.subtitleLanguages ?? null, + grabAllEnabled: data.grabAllEnabled ?? false, + grabAllOrder: data.grabAllOrder ?? 'newest', + scanLimit: data.scanLimit ?? 100, + rateLimitDelay: data.rateLimitDelay ?? 1000, + updatedAt: now, + }, + }) + .returning(); + + return mapRow(result[0]); +} + +/** Delete platform settings for a specific platform. Returns true if a row was deleted. */ +export async function deletePlatformSettings( + db: Db, + platform: string +): Promise { + const result = await db + .delete(platformSettings) + .where(eq(platformSettings.platform, platform)) + .returning({ platform: platformSettings.platform }); + + return result.length > 0; +} + +// ── Row Mapping ── + +function mapRow(row: typeof platformSettings.$inferSelect): PlatformSettings { + return { + platform: row.platform as Platform, + defaultFormatProfileId: row.defaultFormatProfileId, + checkInterval: row.checkInterval ?? 360, + concurrencyLimit: row.concurrencyLimit ?? 2, + subtitleLanguages: row.subtitleLanguages, + grabAllEnabled: row.grabAllEnabled, + grabAllOrder: row.grabAllOrder as 'newest' | 'oldest', + scanLimit: row.scanLimit ?? 100, + rateLimitDelay: row.rateLimitDelay ?? 1000, + createdAt: row.createdAt, + updatedAt: row.updatedAt, + }; +} diff --git a/src/db/repositories/playlist-repository.ts b/src/db/repositories/playlist-repository.ts new file mode 100644 index 0000000..1474abf --- /dev/null +++ b/src/db/repositories/playlist-repository.ts @@ -0,0 +1,172 @@ +import { eq, and, sql, inArray } from 'drizzle-orm'; +import { type LibSQLDatabase } from 'drizzle-orm/libsql'; +import type * as schema from '../schema/index'; +import { playlists, contentPlaylist } from '../schema/index'; +import { contentItems } from '../schema/content'; +import type { Playlist, PlaylistDiscoveryResult } from '../../types/index'; + +type Db = LibSQLDatabase; + +// ── Repository Functions ── + +/** Get all playlists for a channel, ordered by position. */ +export async function getPlaylistsByChannelId( + db: Db, + channelId: number +): Promise { + const rows = await db + .select() + .from(playlists) + .where(eq(playlists.channelId, channelId)) + .orderBy(playlists.position); + + return rows.map(mapRow); +} + +/** + * Upsert playlists from discovery results. + * + * For each discovery result: + * 1. Insert or update the playlist row (match on channelId + platformPlaylistId). + * 2. Resolve videoIds (platformContentId strings) to actual content_items.id values. + * 3. Sync the content_playlist junction table entries. + * + * Uses a transaction for atomicity. Returns the upserted playlists. + */ +export async function upsertPlaylists( + db: Db, + channelId: number, + discoveryResults: PlaylistDiscoveryResult[] +): Promise { + return await db.transaction(async (tx) => { + const upserted: Playlist[] = []; + + for (let i = 0; i < discoveryResults.length; i++) { + const result = discoveryResults[i]; + + // Check if playlist already exists for this channel + const existing = await tx + .select() + .from(playlists) + .where( + and( + eq(playlists.channelId, channelId), + eq(playlists.platformPlaylistId, result.platformPlaylistId) + ) + ) + .limit(1); + + let playlistRow: Playlist; + + if (existing.length > 0) { + // Update existing playlist + const updated = await tx + .update(playlists) + .set({ + title: result.title, + position: i, + updatedAt: sql`(datetime('now'))`, + }) + .where(eq(playlists.id, existing[0].id)) + .returning(); + playlistRow = mapRow(updated[0]); + } else { + // Insert new playlist + const inserted = await tx + .insert(playlists) + .values({ + channelId, + platformPlaylistId: result.platformPlaylistId, + title: result.title, + position: i, + }) + .returning(); + playlistRow = mapRow(inserted[0]); + } + + // Resolve videoIds (platformContentId strings) to content_items.id values + let contentItemIds: number[] = []; + if (result.videoIds.length > 0) { + const contentRows = await tx + .select({ id: contentItems.id, platformContentId: contentItems.platformContentId }) + .from(contentItems) + .where( + and( + eq(contentItems.channelId, channelId), + inArray(contentItems.platformContentId, result.videoIds) + ) + ); + contentItemIds = contentRows.map((r) => r.id); + } + + // Sync junction table: delete old entries, insert new ones + await tx + .delete(contentPlaylist) + .where(eq(contentPlaylist.playlistId, playlistRow.id)); + + if (contentItemIds.length > 0) { + await tx.insert(contentPlaylist).values( + contentItemIds.map((contentItemId) => ({ + contentItemId, + playlistId: playlistRow.id, + })) + ); + } + + upserted.push(playlistRow); + } + + return upserted; + }); +} + +/** + * Get content-to-playlist mappings for a channel. + * Returns a map of playlistId → contentItemId[]. + */ +export async function getContentPlaylistMappings( + db: Db, + channelId: number +): Promise> { + // Join content_playlist with playlists to filter by channelId + const rows = await db + .select({ + playlistId: contentPlaylist.playlistId, + contentItemId: contentPlaylist.contentItemId, + }) + .from(contentPlaylist) + .innerJoin(playlists, eq(contentPlaylist.playlistId, playlists.id)) + .where(eq(playlists.channelId, channelId)); + + const mappings: Record = {}; + for (const row of rows) { + if (!mappings[row.playlistId]) { + mappings[row.playlistId] = []; + } + mappings[row.playlistId].push(row.contentItemId); + } + + return mappings; +} + +/** Delete all playlists for a channel. Cascade handles junction rows. */ +export async function deletePlaylistsByChannelId( + db: Db, + channelId: number +): Promise { + await db.delete(playlists).where(eq(playlists.channelId, channelId)); +} + +// ── Row Mapping ── + +function mapRow(row: typeof playlists.$inferSelect): Playlist { + return { + id: row.id, + channelId: row.channelId, + platformPlaylistId: row.platformPlaylistId, + title: row.title, + position: row.position, + createdAt: row.createdAt, + updatedAt: row.updatedAt, + }; +} diff --git a/src/db/repositories/queue-repository.ts b/src/db/repositories/queue-repository.ts new file mode 100644 index 0000000..68b28be --- /dev/null +++ b/src/db/repositories/queue-repository.ts @@ -0,0 +1,283 @@ +import { eq, and, desc, asc, sql } from 'drizzle-orm'; +import { type LibSQLDatabase } from 'drizzle-orm/libsql'; +import type * as schema from '../schema/index'; +import { queueItems, contentItems, channels } from '../schema/index'; +import type { QueueItem, QueueStatus } from '../../types/index'; + +// ── Types ── + +/** Fields needed to create a new queue item. */ +export interface CreateQueueItemData { + contentItemId: number; + priority?: number; + maxAttempts?: number; +} + +/** Optional fields when updating queue item status. */ +export interface UpdateQueueItemFields { + error?: string | null; + startedAt?: string | null; + completedAt?: string | null; + attempts?: number; +} + +type Db = LibSQLDatabase; + +// ── Repository Functions ── + +/** + * Insert a new queue item. Returns the created row. + */ +export async function createQueueItem( + db: Db, + data: CreateQueueItemData +): Promise { + const result = await db + .insert(queueItems) + .values({ + contentItemId: data.contentItemId, + priority: data.priority ?? 0, + maxAttempts: data.maxAttempts ?? 3, + }) + .returning(); + + return mapRow(result[0]); +} + +/** Get a queue item by ID. Returns null if not found. */ +export async function getQueueItemById( + db: Db, + id: number +): Promise { + const rows = await db + .select() + .from(queueItems) + .where(eq(queueItems.id, id)) + .limit(1); + + return rows.length > 0 ? mapRow(rows[0]) : null; +} + +/** Get queue items by status, ordered by priority DESC then createdAt ASC. */ +export async function getQueueItemsByStatus( + db: Db, + status: QueueStatus +): Promise { + const rows = await db + .select({ + id: queueItems.id, + contentItemId: queueItems.contentItemId, + status: queueItems.status, + priority: queueItems.priority, + attempts: queueItems.attempts, + maxAttempts: queueItems.maxAttempts, + error: queueItems.error, + startedAt: queueItems.startedAt, + completedAt: queueItems.completedAt, + createdAt: queueItems.createdAt, + updatedAt: queueItems.updatedAt, + contentTitle: contentItems.title, + channelName: channels.name, + }) + .from(queueItems) + .leftJoin(contentItems, eq(queueItems.contentItemId, contentItems.id)) + .leftJoin(channels, eq(contentItems.channelId, channels.id)) + .where(eq(queueItems.status, status)) + .orderBy(desc(queueItems.priority), asc(queueItems.createdAt)); + + return rows.map(mapJoinedRow); +} + +/** Get all queue items ordered by priority DESC then createdAt ASC. */ +export async function getAllQueueItems( + db: Db +): Promise { + const rows = await db + .select({ + id: queueItems.id, + contentItemId: queueItems.contentItemId, + status: queueItems.status, + priority: queueItems.priority, + attempts: queueItems.attempts, + maxAttempts: queueItems.maxAttempts, + error: queueItems.error, + startedAt: queueItems.startedAt, + completedAt: queueItems.completedAt, + createdAt: queueItems.createdAt, + updatedAt: queueItems.updatedAt, + contentTitle: contentItems.title, + channelName: channels.name, + }) + .from(queueItems) + .leftJoin(contentItems, eq(queueItems.contentItemId, contentItems.id)) + .leftJoin(channels, eq(contentItems.channelId, channels.id)) + .orderBy(desc(queueItems.priority), asc(queueItems.createdAt)); + + return rows.map(mapJoinedRow); +} + +/** + * Get pending queue items ordered by priority DESC then createdAt ASC. + * Optional limit to constrain the result set (for concurrency control). + */ +export async function getPendingQueueItems( + db: Db, + limit?: number +): Promise { + let query = db + .select() + .from(queueItems) + .where(eq(queueItems.status, 'pending')) + .orderBy(desc(queueItems.priority), asc(queueItems.createdAt)); + + if (limit !== undefined) { + query = query.limit(limit) as typeof query; + } + + const rows = await query; + return rows.map(mapRow); +} + +/** + * Update a queue item's status and optional fields. Sets updatedAt to now. + * Returns updated item or null if not found. + */ +export async function updateQueueItemStatus( + db: Db, + id: number, + status: QueueStatus, + updates?: UpdateQueueItemFields +): Promise { + const setData: Record = { + status, + updatedAt: sql`(datetime('now'))`, + }; + + if (updates?.error !== undefined) setData.error = updates.error; + if (updates?.startedAt !== undefined) setData.startedAt = updates.startedAt; + if (updates?.completedAt !== undefined) setData.completedAt = updates.completedAt; + if (updates?.attempts !== undefined) setData.attempts = updates.attempts; + + const result = await db + .update(queueItems) + .set(setData) + .where(eq(queueItems.id, id)) + .returning(); + + return result.length > 0 ? mapRow(result[0]) : null; +} + +/** + * Count queue items grouped by status. + * Returns a record with counts for each QueueStatus value. + */ +export async function countQueueItemsByStatus( + db: Db +): Promise> { + const rows = await db + .select({ + status: queueItems.status, + count: sql`count(*)`, + }) + .from(queueItems) + .groupBy(queueItems.status); + + const counts: Record = { + pending: 0, + downloading: 0, + completed: 0, + failed: 0, + cancelled: 0, + }; + + for (const row of rows) { + counts[row.status] = Number(row.count); + } + + return counts as Record; +} + +/** Delete a queue item by ID. Returns true if a row was deleted. */ +export async function deleteQueueItem( + db: Db, + id: number +): Promise { + const result = await db + .delete(queueItems) + .where(eq(queueItems.id, id)) + .returning({ id: queueItems.id }); + + return result.length > 0; +} + +/** + * Get a queue item by content item ID (for dedup checking before enqueue). + * Returns the queue item or null if no entry exists for this content item. + */ +export async function getQueueItemByContentItemId( + db: Db, + contentItemId: number +): Promise { + const rows = await db + .select() + .from(queueItems) + .where(eq(queueItems.contentItemId, contentItemId)) + .limit(1); + + return rows.length > 0 ? mapRow(rows[0]) : null; +} + +// ── Row Mapping ── + +/** Map a plain queue_items row (no JOINs). Used by CRUD operations that don't need display names. */ +function mapRow(row: typeof queueItems.$inferSelect): QueueItem { + return { + id: row.id, + contentItemId: row.contentItemId, + status: row.status as QueueStatus, + priority: row.priority, + attempts: row.attempts, + maxAttempts: row.maxAttempts, + error: row.error, + startedAt: row.startedAt, + completedAt: row.completedAt, + createdAt: row.createdAt, + updatedAt: row.updatedAt, + }; +} + +/** Joined row shape from explicit `.select({...})` with LEFT JOIN content_items and channels. */ +interface JoinedQueueRow { + id: number; + contentItemId: number; + status: string; + priority: number; + attempts: number; + maxAttempts: number; + error: string | null; + startedAt: string | null; + completedAt: string | null; + createdAt: string; + updatedAt: string; + contentTitle: string | null; + channelName: string | null; +} + +/** Map a joined queue row (with content title and channel name from LEFT JOINs). */ +function mapJoinedRow(row: JoinedQueueRow): QueueItem { + return { + id: row.id, + contentItemId: row.contentItemId, + status: row.status as QueueStatus, + priority: row.priority, + attempts: row.attempts, + maxAttempts: row.maxAttempts, + error: row.error, + startedAt: row.startedAt, + completedAt: row.completedAt, + createdAt: row.createdAt, + updatedAt: row.updatedAt, + contentTitle: row.contentTitle ?? null, + channelName: row.channelName ?? null, + }; +} diff --git a/src/db/repositories/system-config-repository.ts b/src/db/repositories/system-config-repository.ts new file mode 100644 index 0000000..d9276cc --- /dev/null +++ b/src/db/repositories/system-config-repository.ts @@ -0,0 +1,101 @@ +import { eq, inArray } from 'drizzle-orm'; +import { type LibSQLDatabase } from 'drizzle-orm/libsql'; +import type * as schema from '../schema/index'; +import { systemConfig } from '../schema/index'; +import { appConfig } from '../../config/index'; + +type Db = LibSQLDatabase; + +// ── Key Constants ── + +export const APP_CHECK_INTERVAL = 'app.check_interval'; +export const APP_CONCURRENT_DOWNLOADS = 'app.concurrent_downloads'; + +// ── Read / Write ── + +/** + * Read a single app setting by key. Returns null if the key doesn't exist. + */ +export async function getAppSetting(db: Db, key: string): Promise { + const rows = await db + .select({ value: systemConfig.value }) + .from(systemConfig) + .where(eq(systemConfig.key, key)) + .limit(1); + + return rows[0]?.value ?? null; +} + +/** + * Write a single app setting by key (upsert). + * If the key already exists, updates the value and updatedAt. + * If it doesn't exist, inserts a new row. + */ +export async function setAppSetting(db: Db, key: string, value: string): Promise { + const existing = await db + .select({ key: systemConfig.key }) + .from(systemConfig) + .where(eq(systemConfig.key, key)) + .limit(1); + + if (existing.length > 0) { + await db + .update(systemConfig) + .set({ value, updatedAt: new Date().toISOString() }) + .where(eq(systemConfig.key, key)); + } else { + await db.insert(systemConfig).values({ key, value }); + } +} + +/** + * Read multiple app settings by key. Returns a Record where missing keys map to null. + */ +export async function getAppSettings( + db: Db, + keys: string[] +): Promise> { + const result: Record = {}; + + // Initialize all keys to null + for (const key of keys) { + result[key] = null; + } + + if (keys.length === 0) return result; + + const rows = await db + .select({ key: systemConfig.key, value: systemConfig.value }) + .from(systemConfig) + .where(inArray(systemConfig.key, keys)); + + for (const row of rows) { + result[row.key] = row.value; + } + + return result; +} + +/** + * Seed default app settings from environment config. + * Only inserts a value if the key does NOT already exist in the DB. + * This makes env vars the initial seed on first boot; DB values win on subsequent boots. + */ +export async function seedAppDefaults(db: Db): Promise { + const defaults: Array<{ key: string; value: string }> = [ + { key: APP_CHECK_INTERVAL, value: appConfig.scheduler.defaultCheckInterval.toString() }, + { key: APP_CONCURRENT_DOWNLOADS, value: appConfig.concurrentDownloads.toString() }, + ]; + + for (const { key, value } of defaults) { + const existing = await db + .select({ key: systemConfig.key }) + .from(systemConfig) + .where(eq(systemConfig.key, key)) + .limit(1); + + if (existing.length === 0) { + await db.insert(systemConfig).values({ key, value }); + } + } +} diff --git a/src/db/schema/channels.ts b/src/db/schema/channels.ts new file mode 100644 index 0000000..dfc9620 --- /dev/null +++ b/src/db/schema/channels.ts @@ -0,0 +1,31 @@ +import { sqliteTable, text, integer } from 'drizzle-orm/sqlite-core'; +import { sql } from 'drizzle-orm'; +import { formatProfiles } from './content'; + +/** Monitored channels (YouTube channels, SoundCloud artists, etc.). */ +export const channels = sqliteTable('channels', { + id: integer('id').primaryKey({ autoIncrement: true }), + name: text('name').notNull(), + platform: text('platform').notNull(), // 'youtube' | 'soundcloud' + platformId: text('platform_id').notNull(), + url: text('url').notNull(), + monitoringEnabled: integer('monitoring_enabled', { mode: 'boolean' }) + .notNull() + .default(true), + checkInterval: integer('check_interval').notNull().default(360), // minutes + imageUrl: text('image_url'), + metadata: text('metadata', { mode: 'json' }), // platform-specific extras + formatProfileId: integer('format_profile_id').references( + () => formatProfiles.id, + { onDelete: 'set null' } + ), + createdAt: text('created_at') + .notNull() + .default(sql`(datetime('now'))`), + updatedAt: text('updated_at') + .notNull() + .default(sql`(datetime('now'))`), + lastCheckedAt: text('last_checked_at'), // null until first monitoring check + lastCheckStatus: text('last_check_status'), // 'success' | 'error' | 'rate_limited' + monitoringMode: text('monitoring_mode').notNull().default('all'), // 'all' | 'future' | 'existing' | 'none' +}); diff --git a/src/db/schema/content.ts b/src/db/schema/content.ts new file mode 100644 index 0000000..d5770fe --- /dev/null +++ b/src/db/schema/content.ts @@ -0,0 +1,50 @@ +import { sqliteTable, text, integer, real } from 'drizzle-orm/sqlite-core'; +import { sql } from 'drizzle-orm'; +import { channels } from './channels'; + +/** Individual content items (videos, audio tracks, livestreams). */ +export const contentItems = sqliteTable('content_items', { + id: integer('id').primaryKey({ autoIncrement: true }), + channelId: integer('channel_id') + .notNull() + .references(() => channels.id, { onDelete: 'cascade' }), + title: text('title').notNull(), + platformContentId: text('platform_content_id').notNull(), + url: text('url').notNull(), + contentType: text('content_type').notNull(), // 'video' | 'audio' | 'livestream' + duration: integer('duration'), // seconds + filePath: text('file_path'), + fileSize: integer('file_size'), // bytes + format: text('format'), // container format e.g. 'mp4', 'webm', 'mp3' + qualityMetadata: text('quality_metadata', { mode: 'json' }), // actual quality info post-download + status: text('status').notNull().default('monitored'), // monitored|queued|downloading|downloaded|failed|ignored + thumbnailUrl: text('thumbnail_url'), + publishedAt: text('published_at'), // ISO datetime from platform (nullable) + downloadedAt: text('downloaded_at'), // ISO datetime when download completed (nullable) + monitored: integer('monitored', { mode: 'boolean' }).notNull().default(true), // per-item monitoring toggle + createdAt: text('created_at') + .notNull() + .default(sql`(datetime('now'))`), + updatedAt: text('updated_at') + .notNull() + .default(sql`(datetime('now'))`), +}); + +/** Format profiles defining preferred download quality/format settings. */ +export const formatProfiles = sqliteTable('format_profiles', { + id: integer('id').primaryKey({ autoIncrement: true }), + name: text('name').notNull(), + videoResolution: text('video_resolution'), // e.g. '1080p', '720p', 'best' + audioCodec: text('audio_codec'), // e.g. 'opus', 'aac', 'mp3' + audioBitrate: text('audio_bitrate'), // e.g. '320k', '192k' + containerFormat: text('container_format'), // e.g. 'mp4', 'mkv', 'mp3' + isDefault: integer('is_default', { mode: 'boolean' }).notNull().default(false), + subtitleLanguages: text('subtitle_languages'), + embedSubtitles: integer('embed_subtitles', { mode: 'boolean' }).notNull().default(false), + createdAt: text('created_at') + .notNull() + .default(sql`(datetime('now'))`), + updatedAt: text('updated_at') + .notNull() + .default(sql`(datetime('now'))`), +}); diff --git a/src/db/schema/history.ts b/src/db/schema/history.ts new file mode 100644 index 0000000..c810143 --- /dev/null +++ b/src/db/schema/history.ts @@ -0,0 +1,21 @@ +import { sqliteTable, text, integer } from 'drizzle-orm/sqlite-core'; +import { sql } from 'drizzle-orm'; +import { contentItems } from './content'; +import { channels } from './channels'; + +/** Download history / activity log entries. */ +export const downloadHistory = sqliteTable('download_history', { + id: integer('id').primaryKey({ autoIncrement: true }), + contentItemId: integer('content_item_id').references(() => contentItems.id, { + onDelete: 'set null', + }), + channelId: integer('channel_id').references(() => channels.id, { + onDelete: 'set null', + }), + eventType: text('event_type').notNull(), // 'grabbed' | 'downloaded' | 'failed' | 'imported' | 'deleted' + status: text('status').notNull(), + details: text('details', { mode: 'json' }), // arbitrary event metadata + createdAt: text('created_at') + .notNull() + .default(sql`(datetime('now'))`), +}); diff --git a/src/db/schema/index.ts b/src/db/schema/index.ts new file mode 100644 index 0000000..b283e8a --- /dev/null +++ b/src/db/schema/index.ts @@ -0,0 +1,8 @@ +export { systemConfig } from './system'; +export { channels } from './channels'; +export { contentItems, formatProfiles } from './content'; +export { queueItems } from './queue'; +export { downloadHistory } from './history'; +export { notificationSettings } from './notifications'; +export { platformSettings } from './platform-settings'; +export { playlists, contentPlaylist } from './playlists'; diff --git a/src/db/schema/notifications.ts b/src/db/schema/notifications.ts new file mode 100644 index 0000000..056be8d --- /dev/null +++ b/src/db/schema/notifications.ts @@ -0,0 +1,20 @@ +import { sqliteTable, text, integer } from 'drizzle-orm/sqlite-core'; +import { sql } from 'drizzle-orm'; + +/** Notification channel settings (Discord, email, Pushover, Telegram). */ +export const notificationSettings = sqliteTable('notification_settings', { + id: integer('id').primaryKey({ autoIncrement: true }), + type: text('type').notNull(), // 'discord' | 'email' | 'pushover' | 'telegram' + name: text('name').notNull(), + enabled: integer('enabled', { mode: 'boolean' }).notNull().default(true), + config: text('config', { mode: 'json' }).notNull(), // channel-specific configuration + onGrab: integer('on_grab', { mode: 'boolean' }).notNull().default(true), + onDownload: integer('on_download', { mode: 'boolean' }).notNull().default(true), + onFailure: integer('on_failure', { mode: 'boolean' }).notNull().default(true), + createdAt: text('created_at') + .notNull() + .default(sql`(datetime('now'))`), + updatedAt: text('updated_at') + .notNull() + .default(sql`(datetime('now'))`), +}); diff --git a/src/db/schema/platform-settings.ts b/src/db/schema/platform-settings.ts new file mode 100644 index 0000000..5c6b5ea --- /dev/null +++ b/src/db/schema/platform-settings.ts @@ -0,0 +1,25 @@ +import { sqliteTable, text, integer } from 'drizzle-orm/sqlite-core'; +import { sql } from 'drizzle-orm'; +import { formatProfiles } from './content'; + +/** Per-platform default settings for monitoring, downloads, and back-catalog import. */ +export const platformSettings = sqliteTable('platform_settings', { + platform: text('platform').primaryKey(), + defaultFormatProfileId: integer('default_format_profile_id').references( + () => formatProfiles.id, + { onDelete: 'set null' } + ), + checkInterval: integer('check_interval').default(360), + concurrencyLimit: integer('concurrency_limit').default(2), + subtitleLanguages: text('subtitle_languages'), + grabAllEnabled: integer('grab_all_enabled', { mode: 'boolean' }).notNull().default(false), + grabAllOrder: text('grab_all_order').notNull().default('newest'), + scanLimit: integer('scan_limit').default(100), + rateLimitDelay: integer('rate_limit_delay').default(1000), + createdAt: text('created_at') + .notNull() + .default(sql`(datetime('now'))`), + updatedAt: text('updated_at') + .notNull() + .default(sql`(datetime('now'))`), +}); diff --git a/src/db/schema/playlists.ts b/src/db/schema/playlists.ts new file mode 100644 index 0000000..c1a80af --- /dev/null +++ b/src/db/schema/playlists.ts @@ -0,0 +1,37 @@ +import { sqliteTable, text, integer, primaryKey } from 'drizzle-orm/sqlite-core'; +import { sql } from 'drizzle-orm'; +import { channels } from './channels'; +import { contentItems } from './content'; + +/** Playlists belonging to a channel (e.g. YouTube playlists). */ +export const playlists = sqliteTable('playlists', { + id: integer('id').primaryKey({ autoIncrement: true }), + channelId: integer('channel_id') + .notNull() + .references(() => channels.id, { onDelete: 'cascade' }), + platformPlaylistId: text('platform_playlist_id').notNull(), + title: text('title').notNull(), + position: integer('position').notNull().default(0), + createdAt: text('created_at') + .notNull() + .default(sql`(datetime('now'))`), + updatedAt: text('updated_at') + .notNull() + .default(sql`(datetime('now'))`), +}); + +/** Junction table mapping content items to playlists (many-to-many). */ +export const contentPlaylist = sqliteTable( + 'content_playlist', + { + contentItemId: integer('content_item_id') + .notNull() + .references(() => contentItems.id, { onDelete: 'cascade' }), + playlistId: integer('playlist_id') + .notNull() + .references(() => playlists.id, { onDelete: 'cascade' }), + }, + (t) => ({ + pk: primaryKey({ columns: [t.contentItemId, t.playlistId] }), + }) +); diff --git a/src/db/schema/queue.ts b/src/db/schema/queue.ts new file mode 100644 index 0000000..7820384 --- /dev/null +++ b/src/db/schema/queue.ts @@ -0,0 +1,24 @@ +import { sqliteTable, text, integer } from 'drizzle-orm/sqlite-core'; +import { sql } from 'drizzle-orm'; +import { contentItems } from './content'; + +/** Download queue items with status tracking and retry logic. */ +export const queueItems = sqliteTable('queue_items', { + id: integer('id').primaryKey({ autoIncrement: true }), + contentItemId: integer('content_item_id') + .notNull() + .references(() => contentItems.id, { onDelete: 'cascade' }), + status: text('status').notNull().default('pending'), // pending|downloading|completed|failed|cancelled + priority: integer('priority').notNull().default(0), + attempts: integer('attempts').notNull().default(0), + maxAttempts: integer('max_attempts').notNull().default(3), + error: text('error'), + startedAt: text('started_at'), + completedAt: text('completed_at'), + createdAt: text('created_at') + .notNull() + .default(sql`(datetime('now'))`), + updatedAt: text('updated_at') + .notNull() + .default(sql`(datetime('now'))`), +}); diff --git a/src/db/schema/system.ts b/src/db/schema/system.ts new file mode 100644 index 0000000..93a5010 --- /dev/null +++ b/src/db/schema/system.ts @@ -0,0 +1,14 @@ +import { sqliteTable, text, integer } from 'drizzle-orm/sqlite-core'; +import { sql } from 'drizzle-orm'; + +/** Key-value system configuration (API key, app settings). */ +export const systemConfig = sqliteTable('system_config', { + key: text('key').primaryKey(), + value: text('value').notNull(), + createdAt: text('created_at') + .notNull() + .default(sql`(datetime('now'))`), + updatedAt: text('updated_at') + .notNull() + .default(sql`(datetime('now'))`), +}); diff --git a/src/frontend/index.html b/src/frontend/index.html new file mode 100644 index 0000000..492c4be --- /dev/null +++ b/src/frontend/index.html @@ -0,0 +1,13 @@ + + + + + + Tubearr + + + +
+ + + diff --git a/src/frontend/public/favicon.svg b/src/frontend/public/favicon.svg new file mode 100644 index 0000000..778be46 --- /dev/null +++ b/src/frontend/public/favicon.svg @@ -0,0 +1,7 @@ + + + + + + + diff --git a/src/frontend/public/logo.svg b/src/frontend/public/logo.svg new file mode 100644 index 0000000..e81e52e --- /dev/null +++ b/src/frontend/public/logo.svg @@ -0,0 +1,7 @@ + + + + + + + diff --git a/src/frontend/src/App.tsx b/src/frontend/src/App.tsx new file mode 100644 index 0000000..9c29038 --- /dev/null +++ b/src/frontend/src/App.tsx @@ -0,0 +1,44 @@ +import { Routes, Route, Navigate } from 'react-router-dom'; +import { Sidebar } from './components/Sidebar'; +import { Channels } from './pages/Channels'; +import { ChannelDetail } from './pages/ChannelDetail'; +import { Library } from './pages/Library'; +import { Queue } from './pages/Queue'; +import { ActivityPage } from './pages/Activity'; +import { SettingsPage } from './pages/Settings'; +import { SystemPage } from './pages/System'; + +function AuthenticatedLayout() { + return ( +
+ +
+ + } /> + } /> + } /> + } /> + } /> + } /> + } /> + } /> + +
+
+ ); +} + +export function App() { + return ( + + } /> + + ); +} diff --git a/src/frontend/src/api/client.ts b/src/frontend/src/api/client.ts new file mode 100644 index 0000000..5d202c1 --- /dev/null +++ b/src/frontend/src/api/client.ts @@ -0,0 +1,75 @@ +/** + * API client for same-origin browser requests. + * + * The backend auth middleware (T01) trusts same-origin requests from the web UI. + * No API key header is injected — the browser's Origin header is sufficient. + * + * Logs failed requests to browser console with status code and URL for + * diagnostics (visible in DevTools Network/Console tabs). + */ + +export class ApiClientError extends Error { + constructor( + message: string, + public statusCode: number, + public url: string, + public responseBody?: unknown, + ) { + super(message); + this.name = 'ApiClientError'; + } +} + +async function request( + method: string, + url: string, + body?: unknown, +): Promise { + const headers: Record = {}; + if (body !== undefined) { + headers['Content-Type'] = 'application/json'; + } + + const response = await fetch(url, { + method, + headers, + body: body !== undefined ? JSON.stringify(body) : undefined, + }); + + if (!response.ok) { + let errorBody: unknown; + try { + errorBody = await response.json(); + } catch { + errorBody = await response.text(); + } + + // Log failed requests to console for diagnostics + console.error( + `[API] ${method} ${url} → ${response.status}`, + errorBody, + ); + + const message = + typeof errorBody === 'object' && errorBody !== null && 'message' in errorBody + ? String((errorBody as { message: string }).message) + : `Request failed with status ${response.status}`; + + throw new ApiClientError(message, response.status, url, errorBody); + } + + // Handle 204 No Content + if (response.status === 204) { + return undefined as T; + } + + return response.json() as Promise; +} + +export const apiClient = { + get: (url: string) => request('GET', url), + post: (url: string, body?: unknown) => request('POST', url, body), + put: (url: string, body?: unknown) => request('PUT', url, body), + patch: (url: string, body?: unknown) => request('PATCH', url, body), + del: (url: string) => request('DELETE', url), +}; diff --git a/src/frontend/src/api/hooks/useActivity.ts b/src/frontend/src/api/hooks/useActivity.ts new file mode 100644 index 0000000..6dc3eaf --- /dev/null +++ b/src/frontend/src/api/hooks/useActivity.ts @@ -0,0 +1,61 @@ +import { useQuery } from '@tanstack/react-query'; +import { apiClient } from '../client'; +import type { DownloadHistoryRecord } from '@shared/types/index'; +import type { PaginatedResponse } from '@shared/types/api'; + +// ── Types ── + +export interface HistoryFilters { + eventType?: string; + channelId?: number; + contentItemId?: number; +} + +// ── Query Keys ── + +export const activityKeys = { + all: ['activity'] as const, + history: (filters: HistoryFilters, page: number, pageSize: number) => + ['activity', 'history', filters, page, pageSize] as const, + recent: (limit: number) => ['activity', 'recent', limit] as const, +}; + +// ── Queries ── + +/** Fetch paginated history events with optional filters. */ +export function useHistory( + filters: HistoryFilters = {}, + page: number = 1, + pageSize: number = 20, +) { + return useQuery({ + queryKey: activityKeys.history(filters, page, pageSize), + queryFn: async () => { + const params = new URLSearchParams(); + params.set('page', String(page)); + params.set('pageSize', String(pageSize)); + if (filters.eventType) params.set('eventType', filters.eventType); + if (filters.channelId) params.set('channelId', String(filters.channelId)); + if (filters.contentItemId) params.set('contentItemId', String(filters.contentItemId)); + + return apiClient.get>( + `/api/v1/history?${params.toString()}`, + ); + }, + }); +} + +/** Fetch recent activity feed (non-paginated, limited). */ +export function useActivity(limit: number = 50) { + return useQuery({ + queryKey: activityKeys.recent(limit), + queryFn: async () => { + const response = await apiClient.get<{ + success: boolean; + data: DownloadHistoryRecord[]; + }>(`/api/v1/activity?limit=${limit}`); + return response.data; + }, + refetchInterval: 10000, // Refresh recent activity every 10s + }); +} diff --git a/src/frontend/src/api/hooks/useChannels.ts b/src/frontend/src/api/hooks/useChannels.ts new file mode 100644 index 0000000..4d8dac6 --- /dev/null +++ b/src/frontend/src/api/hooks/useChannels.ts @@ -0,0 +1,145 @@ +import { useQuery, useMutation, useQueryClient } from '@tanstack/react-query'; +import { apiClient } from '../client'; +import { contentKeys } from './useContent'; +import type { Channel } from '@shared/types/index'; +import type { ChannelWithCounts } from '@shared/types/api'; + +// ── Query Keys ── + +export const channelKeys = { + all: ['channels'] as const, + detail: (id: number) => ['channels', id] as const, +}; + +// ── Queries ── + +/** Fetch all channels (includes contentCounts per channel). */ +export function useChannels() { + return useQuery({ + queryKey: channelKeys.all, + queryFn: () => apiClient.get('/api/v1/channel'), + }); +} + +/** Fetch a single channel by ID. */ +export function useChannel(id: number) { + return useQuery({ + queryKey: channelKeys.detail(id), + queryFn: () => apiClient.get(`/api/v1/channel/${id}`), + enabled: id > 0, + }); +} + +// ── Mutations ── + +/** Payload for creating a new channel. */ +interface CreateChannelInput { + url: string; + checkInterval?: number; + monitoringEnabled?: boolean; + formatProfileId?: number; + grabAll?: boolean; + grabAllOrder?: 'newest' | 'oldest'; +} + +/** Create a new channel by URL (resolves metadata via backend). */ +export function useCreateChannel() { + const queryClient = useQueryClient(); + + return useMutation({ + mutationFn: (data: CreateChannelInput) => + apiClient.post('/api/v1/channel', data), + onSuccess: () => { + queryClient.invalidateQueries({ queryKey: channelKeys.all }); + }, + }); +} + +/** Update a channel's fields. */ +export function useUpdateChannel(id: number) { + const queryClient = useQueryClient(); + + return useMutation({ + mutationFn: (data: { name?: string; checkInterval?: number; monitoringEnabled?: boolean; formatProfileId?: number | null }) => + apiClient.put(`/api/v1/channel/${id}`, data), + onSuccess: (updated) => { + queryClient.setQueryData(channelKeys.detail(id), updated); + queryClient.invalidateQueries({ queryKey: channelKeys.all }); + }, + }); +} + +/** Delete a channel. */ +export function useDeleteChannel() { + const queryClient = useQueryClient(); + + return useMutation({ + mutationFn: (id: number) => apiClient.del(`/api/v1/channel/${id}`), + onSuccess: (_data, id) => { + queryClient.removeQueries({ queryKey: channelKeys.detail(id) }); + queryClient.invalidateQueries({ queryKey: channelKeys.all }); + }, + }); +} + +// ── Scan Response Types ── + +export interface ScanChannelResult { + channelId: number; + channelName: string; + newItems: number; + totalFetched: number; + status: 'success' | 'error' | 'rate_limited' | 'already_running'; +} + +export interface ScanAllResult { + results: ScanChannelResult[]; + summary: { total: number; scanned: number; newItems: number; errors: number }; +} + +// ── Scan Mutations ── + +/** Trigger a manual scan for a single channel. */ +export function useScanChannel(id: number) { + const queryClient = useQueryClient(); + + return useMutation({ + mutationFn: () => + apiClient.post(`/api/v1/channel/${id}/scan`), + onSuccess: () => { + queryClient.invalidateQueries({ queryKey: channelKeys.all }); + queryClient.invalidateQueries({ queryKey: channelKeys.detail(id) }); + queryClient.invalidateQueries({ queryKey: contentKeys.byChannel(id) }); + }, + }); +} + +/** Trigger a manual scan for all enabled channels. */ +export function useScanAllChannels() { + const queryClient = useQueryClient(); + + return useMutation({ + mutationFn: () => + apiClient.post('/api/v1/channel/scan-all'), + onSuccess: () => { + queryClient.invalidateQueries({ queryKey: channelKeys.all }); + }, + }); +} + +/** Set the monitoring mode for a channel (cascades to content items). */ +export function useSetMonitoringMode(channelId: number) { + const queryClient = useQueryClient(); + + return useMutation({ + mutationFn: ({ monitoringMode }: { monitoringMode: string }) => + apiClient.put<{ success: boolean; data: Channel }>( + `/api/v1/channel/${channelId}/monitoring-mode`, + { monitoringMode }, + ), + onSuccess: () => { + queryClient.invalidateQueries({ queryKey: channelKeys.detail(channelId) }); + queryClient.invalidateQueries({ queryKey: contentKeys.byChannel(channelId) }); + }, + }); +} diff --git a/src/frontend/src/api/hooks/useContent.ts b/src/frontend/src/api/hooks/useContent.ts new file mode 100644 index 0000000..e94cca8 --- /dev/null +++ b/src/frontend/src/api/hooks/useContent.ts @@ -0,0 +1,76 @@ +import { useQuery, useMutation, useQueryClient } from '@tanstack/react-query'; +import { apiClient } from '../client'; +import type { ContentItem } from '@shared/types/index'; +import type { ApiResponse } from '@shared/types/api'; + +// ── Query Keys ── + +export const contentKeys = { + byChannel: (channelId: number) => ['content', 'channel', channelId] as const, +}; + +// ── Queries ── + +/** Fetch content items for a specific channel. */ +export function useChannelContent(channelId: number) { + return useQuery({ + queryKey: contentKeys.byChannel(channelId), + queryFn: async () => { + const response = await apiClient.get>( + `/api/v1/channel/${channelId}/content`, + ); + return response.data; + }, + enabled: channelId > 0, + }); +} + +// ── Mutations ── + +/** Enqueue a content item for download. Returns 202 with queue item. */ +export function useDownloadContent() { + const queryClient = useQueryClient(); + + return useMutation({ + mutationFn: (contentItemId: number) => + apiClient.post<{ success: boolean; data: unknown }>( + `/api/v1/download/${contentItemId}`, + ), + onSuccess: () => { + // Invalidate all content queries so status updates are visible + queryClient.invalidateQueries({ queryKey: ['content'] }); + }, + }); +} + +/** Toggle monitored state for a single content item via PATCH. */ +export function useToggleMonitored(channelId: number) { + const queryClient = useQueryClient(); + + return useMutation({ + mutationFn: ({ contentId, monitored }: { contentId: number; monitored: boolean }) => + apiClient.patch>( + `/api/v1/content/${contentId}/monitored`, + { monitored }, + ), + onSuccess: () => { + queryClient.invalidateQueries({ queryKey: contentKeys.byChannel(channelId) }); + }, + }); +} + +/** Bulk-update monitored state for multiple content items via PATCH. */ +export function useBulkMonitored(channelId: number) { + const queryClient = useQueryClient(); + + return useMutation({ + mutationFn: ({ ids, monitored }: { ids: number[]; monitored: boolean }) => + apiClient.patch>( + '/api/v1/content/bulk/monitored', + { ids, monitored }, + ), + onSuccess: () => { + queryClient.invalidateQueries({ queryKey: contentKeys.byChannel(channelId) }); + }, + }); +} diff --git a/src/frontend/src/api/hooks/useFormatProfiles.ts b/src/frontend/src/api/hooks/useFormatProfiles.ts new file mode 100644 index 0000000..90fedef --- /dev/null +++ b/src/frontend/src/api/hooks/useFormatProfiles.ts @@ -0,0 +1,81 @@ +import { useQuery, useMutation, useQueryClient } from '@tanstack/react-query'; +import { apiClient } from '../client'; +import type { FormatProfile } from '@shared/types/index'; + +// ── Query Keys ── + +export const formatProfileKeys = { + all: ['formatProfiles'] as const, +}; + +// ── Queries ── + +/** Fetch all format profiles. */ +export function useFormatProfiles() { + return useQuery({ + queryKey: formatProfileKeys.all, + queryFn: () => apiClient.get('/api/v1/format-profile'), + }); +} + +// ── Mutation Payloads ── + +interface CreateFormatProfileInput { + name: string; + videoResolution?: string | null; + audioCodec?: string | null; + audioBitrate?: string | null; + containerFormat?: string | null; + isDefault?: boolean; + subtitleLanguages?: string | null; + embedSubtitles?: boolean; +} + +interface UpdateFormatProfileInput { + name?: string; + videoResolution?: string | null; + audioCodec?: string | null; + audioBitrate?: string | null; + containerFormat?: string | null; + isDefault?: boolean; + subtitleLanguages?: string | null; + embedSubtitles?: boolean; +} + +// ── Mutations ── + +/** Create a new format profile. Invalidates profile list on success. */ +export function useCreateFormatProfile() { + const queryClient = useQueryClient(); + return useMutation({ + mutationFn: (input: CreateFormatProfileInput) => + apiClient.post('/api/v1/format-profile', input), + onSuccess: () => { + queryClient.invalidateQueries({ queryKey: formatProfileKeys.all }); + }, + }); +} + +/** Update a format profile by ID. Invalidates profile list on success. */ +export function useUpdateFormatProfile() { + const queryClient = useQueryClient(); + return useMutation({ + mutationFn: ({ id, ...input }: UpdateFormatProfileInput & { id: number }) => + apiClient.put(`/api/v1/format-profile/${id}`, input), + onSuccess: () => { + queryClient.invalidateQueries({ queryKey: formatProfileKeys.all }); + }, + }); +} + +/** Delete a format profile by ID. Invalidates profile list on success. */ +export function useDeleteFormatProfile() { + const queryClient = useQueryClient(); + return useMutation({ + mutationFn: (id: number) => + apiClient.del(`/api/v1/format-profile/${id}`), + onSuccess: () => { + queryClient.invalidateQueries({ queryKey: formatProfileKeys.all }); + }, + }); +} diff --git a/src/frontend/src/api/hooks/useLibrary.ts b/src/frontend/src/api/hooks/useLibrary.ts new file mode 100644 index 0000000..befd206 --- /dev/null +++ b/src/frontend/src/api/hooks/useLibrary.ts @@ -0,0 +1,46 @@ +import { useQuery } from '@tanstack/react-query'; +import { apiClient } from '../client'; +import type { ContentItem, ContentStatus, ContentType } from '@shared/types/index'; +import type { PaginatedResponse } from '@shared/types/api'; + +// ── Types ── + +export interface LibraryFilters { + page?: number; + pageSize?: number; + status?: ContentStatus | ''; + contentType?: ContentType | ''; + channelId?: number | ''; + search?: string; +} + +// ── Query Keys ── + +export const libraryKeys = { + all: ['library'] as const, + list: (filters: LibraryFilters) => ['library', 'list', filters] as const, +}; + +// ── Queries ── + +/** Fetch paginated content across all channels with filters. */ +export function useLibraryContent(filters: LibraryFilters = {}) { + const { page = 1, pageSize = 20, status, contentType, channelId, search } = filters; + + return useQuery({ + queryKey: libraryKeys.list(filters), + queryFn: async () => { + const params = new URLSearchParams(); + params.set('page', String(page)); + params.set('pageSize', String(pageSize)); + if (status) params.set('status', status); + if (contentType) params.set('contentType', contentType); + if (channelId) params.set('channelId', String(channelId)); + if (search) params.set('search', search); + + return apiClient.get>( + `/api/v1/content?${params.toString()}`, + ); + }, + }); +} diff --git a/src/frontend/src/api/hooks/useNotifications.ts b/src/frontend/src/api/hooks/useNotifications.ts new file mode 100644 index 0000000..2dc253e --- /dev/null +++ b/src/frontend/src/api/hooks/useNotifications.ts @@ -0,0 +1,96 @@ +import { useQuery, useMutation, useQueryClient } from '@tanstack/react-query'; +import { apiClient } from '../client'; +import type { NotificationSetting } from '@shared/types/index'; + +// ── Query Keys ── + +export const notificationKeys = { + all: ['notifications'] as const, +}; + +// ── Re-export for convenience ── + +export type { NotificationSetting }; + +// ── Input Types ── + +export interface CreateNotificationInput { + type: 'discord'; + name: string; + config: { webhookUrl: string }; + enabled?: boolean; + onGrab?: boolean; + onDownload?: boolean; + onFailure?: boolean; +} + +export interface UpdateNotificationInput { + name?: string; + type?: 'discord'; + config?: { webhookUrl: string }; + enabled?: boolean; + onGrab?: boolean; + onDownload?: boolean; + onFailure?: boolean; +} + +interface TestNotificationResult { + success: boolean; + error?: string; +} + +// ── Queries ── + +/** Fetch all notification settings. */ +export function useNotifications() { + return useQuery({ + queryKey: notificationKeys.all, + queryFn: () => apiClient.get('/api/v1/notification'), + }); +} + +// ── Mutations ── + +/** Create a new notification setting. Invalidates list on success. */ +export function useCreateNotification() { + const queryClient = useQueryClient(); + return useMutation({ + mutationFn: (input: CreateNotificationInput) => + apiClient.post('/api/v1/notification', input), + onSuccess: () => { + queryClient.invalidateQueries({ queryKey: notificationKeys.all }); + }, + }); +} + +/** Update a notification setting by ID. Invalidates list on success. */ +export function useUpdateNotification() { + const queryClient = useQueryClient(); + return useMutation({ + mutationFn: ({ id, ...input }: UpdateNotificationInput & { id: number }) => + apiClient.put(`/api/v1/notification/${id}`, input), + onSuccess: () => { + queryClient.invalidateQueries({ queryKey: notificationKeys.all }); + }, + }); +} + +/** Delete a notification setting by ID. Invalidates list on success. */ +export function useDeleteNotification() { + const queryClient = useQueryClient(); + return useMutation({ + mutationFn: (id: number) => + apiClient.del(`/api/v1/notification/${id}`), + onSuccess: () => { + queryClient.invalidateQueries({ queryKey: notificationKeys.all }); + }, + }); +} + +/** Send a test notification for a given setting ID. */ +export function useTestNotification() { + return useMutation({ + mutationFn: (id: number) => + apiClient.post(`/api/v1/notification/${id}/test`), + }); +} diff --git a/src/frontend/src/api/hooks/usePlatformSettings.ts b/src/frontend/src/api/hooks/usePlatformSettings.ts new file mode 100644 index 0000000..7a3fb13 --- /dev/null +++ b/src/frontend/src/api/hooks/usePlatformSettings.ts @@ -0,0 +1,58 @@ +import { useQuery, useMutation, useQueryClient } from '@tanstack/react-query'; +import { apiClient } from '../client'; +import type { PlatformSettings } from '@shared/types/index'; + +// ── Query Keys ── + +export const platformSettingsKeys = { + all: ['platformSettings'] as const, + detail: (platform: string) => ['platformSettings', platform] as const, +}; + +// ── Queries ── + +/** Fetch all platform settings. */ +export function usePlatformSettings() { + return useQuery({ + queryKey: platformSettingsKeys.all, + queryFn: () => apiClient.get('/api/v1/platform-settings'), + }); +} + +/** Fetch platform settings for a single platform. */ +export function usePlatformSetting(platform: string | null) { + return useQuery({ + queryKey: platformSettingsKeys.detail(platform ?? ''), + queryFn: () => apiClient.get(`/api/v1/platform-settings/${platform}`), + enabled: !!platform, + }); +} + +// ── Mutation Payload ── + +export interface UpdatePlatformSettingsInput { + platform: string; + defaultFormatProfileId?: number | null; + checkInterval?: number; + concurrencyLimit?: number; + subtitleLanguages?: string | null; + grabAllEnabled?: boolean; + grabAllOrder?: 'newest' | 'oldest'; + scanLimit?: number; + rateLimitDelay?: number; +} + +// ── Mutations ── + +/** Upsert platform settings. Invalidates list on success. */ +export function useUpdatePlatformSettings() { + const queryClient = useQueryClient(); + return useMutation({ + mutationFn: ({ platform, ...input }: UpdatePlatformSettingsInput) => + apiClient.put(`/api/v1/platform-settings/${platform}`, input), + onSuccess: (_data, variables) => { + queryClient.invalidateQueries({ queryKey: platformSettingsKeys.all }); + queryClient.invalidateQueries({ queryKey: platformSettingsKeys.detail(variables.platform) }); + }, + }); +} diff --git a/src/frontend/src/api/hooks/usePlaylists.ts b/src/frontend/src/api/hooks/usePlaylists.ts new file mode 100644 index 0000000..0292b9d --- /dev/null +++ b/src/frontend/src/api/hooks/usePlaylists.ts @@ -0,0 +1,50 @@ +import { useQuery, useMutation, useQueryClient } from '@tanstack/react-query'; +import { apiClient } from '../client'; +import type { Playlist } from '@shared/types/index'; +import type { ApiResponse } from '@shared/types/api'; + +// ── Query Keys ── + +export const playlistKeys = { + byChannel: (id: number) => ['playlists', 'channel', id] as const, +}; + +// ── Response Types ── + +export interface PlaylistsResponse { + playlists: Playlist[]; + mappings: Record; +} + +// ── Queries ── + +/** Fetch playlists and content-to-playlist mappings for a channel. */ +export function useChannelPlaylists(channelId: number) { + return useQuery({ + queryKey: playlistKeys.byChannel(channelId), + queryFn: async () => { + const response = await apiClient.get>( + `/api/v1/channel/${channelId}/playlists`, + ); + return response.data; + }, + enabled: channelId > 0, + }); +} + +// ── Mutations ── + +/** Refresh playlists from the platform source and persist to DB. */ +export function useRefreshPlaylists(channelId: number) { + const queryClient = useQueryClient(); + + return useMutation({ + mutationFn: () => + apiClient.post>( + `/api/v1/channel/${channelId}/playlists/refresh`, + ), + onSuccess: () => { + queryClient.invalidateQueries({ queryKey: playlistKeys.byChannel(channelId) }); + }, + }); +} diff --git a/src/frontend/src/api/hooks/useQueue.ts b/src/frontend/src/api/hooks/useQueue.ts new file mode 100644 index 0000000..95c7dbe --- /dev/null +++ b/src/frontend/src/api/hooks/useQueue.ts @@ -0,0 +1,59 @@ +import { useQuery, useMutation, useQueryClient } from '@tanstack/react-query'; +import { apiClient } from '../client'; +import type { QueueItem, QueueStatus } from '@shared/types/index'; + +// ── Query Keys ── + +export const queueKeys = { + all: ['queue'] as const, + list: (status?: QueueStatus | '') => ['queue', 'list', status ?? 'all'] as const, +}; + +// ── Queries ── + +/** Fetch queue items, optionally filtered by status. Auto-refreshes every 5s. */ +export function useQueue(statusFilter?: QueueStatus | '') { + return useQuery({ + queryKey: queueKeys.list(statusFilter), + queryFn: async () => { + const url = statusFilter + ? `/api/v1/queue?status=${statusFilter}` + : '/api/v1/queue'; + const response = await apiClient.get<{ success: boolean; data: QueueItem[] }>(url); + return response.data; + }, + refetchInterval: 5000, + }); +} + +// ── Mutations ── + +/** Retry a failed queue item. */ +export function useRetryQueueItem() { + const queryClient = useQueryClient(); + + return useMutation({ + mutationFn: (id: number) => + apiClient.post<{ success: boolean; data: QueueItem }>( + `/api/v1/queue/${id}/retry`, + ), + onSuccess: () => { + queryClient.invalidateQueries({ queryKey: queueKeys.all }); + }, + }); +} + +/** Cancel a pending queue item. */ +export function useCancelQueueItem() { + const queryClient = useQueryClient(); + + return useMutation({ + mutationFn: (id: number) => + apiClient.del<{ success: boolean; data: QueueItem }>( + `/api/v1/queue/${id}`, + ), + onSuccess: () => { + queryClient.invalidateQueries({ queryKey: queueKeys.all }); + }, + }); +} diff --git a/src/frontend/src/api/hooks/useSystem.ts b/src/frontend/src/api/hooks/useSystem.ts new file mode 100644 index 0000000..eb1b01b --- /dev/null +++ b/src/frontend/src/api/hooks/useSystem.ts @@ -0,0 +1,72 @@ +import { useQuery, useMutation, useQueryClient } from '@tanstack/react-query'; +import { apiClient } from '../client'; +import type { HealthResponse, SystemStatusResponse, ApiKeyResponse, AppSettingsResponse } from '@shared/types/api'; + +// ── Query Keys ── + +export const systemKeys = { + status: ['system', 'status'] as const, + health: ['system', 'health'] as const, + apiKey: ['system', 'apikey'] as const, + appSettings: ['system', 'appSettings'] as const, +}; + +// ── Queries ── + +/** Fetch system status (uptime, node version, memory, etc.). Auto-refreshes every 30s. */ +export function useSystemStatus() { + return useQuery({ + queryKey: systemKeys.status, + queryFn: () => apiClient.get('/api/v1/system/status'), + refetchInterval: 30_000, + }); +} + +/** Fetch health check results. Auto-refreshes every 30s. */ +export function useHealth() { + return useQuery({ + queryKey: systemKeys.health, + queryFn: () => apiClient.get('/api/v1/health'), + refetchInterval: 30_000, + }); +} + +/** Fetch the current API key. Does not auto-refresh (key is stable). */ +export function useApiKey() { + return useQuery({ + queryKey: systemKeys.apiKey, + queryFn: () => apiClient.get('/api/v1/system/apikey'), + staleTime: Infinity, + }); +} + +/** Regenerate the API key. Invalidates the apikey query cache on success. */ +export function useRegenerateApiKey() { + const queryClient = useQueryClient(); + return useMutation({ + mutationFn: () => apiClient.post('/api/v1/system/apikey/regenerate'), + onSuccess: () => { + queryClient.invalidateQueries({ queryKey: systemKeys.apiKey }); + }, + }); +} + +/** Fetch app-wide settings (check interval, concurrent downloads). */ +export function useAppSettings() { + return useQuery({ + queryKey: systemKeys.appSettings, + queryFn: () => apiClient.get('/api/v1/system/settings'), + }); +} + +/** Update app-wide settings. Accepts partial updates (only changed fields). */ +export function useUpdateAppSettings() { + const queryClient = useQueryClient(); + return useMutation({ + mutationFn: (data: Partial) => + apiClient.put('/api/v1/system/settings', data), + onSuccess: () => { + queryClient.invalidateQueries({ queryKey: systemKeys.appSettings }); + }, + }); +} diff --git a/src/frontend/src/components/AddChannelModal.tsx b/src/frontend/src/components/AddChannelModal.tsx new file mode 100644 index 0000000..b8cb8e4 --- /dev/null +++ b/src/frontend/src/components/AddChannelModal.tsx @@ -0,0 +1,380 @@ +import { useState, useEffect, useMemo } from 'react'; +import { Modal } from './Modal'; +import { useCreateChannel } from '../api/hooks/useChannels'; +import { usePlatformSettings } from '../api/hooks/usePlatformSettings'; +import { useFormatProfiles } from '../api/hooks/useFormatProfiles'; +import { Loader, CheckCircle } from 'lucide-react'; +import type { Platform } from '@shared/types/index'; + +// ── Platform Detection ── + +/** + * Client-side platform detection from URL. + * Simplified patterns matching the backend's platform-source.ts logic. + */ +function detectPlatform(url: string): Platform | null { + if (!url) return null; + + // YouTube patterns + if ( + /youtube\.com\/@/.test(url) || + /youtube\.com\/channel\//.test(url) || + /youtube\.com\/c\//.test(url) || + /youtube\.com\/user\//.test(url) + ) { + return 'youtube'; + } + + // SoundCloud patterns — match artist pages, reject track/set URLs + if (/soundcloud\.com\/[^/]+\/?$/.test(url) && !/\/(tracks|sets)\//.test(url)) { + return 'soundcloud'; + } + + return null; +} + +const PLATFORM_LABELS: Record = { + youtube: 'YouTube', + soundcloud: 'SoundCloud', +}; + +// ── Component ── + +interface AddChannelModalProps { + open: boolean; + onClose: () => void; +} + +export function AddChannelModal({ open, onClose }: AddChannelModalProps) { + const [url, setUrl] = useState(''); + const [checkInterval, setCheckInterval] = useState(''); + const [formatProfileId, setFormatProfileId] = useState(undefined); + const [grabAll, setGrabAll] = useState(false); + const [grabAllOrder, setGrabAllOrder] = useState<'newest' | 'oldest'>('newest'); + + const createChannel = useCreateChannel(); + const { data: platformSettingsList } = usePlatformSettings(); + const { data: formatProfiles } = useFormatProfiles(); + + // Detect platform from URL + const detectedPlatform = useMemo(() => detectPlatform(url), [url]); + + // Load platform defaults when platform is detected + useEffect(() => { + if (!detectedPlatform || !platformSettingsList) return; + + const settings = platformSettingsList.find((s) => s.platform === detectedPlatform); + if (!settings) return; + + // Pre-fill check interval from platform defaults (only if user hasn't manually edited) + if (settings.checkInterval && !checkInterval) { + setCheckInterval(String(settings.checkInterval)); + } + + // Pre-fill format profile from platform defaults + if (settings.defaultFormatProfileId && !formatProfileId) { + setFormatProfileId(settings.defaultFormatProfileId); + } + + // Pre-fill grab-all defaults for YouTube + if (detectedPlatform === 'youtube') { + if (settings.grabAllEnabled) { + setGrabAll(true); + } + if (settings.grabAllOrder) { + setGrabAllOrder(settings.grabAllOrder); + } + } + }, [detectedPlatform, platformSettingsList]); // eslint-disable-line react-hooks/exhaustive-deps + + const handleSubmit = (e: React.FormEvent) => { + e.preventDefault(); + if (!url.trim()) return; + + createChannel.mutate( + { + url: url.trim(), + checkInterval: checkInterval ? parseInt(checkInterval, 10) : undefined, + formatProfileId: formatProfileId ?? undefined, + grabAll: detectedPlatform === 'youtube' ? grabAll : undefined, + grabAllOrder: detectedPlatform === 'youtube' && grabAll ? grabAllOrder : undefined, + }, + { + onSuccess: () => { + resetForm(); + onClose(); + }, + }, + ); + }; + + const resetForm = () => { + setUrl(''); + setCheckInterval(''); + setFormatProfileId(undefined); + setGrabAll(false); + setGrabAllOrder('newest'); + createChannel.reset(); + }; + + const handleClose = () => { + if (!createChannel.isPending) { + resetForm(); + onClose(); + } + }; + + return ( + +
+ {/* URL input */} +
+ + setUrl(e.target.value)} + placeholder="https://www.youtube.com/@channel or https://soundcloud.com/artist" + required + disabled={createChannel.isPending} + style={{ width: '100%' }} + autoFocus + /> + + {/* Platform detection indicator */} + {detectedPlatform && ( +
+
+ )} +
+ + {/* Check interval (optional) */} +
+ + setCheckInterval(e.target.value)} + placeholder="360 (default: 6 hours)" + disabled={createChannel.isPending} + style={{ width: '100%' }} + /> +
+ + {/* Format Profile (optional, shown when platform detected) */} + {detectedPlatform && formatProfiles && formatProfiles.length > 0 && ( +
+ + +
+ )} + + {/* Grab All — YouTube only */} + {detectedPlatform === 'youtube' && ( + <> +
+ setGrabAll(e.target.checked)} + disabled={createChannel.isPending} + style={{ width: 'auto' }} + /> + +
+ + {/* Download order — shown when grab-all enabled */} + {grabAll && ( +
+ + +

+ Back-catalog items will be enqueued at low priority. +

+
+ )} + + )} + + {/* Error display */} + {createChannel.isError && ( +
+ {createChannel.error instanceof Error + ? createChannel.error.message + : 'Failed to add channel'} +
+ )} + + {/* Actions */} +
+ + +
+
+
+ ); +} diff --git a/src/frontend/src/components/FilterBar.tsx b/src/frontend/src/components/FilterBar.tsx new file mode 100644 index 0000000..6a0a556 --- /dev/null +++ b/src/frontend/src/components/FilterBar.tsx @@ -0,0 +1,80 @@ +// ── Types ── + +export interface FilterDefinition { + key: string; + label: string; + options: { value: string; label: string }[]; +} + +interface FilterBarProps { + filters: FilterDefinition[]; + values: Record; + onChange: (key: string, value: string) => void; +} + +// ── Component ── + +/** + * Row of dropdown/select filters. + * Styled to match *arr select elements. + */ +export function FilterBar({ filters, values, onChange }: FilterBarProps) { + if (filters.length === 0) return null; + + const selectStyle: React.CSSProperties = { + padding: 'var(--space-2) var(--space-3)', + paddingRight: 'var(--space-8)', + backgroundColor: 'var(--bg-input)', + border: '1px solid var(--border)', + borderRadius: 'var(--radius-md)', + color: 'var(--text-primary)', + fontSize: 'var(--font-size-sm)', + cursor: 'pointer', + outline: 'none', + appearance: 'none' as const, + backgroundImage: `url("data:image/svg+xml,%3csvg xmlns='http://www.w3.org/2000/svg' width='12' height='12' viewBox='0 0 24 24' fill='none' stroke='%238b8d97' stroke-width='2'%3e%3cpath d='m6 9 6 6 6-6'/%3e%3c/svg%3e")`, + backgroundRepeat: 'no-repeat', + backgroundPosition: 'right var(--space-2) center', + transition: 'border-color var(--transition-fast)', + }; + + return ( +
+ {filters.map((filter) => ( +
+ + +
+ ))} +
+ ); +} diff --git a/src/frontend/src/components/FormatProfileForm.tsx b/src/frontend/src/components/FormatProfileForm.tsx new file mode 100644 index 0000000..052c8a8 --- /dev/null +++ b/src/frontend/src/components/FormatProfileForm.tsx @@ -0,0 +1,319 @@ +import { useState, useCallback, type FormEvent } from 'react'; +import { Loader } from 'lucide-react'; +import type { FormatProfile } from '@shared/types/index'; + +// ── Dropdown options ── + +const RESOLUTION_OPTIONS = ['Any', 'Best', '2160p', '1080p', '720p', '480p'] as const; +const CODEC_OPTIONS = ['Any', 'AAC', 'MP3', 'OPUS', 'FLAC'] as const; +const BITRATE_OPTIONS = ['Any', 'Best', '320k', '256k', '192k', '128k'] as const; +const CONTAINER_OPTIONS = ['Any', 'MP4', 'MKV', 'WEBM', 'MP3'] as const; + +// ── Types ── + +export interface FormatProfileFormValues { + name: string; + videoResolution: string | null; + audioCodec: string | null; + audioBitrate: string | null; + containerFormat: string | null; + isDefault: boolean; + subtitleLanguages: string | null; + embedSubtitles: boolean; +} + +interface FormatProfileFormProps { + /** Existing profile for edit mode. Omit for create mode. */ + profile?: FormatProfile; + onSubmit: (values: FormatProfileFormValues) => void; + onCancel: () => void; + isPending?: boolean; + error?: string | null; +} + +// ── Helpers ── + +function toDropdownValue(val: string | null): string { + return val ?? 'Any'; +} + +function fromDropdownValue(val: string): string | null { + return val === 'Any' ? null : val; +} + +// ── Shared styles ── + +const labelStyle: React.CSSProperties = { + display: 'block', + fontSize: 'var(--font-size-sm)', + fontWeight: 500, + color: 'var(--text-secondary)', + marginBottom: 'var(--space-1)', +}; + +const inputStyle: React.CSSProperties = { + width: '100%', + padding: 'var(--space-2) var(--space-3)', + backgroundColor: 'var(--bg-input)', + border: '1px solid var(--border)', + borderRadius: 'var(--radius-md)', + color: 'var(--text-primary)', + fontSize: 'var(--font-size-base)', +}; + +const selectStyle: React.CSSProperties = { + ...inputStyle, + cursor: 'pointer', + appearance: 'none' as const, + backgroundImage: `url("data:image/svg+xml,%3Csvg xmlns='http://www.w3.org/2000/svg' width='12' height='12' viewBox='0 0 24 24' fill='none' stroke='%238b8d97' stroke-width='2' stroke-linecap='round' stroke-linejoin='round'%3E%3Cpolyline points='6 9 12 15 18 9'%3E%3C/polyline%3E%3C/svg%3E")`, + backgroundRepeat: 'no-repeat', + backgroundPosition: 'right var(--space-3) center', + paddingRight: 'var(--space-8)', +}; + +const fieldGroupStyle: React.CSSProperties = { + marginBottom: 'var(--space-4)', +}; + +// ── Component ── + +export function FormatProfileForm({ + profile, + onSubmit, + onCancel, + isPending = false, + error, +}: FormatProfileFormProps) { + const [name, setName] = useState(profile?.name ?? ''); + const [videoResolution, setVideoResolution] = useState(toDropdownValue(profile?.videoResolution ?? null)); + const [audioCodec, setAudioCodec] = useState(toDropdownValue(profile?.audioCodec ?? null)); + const [audioBitrate, setAudioBitrate] = useState(toDropdownValue(profile?.audioBitrate ?? null)); + const [containerFormat, setContainerFormat] = useState(toDropdownValue(profile?.containerFormat ?? null)); + const [isDefault, setIsDefault] = useState(profile?.isDefault ?? false); + const [subtitleLanguages, setSubtitleLanguages] = useState(profile?.subtitleLanguages ?? ''); + const [embedSubtitles, setEmbedSubtitles] = useState(profile?.embedSubtitles ?? false); + + const handleSubmit = useCallback( + (e: FormEvent) => { + e.preventDefault(); + if (!name.trim()) return; + onSubmit({ + name: name.trim(), + videoResolution: fromDropdownValue(videoResolution), + audioCodec: fromDropdownValue(audioCodec), + audioBitrate: fromDropdownValue(audioBitrate), + containerFormat: fromDropdownValue(containerFormat), + isDefault, + subtitleLanguages: subtitleLanguages.trim() || null, + embedSubtitles, + }); + }, + [name, videoResolution, audioCodec, audioBitrate, containerFormat, isDefault, subtitleLanguages, embedSubtitles, onSubmit], + ); + + return ( +
+ {error && ( +
+ {error} +
+ )} + + {/* Name */} +
+ + setName(e.target.value)} + placeholder="e.g. HD Video, Music Archive" + required + style={inputStyle} + autoFocus + /> +
+ + {/* Two-column grid for dropdowns */} +
+ {/* Video Resolution */} +
+ + +
+ + {/* Audio Codec */} +
+ + +
+ + {/* Audio Bitrate */} +
+ + +
+ + {/* Container Format */} +
+ + +
+
+ + {/* Subtitle Languages */} +
+ + setSubtitleLanguages(e.target.value)} + placeholder="e.g. en,es,fr" + style={inputStyle} + /> + + Comma-separated language codes. yt-dlp will download subtitles when available. + +
+ + {/* Embed Subtitles checkbox */} +
+ setEmbedSubtitles(e.target.checked)} + style={{ + width: 16, + height: 16, + accentColor: 'var(--accent)', + cursor: 'pointer', + }} + /> + +
+ + {/* Is Default checkbox */} +
+ setIsDefault(e.target.checked)} + style={{ + width: 16, + height: 16, + accentColor: 'var(--accent)', + cursor: 'pointer', + }} + /> + +
+ + {/* Action buttons */} +
+ + +
+
+ ); +} diff --git a/src/frontend/src/components/HealthStatus.tsx b/src/frontend/src/components/HealthStatus.tsx new file mode 100644 index 0000000..50cbc59 --- /dev/null +++ b/src/frontend/src/components/HealthStatus.tsx @@ -0,0 +1,351 @@ +import { AlertTriangle, CheckCircle2, HardDrive, Play, Square, Terminal } from 'lucide-react'; +import type { ComponentHealth } from '@shared/types/api'; +import { formatBytes } from '../utils/format'; + +// ── Status → color mapping ── + +const STATUS_COLORS: Record = { + healthy: { dot: 'var(--success)', text: 'var(--success)', bg: 'var(--success-bg)' }, + degraded: { dot: 'var(--warning)', text: 'var(--warning)', bg: 'var(--warning-bg)' }, + unhealthy: { dot: 'var(--danger)', text: 'var(--danger)', bg: 'var(--danger-bg)' }, +}; + +const DEFAULT_COLORS = { dot: 'var(--text-muted)', text: 'var(--text-muted)', bg: 'var(--bg-hover)' }; + +// ── Friendly display names for component keys ── + +const COMPONENT_LABELS: Record = { + scheduler: 'Scheduler', + ytDlp: 'yt-dlp', + diskSpace: 'Disk Space', + recentErrors: 'Recent Errors', + database: 'Database', +}; + +// ── Component ── + +interface HealthStatusProps { + components: ComponentHealth[]; + overallStatus: 'healthy' | 'degraded' | 'unhealthy'; +} + +export function HealthStatus({ components, overallStatus }: HealthStatusProps) { + const overallColors = STATUS_COLORS[overallStatus] ?? DEFAULT_COLORS; + const overallLabel = overallStatus.charAt(0).toUpperCase() + overallStatus.slice(1); + + return ( +
+ {/* Overall status banner */} +
+
+ + {/* Component cards */} +
+ {components.map((comp) => ( + + ))} +
+ + {components.length === 0 && ( +
+ No health components available. +
+ )} +
+ ); +} + +// ── Component Card ── + +function ComponentCard({ component }: { component: ComponentHealth }) { + const colors = STATUS_COLORS[component.status] ?? DEFAULT_COLORS; + const label = COMPONENT_LABELS[component.name] ?? component.name; + + return ( +
+ {/* Card header: name + status badge */} +
+ + {label} + + + +
+ + {/* Custom detail rendering per component type */} + +
+ ); +} + +// ── Detail Renderers ── + +function ComponentDetail({ component }: { component: ComponentHealth }) { + switch (component.name) { + case 'diskSpace': + return ; + case 'ytDlp': + return ; + case 'scheduler': + return ; + case 'recentErrors': + return ; + default: + return ; + } +} + +// ── Disk Space ── + +function DiskSpaceDetail({ component }: { component: ComponentHealth }) { + const details = component.details as + | { availableBytes?: number; totalBytes?: number; freePercent?: number } + | undefined; + + if (!details?.totalBytes || !details.availableBytes) { + return ; + } + + const usedPercent = 100 - (details.freePercent ?? 0); + + // Color: green <70%, yellow 70-90%, red >90% + let barColor = 'var(--success)'; + if (usedPercent > 90) barColor = 'var(--danger)'; + else if (usedPercent > 70) barColor = 'var(--warning)'; + + return ( +
+
+
+ + {/* Usage bar */} +
+
+
+ + + {usedPercent.toFixed(1)}% used + +
+ ); +} + +// ── yt-dlp ── + +function YtDlpDetail({ component }: { component: ComponentHealth }) { + const details = component.details as { version?: string } | undefined; + + return ( +
+
+ ); +} + +// ── Scheduler ── + +function SchedulerDetail({ component }: { component: ComponentHealth }) { + const details = component.details as { channelCount?: number; running?: boolean } | undefined; + const isRunning = component.status === 'healthy'; + + return ( +
+ {isRunning ? ( +
+ ); +} + +// ── Recent Errors ── + +function RecentErrorsDetail({ component }: { component: ComponentHealth }) { + const details = component.details as { errorCount?: number } | undefined; + const count = details?.errorCount ?? 0; + + return ( +
+ {count > 0 ? ( + <> +
+ ); +} + +// ── Generic Fallback ── + +function GenericDetail({ component }: { component: ComponentHealth }) { + return ( + + {component.message ?? '—'} + + ); +} diff --git a/src/frontend/src/components/Modal.tsx b/src/frontend/src/components/Modal.tsx new file mode 100644 index 0000000..2e2317b --- /dev/null +++ b/src/frontend/src/components/Modal.tsx @@ -0,0 +1,170 @@ +import { type ReactNode, useEffect, useRef, useCallback } from 'react'; +import { X } from 'lucide-react'; + +// ── Component ── + +interface ModalProps { + title: string; + open: boolean; + onClose: () => void; + children: ReactNode; + /** Width of the modal content. Defaults to 480px. */ + width?: number; +} + +/** + * Dark-themed overlay modal with accessible focus management. + * + * - Focus trap: tab cycles within modal + * - Escape to close + * - aria-modal and role="dialog" + * - Click overlay to close + */ +export function Modal({ title, open, onClose, children, width = 480 }: ModalProps) { + const modalRef = useRef(null); + const previousFocus = useRef(null); + + // Save and restore focus + useEffect(() => { + if (open) { + previousFocus.current = document.activeElement as HTMLElement | null; + // Focus the modal container after render + requestAnimationFrame(() => { + modalRef.current?.focus(); + }); + } else if (previousFocus.current) { + previousFocus.current.focus(); + previousFocus.current = null; + } + }, [open]); + + // Escape key handler + useEffect(() => { + if (!open) return; + const handleKey = (e: KeyboardEvent) => { + if (e.key === 'Escape') { + e.stopPropagation(); + onClose(); + } + }; + document.addEventListener('keydown', handleKey); + return () => document.removeEventListener('keydown', handleKey); + }, [open, onClose]); + + // Focus trap + const handleKeyDown = useCallback( + (e: React.KeyboardEvent) => { + if (e.key !== 'Tab' || !modalRef.current) return; + + const focusable = modalRef.current.querySelectorAll( + 'button, [href], input, select, textarea, [tabindex]:not([tabindex="-1"])', + ); + if (focusable.length === 0) return; + + const first = focusable[0]; + const last = focusable[focusable.length - 1]; + + if (e.shiftKey) { + if (document.activeElement === first) { + e.preventDefault(); + last.focus(); + } + } else { + if (document.activeElement === last) { + e.preventDefault(); + first.focus(); + } + } + }, + [], + ); + + if (!open) return null; + + return ( +
{ + if (e.target === e.currentTarget) onClose(); + }} + > +
+ {/* Header */} +
+

+ {title} +

+ +
+ + {/* Body */} +
+ {children} +
+
+
+ ); +} diff --git a/src/frontend/src/components/NotificationForm.tsx b/src/frontend/src/components/NotificationForm.tsx new file mode 100644 index 0000000..601bacc --- /dev/null +++ b/src/frontend/src/components/NotificationForm.tsx @@ -0,0 +1,280 @@ +import { useState, useCallback, type FormEvent } from 'react'; +import { Loader } from 'lucide-react'; + +// ── Types ── + +export interface NotificationFormValues { + name: string; + webhookUrl: string; + enabled: boolean; + onGrab: boolean; + onDownload: boolean; + onFailure: boolean; +} + +interface NotificationFormProps { + /** Existing values for edit mode. Omit for create mode. */ + initialValues?: Partial; + onSubmit: (values: NotificationFormValues) => void; + onCancel: () => void; + isPending?: boolean; + error?: string | null; +} + +// ── Shared styles (matching FormatProfileForm) ── + +const labelStyle: React.CSSProperties = { + display: 'block', + fontSize: 'var(--font-size-sm)', + fontWeight: 500, + color: 'var(--text-secondary)', + marginBottom: 'var(--space-1)', +}; + +const inputStyle: React.CSSProperties = { + width: '100%', + padding: 'var(--space-2) var(--space-3)', + backgroundColor: 'var(--bg-input)', + border: '1px solid var(--border)', + borderRadius: 'var(--radius-md)', + color: 'var(--text-primary)', + fontSize: 'var(--font-size-base)', +}; + +const fieldGroupStyle: React.CSSProperties = { + marginBottom: 'var(--space-4)', +}; + +const checkboxRowStyle: React.CSSProperties = { + display: 'flex', + alignItems: 'center', + gap: 'var(--space-2)', + marginBottom: 'var(--space-2)', +}; + +const checkboxStyle: React.CSSProperties = { + width: 16, + height: 16, + accentColor: 'var(--accent)', + cursor: 'pointer', +}; + +const checkboxLabelStyle: React.CSSProperties = { + fontSize: 'var(--font-size-sm)', + color: 'var(--text-secondary)', + cursor: 'pointer', +}; + +// ── Component ── + +export function NotificationForm({ + initialValues, + onSubmit, + onCancel, + isPending = false, + error, +}: NotificationFormProps) { + const [name, setName] = useState(initialValues?.name ?? ''); + const [webhookUrl, setWebhookUrl] = useState(initialValues?.webhookUrl ?? ''); + const [enabled, setEnabled] = useState(initialValues?.enabled ?? true); + const [onGrab, setOnGrab] = useState(initialValues?.onGrab ?? true); + const [onDownload, setOnDownload] = useState(initialValues?.onDownload ?? true); + const [onFailure, setOnFailure] = useState(initialValues?.onFailure ?? true); + + const [validationError, setValidationError] = useState(null); + + const isValid = name.trim().length > 0 && webhookUrl.trim().startsWith('https://'); + + const handleSubmit = useCallback( + (e: FormEvent) => { + e.preventDefault(); + setValidationError(null); + + if (!name.trim()) { + setValidationError('Name is required.'); + return; + } + if (!webhookUrl.trim().startsWith('https://')) { + setValidationError('Webhook URL must start with https://'); + return; + } + + onSubmit({ + name: name.trim(), + webhookUrl: webhookUrl.trim(), + enabled, + onGrab, + onDownload, + onFailure, + }); + }, + [name, webhookUrl, enabled, onGrab, onDownload, onFailure, onSubmit], + ); + + const displayError = validationError ?? error; + + return ( +
+ {displayError && ( +
+ {displayError} +
+ )} + + {/* Name */} +
+ + setName(e.target.value)} + placeholder="e.g. My Discord Server" + required + style={inputStyle} + autoFocus + /> +
+ + {/* Webhook URL */} +
+ + setWebhookUrl(e.target.value)} + placeholder="https://discord.com/api/webhooks/..." + required + style={inputStyle} + /> + + Create a webhook in Discord → Server Settings → Integrations → Webhooks + +
+ + {/* Enabled */} +
+ setEnabled(e.target.checked)} + style={checkboxStyle} + /> + +
+ + {/* Event Toggles */} +
+ +
+
+ setOnGrab(e.target.checked)} + style={checkboxStyle} + /> + +
+
+ setOnDownload(e.target.checked)} + style={checkboxStyle} + /> + +
+
+ setOnFailure(e.target.checked)} + style={checkboxStyle} + /> + +
+
+
+ + {/* Action buttons */} +
+ + +
+
+ ); +} diff --git a/src/frontend/src/components/Pagination.tsx b/src/frontend/src/components/Pagination.tsx new file mode 100644 index 0000000..65ce543 --- /dev/null +++ b/src/frontend/src/components/Pagination.tsx @@ -0,0 +1,146 @@ +import { ChevronLeft, ChevronRight } from 'lucide-react'; + +// ── Types ── + +interface PaginationProps { + page: number; + totalPages: number; + totalItems: number; + onPageChange: (page: number) => void; +} + +// ── Component ── + +/** + * Page navigation with previous/next buttons and page numbers. + * Shows "Page X of Y" and total items. *Arr-styled with dark buttons. + */ +export function Pagination({ page, totalPages, totalItems, onPageChange }: PaginationProps) { + if (totalPages <= 1) { + return ( +
+ {totalItems} item{totalItems !== 1 ? 's' : ''} +
+ ); + } + + // Build page number range — show at most 7 page buttons + const pageNumbers: number[] = []; + const maxVisible = 7; + + if (totalPages <= maxVisible) { + for (let i = 1; i <= totalPages; i++) pageNumbers.push(i); + } else { + pageNumbers.push(1); + let start = Math.max(2, page - 2); + let end = Math.min(totalPages - 1, page + 2); + + // Adjust if we're near the edges + if (page <= 3) { + end = Math.min(totalPages - 1, 5); + } else if (page >= totalPages - 2) { + start = Math.max(2, totalPages - 4); + } + + if (start > 2) pageNumbers.push(-1); // ellipsis + for (let i = start; i <= end; i++) pageNumbers.push(i); + if (end < totalPages - 1) pageNumbers.push(-2); // ellipsis + pageNumbers.push(totalPages); + } + + const buttonBase: React.CSSProperties = { + display: 'inline-flex', + alignItems: 'center', + justifyContent: 'center', + minWidth: 32, + height: 32, + padding: '0 var(--space-2)', + border: '1px solid var(--border)', + borderRadius: 'var(--radius-md)', + backgroundColor: 'var(--bg-input)', + color: 'var(--text-primary)', + fontSize: 'var(--font-size-sm)', + cursor: 'pointer', + transition: 'background-color var(--transition-fast)', + }; + + const disabledStyle: React.CSSProperties = { + ...buttonBase, + opacity: 0.4, + cursor: 'not-allowed', + }; + + const activeStyle: React.CSSProperties = { + ...buttonBase, + backgroundColor: 'var(--accent)', + color: 'var(--text-inverse)', + borderColor: 'var(--accent)', + fontWeight: 600, + }; + + return ( +
+ + {totalItems} item{totalItems !== 1 ? 's' : ''} · Page {page} of {totalPages} + + +
+ + + {pageNumbers.map((num, idx) => { + if (num < 0) { + return ( + + … + + ); + } + return ( + + ); + })} + + +
+
+ ); +} diff --git a/src/frontend/src/components/PlatformBadge.tsx b/src/frontend/src/components/PlatformBadge.tsx new file mode 100644 index 0000000..1a785b3 --- /dev/null +++ b/src/frontend/src/components/PlatformBadge.tsx @@ -0,0 +1,46 @@ +import type { Platform } from '@shared/types/index'; + +// ── Platform → Style mapping ── + +const PLATFORM_STYLES: Record = { + youtube: { color: '#ff0000', label: 'YouTube' }, + soundcloud: { color: '#ff7700', label: 'SoundCloud' }, +}; + +const DEFAULT_STYLE = { color: 'var(--text-secondary)', label: 'Unknown' }; + +// ── Component ── + +interface PlatformBadgeProps { + platform: Platform; +} + +export function PlatformBadge({ platform }: PlatformBadgeProps) { + const style = PLATFORM_STYLES[platform] ?? DEFAULT_STYLE; + + return ( + + + ); +} diff --git a/src/frontend/src/components/PlatformSettingsForm.tsx b/src/frontend/src/components/PlatformSettingsForm.tsx new file mode 100644 index 0000000..0d0a6e0 --- /dev/null +++ b/src/frontend/src/components/PlatformSettingsForm.tsx @@ -0,0 +1,314 @@ +import { useState, useCallback, type FormEvent } from 'react'; +import { Loader } from 'lucide-react'; +import type { PlatformSettings, FormatProfile } from '@shared/types/index'; + +// ── Types ── + +export interface PlatformSettingsFormValues { + defaultFormatProfileId: number | null; + checkInterval: number; + concurrencyLimit: number; + subtitleLanguages: string; + grabAllEnabled: boolean; + grabAllOrder: 'newest' | 'oldest'; + scanLimit: number; + rateLimitDelay: number; +} + +interface PlatformSettingsFormProps { + platform: string; + settings: PlatformSettings | null; + formatProfiles: FormatProfile[]; + onSubmit: (values: PlatformSettingsFormValues) => void; + onCancel: () => void; + isPending?: boolean; + error?: string | null; +} + +// ── Shared styles (match FormatProfileForm) ── + +const labelStyle: React.CSSProperties = { + display: 'block', + fontSize: 'var(--font-size-sm)', + fontWeight: 500, + color: 'var(--text-secondary)', + marginBottom: 'var(--space-1)', +}; + +const inputStyle: React.CSSProperties = { + width: '100%', + padding: 'var(--space-2) var(--space-3)', + backgroundColor: 'var(--bg-input)', + border: '1px solid var(--border)', + borderRadius: 'var(--radius-md)', + color: 'var(--text-primary)', + fontSize: 'var(--font-size-base)', +}; + +const selectStyle: React.CSSProperties = { + ...inputStyle, + cursor: 'pointer', + appearance: 'none' as const, + backgroundImage: `url("data:image/svg+xml,%3Csvg xmlns='http://www.w3.org/2000/svg' width='12' height='12' viewBox='0 0 24 24' fill='none' stroke='%238b8d97' stroke-width='2' stroke-linecap='round' stroke-linejoin='round'%3E%3Cpolyline points='6 9 12 15 18 9'%3E%3C/polyline%3E%3C/svg%3E")`, + backgroundRepeat: 'no-repeat', + backgroundPosition: 'right var(--space-3) center', + paddingRight: 'var(--space-8)', +}; + +const fieldGroupStyle: React.CSSProperties = { + marginBottom: 'var(--space-4)', +}; + +const hintStyle: React.CSSProperties = { + fontSize: 'var(--font-size-xs)', + color: 'var(--text-muted)', + marginTop: 'var(--space-1)', + display: 'block', +}; + +// ── Component ── + +export function PlatformSettingsForm({ + platform, + settings, + formatProfiles, + onSubmit, + onCancel, + isPending = false, + error, +}: PlatformSettingsFormProps) { + const [defaultFormatProfileId, setDefaultFormatProfileId] = useState( + settings?.defaultFormatProfileId ?? null, + ); + const [checkInterval, setCheckInterval] = useState(settings?.checkInterval ?? 360); + const [concurrencyLimit, setConcurrencyLimit] = useState(settings?.concurrencyLimit ?? 2); + const [subtitleLanguages, setSubtitleLanguages] = useState(settings?.subtitleLanguages ?? ''); + const [grabAllEnabled, setGrabAllEnabled] = useState(settings?.grabAllEnabled ?? false); + const [grabAllOrder, setGrabAllOrder] = useState<'newest' | 'oldest'>(settings?.grabAllOrder ?? 'newest'); + const [scanLimit, setScanLimit] = useState(settings?.scanLimit ?? 100); + const [rateLimitDelay, setRateLimitDelay] = useState(settings?.rateLimitDelay ?? 1000); + + const handleSubmit = useCallback( + (e: FormEvent) => { + e.preventDefault(); + onSubmit({ + defaultFormatProfileId, + checkInterval, + concurrencyLimit, + subtitleLanguages: subtitleLanguages.trim(), + grabAllEnabled, + grabAllOrder, + scanLimit, + rateLimitDelay, + }); + }, + [defaultFormatProfileId, checkInterval, concurrencyLimit, subtitleLanguages, grabAllEnabled, grabAllOrder, scanLimit, rateLimitDelay, onSubmit], + ); + + const platformLabel = platform === 'youtube' ? 'YouTube' : platform === 'soundcloud' ? 'SoundCloud' : platform; + + return ( +
+ {error && ( +
+ {error} +
+ )} + +

+ Configure defaults for {platformLabel} channels. + These are applied automatically when adding new channels from this platform. +

+ + {/* Default Format Profile */} +
+ + +
+ + {/* Two-column grid for numeric inputs */} +
+ {/* Check Interval */} +
+ + setCheckInterval(Number(e.target.value) || 1)} + placeholder="360" + style={inputStyle} + /> + How often to check for new content +
+ + {/* Concurrency Limit */} +
+ + setConcurrencyLimit(Math.min(10, Math.max(1, Number(e.target.value) || 1)))} + placeholder="2" + style={inputStyle} + /> + Max simultaneous downloads (1–10) +
+ + {/* Scan Limit */} +
+ + setScanLimit(Math.min(1000, Math.max(10, Number(e.target.value) || 100)))} + placeholder="100" + style={inputStyle} + /> + Maximum videos to check per scan +
+ + {/* Rate Limit Delay */} +
+ + setRateLimitDelay(Math.min(10000, Math.max(0, Number(e.target.value) || 0)))} + placeholder="1000" + style={inputStyle} + /> + Milliseconds between YouTube API calls during enrichment +
+
+ + {/* Subtitle Languages */} +
+ + setSubtitleLanguages(e.target.value)} + placeholder="en,es,fr" + style={inputStyle} + /> + Comma-separated language codes. yt-dlp will download subtitles when available. +
+ + {/* Grab All Enabled */} +
+ setGrabAllEnabled(e.target.checked)} + style={{ + width: 16, + height: 16, + accentColor: 'var(--accent)', + cursor: 'pointer', + }} + /> + +
+ + {/* Grab All Order */} +
+ + + Order in which back-catalog items are queued when grabbing all content. +
+ + {/* Action buttons */} +
+ + +
+
+ ); +} diff --git a/src/frontend/src/components/ProgressBar.tsx b/src/frontend/src/components/ProgressBar.tsx new file mode 100644 index 0000000..d7760e3 --- /dev/null +++ b/src/frontend/src/components/ProgressBar.tsx @@ -0,0 +1,73 @@ +import { type CSSProperties, memo } from 'react'; + +interface ProgressBarProps { + /** Current value (e.g. downloaded count) */ + value: number; + /** Maximum value (e.g. total count) */ + max: number; +} + +const containerStyle: CSSProperties = { + display: 'flex', + alignItems: 'center', + gap: 'var(--space-2)', + minWidth: 0, +}; + +const trackStyle: CSSProperties = { + flex: 1, + height: 6, + backgroundColor: 'var(--bg-hover)', + borderRadius: 'var(--radius-sm)', + overflow: 'hidden', +}; + +const labelStyle: CSSProperties = { + fontSize: 'var(--font-size-xs)', + color: 'var(--text-muted)', + fontVariantNumeric: 'tabular-nums', + whiteSpace: 'nowrap', + flexShrink: 0, +}; + +function ProgressBarInner({ value, max }: ProgressBarProps) { + // No content — render a muted placeholder + if (max === 0) { + return ( +
+
+ +
+ ); + } + + const percentage = Math.min(Math.round((value / max) * 100), 100); + + const fillStyle: CSSProperties = { + width: `${percentage}%`, + height: '100%', + backgroundColor: 'var(--success)', + borderRadius: 'var(--radius-sm)', + transition: 'width 0.3s ease', + }; + + return ( +
+
+
+
+ + {value} / {max} + +
+ ); +} + +export const ProgressBar = memo(ProgressBarInner); diff --git a/src/frontend/src/components/QualityLabel.tsx b/src/frontend/src/components/QualityLabel.tsx new file mode 100644 index 0000000..5962a6d --- /dev/null +++ b/src/frontend/src/components/QualityLabel.tsx @@ -0,0 +1,110 @@ +import type { QualityInfo } from '@shared/types/index'; +import { AlertTriangle } from 'lucide-react'; +import { useState } from 'react'; + +// ── Component ── + +interface QualityLabelProps { + quality: QualityInfo | null; +} + +/** + * Displays actual quality metadata with warning indicators. + * Shows resolution, codec, and bitrate when available. + * Warnings render as a hover tooltip with a warning icon. + * Shows "—" when no quality metadata exists. + * + * Covers R003: quality honesty — no upscale deception. + */ +export function QualityLabel({ quality }: QualityLabelProps) { + const [showTooltip, setShowTooltip] = useState(false); + + if (!quality) { + return ( + + ); + } + + const parts: string[] = []; + if (quality.actualResolution) parts.push(quality.actualResolution); + if (quality.actualCodec) parts.push(quality.actualCodec); + if (quality.actualBitrate) parts.push(quality.actualBitrate); + if (quality.containerFormat) parts.push(quality.containerFormat); + + if (parts.length === 0) { + return ( + + ); + } + + const hasWarnings = quality.qualityWarnings && quality.qualityWarnings.length > 0; + + return ( + + + {parts.join(' · ')} + + {hasWarnings && ( + setShowTooltip(true)} + onMouseLeave={() => setShowTooltip(false)} + onFocus={() => setShowTooltip(true)} + onBlur={() => setShowTooltip(false)} + tabIndex={0} + role="button" + aria-label={`Quality warnings: ${quality.qualityWarnings.join(', ')}`} + > + + )} + + ); +} diff --git a/src/frontend/src/components/SearchBar.tsx b/src/frontend/src/components/SearchBar.tsx new file mode 100644 index 0000000..a33b690 --- /dev/null +++ b/src/frontend/src/components/SearchBar.tsx @@ -0,0 +1,96 @@ +import { Search } from 'lucide-react'; +import { useState, useEffect, useRef } from 'react'; + +// ── Types ── + +interface SearchBarProps { + value: string; + onChange: (value: string) => void; + placeholder?: string; + /** Debounce delay in ms. Defaults to 300. */ + debounce?: number; +} + +// ── Component ── + +/** + * Text input with search icon and debounced onChange callback. + * Styled to match *arr input fields. + */ +export function SearchBar({ + value, + onChange, + placeholder = 'Search...', + debounce = 300, +}: SearchBarProps) { + const [localValue, setLocalValue] = useState(value); + const timerRef = useRef | null>(null); + + // Sync external value changes + useEffect(() => { + setLocalValue(value); + }, [value]); + + const handleChange = (e: React.ChangeEvent) => { + const next = e.target.value; + setLocalValue(next); + + if (timerRef.current) clearTimeout(timerRef.current); + timerRef.current = setTimeout(() => { + onChange(next); + }, debounce); + }; + + // Cleanup timer on unmount + useEffect(() => { + return () => { + if (timerRef.current) clearTimeout(timerRef.current); + }; + }, []); + + return ( +
+
+ ); +} diff --git a/src/frontend/src/components/Sidebar.tsx b/src/frontend/src/components/Sidebar.tsx new file mode 100644 index 0000000..a2d22d7 --- /dev/null +++ b/src/frontend/src/components/Sidebar.tsx @@ -0,0 +1,138 @@ +import { NavLink } from 'react-router-dom'; +import { + Radio, + BookOpen, + ListOrdered, + Activity, + Settings, + Server, + ChevronLeft, + ChevronRight, +} from 'lucide-react'; +import { useState, useEffect } from 'react'; +import { TubearrLogo } from './TubearrLogo'; + +const NAV_ITEMS = [ + { to: '/', icon: Radio, label: 'Channels' }, + { to: '/library', icon: BookOpen, label: 'Library' }, + { to: '/queue', icon: ListOrdered, label: 'Queue' }, + { to: '/activity', icon: Activity, label: 'Activity' }, + { to: '/settings', icon: Settings, label: 'Settings' }, + { to: '/system', icon: Server, label: 'System' }, +] as const; + +export function Sidebar() { + const [collapsed, setCollapsed] = useState(() => { + try { + return localStorage.getItem('tubearr-sidebar-collapsed') === 'true'; + } catch { + return false; + } + }); + + useEffect(() => { + try { + localStorage.setItem('tubearr-sidebar-collapsed', String(collapsed)); + } catch { + // Ignore — localStorage may be unavailable in privacy mode + } + }, [collapsed]); + + return ( + + ); +} diff --git a/src/frontend/src/components/StatusBadge.tsx b/src/frontend/src/components/StatusBadge.tsx new file mode 100644 index 0000000..3bf2246 --- /dev/null +++ b/src/frontend/src/components/StatusBadge.tsx @@ -0,0 +1,80 @@ +import type { ContentStatus, QueueStatus } from '@shared/types/index'; + +// ── Status → Color mapping ── + +type StatusValue = ContentStatus | QueueStatus | string; + +interface BadgeStyle { + color: string; + backgroundColor: string; +} + +const STATUS_STYLES: Record = { + // Content statuses + monitored: { color: 'var(--info)', backgroundColor: 'var(--info-bg)' }, + downloaded: { color: 'var(--success)', backgroundColor: 'var(--success-bg)' }, + downloading: { color: 'var(--info)', backgroundColor: 'var(--info-bg)' }, + failed: { color: 'var(--danger)', backgroundColor: 'var(--danger-bg)' }, + queued: { color: 'var(--warning)', backgroundColor: 'var(--warning-bg)' }, + ignored: { color: 'var(--text-muted)', backgroundColor: 'var(--bg-hover)' }, + // Queue statuses + pending: { color: 'var(--warning)', backgroundColor: 'var(--warning-bg)' }, + completed: { color: 'var(--success)', backgroundColor: 'var(--success-bg)' }, + cancelled: { color: 'var(--text-muted)', backgroundColor: 'var(--bg-hover)' }, + // Check statuses + success: { color: 'var(--success)', backgroundColor: 'var(--success-bg)' }, + error: { color: 'var(--danger)', backgroundColor: 'var(--danger-bg)' }, + rate_limited: { color: 'var(--warning)', backgroundColor: 'var(--warning-bg)' }, +}; + +const DEFAULT_STYLE: BadgeStyle = { + color: 'var(--text-secondary)', + backgroundColor: 'var(--bg-hover)', +}; + +// ── Component ── + +interface StatusBadgeProps { + status: StatusValue; + /** Override the display label. Defaults to capitalized status. */ + label?: string; + /** Animate for in-progress states. */ + pulse?: boolean; +} + +export function StatusBadge({ status, label, pulse }: StatusBadgeProps) { + const style = STATUS_STYLES[status] ?? DEFAULT_STYLE; + const displayLabel = label ?? status.charAt(0).toUpperCase() + status.slice(1).replaceAll('_', ' '); + + return ( + + {(pulse || status === 'downloading') && ( + + ); +} diff --git a/src/frontend/src/components/Table.tsx b/src/frontend/src/components/Table.tsx new file mode 100644 index 0000000..8bcfda6 --- /dev/null +++ b/src/frontend/src/components/Table.tsx @@ -0,0 +1,151 @@ +import { type ReactNode, useCallback, useMemo, useState } from 'react'; +import { ChevronUp, ChevronDown } from 'lucide-react'; + +// ── Types ── + +export interface Column { + key: string; + label: string; + sortable?: boolean; + width?: string; + headerRender?: () => ReactNode; + render: (item: T) => ReactNode; +} + +interface TableProps { + columns: Column[]; + data: T[]; + keyExtractor: (item: T) => string | number; + onRowClick?: (item: T) => void; + emptyMessage?: string; + sortKey?: string; + sortDirection?: 'asc' | 'desc'; + onSort?: (key: string, direction: 'asc' | 'desc') => void; +} + +// ── Component ── + +export function Table({ + columns, + data, + keyExtractor, + onRowClick, + emptyMessage = 'No items found.', + sortKey: controlledSortKey, + sortDirection: controlledSortDirection, + onSort: controlledOnSort, +}: TableProps) { + // Internal sort state (used when not controlled) + const [internalSortKey, setInternalSortKey] = useState(null); + const [internalSortDir, setInternalSortDir] = useState<'asc' | 'desc'>('asc'); + + const activeSortKey = controlledSortKey ?? internalSortKey; + const activeSortDir = controlledSortDirection ?? internalSortDir; + + const handleSort = useCallback( + (key: string) => { + if (controlledOnSort) { + const nextDir = activeSortKey === key && activeSortDir === 'asc' ? 'desc' : 'asc'; + controlledOnSort(key, nextDir); + } else { + setInternalSortKey((prev) => { + if (prev === key) { + setInternalSortDir((d) => (d === 'asc' ? 'desc' : 'asc')); + return prev; + } + setInternalSortDir('asc'); + return key; + }); + } + }, + [activeSortKey, activeSortDir, controlledOnSort], + ); + + // Memoize hover styles + const rowStyle = useMemo( + () => ({ + cursor: onRowClick ? 'pointer' : 'default', + transition: 'background-color var(--transition-fast)', + }), + [onRowClick], + ); + + if (data.length === 0) { + return ( +
+ {emptyMessage} +
+ ); + } + + return ( +
+ + + + {columns.map((col) => ( + + ))} + + + + {data.map((item) => ( + onRowClick(item) : undefined} + style={rowStyle} + tabIndex={onRowClick ? 0 : undefined} + onKeyDown={ + onRowClick + ? (e) => { + if (e.key === 'Enter' || e.key === ' ') { + e.preventDefault(); + onRowClick(item); + } + } + : undefined + } + role={onRowClick ? 'button' : undefined} + > + {columns.map((col) => ( + + ))} + + ))} + +
handleSort(col.key) : undefined} + aria-sort={ + activeSortKey === col.key + ? activeSortDir === 'asc' + ? 'ascending' + : 'descending' + : undefined + } + > + + {col.headerRender ? col.headerRender() : col.label} + {col.sortable && activeSortKey === col.key && ( + activeSortDir === 'asc' + ? + : + )} + +
{col.render(item)}
+
+ ); +} diff --git a/src/frontend/src/components/TubearrLogo.tsx b/src/frontend/src/components/TubearrLogo.tsx new file mode 100644 index 0000000..0563903 --- /dev/null +++ b/src/frontend/src/components/TubearrLogo.tsx @@ -0,0 +1,64 @@ +interface TubearrLogoProps { + collapsed: boolean; + size?: number; +} + +function LogoIcon({ size = 24 }: { size?: number }) { + return ( + + ); +} + +export function TubearrLogo({ collapsed, size = 24 }: TubearrLogoProps) { + return ( +
+ + {!collapsed && ( + + Tubearr + + )} +
+ ); +} diff --git a/src/frontend/src/main.tsx b/src/frontend/src/main.tsx new file mode 100644 index 0000000..96d4939 --- /dev/null +++ b/src/frontend/src/main.tsx @@ -0,0 +1,29 @@ +import { StrictMode } from 'react'; +import { createRoot } from 'react-dom/client'; +import { BrowserRouter } from 'react-router-dom'; +import { QueryClient, QueryClientProvider } from '@tanstack/react-query'; +import { App } from './App'; +import './styles/global.css'; + +const queryClient = new QueryClient({ + defaultOptions: { + queries: { + staleTime: 30_000, + retry: 1, + refetchOnWindowFocus: false, + }, + }, +}); + +const root = document.getElementById('root'); +if (!root) throw new Error('Root element not found'); + +createRoot(root).render( + + + + + + + , +); diff --git a/src/frontend/src/pages/Activity.tsx b/src/frontend/src/pages/Activity.tsx new file mode 100644 index 0000000..f803b11 --- /dev/null +++ b/src/frontend/src/pages/Activity.tsx @@ -0,0 +1,471 @@ +import { useState, useMemo, useCallback } from 'react'; +import { ActivityIcon, Clock, Loader, RefreshCw } from 'lucide-react'; +import { Table, type Column } from '../components/Table'; +import { StatusBadge } from '../components/StatusBadge'; +import { Pagination } from '../components/Pagination'; +import { FilterBar, type FilterDefinition } from '../components/FilterBar'; +import { useHistory, useActivity, type HistoryFilters } from '../api/hooks/useActivity'; +import type { DownloadHistoryRecord } from '@shared/types/index'; + +// ── Helpers ── + +function formatTimestamp(iso: string): string { + const d = new Date(iso); + return d.toLocaleString(undefined, { + month: 'short', + day: 'numeric', + hour: '2-digit', + minute: '2-digit', + second: '2-digit', + }); +} + +function formatRelativeTime(iso: string): string { + const d = new Date(iso); + const now = new Date(); + const diffMs = now.getTime() - d.getTime(); + + if (diffMs < 60_000) return 'just now'; + if (diffMs < 3600_000) return `${Math.floor(diffMs / 60_000)}m ago`; + if (diffMs < 86400_000) return `${Math.floor(diffMs / 3600_000)}h ago`; + if (diffMs < 604800_000) return `${Math.floor(diffMs / 86400_000)}d ago`; + return d.toLocaleDateString(undefined, { month: 'short', day: 'numeric' }); +} + +function formatEventType(type: string): string { + return type + .split('_') + .map((w) => w.charAt(0).toUpperCase() + w.slice(1)) + .join(' '); +} + +// ── Event type colors for badges ── + +const EVENT_TYPE_STYLES: Record = { + download_started: 'downloading', + download_completed: 'completed', + download_failed: 'failed', + content_added: 'monitored', + content_removed: 'ignored', + channel_added: 'success', + channel_removed: 'cancelled', + queue_retry: 'pending', +}; + +// ── Known event types for filter ── + +const EVENT_TYPES = [ + 'download_started', + 'download_completed', + 'download_failed', + 'content_added', + 'content_removed', + 'channel_added', + 'channel_removed', + 'queue_retry', +]; + +// ── Component ── + +export function ActivityPage() { + const [activeTab, setActiveTab] = useState<'history' | 'recent'>('history'); + const [page, setPage] = useState(1); + const [filterValues, setFilterValues] = useState>({ + eventType: '', + }); + + // History filters + const historyFilters: HistoryFilters = useMemo( + () => ({ + eventType: filterValues.eventType || undefined, + }), + [filterValues], + ); + + // Queries + const { data: historyData, isLoading: historyLoading, error: historyError, refetch: refetchHistory } = useHistory( + historyFilters, + page, + 20, + ); + const { data: recentData, isLoading: recentLoading, error: recentError, refetch: refetchRecent } = useActivity(50); + + const handleFilterChange = useCallback((key: string, value: string) => { + setFilterValues((prev) => ({ ...prev, [key]: value })); + setPage(1); + }, []); + + // Filter definitions + const filterDefs: FilterDefinition[] = useMemo( + () => [ + { + key: 'eventType', + label: 'Event Type', + options: [ + { value: '', label: 'All Events' }, + ...EVENT_TYPES.map((t) => ({ value: t, label: formatEventType(t) })), + ], + }, + ], + [], + ); + + // History table columns + const historyColumns: Column[] = useMemo( + () => [ + { + key: 'eventType', + label: 'Event', + width: '160px', + render: (item) => ( + + ), + }, + { + key: 'status', + label: 'Status', + width: '120px', + render: (item) => ( + + ), + }, + { + key: 'details', + label: 'Details', + render: (item) => { + if (!item.details) return ; + // Show a summary of details + const summary = Object.entries(item.details) + .filter(([, v]) => v != null && v !== '') + .map(([k, v]) => `${k}: ${typeof v === 'object' ? JSON.stringify(v) : String(v)}`) + .join(', '); + return ( + + {summary || '—'} + + ); + }, + }, + { + key: 'references', + label: 'References', + width: '140px', + render: (item) => ( + + {item.channelId && `Channel #${item.channelId}`} + {item.channelId && item.contentItemId && ' · '} + {item.contentItemId && `Content #${item.contentItemId}`} + {!item.channelId && !item.contentItemId && '—'} + + ), + }, + { + key: 'createdAt', + label: 'Time', + width: '170px', + sortable: true, + render: (item) => ( + + {formatTimestamp(item.createdAt)} + + ), + }, + ], + [], + ); + + const tabBase: React.CSSProperties = { + padding: 'var(--space-2) var(--space-4)', + border: '1px solid var(--border)', + borderBottom: 'none', + borderRadius: 'var(--radius-md) var(--radius-md) 0 0', + backgroundColor: 'var(--bg-input)', + color: 'var(--text-secondary)', + fontSize: 'var(--font-size-sm)', + fontWeight: 500, + cursor: 'pointer', + transition: 'background-color var(--transition-fast), color var(--transition-fast)', + }; + + const tabActive: React.CSSProperties = { + ...tabBase, + backgroundColor: 'var(--bg-card)', + color: 'var(--text-primary)', + borderColor: 'var(--accent)', + borderBottomColor: 'var(--bg-card)', + }; + + const historyPagination = historyData?.pagination; + const historyItems = historyData?.data ?? []; + + return ( +
+ {/* Header */} +
+ +

Activity

+
+ + {/* Tabs */} +
+ + +
+ + {/* History tab */} + {activeTab === 'history' && ( +
+ {/* Event type filter */} +
+ +
+ + {/* Error */} + {historyError && ( +
+ {historyError instanceof Error ? historyError.message : 'Failed to load history'} + +
+ )} + + {/* Loading */} + {historyLoading && ( +
+ + Loading history… +
+ )} + + {/* Table */} + {!historyLoading && ( + <> + + columns={historyColumns} + data={historyItems} + keyExtractor={(item) => item.id} + emptyMessage="No activity recorded yet." + /> + + {historyPagination && ( + + )} + + )} +
+ )} + + {/* Recent tab */} + {activeTab === 'recent' && ( +
+ {/* Error */} + {recentError && ( +
+ {recentError instanceof Error ? recentError.message : 'Failed to load activity'} + +
+ )} + + {/* Loading */} + {recentLoading && ( +
+ + Loading recent activity… +
+ )} + + {/* Activity feed */} + {!recentLoading && ( +
+ {(!recentData || recentData.length === 0) && ( +
+ No activity recorded yet. +
+ )} + + {recentData?.map((event) => ( +
+ {/* Event badge */} + + + {/* Status */} + + + {/* Details */} +
+ + {event.details + ? (Object.entries(event.details) + .filter(([, v]) => v != null && v !== '') + .map(([k, v]) => `${k}: ${typeof v === 'object' ? JSON.stringify(v) : String(v)}`) + .join(', ') || '—') + : '—'} + +
+ + {/* References */} + + {event.channelId && `C#${event.channelId}`} + {event.channelId && event.contentItemId && ' · '} + {event.contentItemId && `I#${event.contentItemId}`} + + + {/* Time */} + + +
+ ))} +
+ )} +
+ )} +
+ ); +} diff --git a/src/frontend/src/pages/ChannelDetail.tsx b/src/frontend/src/pages/ChannelDetail.tsx new file mode 100644 index 0000000..2f43476 --- /dev/null +++ b/src/frontend/src/pages/ChannelDetail.tsx @@ -0,0 +1,1291 @@ +import { useState, useEffect, useMemo, useCallback } from 'react'; +import { useParams, useNavigate, Link } from 'react-router-dom'; +import { + ArrowLeft, + Bookmark, + BookmarkPlus, + CheckCircle, + ChevronDown, + ChevronRight, + Download, + ExternalLink, + Film, + ListMusic, + Loader, + Music, + RefreshCw, + Save, + Trash2, +} from 'lucide-react'; +import { useChannel, useUpdateChannel, useDeleteChannel, useScanChannel, useSetMonitoringMode } from '../api/hooks/useChannels'; +import { useChannelContent, useDownloadContent, useToggleMonitored, useBulkMonitored } from '../api/hooks/useContent'; +import { useChannelPlaylists, useRefreshPlaylists } from '../api/hooks/usePlaylists'; +import { useFormatProfiles } from '../api/hooks/useFormatProfiles'; +import { Table, type Column } from '../components/Table'; +import { PlatformBadge } from '../components/PlatformBadge'; +import { StatusBadge } from '../components/StatusBadge'; +import { QualityLabel } from '../components/QualityLabel'; +import { Modal } from '../components/Modal'; +import type { ContentItem, MonitoringMode } from '@shared/types/index'; + +// ── Helpers ── + +function formatDuration(seconds: number | null): string { + if (seconds == null) return '—'; + const h = Math.floor(seconds / 3600); + const m = Math.floor((seconds % 3600) / 60); + const s = seconds % 60; + if (h > 0) return `${h}:${String(m).padStart(2, '0')}:${String(s).padStart(2, '0')}`; + return `${m}:${String(s).padStart(2, '0')}`; +} + +function formatFileSize(bytes: number | null): string { + if (bytes == null) return '—'; + if (bytes < 1024) return `${bytes} B`; + if (bytes < 1024 * 1024) return `${(bytes / 1024).toFixed(1)} KB`; + if (bytes < 1024 * 1024 * 1024) return `${(bytes / (1024 * 1024)).toFixed(1)} MB`; + return `${(bytes / (1024 * 1024 * 1024)).toFixed(2)} GB`; +} + +function formatRelativeTime(isoString: string | null): string { + if (!isoString) return '—'; + const delta = Date.now() - Date.parse(isoString); + if (delta < 0) return 'just now'; + const seconds = Math.floor(delta / 1000); + if (seconds < 60) return 'just now'; + const minutes = Math.floor(seconds / 60); + if (minutes < 60) return `${minutes}m ago`; + const hours = Math.floor(minutes / 60); + if (hours < 24) return `${hours}h ago`; + const days = Math.floor(hours / 24); + if (days < 30) return `${days}d ago`; + const months = Math.floor(days / 30); + if (months < 12) return `${months}mo ago`; + const years = Math.floor(months / 12); + return `${years}y ago`; +} + +const MONITORING_MODE_OPTIONS: { value: MonitoringMode; label: string }[] = [ + { value: 'all', label: 'All Content' }, + { value: 'future', label: 'Future Only' }, + { value: 'existing', label: 'Existing Only' }, + { value: 'none', label: 'None' }, +]; + +// ── Component ── + +export function ChannelDetail() { + const { id } = useParams<{ id: string }>(); + const navigate = useNavigate(); + const channelId = parseInt(id ?? '0', 10); + + // ── Data hooks ── + const { data: channel, isLoading: channelLoading, error: channelError } = useChannel(channelId); + const { data: content, isLoading: contentLoading, error: contentError, refetch: refetchContent } = useChannelContent(channelId); + const { data: formatProfiles } = useFormatProfiles(); + const { data: playlistData } = useChannelPlaylists(channelId); + + // ── Mutation hooks ── + const updateChannel = useUpdateChannel(channelId); + const deleteChannel = useDeleteChannel(); + const downloadContent = useDownloadContent(); + const scanChannel = useScanChannel(channelId); + const setMonitoringMode = useSetMonitoringMode(channelId); + const toggleMonitored = useToggleMonitored(channelId); + const refreshPlaylists = useRefreshPlaylists(channelId); + const bulkMonitored = useBulkMonitored(channelId); + + // ── Local state ── + const [showDeleteConfirm, setShowDeleteConfirm] = useState(false); + const [scanResult, setScanResult] = useState<{ message: string; isError: boolean } | null>(null); + const [sortKey, setSortKey] = useState(null); + const [sortDirection, setSortDirection] = useState<'asc' | 'desc'>('asc'); + const [expandedPlaylists, setExpandedPlaylists] = useState>(new Set()); + const [selectedIds, setSelectedIds] = useState>(new Set()); + const [localCheckInterval, setLocalCheckInterval] = useState(''); + const [checkIntervalSaved, setCheckIntervalSaved] = useState(false); + + // Sync local check interval from channel data + useEffect(() => { + if (channel?.checkInterval != null) { + setLocalCheckInterval(channel.checkInterval); + } + }, [channel?.checkInterval]); + + // Auto-dismiss scan result toast after 5 seconds + useEffect(() => { + if (!scanResult) return; + const timer = setTimeout(() => setScanResult(null), 5000); + return () => clearTimeout(timer); + }, [scanResult]); + + // ── Handlers ── + + const handleFormatProfileChange = useCallback( + (e: React.ChangeEvent) => { + const profileId = e.target.value ? parseInt(e.target.value, 10) : null; + updateChannel.mutate({ formatProfileId: profileId }); + }, + [updateChannel], + ); + + const handleCheckIntervalSave = useCallback(() => { + if (localCheckInterval === '' || Number(localCheckInterval) < 1) return; + updateChannel.mutate( + { checkInterval: Number(localCheckInterval) }, + { + onSuccess: () => { + setCheckIntervalSaved(true); + setTimeout(() => setCheckIntervalSaved(false), 2500); + }, + }, + ); + }, [localCheckInterval, updateChannel]); + + const handleMonitoringModeChange = useCallback( + (e: React.ChangeEvent) => { + setMonitoringMode.mutate({ monitoringMode: e.target.value }); + }, + [setMonitoringMode], + ); + + const handleScan = useCallback(() => { + scanChannel.mutate(undefined, { + onSuccess: (result) => { + if (result.status === 'already_running') { + setScanResult({ message: 'Scan already in progress', isError: false }); + } else if (result.status === 'rate_limited') { + setScanResult({ message: 'Rate limited — try again later', isError: false }); + } else if (result.status === 'error') { + setScanResult({ message: 'Scan failed — check server logs', isError: true }); + } else { + const msg = result.newItems > 0 + ? `Found ${result.newItems} new item${result.newItems === 1 ? '' : 's'}` + : 'No new content'; + setScanResult({ message: msg, isError: false }); + } + }, + onError: (err) => { + setScanResult({ + message: err instanceof Error ? err.message : 'Scan failed', + isError: true, + }); + }, + }); + }, [scanChannel]); + + const handleRefreshPlaylists = useCallback(() => { + refreshPlaylists.mutate(undefined, { + onSuccess: () => { + setScanResult({ message: 'Playlists refreshed', isError: false }); + }, + onError: (err) => { + setScanResult({ + message: err instanceof Error ? err.message : 'Failed to refresh playlists', + isError: true, + }); + }, + }); + }, [refreshPlaylists]); + + const handleDelete = useCallback(() => { + deleteChannel.mutate(channelId, { + onSuccess: () => { + navigate('/', { replace: true }); + }, + }); + }, [channelId, deleteChannel, navigate]); + + const handleSort = useCallback((key: string, direction: 'asc' | 'desc') => { + setSortKey(key); + setSortDirection(direction); + }, []); + + const togglePlaylist = useCallback((id: number | 'uncategorized') => { + setExpandedPlaylists((prev) => { + const next = new Set(prev); + if (next.has(id)) { + next.delete(id); + } else { + next.add(id); + } + return next; + }); + }, []); + + // ── Bulk selection handlers ── + + const toggleSelect = useCallback((id: number) => { + setSelectedIds((prev) => { + const next = new Set(prev); + if (next.has(id)) { + next.delete(id); + } else { + next.add(id); + } + return next; + }); + }, []); + + const clearSelection = useCallback(() => { + setSelectedIds(new Set()); + }, []); + + const toggleSelectAll = useCallback(() => { + const items = content ?? []; + if (items.length === 0) return; + setSelectedIds((prev) => { + if (prev.size === items.length) return new Set(); + return new Set(items.map((item) => item.id)); + }); + }, [content]); + + const isAllSelected = content != null && content.length > 0 && selectedIds.size === content.length; + + const handleBulkMonitor = useCallback( + (monitored: boolean) => { + bulkMonitored.mutate( + { ids: [...selectedIds], monitored }, + { onSuccess: () => clearSelection() }, + ); + }, + [bulkMonitored, selectedIds, clearSelection], + ); + + const handleBulkDownload = useCallback(() => { + for (const id of selectedIds) { + downloadContent.mutate(id); + } + clearSelection(); + }, [selectedIds, downloadContent, clearSelection]); + + // ── Sorted content ── + + const sortedContent = useMemo(() => { + const items = content ?? []; + if (!sortKey) return items; + const sorted = [...items]; + sorted.sort((a, b) => { + let cmp = 0; + switch (sortKey) { + case 'title': + cmp = a.title.localeCompare(b.title); + break; + case 'publishedAt': { + const aDate = a.publishedAt ? Date.parse(a.publishedAt) : -Infinity; + const bDate = b.publishedAt ? Date.parse(b.publishedAt) : -Infinity; + cmp = aDate - bDate; + break; + } + case 'status': + cmp = a.status.localeCompare(b.status); + break; + case 'duration': { + const aDur = a.duration ?? -Infinity; + const bDur = b.duration ?? -Infinity; + cmp = aDur - bDur; + break; + } + case 'fileSize': + cmp = (a.fileSize ?? -Infinity) - (b.fileSize ?? -Infinity); + break; + case 'downloadedAt': { + const aDate2 = a.downloadedAt ? Date.parse(a.downloadedAt) : -Infinity; + const bDate2 = b.downloadedAt ? Date.parse(b.downloadedAt) : -Infinity; + cmp = aDate2 - bDate2; + break; + } + case 'quality': { + const aQ = a.qualityMetadata?.actualResolution ?? ''; + const bQ = b.qualityMetadata?.actualResolution ?? ''; + cmp = aQ.localeCompare(bQ); + break; + } + default: + return 0; + } + return sortDirection === 'desc' ? -cmp : cmp; + }); + return sorted; + }, [content, sortKey, sortDirection]); + + // ── Playlist grouping (YouTube only) ── + + const playlistGroups = useMemo(() => { + if (!channel || channel.platform !== 'youtube' || !playlistData) return null; + const { playlists, mappings } = playlistData; + if (playlists.length === 0) return null; + + // Build a set of content IDs that belong to some playlist + const categorizedIds = new Set(); + const groups: { id: number | 'uncategorized'; title: string; items: ContentItem[] }[] = []; + + // Build a Map from content ID to content item for O(1) lookups (js-index-maps) + const contentById = new Map(); + for (const item of sortedContent) { + contentById.set(item.id, item); + } + + for (const playlist of playlists) { + const contentIds = mappings[playlist.id] ?? []; + const items: ContentItem[] = []; + for (const cid of contentIds) { + const item = contentById.get(cid); + if (item) { + items.push(item); + categorizedIds.add(cid); + } + } + if (items.length > 0) { + groups.push({ id: playlist.id, title: playlist.title, items }); + } + } + + // Uncategorized: items not in any playlist + const uncategorized = sortedContent.filter((item) => !categorizedIds.has(item.id)); + if (uncategorized.length > 0) { + groups.push({ id: 'uncategorized', title: 'Uncategorized', items: uncategorized }); + } + + return groups.length > 0 ? groups : null; + }, [channel, playlistData, sortedContent]); + + // ── Content table columns ── + + const contentColumns = useMemo[]>( + () => [ + { + key: 'select', + label: '', + width: '40px', + headerRender: () => ( + e.stopPropagation()} + aria-label="Select all content items" + /> + ), + render: (item) => ( + { + e.stopPropagation(); + toggleSelect(item.id); + }} + onClick={(e) => e.stopPropagation()} + aria-label={`Select ${item.title}`} + /> + ), + }, + { + key: 'monitored', + label: '', + width: '40px', + render: (item) => { + const isMonitored = item.monitored; + return ( + + ); + }, + }, + { + key: 'thumbnail', + label: '', + width: '70px', + render: (item) => + item.thumbnailUrl ? ( + + ) : ( +
+ {item.contentType === 'audio' ? : } +
+ ), + }, + { + key: 'title', + label: 'Title', + sortable: true, + render: (item) => ( + + + {item.title} + + + + ), + }, + { + key: 'contentType', + label: 'Type', + width: '90px', + render: (item) => ( + + ), + }, + { + key: 'status', + label: 'Status', + width: '120px', + sortable: true, + render: (item) => , + }, + { + key: 'quality', + label: 'Quality', + width: '200px', + sortable: true, + render: (item) => , + }, + { + key: 'publishedAt', + label: 'Published', + width: '120px', + sortable: true, + render: (item) => ( + + {formatRelativeTime(item.publishedAt)} + + ), + }, + { + key: 'downloadedAt', + label: 'Downloaded', + width: '120px', + sortable: true, + render: (item) => ( + + {formatRelativeTime(item.downloadedAt)} + + ), + }, + { + key: 'duration', + label: 'Duration', + width: '90px', + sortable: true, + render: (item) => ( + + {formatDuration(item.duration)} + + ), + }, + { + key: 'fileSize', + label: 'Size', + width: '90px', + sortable: true, + render: (item) => ( + + {formatFileSize(item.fileSize)} + + ), + }, + ], + [toggleMonitored, selectedIds, toggleSelect, isAllSelected, toggleSelectAll], + ); + + // ── Render helpers ── + + const renderTable = useCallback( + (items: ContentItem[]) => ( + item.id} + emptyMessage="No content found for this channel." + sortKey={sortKey ?? undefined} + sortDirection={sortDirection} + onSort={handleSort} + /> + ), + [contentColumns, sortKey, sortDirection, handleSort], + ); + + const renderPlaylistGroups = useCallback( + (groups: { id: number | 'uncategorized'; title: string; items: ContentItem[] }[]) => ( +
+ {groups.map((group) => { + const isExpanded = expandedPlaylists.has(group.id); + return ( +
+ + {isExpanded ? renderTable(group.items) : null} +
+ ); + })} +
+ ), + [expandedPlaylists, togglePlaylist, renderTable], + ); + + // ── Loading / Error states ── + + if (channelLoading) { + return ( +
+ + Loading channel... +
+ ); + } + + if (channelError || !channel) { + return ( +
+ + Back to Channels + +
+ {channelError instanceof Error + ? channelError.message + : `Channel with ID ${id} not found.`} +
+
+ ); + } + + const isYouTube = channel.platform === 'youtube'; + const hasPlaylistGroups = isYouTube && playlistGroups !== null; + + return ( +
+ {/* Back navigation */} + + Back to Channels + + + {/* Channel header */} +
+ {/* Avatar */} + {`${channel.name} + + {/* Info */} +
+
+

+ {channel.name} +

+ +
+ + + + {channel.url} + + + + + {/* Actions row */} +
+ {/* Monitoring mode dropdown */} + + + {/* Format profile selector */} + + + {/* Per-channel check interval */} +
+ setLocalCheckInterval(e.target.value === '' ? '' : Number(e.target.value))} + aria-label="Check interval in minutes" + title="Check interval (minutes)" + style={{ + width: 64, + padding: 'var(--space-2) var(--space-2)', + borderRadius: 'var(--radius-md)', + border: '1px solid var(--border)', + backgroundColor: 'var(--bg-main)', + color: 'var(--text-primary)', + fontSize: 'var(--font-size-sm)', + }} + /> + min + +
+ + {/* Scan Now button */} + + + {/* Refresh Playlists button (YouTube only) */} + {isYouTube ? ( + + ) : null} + + {/* Delete button */} + +
+
+
+ + {/* Content table / playlist groups */} +
+
+

+ Content +

+
+ {contentError ? ( +
+ + {contentError instanceof Error + ? contentError.message + : 'Failed to load content'} + + +
+ ) : null} + {contentLoading ? ( +
+ + Loading content... +
+ ) : hasPlaylistGroups ? ( + renderPlaylistGroups(playlistGroups!) + ) : ( + renderTable(sortedContent) + )} +
+ + {/* Floating bulk action bar */} + {selectedIds.size > 0 ? ( +
+ + {selectedIds.size} selected + +
+ + + + +
+ ) : null} + + {/* Download error toast */} + {downloadContent.isError ? ( +
+ {downloadContent.error instanceof Error + ? downloadContent.error.message + : 'Failed to enqueue download'} +
+ ) : null} + + {/* Scan result toast */} + {scanResult ? ( +
+ {scanResult.message} +
+ ) : null} + + {/* Delete confirmation modal */} + setShowDeleteConfirm(false)} + width={400} + > +

+ Are you sure you want to delete {channel.name}? + This action cannot be undone. +

+ {deleteChannel.isError ? ( +
+ {deleteChannel.error instanceof Error + ? deleteChannel.error.message + : 'Failed to delete channel'} +
+ ) : null} +
+ + +
+
+
+ ); +} diff --git a/src/frontend/src/pages/Channels.tsx b/src/frontend/src/pages/Channels.tsx new file mode 100644 index 0000000..b9e0473 --- /dev/null +++ b/src/frontend/src/pages/Channels.tsx @@ -0,0 +1,334 @@ +import { useState, useEffect, useMemo, useCallback } from 'react'; +import { useNavigate } from 'react-router-dom'; +import { Plus, Loader, RefreshCw } from 'lucide-react'; +import { useChannels, useScanAllChannels } from '../api/hooks/useChannels'; +import { Table, type Column } from '../components/Table'; +import { PlatformBadge } from '../components/PlatformBadge'; +import { StatusBadge } from '../components/StatusBadge'; +import { ProgressBar } from '../components/ProgressBar'; +import { AddChannelModal } from '../components/AddChannelModal'; +import type { ChannelWithCounts } from '@shared/types/api'; + +// ── Helpers ── + +function formatRelativeTime(dateStr: string | null): string { + if (!dateStr) return '—'; + const diff = Date.now() - new Date(dateStr).getTime(); + const seconds = Math.floor(diff / 1000); + if (seconds < 60) return 'just now'; + const minutes = Math.floor(seconds / 60); + if (minutes < 60) return `${minutes}m ago`; + const hours = Math.floor(minutes / 60); + if (hours < 24) return `${hours}h ago`; + const days = Math.floor(hours / 24); + return `${days}d ago`; +} + +// ── Component ── + +export function Channels() { + const navigate = useNavigate(); + const [showAddModal, setShowAddModal] = useState(false); + const [scanResult, setScanResult] = useState<{ message: string; isError: boolean } | null>(null); + + const { data: channels, isLoading, error, refetch } = useChannels(); + const scanAll = useScanAllChannels(); + + // Auto-dismiss scan result toast after 5 seconds + useEffect(() => { + if (!scanResult) return; + const timer = setTimeout(() => setScanResult(null), 5000); + return () => clearTimeout(timer); + }, [scanResult]); + + const handleScanAll = useCallback(() => { + scanAll.mutate(undefined, { + onSuccess: (result) => { + let msg = `Scanned ${result.summary.scanned} channel${result.summary.scanned === 1 ? '' : 's'}, found ${result.summary.newItems} new item${result.summary.newItems === 1 ? '' : 's'}`; + if (result.summary.errors > 0) { + msg += ` (${result.summary.errors} error${result.summary.errors === 1 ? '' : 's'})`; + } + setScanResult({ message: msg, isError: result.summary.errors > 0 }); + }, + onError: (err) => { + setScanResult({ + message: err instanceof Error ? err.message : 'Scan failed', + isError: true, + }); + }, + }); + }, [scanAll]); + + const handleRowClick = useCallback( + (channel: ChannelWithCounts) => { + navigate(`/channel/${channel.id}`); + }, + [navigate], + ); + + const columns = useMemo[]>( + () => [ + { + key: 'avatar', + label: '', + width: '40px', + render: (c) => ( + {c.name} + ), + }, + { + key: 'name', + label: 'Name', + sortable: true, + render: (c) => ( + + {c.name} + + ), + }, + { + key: 'platform', + label: 'Platform', + width: '120px', + render: (c) => , + }, + { + key: 'monitoring', + label: 'Monitoring', + width: '110px', + render: (c) => ( + + ), + }, + { + key: 'progress', + label: 'Progress', + width: '160px', + render: (c) => ( + + ), + }, + { + key: 'lastChecked', + label: 'Last Check', + width: '120px', + sortable: true, + render: (c) => ( + + {formatRelativeTime(c.lastCheckedAt)} + + ), + }, + { + key: 'lastCheckStatus', + label: 'Status', + width: '110px', + render: (c) => + c.lastCheckStatus ? ( + + ) : ( + + ), + }, + ], + [], + ); + + if (isLoading) { + return ( +
+ + Loading channels... +
+ ); + } + + if (error) { + return ( +
+

Channels

+
+ Failed to load channels: {error instanceof Error ? error.message : 'Unknown error'} + +
+
+ ); + } + + return ( +
+ {/* Page header */} +
+

+ Channels +

+
+ {/* Scan All button */} + + {/* Add Channel button */} + +
+
+ + {/* Channel table */} +
+
c.id} + onRowClick={handleRowClick} + emptyMessage="No channels added yet. Add a YouTube channel or SoundCloud artist to get started." + /> + + + {/* Add Channel modal */} + setShowAddModal(false)} /> + + {/* Scan result toast */} + {scanResult && ( +
+ {scanResult.message} +
+ )} + + ); +} diff --git a/src/frontend/src/pages/Library.tsx b/src/frontend/src/pages/Library.tsx new file mode 100644 index 0000000..d8e7329 --- /dev/null +++ b/src/frontend/src/pages/Library.tsx @@ -0,0 +1,388 @@ +import { useState, useCallback, useMemo } from 'react'; +import { useNavigate } from 'react-router-dom'; +import { Library as LibraryIcon, Loader, RefreshCw, Film, Music } from 'lucide-react'; +import { Table, type Column } from '../components/Table'; +import { StatusBadge } from '../components/StatusBadge'; +import { QualityLabel } from '../components/QualityLabel'; +import { PlatformBadge } from '../components/PlatformBadge'; +import { Pagination } from '../components/Pagination'; +import { SearchBar } from '../components/SearchBar'; +import { FilterBar, type FilterDefinition } from '../components/FilterBar'; +import { useLibraryContent, type LibraryFilters } from '../api/hooks/useLibrary'; +import { useChannels } from '../api/hooks/useChannels'; +import type { ContentItem, ContentStatus, ContentType } from '@shared/types/index'; + +// ── Helpers ── + +function formatDuration(seconds: number | null): string { + if (seconds == null) return '—'; + const h = Math.floor(seconds / 3600); + const m = Math.floor((seconds % 3600) / 60); + const s = seconds % 60; + if (h > 0) return `${h}:${String(m).padStart(2, '0')}:${String(s).padStart(2, '0')}`; + return `${m}:${String(s).padStart(2, '0')}`; +} + +function formatFileSize(bytes: number | null): string { + if (bytes == null) return '—'; + if (bytes < 1024) return `${bytes} B`; + if (bytes < 1024 * 1024) return `${(bytes / 1024).toFixed(1)} KB`; + if (bytes < 1024 * 1024 * 1024) return `${(bytes / (1024 * 1024)).toFixed(1)} MB`; + return `${(bytes / (1024 * 1024 * 1024)).toFixed(2)} GB`; +} + +// ── Component ── + +export function Library() { + const navigate = useNavigate(); + + // Filter state + const [page, setPage] = useState(1); + const [search, setSearch] = useState(''); + const [filterValues, setFilterValues] = useState>({ + status: '', + contentType: '', + channelId: '', + }); + + // Build query filters + const filters: LibraryFilters = useMemo( + () => ({ + page, + pageSize: 20, + search: search || undefined, + status: (filterValues.status as ContentStatus) || undefined, + contentType: (filterValues.contentType as ContentType) || undefined, + channelId: filterValues.channelId ? Number(filterValues.channelId) : undefined, + }), + [page, search, filterValues], + ); + + // Queries + const { data, isLoading, error, refetch } = useLibraryContent(filters); + const { data: channels } = useChannels(); + + // Reset to page 1 when filters change + const handleSearchChange = useCallback((value: string) => { + setSearch(value); + setPage(1); + }, []); + + const handleFilterChange = useCallback((key: string, value: string) => { + setFilterValues((prev) => ({ ...prev, [key]: value })); + setPage(1); + }, []); + + // Build channel options for filter dropdown + const channelOptions = useMemo(() => { + const opts = [{ value: '', label: 'All Channels' }]; + if (channels && Array.isArray(channels)) { + for (const c of channels) { + opts.push({ value: String(c.id), label: c.name }); + } + } + return opts; + }, [channels]); + + // Filter definitions + const filterDefs: FilterDefinition[] = useMemo( + () => [ + { + key: 'status', + label: 'Status', + options: [ + { value: '', label: 'All' }, + { value: 'monitored', label: 'Monitored' }, + { value: 'queued', label: 'Queued' }, + { value: 'downloading', label: 'Downloading' }, + { value: 'downloaded', label: 'Downloaded' }, + { value: 'failed', label: 'Failed' }, + { value: 'ignored', label: 'Ignored' }, + ], + }, + { + key: 'contentType', + label: 'Type', + options: [ + { value: '', label: 'All' }, + { value: 'video', label: 'Video' }, + { value: 'audio', label: 'Audio' }, + { value: 'livestream', label: 'Livestream' }, + ], + }, + { + key: 'channelId', + label: 'Channel', + options: channelOptions, + }, + ], + [channelOptions], + ); + + // Table columns + const columns: Column[] = useMemo( + () => [ + { + key: 'thumbnail', + label: '', + width: '70px', + render: (item) => + item.thumbnailUrl ? ( + + ) : ( +
+ {item.contentType === 'audio' ? : } +
+ ), + }, + { + key: 'title', + label: 'Title', + sortable: true, + render: (item) => ( + + {item.title} + + ), + }, + { + key: 'channel', + label: 'Channel', + render: (item) => { + // Find channel name from loaded channels list + const channel = channels?.find?.((c: { id: number }) => c.id === item.channelId); + return ( + + ); + }, + }, + { + key: 'platform', + label: 'Platform', + width: '110px', + render: (item) => { + const channel = channels?.find?.((c: { id: number }) => c.id === item.channelId); + return channel ? : ; + }, + }, + { + key: 'status', + label: 'Status', + width: '120px', + sortable: true, + render: (item) => ( + + ), + }, + { + key: 'quality', + label: 'Quality', + width: '180px', + render: (item) => , + }, + { + key: 'contentType', + label: 'Type', + width: '90px', + render: (item) => ( + + {item.contentType} + + ), + }, + { + key: 'duration', + label: 'Duration', + width: '90px', + render: (item) => ( + + {formatDuration(item.duration)} + + ), + }, + { + key: 'fileSize', + label: 'Size', + width: '90px', + render: (item) => ( + + {formatFileSize(item.fileSize)} + + ), + }, + ], + [channels, navigate], + ); + + // Extract pagination from response + const pagination = data?.pagination; + const items = data?.data ?? []; + + return ( +
+ {/* Header */} +
+ +

Library

+
+ + {/* Toolbar: search + filters */} +
+ + +
+ + {/* Error state */} + {error && ( +
+ {error instanceof Error ? error.message : 'Failed to load library content'} + +
+ )} + + {/* Loading state */} + {isLoading && ( +
+ + Loading library… +
+ )} + + {/* Content table */} + {!isLoading && !error && ( + <> + + columns={columns} + data={items} + keyExtractor={(item) => item.id} + emptyMessage="No content found." + /> + + {/* Pagination */} + {pagination && ( + + )} + + )} +
+ ); +} diff --git a/src/frontend/src/pages/Login.tsx b/src/frontend/src/pages/Login.tsx new file mode 100644 index 0000000..cbbb60d --- /dev/null +++ b/src/frontend/src/pages/Login.tsx @@ -0,0 +1,184 @@ +/** + * Login page — currently unused. + * + * The web UI is a trusted same-origin client (auth model rework, M002/S01). + * This page is kept for potential future optional Forms-based auth. + */ +import { useState, type FormEvent } from 'react'; + +interface LoginProps { + onLoginSuccess: () => void; +} + +export function Login({ onLoginSuccess }: LoginProps) { + const [key, setKey] = useState(''); + const [error, setError] = useState(null); + const [loading, setLoading] = useState(false); + + async function handleSubmit(e: FormEvent) { + e.preventDefault(); + const trimmedKey = key.trim(); + + if (!trimmedKey) { + setError('Please enter an API key'); + return; + } + + setError(null); + setLoading(true); + + try { + // Validate the API key by hitting the system status endpoint + const response = await fetch('/api/v1/system/status', { + headers: { 'X-Api-Key': trimmedKey }, + }); + + if (response.status === 401) { + setError('Invalid API key'); + setLoading(false); + return; + } + + if (!response.ok) { + setError(`Server error (${response.status})`); + setLoading(false); + return; + } + + // Key is valid — store it and redirect + localStorage.setItem('tubearr-api-key', trimmedKey); + onLoginSuccess(); + } catch (err) { + console.error('[Login] Validation request failed:', err); + setError('Unable to connect to the server'); + setLoading(false); + } + } + + return ( +
+
+ {/* Brand */} +
+

+ Tubearr +

+

+ Enter your API key to continue +

+
+ + {/* Form */} +
+
+ + setKey(e.target.value)} + placeholder="Enter your API key" + autoFocus + disabled={loading} + style={{ + width: '100%', + padding: 'var(--space-3)', + fontSize: 'var(--font-size-base)', + }} + /> +
+ + {error && ( +
+ {error} +
+ )} + + + + +

+ Find your API key in Settings → General +

+
+
+ ); +} diff --git a/src/frontend/src/pages/Queue.tsx b/src/frontend/src/pages/Queue.tsx new file mode 100644 index 0000000..2f0b68b --- /dev/null +++ b/src/frontend/src/pages/Queue.tsx @@ -0,0 +1,357 @@ +import { useState, useMemo } from 'react'; +import { ListOrdered, RotateCcw, X, Loader, RefreshCw } from 'lucide-react'; +import { Table, type Column } from '../components/Table'; +import { StatusBadge } from '../components/StatusBadge'; +import { useQueue, useRetryQueueItem, useCancelQueueItem } from '../api/hooks/useQueue'; +import type { QueueItem, QueueStatus } from '@shared/types/index'; + +// ── Helpers ── + +function formatTime(iso: string | null): string { + if (!iso) return '—'; + const d = new Date(iso); + const now = new Date(); + const diffMs = now.getTime() - d.getTime(); + + if (diffMs < 60_000) return 'just now'; + if (diffMs < 3600_000) return `${Math.floor(diffMs / 60_000)}m ago`; + if (diffMs < 86400_000) return `${Math.floor(diffMs / 3600_000)}h ago`; + return d.toLocaleDateString(undefined, { month: 'short', day: 'numeric', hour: '2-digit', minute: '2-digit' }); +} + +// ── Status Tab Options ── + +const STATUS_TABS: { value: QueueStatus | ''; label: string }[] = [ + { value: '', label: 'All' }, + { value: 'pending', label: 'Pending' }, + { value: 'downloading', label: 'Downloading' }, + { value: 'completed', label: 'Completed' }, + { value: 'failed', label: 'Failed' }, +]; + +// ── Component ── + +export function Queue() { + const [statusFilter, setStatusFilter] = useState(''); + + // Query with 5s auto-refresh + const { data: items, isLoading, error, refetch } = useQueue(statusFilter); + const retryMutation = useRetryQueueItem(); + const cancelMutation = useCancelQueueItem(); + + // Table columns + const columns: Column[] = useMemo( + () => [ + { + key: 'contentTitle', + label: 'Content', + render: (item) => { + let displayText: string; + if (item.channelName && item.contentTitle) { + displayText = `${item.channelName} — ${item.contentTitle}`; + } else if (item.contentTitle) { + displayText = item.contentTitle; + } else { + displayText = `Content #${item.contentItemId}`; + } + return ( + + {displayText} + + ); + }, + }, + { + key: 'status', + label: 'Status', + width: '130px', + sortable: true, + render: (item) => ( + + ), + }, + { + key: 'priority', + label: 'Priority', + width: '80px', + sortable: true, + render: (item) => ( + + {item.priority} + + ), + }, + { + key: 'attempts', + label: 'Attempts', + width: '90px', + render: (item) => ( + + {item.attempts}/{item.maxAttempts} + + ), + }, + { + key: 'error', + label: 'Error', + render: (item) => + item.error ? ( + + {item.error} + + ) : ( + + ), + }, + { + key: 'startedAt', + label: 'Started', + width: '110px', + render: (item) => ( + + {formatTime(item.startedAt)} + + ), + }, + { + key: 'completedAt', + label: 'Completed', + width: '110px', + render: (item) => ( + + {formatTime(item.completedAt)} + + ), + }, + { + key: 'actions', + label: 'Actions', + width: '100px', + render: (item) => ( +
+ {item.status === 'failed' && ( + + )} + {item.status === 'pending' && ( + + )} +
+ ), + }, + ], + [retryMutation, cancelMutation], + ); + + const tabBase: React.CSSProperties = { + padding: 'var(--space-2) var(--space-4)', + border: '1px solid var(--border)', + borderBottom: 'none', + borderRadius: 'var(--radius-md) var(--radius-md) 0 0', + backgroundColor: 'var(--bg-input)', + color: 'var(--text-secondary)', + fontSize: 'var(--font-size-sm)', + fontWeight: 500, + cursor: 'pointer', + transition: 'background-color var(--transition-fast), color var(--transition-fast)', + }; + + const tabActive: React.CSSProperties = { + ...tabBase, + backgroundColor: 'var(--bg-card)', + color: 'var(--text-primary)', + borderColor: 'var(--accent)', + borderBottomColor: 'var(--bg-card)', + }; + + return ( +
+ {/* Header */} +
+ +

Queue

+ + Auto-refreshes every 5s + +
+ + {/* Status filter tabs */} +
+ {STATUS_TABS.map((tab) => ( + + ))} +
+ + {/* Error state */} + {error && ( +
+ {error instanceof Error ? error.message : 'Failed to load queue'} + +
+ )} + + {/* Mutation errors */} + {(retryMutation.error || cancelMutation.error) && ( +
+ {retryMutation.error instanceof Error + ? retryMutation.error.message + : cancelMutation.error instanceof Error + ? cancelMutation.error.message + : 'Action failed'} +
+ )} + + {/* Loading state */} + {isLoading && ( +
+ + Loading queue… +
+ )} + + {/* Queue table */} + {!isLoading && ( + + columns={columns} + data={items ?? []} + keyExtractor={(item) => item.id} + emptyMessage="Queue is empty." + /> + )} +
+ ); +} diff --git a/src/frontend/src/pages/Settings.tsx b/src/frontend/src/pages/Settings.tsx new file mode 100644 index 0000000..7a2ba08 --- /dev/null +++ b/src/frontend/src/pages/Settings.tsx @@ -0,0 +1,1397 @@ +import { useState, useCallback, useMemo, useEffect } from 'react'; +import { Plus, Pencil, Trash2, Loader, RefreshCw, Star, Bell, Send, CheckCircle, XCircle, Eye, EyeOff, Copy, RotateCw, Key, Globe, Save } from 'lucide-react'; +import { + useFormatProfiles, + useCreateFormatProfile, + useUpdateFormatProfile, + useDeleteFormatProfile, +} from '../api/hooks/useFormatProfiles'; +import { + usePlatformSettings, + useUpdatePlatformSettings, + type UpdatePlatformSettingsInput, +} from '../api/hooks/usePlatformSettings'; +import { + useNotifications, + useCreateNotification, + useUpdateNotification, + useDeleteNotification, + useTestNotification, + type NotificationSetting, +} from '../api/hooks/useNotifications'; +import { useApiKey, useRegenerateApiKey, useAppSettings, useUpdateAppSettings } from '../api/hooks/useSystem'; +import { Table, type Column } from '../components/Table'; +import { Modal } from '../components/Modal'; +import { FormatProfileForm, type FormatProfileFormValues } from '../components/FormatProfileForm'; +import { PlatformSettingsForm, type PlatformSettingsFormValues } from '../components/PlatformSettingsForm'; +import { NotificationForm, type NotificationFormValues } from '../components/NotificationForm'; +import type { FormatProfile, PlatformSettings } from '@shared/types/index'; + +// ── Badge styles ── + +const badgeBase: React.CSSProperties = { + display: 'inline-flex', + alignItems: 'center', + padding: '1px var(--space-2)', + borderRadius: 'var(--radius-sm)', + fontSize: 'var(--font-size-xs)', + fontWeight: 600, + textTransform: 'uppercase', + letterSpacing: '0.04em', +}; + +const iconButtonBase: React.CSSProperties = { + display: 'inline-flex', + alignItems: 'center', + justifyContent: 'center', + width: 28, + height: 28, + borderRadius: 'var(--radius-sm)', + color: 'var(--text-muted)', + transition: 'color var(--transition-fast), background-color var(--transition-fast)', +}; + +// ── Component ── + +export function SettingsPage() { + // ── Format Profiles state ── + const { data: profiles, isLoading: profilesLoading, error: profilesError, refetch: refetchProfiles } = useFormatProfiles(); + const createProfileMutation = useCreateFormatProfile(); + const updateProfileMutation = useUpdateFormatProfile(); + const deleteProfileMutation = useDeleteFormatProfile(); + + const [showCreateProfileModal, setShowCreateProfileModal] = useState(false); + const [editingProfile, setEditingProfile] = useState(null); + const [deletingProfile, setDeletingProfile] = useState(null); + + // ── Platform Settings state ── + const { data: platformSettings } = usePlatformSettings(); + const updatePlatformSettingsMutation = useUpdatePlatformSettings(); + const [editingPlatform, setEditingPlatform] = useState(null); + + // ── Notifications state ── + const { data: notifications, isLoading: notificationsLoading } = useNotifications(); + const createNotifMutation = useCreateNotification(); + const updateNotifMutation = useUpdateNotification(); + const deleteNotifMutation = useDeleteNotification(); + const testNotifMutation = useTestNotification(); + + const [showCreateNotifModal, setShowCreateNotifModal] = useState(false); + const [editingNotification, setEditingNotification] = useState(null); + const [deletingNotification, setDeletingNotification] = useState(null); + const [testResults, setTestResults] = useState>({}); + + // ── API Key state ── + const { data: apiKeyData, isLoading: apiKeyLoading, error: apiKeyError } = useApiKey(); + const regenerateApiKeyMutation = useRegenerateApiKey(); + const [showApiKey, setShowApiKey] = useState(false); + const [showRegenerateConfirm, setShowRegenerateConfirm] = useState(false); + const [copySuccess, setCopySuccess] = useState(false); + + // ── App Settings state ── + const { data: appSettings, isLoading: appSettingsLoading } = useAppSettings(); + const updateAppSettingsMutation = useUpdateAppSettings(); + const [checkInterval, setCheckInterval] = useState(''); + const [concurrentDownloads, setConcurrentDownloads] = useState(''); + const [settingsSaveFlash, setSettingsSaveFlash] = useState(false); + + // Initialize local state from fetched app settings + useEffect(() => { + if (appSettings) { + setCheckInterval(appSettings.checkInterval); + setConcurrentDownloads(appSettings.concurrentDownloads); + } + }, [appSettings]); + + const settingsDirty = + checkInterval !== '' && + concurrentDownloads !== '' && + appSettings != null && + (Number(checkInterval) !== appSettings.checkInterval || + Number(concurrentDownloads) !== appSettings.concurrentDownloads); + + const settingsValid = + checkInterval !== '' && + concurrentDownloads !== '' && + Number(checkInterval) >= 1 && + Number(concurrentDownloads) >= 1 && + Number(concurrentDownloads) <= 10; + + // ── App Settings handlers ── + + const handleSaveSettings = useCallback(() => { + if (!settingsDirty || !settingsValid) return; + updateAppSettingsMutation.mutate( + { + checkInterval: Number(checkInterval), + concurrentDownloads: Number(concurrentDownloads), + }, + { + onSuccess: () => { + setSettingsSaveFlash(true); + setTimeout(() => setSettingsSaveFlash(false), 2500); + }, + }, + ); + }, [settingsDirty, settingsValid, checkInterval, concurrentDownloads, updateAppSettingsMutation]); + + // ── Format Profile handlers ── + + const handleCreateProfile = useCallback( + (values: FormatProfileFormValues) => { + createProfileMutation.mutate(values, { + onSuccess: () => setShowCreateProfileModal(false), + }); + }, + [createProfileMutation], + ); + + const handleUpdateProfile = useCallback( + (values: FormatProfileFormValues) => { + if (!editingProfile) return; + updateProfileMutation.mutate( + { id: editingProfile.id, ...values }, + { onSuccess: () => setEditingProfile(null) }, + ); + }, + [editingProfile, updateProfileMutation], + ); + + const handleDeleteProfile = useCallback(() => { + if (!deletingProfile) return; + deleteProfileMutation.mutate(deletingProfile.id, { + onSuccess: () => setDeletingProfile(null), + }); + }, [deletingProfile, deleteProfileMutation]); + + // ── Platform Settings handlers ── + + const KNOWN_PLATFORMS = ['youtube', 'soundcloud'] as const; + + const platformSettingsMap = useMemo(() => { + const map = new Map(); + if (platformSettings) { + for (const ps of platformSettings) { + map.set(ps.platform, ps); + } + } + return map; + }, [platformSettings]); + + const editingPlatformSettings = editingPlatform ? platformSettingsMap.get(editingPlatform) ?? null : null; + + const handleUpdatePlatformSettings = useCallback( + (values: PlatformSettingsFormValues) => { + if (!editingPlatform) return; + const input: UpdatePlatformSettingsInput = { + platform: editingPlatform, + defaultFormatProfileId: values.defaultFormatProfileId, + checkInterval: values.checkInterval, + concurrencyLimit: values.concurrencyLimit, + subtitleLanguages: values.subtitleLanguages || null, + grabAllEnabled: values.grabAllEnabled, + grabAllOrder: values.grabAllOrder, + }; + updatePlatformSettingsMutation.mutate(input, { + onSuccess: () => setEditingPlatform(null), + }); + }, + [editingPlatform, updatePlatformSettingsMutation], + ); + + // ── Platform Settings table data ── + + type PlatformRow = { + platform: string; + label: string; + settings: PlatformSettings | null; + }; + + const platformRows = useMemo( + () => + KNOWN_PLATFORMS.map((platform) => ({ + platform, + label: platform === 'youtube' ? 'YouTube' : 'SoundCloud', + settings: platformSettingsMap.get(platform) ?? null, + })), + [platformSettingsMap], + ); + + const platformColumns = useMemo[]>( + () => [ + { + key: 'platform', + label: 'Platform', + width: '130px', + render: (row) => ( + + {row.label} + + ), + }, + { + key: 'formatProfile', + label: 'Format Profile', + render: (row) => { + const profileId = row.settings?.defaultFormatProfileId; + const profile = profileId ? profiles?.find((p) => p.id === profileId) : null; + return ( + + {profile ? profile.name : 'System default'} + + ); + }, + }, + { + key: 'checkInterval', + label: 'Interval', + width: '90px', + render: (row) => ( + + {row.settings?.checkInterval ?? 360}m + + ), + }, + { + key: 'concurrency', + label: 'Concurrency', + width: '100px', + render: (row) => ( + + {row.settings?.concurrencyLimit ?? 2} + + ), + }, + { + key: 'grabAll', + label: 'Grab All', + width: '90px', + render: (row) => { + const enabled = row.settings?.grabAllEnabled ?? false; + return ( + + {enabled ? 'On' : 'Off'} + + ); + }, + }, + { + key: 'actions', + label: '', + width: '50px', + render: (row) => ( +
+ +
+ ), + }, + ], + [profiles], + ); + + // ── Notification handlers ── + + const handleCreateNotification = useCallback( + (values: NotificationFormValues) => { + createNotifMutation.mutate( + { + type: 'discord', + name: values.name, + config: { webhookUrl: values.webhookUrl }, + enabled: values.enabled, + onGrab: values.onGrab, + onDownload: values.onDownload, + onFailure: values.onFailure, + }, + { onSuccess: () => setShowCreateNotifModal(false) }, + ); + }, + [createNotifMutation], + ); + + const handleUpdateNotification = useCallback( + (values: NotificationFormValues) => { + if (!editingNotification) return; + updateNotifMutation.mutate( + { + id: editingNotification.id, + name: values.name, + config: { webhookUrl: values.webhookUrl }, + enabled: values.enabled, + onGrab: values.onGrab, + onDownload: values.onDownload, + onFailure: values.onFailure, + }, + { onSuccess: () => setEditingNotification(null) }, + ); + }, + [editingNotification, updateNotifMutation], + ); + + const handleDeleteNotification = useCallback(() => { + if (!deletingNotification) return; + deleteNotifMutation.mutate(deletingNotification.id, { + onSuccess: () => setDeletingNotification(null), + }); + }, [deletingNotification, deleteNotifMutation]); + + const handleTestNotification = useCallback( + (id: number) => { + setTestResults((prev) => ({ ...prev, [id]: 'loading' })); + testNotifMutation.mutate(id, { + onSuccess: (data) => { + setTestResults((prev) => ({ ...prev, [id]: data.success ? 'success' : 'error' })); + // Clear result after 4 seconds + setTimeout(() => { + setTestResults((prev) => ({ ...prev, [id]: null })); + }, 4000); + }, + onError: () => { + setTestResults((prev) => ({ ...prev, [id]: 'error' })); + setTimeout(() => { + setTestResults((prev) => ({ ...prev, [id]: null })); + }, 4000); + }, + }); + }, + [testNotifMutation], + ); + + // ── API Key handlers ── + + const handleCopyApiKey = useCallback(() => { + if (!apiKeyData?.apiKey) return; + navigator.clipboard.writeText(apiKeyData.apiKey).then(() => { + setCopySuccess(true); + setTimeout(() => setCopySuccess(false), 2000); + }); + }, [apiKeyData]); + + const handleRegenerateApiKey = useCallback(() => { + regenerateApiKeyMutation.mutate(undefined, { + onSuccess: () => { + setShowRegenerateConfirm(false); + setShowApiKey(false); + }, + }); + }, [regenerateApiKeyMutation]); + + const maskedApiKey = useMemo(() => { + if (!apiKeyData?.apiKey) return '••••••••-••••-••••-••••-••••••••••••'; + const key = apiKeyData.apiKey; + const last4 = key.slice(-4); + return `••••••••-••••-••••-••••-••••••••${last4}`; + }, [apiKeyData]); + + // ── Format Profile columns ── + + const profileColumns = useMemo[]>( + () => [ + { + key: 'name', + label: 'Name', + render: (p) => ( + + {p.name} + {p.isDefault && ( + + + Default + + )} + + ), + }, + { + key: 'videoResolution', + label: 'Resolution', + width: '100px', + render: (p) => ( + + {p.videoResolution ?? 'Any'} + + ), + }, + { + key: 'audioCodec', + label: 'Codec', + width: '80px', + render: (p) => ( + + {p.audioCodec ?? 'Any'} + + ), + }, + { + key: 'audioBitrate', + label: 'Bitrate', + width: '80px', + render: (p) => ( + + {p.audioBitrate ?? 'Any'} + + ), + }, + { + key: 'containerFormat', + label: 'Container', + width: '90px', + render: (p) => ( + + {p.containerFormat ?? 'Any'} + + ), + }, + { + key: 'actions', + label: '', + width: '80px', + render: (p) => ( +
+ + {!p.isDefault && ( + + )} +
+ ), + }, + ], + [], + ); + + // ── Notification columns ── + + const notificationColumns = useMemo[]>( + () => [ + { + key: 'name', + label: 'Name', + render: (n) => ( + {n.name} + ), + }, + { + key: 'type', + label: 'Type', + width: '100px', + render: (n) => ( + + {n.type === 'discord' ? 'Discord' : n.type} + + ), + }, + { + key: 'events', + label: 'Events', + width: '180px', + render: (n) => { + const events: string[] = []; + if (n.onGrab) events.push('Grab'); + if (n.onDownload) events.push('Download'); + if (n.onFailure) events.push('Failure'); + return ( + + {events.length > 0 ? events.join(', ') : 'None'} + + ); + }, + }, + { + key: 'enabled', + label: 'Status', + width: '90px', + render: (n) => ( + + {n.enabled ? 'Active' : 'Disabled'} + + ), + }, + { + key: 'actions', + label: '', + width: '130px', + render: (n) => { + const result = testResults[n.id]; + return ( +
+ {/* Test result indicator */} + {result === 'success' && } + {result === 'error' && } + + {/* Test button */} + + + {/* Edit button */} + + + {/* Delete button */} + +
+ ); + }, + }, + ], + [testResults, handleTestNotification], + ); + + // ── Loading state ── + + if (profilesLoading) { + return ( +
+ + Loading settings... +
+ ); + } + + // ── Error state ── + + if (profilesError) { + return ( +
+

Settings

+
+ Failed to load settings: {profilesError instanceof Error ? profilesError.message : 'Unknown error'} + +
+
+ ); + } + + return ( +
+ {/* Page header */} +

+ Settings +

+ + {/* ── General section (API Key + server info) ── */} +
+
+

+ + General +

+

+ API key is used for external integrations (Homepage, Organizr, scripts). The web UI does not require it. +

+
+ +
+
+ + {/* API Key row */} + + + + + + {/* Editable Check Interval */} + + + + + {/* Editable Concurrent Downloads */} + + + + + +
+ API Key + + {apiKeyLoading ? ( + + + Loading… + + ) : apiKeyError ? ( + + Failed to load API key + + ) : ( +
+ + {showApiKey ? apiKeyData?.apiKey : maskedApiKey} + + + {/* Show/Hide toggle */} + + + {/* Copy button */} + + + {/* Regenerate button */} + +
+ )} +
+ Check Interval + + {appSettingsLoading ? ( + + + Loading… + + ) : ( +
+ setCheckInterval(e.target.value === '' ? '' : Number(e.target.value))} + aria-label="Check interval in minutes" + style={{ + width: 80, + padding: 'var(--space-2) var(--space-3)', + borderRadius: 'var(--radius-md)', + border: '1px solid var(--border)', + backgroundColor: 'var(--bg-main)', + color: 'var(--text-primary)', + fontSize: 'var(--font-size-sm)', + }} + /> + minutes +
+ )} +
+ Concurrent Downloads + + {appSettingsLoading ? ( + + + Loading… + + ) : ( +
+ setConcurrentDownloads(e.target.value === '' ? '' : Number(e.target.value))} + aria-label="Concurrent downloads" + style={{ + width: 80, + padding: 'var(--space-2) var(--space-3)', + borderRadius: 'var(--radius-md)', + border: '1px solid var(--border)', + backgroundColor: 'var(--bg-main)', + color: 'var(--text-primary)', + fontSize: 'var(--font-size-sm)', + }} + /> + max 10 +
+ )} +
+
+ {/* Save button + feedback for editable settings */} +
+ + {updateAppSettingsMutation.isError && ( + + {updateAppSettingsMutation.error instanceof Error + ? updateAppSettingsMutation.error.message + : 'Failed to save settings'} + + )} +
+

+ Media path is configured via environment variable and cannot be changed from the UI. +

+ + + {/* ── Platform Settings section ── */} +
+
+

+ + Platform Settings +

+

+ Configure default settings for each platform. These defaults are applied when adding new channels. +

+
+ +
+ row.platform} + emptyMessage="No platforms available." + /> + + + + {/* ── Format Profiles section ── */} +
+
+

+ Format Profiles +

+ +
+ +
+
p.id} + emptyMessage="No format profiles defined. Create one to control download quality preferences." + /> + + + + {/* ── Notifications section ── */} +
+
+

+ + Notifications +

+ +
+ +
+ {notificationsLoading ? ( +
+ + Loading notifications... +
+ ) : ( +
n.id} + emptyMessage="No notification channels configured. Add one to receive Discord alerts on download events." + /> + )} + + + + {/* ── Format Profile: Create modal ── */} + setShowCreateProfileModal(false)} width={520}> + setShowCreateProfileModal(false)} + isPending={createProfileMutation.isPending} + error={createProfileMutation.error instanceof Error ? createProfileMutation.error.message : null} + /> + + + {/* ── Format Profile: Edit modal ── */} + setEditingProfile(null)} + width={520} + > + {editingProfile && ( + setEditingProfile(null)} + isPending={updateProfileMutation.isPending} + error={updateProfileMutation.error instanceof Error ? updateProfileMutation.error.message : null} + /> + )} + + + {/* ── Format Profile: Delete confirmation ── */} + setDeletingProfile(null)} + width={400} + > +

+ Are you sure you want to delete {deletingProfile?.name}? + This action cannot be undone. +

+ {deleteProfileMutation.error && ( +
+ {deleteProfileMutation.error instanceof Error ? deleteProfileMutation.error.message : 'Delete failed'} +
+ )} +
+ + +
+
+ + {/* ── Platform Settings: Edit modal ── */} + setEditingPlatform(null)} + width={520} + > + {editingPlatform && ( + setEditingPlatform(null)} + isPending={updatePlatformSettingsMutation.isPending} + error={updatePlatformSettingsMutation.error instanceof Error ? updatePlatformSettingsMutation.error.message : null} + /> + )} + + + {/* ── Notification: Create modal ── */} + setShowCreateNotifModal(false)} width={520}> + setShowCreateNotifModal(false)} + isPending={createNotifMutation.isPending} + error={createNotifMutation.error instanceof Error ? createNotifMutation.error.message : null} + /> + + + {/* ── Notification: Edit modal ── */} + setEditingNotification(null)} + width={520} + > + {editingNotification && ( + setEditingNotification(null)} + isPending={updateNotifMutation.isPending} + error={updateNotifMutation.error instanceof Error ? updateNotifMutation.error.message : null} + /> + )} + + + {/* ── Notification: Delete confirmation ── */} + setDeletingNotification(null)} + width={400} + > +

+ Are you sure you want to delete {deletingNotification?.name}? + This action cannot be undone. +

+ {deleteNotifMutation.error && ( +
+ {deleteNotifMutation.error instanceof Error ? deleteNotifMutation.error.message : 'Delete failed'} +
+ )} +
+ + +
+
+ + {/* ── API Key: Regenerate confirmation ── */} + setShowRegenerateConfirm(false)} + width={400} + > +

+ This will invalidate the current key for all external integrations. Any tools using the old key will need to be updated. Continue? +

+ {regenerateApiKeyMutation.error && ( +
+ {regenerateApiKeyMutation.error instanceof Error ? regenerateApiKeyMutation.error.message : 'Regeneration failed'} +
+ )} +
+ + +
+
+ + ); +} + +// ── Helper component for General section ── + +function GeneralInfoRow({ label, value, description }: { label: string; value: string; description: string }) { + return ( + + + + + ); +} diff --git a/src/frontend/src/pages/System.tsx b/src/frontend/src/pages/System.tsx new file mode 100644 index 0000000..73d2549 --- /dev/null +++ b/src/frontend/src/pages/System.tsx @@ -0,0 +1,261 @@ +import { Loader, RefreshCw, Server, Activity, Cpu, HardDrive } from 'lucide-react'; +import { useSystemStatus, useHealth } from '../api/hooks/useSystem'; +import { HealthStatus } from '../components/HealthStatus'; +import { formatBytes } from '../utils/format'; + +// ── Helpers ── + +function formatUptime(seconds: number): string { + const days = Math.floor(seconds / 86400); + const hours = Math.floor((seconds % 86400) / 3600); + const minutes = Math.floor((seconds % 3600) / 60); + + const parts: string[] = []; + if (days > 0) parts.push(`${days}d`); + if (hours > 0) parts.push(`${hours}h`); + parts.push(`${minutes}m`); + return parts.join(' '); +} + +// ── Component ── + +export function SystemPage() { + const { data: health, isLoading: healthLoading, error: healthError, refetch: refetchHealth } = useHealth(); + const { data: status, isLoading: statusLoading, error: statusError, refetch: refetchStatus } = useSystemStatus(); + + const isLoading = healthLoading || statusLoading; + + if (isLoading) { + return ( +
+ + Loading system info... +
+ ); + } + + return ( +
+ {/* Page header */} +

+ System +

+ + {/* ── Health section ── */} +
+
+

+ + Health +

+ +
+ + {healthError ? ( +
+ Failed to load health status: {healthError instanceof Error ? healthError.message : 'Unknown error'} + +
+ ) : health ? ( + + ) : null} +
+ + {/* ── System Status section ── */} +
+
+

+ + Status +

+ +
+ + {statusError ? ( +
+ Failed to load system status: {statusError instanceof Error ? statusError.message : 'Unknown error'} + +
+ ) : status ? ( +
+
{label} +
+ {value} +
+ {description} +
+
+
+ + } + label="Application" + value={`${status.appName} v${status.version}`} + /> + } + label="Uptime" + value={formatUptime(status.uptime)} + /> + } + label="Node.js" + value={status.nodeVersion} + /> + } + label="Platform" + value={`${status.platform} / ${status.arch}`} + /> + } + label="Memory Usage" + value={(() => { + const used = formatBytes(status.memoryUsage.heapUsed); + const total = formatBytes(status.memoryUsage.heapTotal); + const pct = ((status.memoryUsage.heapUsed / status.memoryUsage.heapTotal) * 100).toFixed(1); + return `${used} / ${total} (${pct}%)`; + })()} + /> + +
+
+ ) : null} + +

+ Auto-refreshes every 30 seconds. +

+
+
+ ); +} + +// ── Helper component ── + +function SystemInfoRow({ icon, label, value }: { icon: React.ReactNode; label: string; value: string }) { + return ( + + + + {icon} + {label} + + + + + {value} + + + + ); +} diff --git a/src/frontend/src/styles/global.css b/src/frontend/src/styles/global.css new file mode 100644 index 0000000..c6c8658 --- /dev/null +++ b/src/frontend/src/styles/global.css @@ -0,0 +1,148 @@ +/* ── Global Reset & Base Styles ── */ +@import './theme.css'; + +*, +*::before, +*::after { + box-sizing: border-box; + margin: 0; + padding: 0; +} + +html { + font-size: 16px; + -webkit-font-smoothing: antialiased; + -moz-osx-font-smoothing: grayscale; +} + +body { + font-family: var(--font-family); + font-size: var(--font-size-base); + color: var(--text-primary); + background-color: var(--bg-main); + line-height: 1.5; + min-height: 100vh; +} + +#root { + min-height: 100vh; +} + +/* ── Links ── */ +a { + color: var(--text-link); + text-decoration: none; +} + +a:hover { + color: var(--accent-hover); +} + +/* ── Focus outline ── */ +:focus-visible { + outline: 2px solid var(--accent); + outline-offset: 2px; +} + +/* ── Scrollbar styling ── */ +::-webkit-scrollbar { + width: 8px; + height: 8px; +} + +::-webkit-scrollbar-track { + background: var(--bg-main); +} + +::-webkit-scrollbar-thumb { + background: var(--border-light); + border-radius: 4px; +} + +::-webkit-scrollbar-thumb:hover { + background: var(--text-muted); +} + +/* ── Buttons base ── */ +button { + font-family: inherit; + font-size: inherit; + cursor: pointer; + border: none; + background: none; + color: inherit; +} + +/* ── Inputs base ── */ +input, +textarea, +select { + font-family: inherit; + font-size: inherit; + color: var(--text-primary); + background-color: var(--bg-input); + border: 1px solid var(--border); + border-radius: var(--radius-md); + padding: var(--space-2) var(--space-3); +} + +input:focus, +textarea:focus, +select:focus { + border-color: var(--accent); + outline: none; +} + +input::placeholder { + color: var(--text-muted); +} + +/* ── Tables ── */ +table { + width: 100%; + border-collapse: collapse; +} + +th { + text-align: left; + font-weight: 600; + font-size: var(--font-size-sm); + color: var(--text-secondary); + text-transform: uppercase; + letter-spacing: 0.03em; + padding: var(--space-3) var(--space-4); + border-bottom: 1px solid var(--border); +} + +td { + padding: var(--space-3) var(--space-4); + border-bottom: 1px solid var(--border); +} + +tr:hover { + background-color: var(--bg-hover); +} + +/* ── Utility classes ── */ +.sr-only { + position: absolute; + width: 1px; + height: 1px; + padding: 0; + margin: -1px; + overflow: hidden; + clip: rect(0, 0, 0, 0); + white-space: nowrap; + border-width: 0; +} + +/* ── Animations ── */ +@keyframes pulse { + 0%, 100% { opacity: 1; } + 50% { opacity: 0.4; } +} + +@keyframes spin { + from { transform: rotate(0deg); } + to { transform: rotate(360deg); } +} diff --git a/src/frontend/src/styles/theme.css b/src/frontend/src/styles/theme.css new file mode 100644 index 0000000..bf5cb31 --- /dev/null +++ b/src/frontend/src/styles/theme.css @@ -0,0 +1,85 @@ +/* ── *arr Dark Theme ── + * Color palette matching Sonarr/Radarr aesthetic. + * All UI components reference these custom properties. + */ + +:root { + /* ── Backgrounds ── */ + --bg-main: #1a1d23; + --bg-sidebar: #14161a; + --bg-card: #242731; + --bg-input: #2a2e38; + --bg-hover: #2f3341; + --bg-selected: #35394a; + --bg-header: #1e2029; + --bg-toolbar: #1e2129; + --bg-modal-overlay: rgba(0, 0, 0, 0.6); + + /* ── Accent ── */ + --accent: #e05d44; + --accent-hover: #c94e38; + --accent-subtle: rgba(224, 93, 68, 0.12); + + /* ── Text ── */ + --text-primary: #e1e2e6; + --text-secondary: #8b8d97; + --text-muted: #5d5f69; + --text-inverse: #14161a; + --text-link: #e05d44; + + /* ── Status colors ── */ + --success: #27c24c; + --success-bg: rgba(39, 194, 76, 0.12); + --warning: #ff902b; + --warning-bg: rgba(255, 144, 43, 0.12); + --danger: #f05050; + --danger-bg: rgba(240, 80, 80, 0.12); + --info: #e05d44; + --info-bg: rgba(224, 93, 68, 0.12); + + /* ── Borders ── */ + --border: #2d3040; + --border-light: #373b4e; + + /* ── Typography ── */ + --font-family: -apple-system, BlinkMacSystemFont, 'Segoe UI', Roboto, 'Helvetica Neue', Arial, sans-serif; + --font-mono: 'SFMono-Regular', Consolas, 'Liberation Mono', Menlo, monospace; + --font-size-xs: 0.75rem; + --font-size-sm: 0.8125rem; + --font-size-base: 0.875rem; + --font-size-md: 1rem; + --font-size-lg: 1.125rem; + --font-size-xl: 1.25rem; + --font-size-2xl: 1.5rem; + + /* ── Spacing ── */ + --space-1: 0.25rem; + --space-2: 0.5rem; + --space-3: 0.75rem; + --space-4: 1rem; + --space-5: 1.25rem; + --space-6: 1.5rem; + --space-8: 2rem; + --space-10: 2.5rem; + --space-12: 3rem; + + /* ── Border Radius ── */ + --radius-sm: 3px; + --radius-md: 4px; + --radius-lg: 6px; + --radius-xl: 8px; + + /* ── Shadows ── */ + --shadow-sm: 0 1px 2px rgba(0, 0, 0, 0.3); + --shadow-md: 0 2px 8px rgba(0, 0, 0, 0.3); + --shadow-lg: 0 4px 16px rgba(0, 0, 0, 0.4); + + /* ── Layout ── */ + --sidebar-width: 210px; + --sidebar-collapsed: 50px; + --header-height: 55px; + + /* ── Transitions ── */ + --transition-fast: 150ms ease; + --transition-normal: 250ms ease; +} diff --git a/src/frontend/src/utils/format.ts b/src/frontend/src/utils/format.ts new file mode 100644 index 0000000..bfc8c45 --- /dev/null +++ b/src/frontend/src/utils/format.ts @@ -0,0 +1,10 @@ +/** + * Format a byte count into a human-readable string (B, KB, MB, GB, TB). + */ +export function formatBytes(bytes: number): string { + if (bytes < 1024) return `${bytes} B`; + if (bytes < 1024 * 1024) return `${(bytes / 1024).toFixed(1)} KB`; + if (bytes < 1024 ** 3) return `${(bytes / 1024 ** 2).toFixed(1)} MB`; + if (bytes < 1024 ** 4) return `${(bytes / 1024 ** 3).toFixed(1)} GB`; + return `${(bytes / 1024 ** 4).toFixed(1)} TB`; +} diff --git a/src/frontend/tsconfig.json b/src/frontend/tsconfig.json new file mode 100644 index 0000000..c06dd75 --- /dev/null +++ b/src/frontend/tsconfig.json @@ -0,0 +1,20 @@ +{ + "extends": "../../tsconfig.json", + "compilerOptions": { + "jsx": "react-jsx", + "lib": ["ES2022", "DOM", "DOM.Iterable"], + "module": "ESNext", + "moduleResolution": "bundler", + "outDir": "../../dist/frontend", + "rootDir": ".", + "baseUrl": ".", + "paths": { + "@shared/*": ["../types/*"] + }, + "declaration": false, + "declarationMap": false, + "noEmit": true + }, + "include": ["src/**/*.ts", "src/**/*.tsx", "../types/**/*.ts"], + "exclude": ["node_modules"] +} diff --git a/src/frontend/vite.config.ts b/src/frontend/vite.config.ts new file mode 100644 index 0000000..bfba457 --- /dev/null +++ b/src/frontend/vite.config.ts @@ -0,0 +1,24 @@ +import { defineConfig } from 'vite'; +import react from '@vitejs/plugin-react'; +import { resolve } from 'node:path'; + +export default defineConfig({ + plugins: [react()], + root: resolve(__dirname), + build: { + outDir: resolve(__dirname, '../../dist/frontend'), + emptyOutDir: true, + }, + resolve: { + alias: { + '@shared': resolve(__dirname, '../types'), + }, + }, + server: { + port: 3000, + proxy: { + '/api': 'http://localhost:8989', + '/ping': 'http://localhost:8989', + }, + }, +}); diff --git a/src/index.ts b/src/index.ts new file mode 100644 index 0000000..b5a031b --- /dev/null +++ b/src/index.ts @@ -0,0 +1,190 @@ +import { config as loadEnv } from 'dotenv'; + +// Load environment variables before anything else +loadEnv(); + +import { appConfig } from './config/index'; +import { initDatabaseAsync, closeDatabase } from './db/index'; +import { runMigrations } from './db/migrate'; +import { buildServer } from './server/index'; +import { ensureDefaultFormatProfile } from './db/repositories/format-profile-repository'; +import { seedAppDefaults } from './db/repositories/system-config-repository'; +import { RateLimiter } from './services/rate-limiter'; +import { SchedulerService } from './services/scheduler'; +import { FileOrganizer } from './services/file-organizer'; +import { CookieManager } from './services/cookie-manager'; +import { QualityAnalyzer } from './services/quality-analyzer'; +import { DownloadService } from './services/download'; +import { QueueService } from './services/queue'; +import { NotificationService } from './services/notification'; +import { HealthService } from './services/health'; +import { PlatformRegistry } from './sources/platform-source'; +import { YouTubeSource } from './sources/youtube'; +import { SoundCloudSource } from './sources/soundcloud'; +import { Platform } from './types/index'; + +const APP_NAME = 'Tubearr'; + +async function main(): Promise { + console.log(`[${APP_NAME}] Starting...`); + + // 1. Initialize database with WAL mode + const db = await initDatabaseAsync(appConfig.dbPath); + + // 2. Run migrations (idempotent — skips already-applied) + await runMigrations(appConfig.dbPath); + + // 2b. Seed default format profile (idempotent) + await ensureDefaultFormatProfile(db); + console.log(`[${APP_NAME}] Default format profile ensured`); + + // 2c. Seed app settings (idempotent — env vars seed on first boot, DB wins after) + await seedAppDefaults(db); + console.log(`[${APP_NAME}] App settings seeded`); + + // 3. Build and configure Fastify server + const server = await buildServer({ db }); + + // 4. Set up shared services + const rateLimiter = new RateLimiter({ + [Platform.YouTube]: appConfig.rateLimiter.youtube, + [Platform.SoundCloud]: appConfig.rateLimiter.soundcloud, + }); + + const fileOrganizer = new FileOrganizer(appConfig.mediaPath); + const cookieManager = new CookieManager(appConfig.cookiePath); + const qualityAnalyzer = new QualityAnalyzer(); + const downloadService = new DownloadService( + db, + rateLimiter, + fileOrganizer, + qualityAnalyzer, + cookieManager + ); + + // Attach download service to server for route access + (server as { downloadService: DownloadService | null }).downloadService = downloadService; + + // 4b. Set up notification and queue services + const notificationService = new NotificationService(db); + + const queueService = new QueueService(db, downloadService, { + concurrency: appConfig.concurrentDownloads, + onDownloadComplete: (event) => { + notificationService + .notifyDownload(event.contentTitle, event.channelName, event.platform, event.url, event.filePath) + .catch((err) => { + console.log(`[notification] onDownloadComplete error: ${err instanceof Error ? err.message : String(err)}`); + }); + }, + onDownloadFailed: (event) => { + notificationService + .notifyFailure( + event.contentTitle, + event.channelName, + event.platform, + event.error ?? 'Unknown error', + event.attempt ?? 0, + event.maxAttempts ?? 0 + ) + .catch((err) => { + console.log(`[notification] onDownloadFailed error: ${err instanceof Error ? err.message : String(err)}`); + }); + }, + }); + (server as { queueService: QueueService | null }).queueService = queueService; + + // 5. Set up scheduler (if enabled) + let scheduler: SchedulerService | null = null; + + if (appConfig.scheduler.enabled) { + const platformRegistry = new PlatformRegistry(); + platformRegistry.register(Platform.YouTube, new YouTubeSource()); + platformRegistry.register(Platform.SoundCloud, new SoundCloudSource()); + + scheduler = new SchedulerService(db, platformRegistry, rateLimiter, { + onNewContent: (contentItemId: number) => { + queueService.enqueue(contentItemId).catch((err) => { + console.error( + `[scheduler] auto-enqueue failed for contentItemId=${contentItemId}:`, + err instanceof Error ? err.message : err + ); + }); + }, + }); + + // Attach scheduler to server so routes can notify it + (server as { scheduler: SchedulerService | null }).scheduler = scheduler; + } + + // 5b. Set up health service + const healthService = new HealthService( + db, + () => scheduler?.getState() ?? null, + appConfig.mediaPath + ); + (server as { healthService: HealthService | null }).healthService = healthService; + + // 6. Graceful shutdown handler + const shutdown = async (signal: string) => { + console.log(`[${APP_NAME}] ${signal} received — shutting down gracefully...`); + try { + // Stop queue service and scheduler before closing server + queueService.stop(); + if (scheduler) { + scheduler.stop(); + } + await server.close(); + console.log(`[${APP_NAME}] Server closed.`); + } catch (err) { + console.error(`[${APP_NAME}] Error closing server:`, err); + } + closeDatabase(); + console.log(`[${APP_NAME}] Shutdown complete.`); + process.exit(0); + }; + + process.on('SIGINT', () => void shutdown('SIGINT')); + process.on('SIGTERM', () => void shutdown('SIGTERM')); + + // 7. Start listening + try { + const address = await server.listen({ + port: appConfig.port, + host: '0.0.0.0', + }); + console.log(`[${APP_NAME}] Server listening on ${address}`); + } catch (err) { + console.error(`[${APP_NAME}] Failed to start server:`, err); + closeDatabase(); + process.exit(1); + } + + // 8. Start queue service and scheduler after server is listening + try { + const recovered = await queueService.recoverOnStartup(); + if (recovered > 0) { + console.log(`[${APP_NAME}] Queue recovered ${recovered} interrupted item(s)`); + } + queueService.start(); + console.log(`[${APP_NAME}] Queue service started`); + } catch (err) { + console.error(`[${APP_NAME}] Failed to start queue service:`, err); + // Non-fatal — server is still running + } + + if (scheduler) { + try { + await scheduler.start(); + } catch (err) { + console.error(`[${APP_NAME}] Failed to start scheduler:`, err); + // Non-fatal — server is still running, scheduler can be retried + } + } +} + +main().catch((err) => { + console.error(`[${APP_NAME}] Fatal startup error:`, err); + closeDatabase(); + process.exit(1); +}); diff --git a/src/server/index.ts b/src/server/index.ts new file mode 100644 index 0000000..48e4960 --- /dev/null +++ b/src/server/index.ts @@ -0,0 +1,155 @@ +import Fastify, { type FastifyInstance } from 'fastify'; +import cors from '@fastify/cors'; +import fastifyStatic from '@fastify/static'; +import { existsSync } from 'node:fs'; +import { join } from 'node:path'; +import { type LibSQLDatabase } from 'drizzle-orm/libsql'; +import type * as schema from '../db/schema/index'; +import { appConfig } from '../config/index'; +import { authPlugin } from './middleware/auth'; +import { errorHandlerPlugin } from './middleware/error-handler'; +import { healthRoutes } from './routes/health'; +import { systemRoutes } from './routes/system'; +import { channelRoutes } from './routes/channel'; +import { formatProfileRoutes } from './routes/format-profile'; +import { downloadRoutes } from './routes/download'; +import { queueRoutes } from './routes/queue'; +import { historyRoutes } from './routes/history'; +import { contentRoutes } from './routes/content'; +import { notificationRoutes } from './routes/notification'; +import { platformSettingsRoutes } from './routes/platform-settings'; +import { scanRoutes } from './routes/scan'; +import { playlistRoutes } from './routes/playlist'; +import type { SchedulerService } from '../services/scheduler'; +import type { DownloadService } from '../services/download'; +import type { QueueService } from '../services/queue'; +import type { HealthService } from '../services/health'; + +// Extend Fastify's type system so routes can access the database and scheduler +declare module 'fastify' { + interface FastifyInstance { + db: LibSQLDatabase; + scheduler: SchedulerService | null; + downloadService: DownloadService | null; + queueService: QueueService | null; + healthService: HealthService | null; + } +} + +export interface BuildServerOptions { + db: LibSQLDatabase; +} + +/** + * Create and configure the Fastify server instance. + * Registers CORS, auth middleware, error handler, and all route plugins. + * The database is decorated onto the instance so routes can access it via `fastify.db`. + */ +export async function buildServer(opts: BuildServerOptions): Promise { + const server = Fastify({ + logger: { + level: appConfig.logLevel, + // Redact API key from request logs + serializers: { + req(request) { + return { + method: request.method, + url: sanitizeUrl(request.url), + hostname: request.hostname, + remoteAddress: request.ip, + }; + }, + }, + }, + }); + + // Decorate with database instance for route access + server.decorate('db', opts.db); + + // Decorate with scheduler (null until set by startup code) + server.decorate('scheduler', null); + + // Decorate with download service (null until set by startup code) + server.decorate('downloadService', null); + + // Decorate with queue service (null until set by startup code) + server.decorate('queueService', null); + + // Decorate with health service (null until set by startup code) + server.decorate('healthService', null); + + // Register CORS — permissive for development, tightened later + await server.register(cors, { origin: true }); + + // Register centralized error handler + await server.register(errorHandlerPlugin); + + // Register API key authentication + await server.register(authPlugin); + + // Register route modules + await server.register(healthRoutes); + await server.register(systemRoutes); + await server.register(channelRoutes); + await server.register(formatProfileRoutes); + await server.register(downloadRoutes); + await server.register(queueRoutes); + await server.register(historyRoutes); + await server.register(contentRoutes); + await server.register(notificationRoutes); + await server.register(platformSettingsRoutes); + await server.register(scanRoutes); + await server.register(playlistRoutes); + + // ── Static file serving for the frontend SPA ── + const frontendDir = join(process.cwd(), 'dist', 'frontend'); + if (existsSync(frontendDir)) { + await server.register(fastifyStatic, { + root: frontendDir, + prefix: '/', + wildcard: false, + }); + + // SPA catch-all: serve index.html for any GET request that isn't an API route, + // /ping, or a static file. API and non-GET requests get a standard 404 JSON. + server.setNotFoundHandler(async (request, reply) => { + if ( + request.method === 'GET' && + !request.url.startsWith('/api/') && + request.url !== '/ping' + ) { + return reply.sendFile('index.html'); + } + + return reply.status(404).send({ + statusCode: 404, + error: 'Not Found', + message: `Route ${request.method}:${request.url} not found`, + }); + }); + } else { + // No frontend build — standard 404 for all unknown routes + server.setNotFoundHandler(async (_request, reply) => { + return reply.status(404).send({ + statusCode: 404, + error: 'Not Found', + message: 'Route not found', + }); + }); + } + + return server; +} + +/** + * Strip API key query parameters from URLs before logging. + * Prevents accidental credential leakage in log output. + */ +function sanitizeUrl(url: string): string { + try { + // Replace apikey query param value with [REDACTED] + return url.replace(/([?&])apikey=[^&]*/gi, '$1apikey=[REDACTED]'); + } catch { + return url; + } +} diff --git a/src/server/middleware/auth.ts b/src/server/middleware/auth.ts new file mode 100644 index 0000000..59ddf42 --- /dev/null +++ b/src/server/middleware/auth.ts @@ -0,0 +1,176 @@ +import { type FastifyInstance, type FastifyRequest, type FastifyReply } from 'fastify'; +import fp from 'fastify-plugin'; +import { randomUUID } from 'node:crypto'; +import { eq } from 'drizzle-orm'; +import { systemConfig } from '../../db/schema/index'; + +export const API_KEY_DB_KEY = 'api_key'; + +/** + * Mutable container for the current API key. + * Using an object (not a primitive) as the Fastify decoration value ensures + * that all plugin scopes share the same reference — mutations propagate + * across encapsulated contexts (auth middleware, route plugins, etc.). + */ +export interface ApiKeyHolder { + value: string; +} + +// Extend Fastify's type system for the cached API key holder +declare module 'fastify' { + interface FastifyInstance { + /** Mutable holder for the currently active API key. */ + apiKeyHolder: ApiKeyHolder; + } +} + +/** + * Resolve the API key — read from DB, or generate and store on first run. + * Returns the active API key string. + */ +export async function resolveApiKey(fastify: FastifyInstance): Promise { + const db = fastify.db; + + // Try to read existing key from system_config + const rows = await db + .select() + .from(systemConfig) + .where(eq(systemConfig.key, API_KEY_DB_KEY)) + .limit(1); + + if (rows.length > 0 && rows[0].value) { + return rows[0].value; + } + + // First run — generate a new API key + const newKey = randomUUID(); + await db.insert(systemConfig).values({ + key: API_KEY_DB_KEY, + value: newKey, + }); + + // Log the generated key to stdout ONCE so the user can retrieve it + // This is the only time the key is ever logged. + console.log(''); + console.log('='.repeat(60)); + console.log(' API Key generated (save this — it will not be shown again):'); + console.log(` ${newKey}`); + console.log('='.repeat(60)); + console.log(''); + + return newKey; +} + +/** + * Check if a request originates from the same host (browser UI). + * + * Compares the hostname from the Origin or Referer header against the server's + * own hostname. Port is not compared because the browser might be connecting + * via a proxy or the default port might be implicit/explicit differently. + * Browser requests from the Tubearr UI naturally include these headers. + * External tools (curl, scripts) don't, so they fall through to API key check. + */ +function isSameOriginRequest(request: FastifyRequest): boolean { + const serverHostname = request.hostname; // e.g. "localhost" or "192.168.1.10" (no port) + + // Check Origin header first (set by fetch/XHR from browser) + const origin = request.headers.origin; + if (origin) { + try { + const originUrl = new URL(origin); + if (originUrl.hostname === serverHostname) { + return true; + } + } catch { + // Malformed Origin header — ignore + } + } + + // Fallback to Referer header (set by browser navigation and some fetch requests) + const referer = request.headers.referer; + if (referer) { + try { + const refererUrl = new URL(referer); + if (refererUrl.hostname === serverHostname) { + return true; + } + } catch { + // Malformed Referer header — ignore + } + } + + return false; +} + +/** + * Dual-mode API authentication plugin. + * + * Wrapped with fastify-plugin so it applies globally (not encapsulated). + * Adds an onRequest hook that checks all routes under /api/* using two strategies: + * + * 1. **Same-origin bypass**: Browser requests from the Tubearr UI include Origin + * or Referer headers matching the server host. These are trusted and allowed + * through without an API key — matching the Sonarr/Radarr auth model where + * the UI is a trusted internal client. + * + * 2. **API key authentication**: External requests (curl, scripts, integrations) + * must provide a valid API key via: + * - `X-Api-Key` request header + * - `apikey` query parameter + * + * Routes not under /api/ (like /ping) are excluded from authentication. + * + * The resolved API key is decorated onto `fastify.apiKey` so that the + * regeneration endpoint can update the cached value without restarting. + */ +async function authPluginHandler(fastify: FastifyInstance): Promise { + // Resolve the API key at plugin registration time (server startup) + const initialKey = await resolveApiKey(fastify); + + // Decorate with a mutable holder object — mutations propagate across all plugin scopes + fastify.decorate('apiKeyHolder', { value: initialKey }); + + fastify.addHook('onRequest', async (request: FastifyRequest, reply: FastifyReply) => { + // Skip auth for non-API routes (e.g., /ping) + if (!request.url.startsWith('/api/')) { + return; + } + + // Same-origin bypass: browser UI requests are trusted internal clients + if (isSameOriginRequest(request)) { + request.log.debug(`[auth] same-origin bypass for ${request.url}`); + return; + } + + // External request — check X-Api-Key header first, then apikey query parameter + const headerKey = request.headers['x-api-key'] as string | undefined; + const queryKey = (request.query as Record)?.apikey; + + const providedKey = headerKey || queryKey; + + if (!providedKey) { + request.log.debug(`[auth] rejected: missing API key for ${request.url}`); + return reply.status(401).send({ + statusCode: 401, + error: 'Unauthorized', + message: 'Invalid or missing API key', + }); + } + + // Compare against the current (possibly regenerated) API key + if (providedKey !== fastify.apiKeyHolder.value) { + request.log.debug(`[auth] rejected: invalid API key for ${request.url}`); + return reply.status(401).send({ + statusCode: 401, + error: 'Unauthorized', + message: 'Invalid API key', + }); + } + }); +} + +export const authPlugin = fp(authPluginHandler, { + name: 'auth', + // Auth plugin needs the database decoration to be present + dependencies: [], +}); diff --git a/src/server/middleware/error-handler.ts b/src/server/middleware/error-handler.ts new file mode 100644 index 0000000..4bc8a41 --- /dev/null +++ b/src/server/middleware/error-handler.ts @@ -0,0 +1,103 @@ +import { type FastifyInstance, type FastifyError, type FastifyRequest, type FastifyReply } from 'fastify'; +import fp from 'fastify-plugin'; +import { appConfig } from '../../config/index'; +import type { ApiError } from '../../types/api'; + +/** + * Centralized error handler plugin. + * + * All errors are caught and returned as structured JSON: + * { statusCode, error, message } + * + * In development mode, stack traces are included. + * API keys are never leaked in error responses. + */ +async function errorHandlerPluginHandler(fastify: FastifyInstance): Promise { + fastify.setErrorHandler((error: FastifyError, request: FastifyRequest, reply: FastifyReply) => { + const statusCode = error.statusCode ?? 500; + const isDev = appConfig.nodeEnv === 'development'; + + // Build error response name from status code + const errorName = statusCodeToName(statusCode); + + // Handle Fastify validation errors (400) + if (error.validation && error.validation.length > 0) { + const validationDetails = error.validation.map((v) => ({ + field: v.instancePath || v.params?.missingProperty || 'unknown', + message: v.message || 'Validation failed', + })); + + const response: ApiError = { + statusCode: 400, + error: 'Bad Request', + message: `Validation failed: ${validationDetails.map((d) => `${d.field} ${d.message}`).join(', ')}`, + }; + + if (isDev && error.stack) { + response.stack = sanitizeStack(error.stack); + } + + return reply.status(400).send(response); + } + + // General error response + const response: ApiError = { + statusCode, + error: errorName, + message: sanitizeMessage(error.message), + }; + + if (isDev && error.stack) { + response.stack = sanitizeStack(error.stack); + } + + // Log server errors (5xx) + if (statusCode >= 500) { + request.log.error({ err: error, statusCode }, 'Server error'); + } + + return reply.status(statusCode).send(response); + }); + + // Handle 404 for undefined routes — SPA fallback is configured in server/index.ts + // when the frontend build directory exists +} + +/** + * Remove any API key values from error messages and stack traces. + */ +function sanitizeMessage(message: string): string { + // Redact anything that looks like a UUID API key in error messages + return message.replace( + /[0-9a-f]{8}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{12}/gi, + '[REDACTED]' + ); +} + +function sanitizeStack(stack: string): string { + return sanitizeMessage(stack); +} + +/** + * Map HTTP status codes to standard error names. + */ +function statusCodeToName(code: number): string { + const names: Record = { + 400: 'Bad Request', + 401: 'Unauthorized', + 403: 'Forbidden', + 404: 'Not Found', + 405: 'Method Not Allowed', + 409: 'Conflict', + 422: 'Unprocessable Entity', + 429: 'Too Many Requests', + 500: 'Internal Server Error', + 502: 'Bad Gateway', + 503: 'Service Unavailable', + }; + return names[code] || 'Error'; +} + +export const errorHandlerPlugin = fp(errorHandlerPluginHandler, { + name: 'error-handler', +}); diff --git a/src/server/routes/channel.ts b/src/server/routes/channel.ts new file mode 100644 index 0000000..99f47bc --- /dev/null +++ b/src/server/routes/channel.ts @@ -0,0 +1,353 @@ +import { type FastifyInstance } from 'fastify'; +import { PlatformRegistry } from '../../sources/platform-source'; +import { YouTubeSource } from '../../sources/youtube'; +import { SoundCloudSource } from '../../sources/soundcloud'; +import { YtDlpError } from '../../sources/yt-dlp'; +import { Platform } from '../../types/index'; +import type { MonitoringMode } from '../../types/index'; +import { + createChannel, + getChannelById, + getAllChannels, + updateChannel, + deleteChannel, + getChannelByPlatformId, + setMonitoringMode, +} from '../../db/repositories/channel-repository'; +import { getContentCountsByChannelIds } from '../../db/repositories/content-repository'; +import { BackCatalogImportService } from '../../services/back-catalog-import'; +import { getAppSetting, APP_CHECK_INTERVAL } from '../../db/repositories/system-config-repository'; + +// ── Default Registry ── + +function buildDefaultRegistry(): PlatformRegistry { + const registry = new PlatformRegistry(); + registry.register(Platform.YouTube, new YouTubeSource()); + registry.register(Platform.SoundCloud, new SoundCloudSource()); + return registry; +} + +// ── JSON Schemas for Fastify Validation ── + +const createChannelBodySchema = { + type: 'object' as const, + required: ['url'], + properties: { + url: { type: 'string' as const }, + checkInterval: { type: 'number' as const, minimum: 1 }, + monitoringEnabled: { type: 'boolean' as const }, + formatProfileId: { type: 'number' as const }, + grabAll: { type: 'boolean' as const }, + grabAllOrder: { type: 'string' as const, enum: ['newest', 'oldest'] }, + }, + additionalProperties: false, +}; + +const updateChannelBodySchema = { + type: 'object' as const, + properties: { + name: { type: 'string' as const, minLength: 1 }, + checkInterval: { type: 'number' as const, minimum: 1 }, + monitoringEnabled: { type: 'boolean' as const }, + formatProfileId: { type: 'number' as const, nullable: true }, + }, + additionalProperties: false, +}; + +const monitoringModeBodySchema = { + type: 'object' as const, + required: ['monitoringMode'], + properties: { + monitoringMode: { + type: 'string' as const, + enum: ['all', 'future', 'existing', 'none'], + }, + }, + additionalProperties: false, +}; + +// ── Route Plugin ── + +/** + * Channel CRUD route plugin. + * + * Registers: + * POST /api/v1/channel — add a new channel by URL (resolves metadata via yt-dlp) + * GET /api/v1/channel — list all channels + * GET /api/v1/channel/:id — get a single channel + * PUT /api/v1/channel/:id — update channel fields + * PUT /api/v1/channel/:id/monitoring-mode — change monitoring mode with cascade + * DELETE /api/v1/channel/:id — delete a channel + */ +export async function channelRoutes(fastify: FastifyInstance): Promise { + const registry = buildDefaultRegistry(); + + // ── POST /api/v1/channel ── + + fastify.post<{ + Body: { + url: string; + checkInterval?: number; + monitoringEnabled?: boolean; + formatProfileId?: number; + grabAll?: boolean; + grabAllOrder?: 'newest' | 'oldest'; + }; + }>( + '/api/v1/channel', + { + schema: { body: createChannelBodySchema }, + }, + async (request, reply) => { + const { url, checkInterval, monitoringEnabled, formatProfileId, grabAll, grabAllOrder } = request.body; + + // Validate URL against registered platforms + const match = registry.getForUrl(url); + if (!match) { + return reply.status(422).send({ + statusCode: 422, + error: 'Unprocessable Entity', + message: `Unsupported URL: no registered platform matches "${url}"`, + }); + } + + // Resolve channel metadata via yt-dlp + let metadata; + try { + metadata = await match.source.resolveChannel(url); + } catch (err) { + if (err instanceof YtDlpError) { + request.log.error( + { err, url, platform: match.platform }, + '[channel] yt-dlp failed to resolve channel' + ); + return reply.status(502).send({ + statusCode: 502, + error: 'Bad Gateway', + message: `Failed to resolve channel from URL: ${err.message}`, + }); + } + throw err; // Let error handler deal with unexpected errors + } + + // Check for duplicate channel + const existing = await getChannelByPlatformId( + fastify.db, + metadata.platform, + metadata.platformId + ); + if (existing) { + return reply.status(409).send({ + statusCode: 409, + error: 'Conflict', + message: `Channel already exists: "${existing.name}" (${existing.platform}:${existing.platformId})`, + }); + } + + // Read default check interval from DB (seeded from env on first boot) + const defaultInterval = parseInt( + await getAppSetting(fastify.db, APP_CHECK_INTERVAL) ?? '360', + 10 + ); + + // Insert channel + const channel = await createChannel(fastify.db, { + name: metadata.name, + platform: metadata.platform, + platformId: metadata.platformId, + url: metadata.url, + monitoringEnabled: monitoringEnabled ?? true, + checkInterval: checkInterval ?? defaultInterval, + imageUrl: metadata.imageUrl, + metadata: null, + formatProfileId: formatProfileId ?? null, + }); + + // Notify scheduler of new channel + fastify.scheduler?.addChannel(channel); + + // Handle grab-all request — fire-and-forget async import + if (grabAll && fastify.queueService) { + const order = grabAllOrder ?? 'newest'; + request.log.info( + { channelId: channel.id, platform: channel.platform, order }, + `[channel] Grab-all requested for channel ${channel.id} (${order})` + ); + + const importService = new BackCatalogImportService( + fastify.db, + registry, + fastify.queueService + ); + + // Fire-and-forget — the POST response returns immediately + importService.importChannel(channel.id, order).catch((err) => { + request.log.error( + { err, channelId: channel.id, platform: channel.platform }, + `[import] Back-catalog import failed for channel ${channel.id}` + ); + }); + } + + return reply.status(201).send(channel); + } + ); + + // ── GET /api/v1/channel ── + + fastify.get('/api/v1/channel', async (_request, _reply) => { + const channels = await getAllChannels(fastify.db); + const channelIds = channels.map((c) => c.id); + const countsMap = await getContentCountsByChannelIds(fastify.db, channelIds); + + const defaultCounts = { total: 0, monitored: 0, downloaded: 0 }; + return channels.map((channel) => ({ + ...channel, + contentCounts: countsMap.get(channel.id) ?? defaultCounts, + })); + }); + + // ── GET /api/v1/channel/:id ── + + fastify.get<{ Params: { id: string } }>( + '/api/v1/channel/:id', + async (request, reply) => { + const id = parseInt(request.params.id, 10); + if (isNaN(id)) { + return reply.status(400).send({ + statusCode: 400, + error: 'Bad Request', + message: 'Channel ID must be a number', + }); + } + + const channel = await getChannelById(fastify.db, id); + if (!channel) { + return reply.status(404).send({ + statusCode: 404, + error: 'Not Found', + message: `Channel with ID ${id} not found`, + }); + } + + return channel; + } + ); + + // ── PUT /api/v1/channel/:id ── + + fastify.put<{ + Params: { id: string }; + Body: { name?: string; checkInterval?: number; monitoringEnabled?: boolean; formatProfileId?: number | null }; + }>( + '/api/v1/channel/:id', + { + schema: { body: updateChannelBodySchema }, + }, + async (request, reply) => { + const id = parseInt(request.params.id, 10); + if (isNaN(id)) { + return reply.status(400).send({ + statusCode: 400, + error: 'Bad Request', + message: 'Channel ID must be a number', + }); + } + + const updated = await updateChannel(fastify.db, id, request.body); + if (!updated) { + return reply.status(404).send({ + statusCode: 404, + error: 'Not Found', + message: `Channel with ID ${id} not found`, + }); + } + + // Notify scheduler of updated channel + fastify.scheduler?.updateChannel(updated); + + return updated; + } + ); + + // ── PUT /api/v1/channel/:id/monitoring-mode ── + + fastify.put<{ + Params: { id: string }; + Body: { monitoringMode: string }; + }>( + '/api/v1/channel/:id/monitoring-mode', + { schema: { body: monitoringModeBodySchema } }, + async (request, reply) => { + const id = parseInt(request.params.id, 10); + if (isNaN(id)) { + return reply.status(400).send({ + statusCode: 400, + error: 'Bad Request', + message: 'Channel ID must be a number', + }); + } + + try { + const result = await setMonitoringMode( + fastify.db, + id, + request.body.monitoringMode as MonitoringMode + ); + + if (!result) { + return reply.status(404).send({ + statusCode: 404, + error: 'Not Found', + message: `Channel with ID ${id} not found`, + }); + } + + return { success: true, data: result }; + } catch (err) { + request.log.error( + { err, id, monitoringMode: request.body.monitoringMode }, + '[channel] Failed to update monitoring mode' + ); + return reply.status(500).send({ + statusCode: 500, + error: 'Internal Server Error', + message: 'Failed to update monitoring mode', + }); + } + } + ); + + // ── DELETE /api/v1/channel/:id ── + + fastify.delete<{ Params: { id: string } }>( + '/api/v1/channel/:id', + async (request, reply) => { + const id = parseInt(request.params.id, 10); + if (isNaN(id)) { + return reply.status(400).send({ + statusCode: 400, + error: 'Bad Request', + message: 'Channel ID must be a number', + }); + } + + // Verify channel exists before deleting + const existing = await getChannelById(fastify.db, id); + if (!existing) { + return reply.status(404).send({ + statusCode: 404, + error: 'Not Found', + message: `Channel with ID ${id} not found`, + }); + } + + await deleteChannel(fastify.db, id); + + // Notify scheduler of deleted channel + fastify.scheduler?.removeChannel(id); + + return reply.status(204).send(); + } + ); +} diff --git a/src/server/routes/content.ts b/src/server/routes/content.ts new file mode 100644 index 0000000..b2bc480 --- /dev/null +++ b/src/server/routes/content.ts @@ -0,0 +1,237 @@ +import { type FastifyInstance } from 'fastify'; +import { + getAllContentItems, + getContentByChannelId, + setMonitored, + bulkSetMonitored, +} from '../../db/repositories/content-repository'; +import type { PaginatedResponse, ApiResponse } from '../../types/api'; +import type { ContentItem, ContentStatus, ContentType } from '../../types/index'; + +// ── JSON Schemas for Fastify Validation ── + +const bulkMonitoredBodySchema = { + type: 'object' as const, + required: ['ids', 'monitored'], + properties: { + ids: { type: 'array' as const, items: { type: 'number' as const }, minItems: 1 }, + monitored: { type: 'boolean' as const }, + }, + additionalProperties: false, +}; + +const toggleMonitoredBodySchema = { + type: 'object' as const, + required: ['monitored'], + properties: { + monitored: { type: 'boolean' as const }, + }, + additionalProperties: false, +}; + +// ── Route Plugin ── + +/** + * Content route plugin. + * + * Registers: + * GET /api/v1/content — paginated content listing with optional filters + * PATCH /api/v1/content/bulk/monitored — bulk toggle monitored state + * PATCH /api/v1/content/:id/monitored — toggle single item monitored state + * GET /api/v1/channel/:id/content — content items for a specific channel + */ +export async function contentRoutes(fastify: FastifyInstance): Promise { + // ── GET /api/v1/content ── + + fastify.get<{ + Querystring: { + page?: string; + pageSize?: string; + status?: string; + contentType?: string; + channelId?: string; + search?: string; + }; + }>('/api/v1/content', async (request, _reply) => { + const page = Math.max(1, parseInt(request.query.page ?? '1', 10) || 1); + const pageSize = Math.min( + 100, + Math.max(1, parseInt(request.query.pageSize ?? '20', 10) || 20) + ); + + const filters: { + status?: ContentStatus; + contentType?: ContentType; + channelId?: number; + search?: string; + } = {}; + + if (request.query.status) { + filters.status = request.query.status as ContentStatus; + } + if (request.query.contentType) { + filters.contentType = request.query.contentType as ContentType; + } + if (request.query.channelId) { + const channelId = parseInt(request.query.channelId, 10); + if (!isNaN(channelId)) filters.channelId = channelId; + } + if (request.query.search) { + filters.search = request.query.search; + } + + try { + const result = await getAllContentItems(fastify.db, filters, page, pageSize); + + const response: PaginatedResponse = { + success: true, + data: result.items, + pagination: { + page, + pageSize, + totalItems: result.total, + totalPages: Math.ceil(result.total / pageSize), + }, + }; + + return response; + } catch (err) { + request.log.error( + { err, filters, page, pageSize }, + '[content] Failed to fetch paginated content items' + ); + return _reply.status(500).send({ + statusCode: 500, + error: 'Internal Server Error', + message: 'Failed to retrieve content items', + }); + } + }); + + // ── PATCH /api/v1/content/bulk/monitored ── + // NOTE: Must be registered BEFORE /api/v1/content/:id/* routes + // to prevent Fastify from matching "bulk" as an :id param. + + fastify.patch<{ + Body: { ids: number[]; monitored: boolean }; + }>( + '/api/v1/content/bulk/monitored', + { schema: { body: bulkMonitoredBodySchema } }, + async (request, reply) => { + try { + const count = await bulkSetMonitored( + fastify.db, + request.body.ids, + request.body.monitored + ); + + const response: ApiResponse<{ updated: number }> = { + success: true, + data: { updated: count }, + }; + + return response; + } catch (err) { + request.log.error( + { err, ids: request.body.ids }, + '[content] Failed to bulk update monitored state' + ); + return reply.status(500).send({ + statusCode: 500, + error: 'Internal Server Error', + message: 'Failed to bulk update monitored state', + }); + } + } + ); + + // ── PATCH /api/v1/content/:id/monitored ── + + fastify.patch<{ + Params: { id: string }; + Body: { monitored: boolean }; + }>( + '/api/v1/content/:id/monitored', + { schema: { body: toggleMonitoredBodySchema } }, + async (request, reply) => { + const id = parseInt(request.params.id, 10); + if (isNaN(id)) { + return reply.status(400).send({ + statusCode: 400, + error: 'Bad Request', + message: 'Content item ID must be a number', + }); + } + + try { + const result = await setMonitored( + fastify.db, + id, + request.body.monitored + ); + + if (!result) { + return reply.status(404).send({ + statusCode: 404, + error: 'Not Found', + message: 'Content item not found', + }); + } + + const response: ApiResponse = { + success: true, + data: result, + }; + + return response; + } catch (err) { + request.log.error( + { err, id }, + '[content] Failed to update monitored state' + ); + return reply.status(500).send({ + statusCode: 500, + error: 'Internal Server Error', + message: 'Failed to update monitored state', + }); + } + } + ); + + // ── GET /api/v1/channel/:id/content ── + + fastify.get<{ + Params: { id: string }; + }>('/api/v1/channel/:id/content', async (request, reply) => { + const channelId = parseInt(request.params.id, 10); + + if (isNaN(channelId)) { + return reply.status(400).send({ + statusCode: 400, + error: 'Bad Request', + message: 'Invalid channel ID', + }); + } + + try { + const items = await getContentByChannelId(fastify.db, channelId); + + const response: ApiResponse = { + success: true, + data: items, + }; + + return response; + } catch (err) { + request.log.error( + { err, channelId }, + '[content] Failed to fetch content for channel' + ); + return reply.status(500).send({ + statusCode: 500, + error: 'Internal Server Error', + message: `Failed to retrieve content for channel ${channelId}`, + }); + } + }); +} diff --git a/src/server/routes/download.ts b/src/server/routes/download.ts new file mode 100644 index 0000000..a55a295 --- /dev/null +++ b/src/server/routes/download.ts @@ -0,0 +1,80 @@ +import { type FastifyInstance } from 'fastify'; +import { + getContentItemById, +} from '../../db/repositories/content-repository'; + +// ── Route Plugin ── + +/** + * Download trigger route plugin. + * + * Registers: + * POST /api/v1/download/:contentItemId — enqueue a content item for download + * + * Downloads now flow through the queue instead of calling DownloadService + * directly. The response is 202 Accepted with the queue item. + */ +export async function downloadRoutes(fastify: FastifyInstance): Promise { + // ── POST /api/v1/download/:contentItemId ── + + fastify.post<{ Params: { contentItemId: string } }>( + '/api/v1/download/:contentItemId', + async (request, reply) => { + const contentItemId = parseInt(request.params.contentItemId, 10); + if (isNaN(contentItemId)) { + return reply.status(400).send({ + statusCode: 400, + error: 'Bad Request', + message: 'Content item ID must be a number', + }); + } + + // Look up content item + const contentItem = await getContentItemById(fastify.db, contentItemId); + if (!contentItem) { + return reply.status(404).send({ + statusCode: 404, + error: 'Not Found', + message: `Content item with ID ${contentItemId} not found`, + }); + } + + // Reject if already downloading or downloaded + if (contentItem.status === 'downloading' || contentItem.status === 'downloaded') { + return reply.status(409).send({ + statusCode: 409, + error: 'Conflict', + message: `Content item is already ${contentItem.status}`, + }); + } + + // Ensure queue service is available + if (!fastify.queueService) { + return reply.status(503).send({ + statusCode: 503, + error: 'Service Unavailable', + message: 'Queue service is not initialized', + }); + } + + // Enqueue for download + try { + const queueItem = await fastify.queueService.enqueue(contentItemId); + return reply.status(202).send({ success: true, data: queueItem }); + } catch (err: unknown) { + const message = err instanceof Error ? err.message : String(err); + + // Already queued → 409 Conflict + if (message.includes('already in the queue')) { + return reply.status(409).send({ + statusCode: 409, + error: 'Conflict', + message, + }); + } + + throw err; // Let error handler deal with unexpected errors + } + } + ); +} diff --git a/src/server/routes/format-profile.ts b/src/server/routes/format-profile.ts new file mode 100644 index 0000000..74e7bc7 --- /dev/null +++ b/src/server/routes/format-profile.ts @@ -0,0 +1,207 @@ +import { type FastifyInstance } from 'fastify'; +import { + createFormatProfile, + getAllFormatProfiles, + getFormatProfileById, + updateFormatProfile, + deleteFormatProfile, +} from '../../db/repositories/format-profile-repository'; + +// ── JSON Schemas for Fastify Validation ── + +const createFormatProfileBodySchema = { + type: 'object' as const, + required: ['name'], + properties: { + name: { type: 'string' as const, minLength: 1 }, + videoResolution: { type: 'string' as const, nullable: true }, + audioCodec: { type: 'string' as const, nullable: true }, + audioBitrate: { type: 'string' as const, nullable: true }, + containerFormat: { type: 'string' as const, nullable: true }, + isDefault: { type: 'boolean' as const }, + subtitleLanguages: { type: 'string' as const, nullable: true }, + embedSubtitles: { type: 'boolean' as const }, + }, + additionalProperties: false, +}; + +const updateFormatProfileBodySchema = { + type: 'object' as const, + properties: { + name: { type: 'string' as const, minLength: 1 }, + videoResolution: { type: 'string' as const, nullable: true }, + audioCodec: { type: 'string' as const, nullable: true }, + audioBitrate: { type: 'string' as const, nullable: true }, + containerFormat: { type: 'string' as const, nullable: true }, + isDefault: { type: 'boolean' as const }, + subtitleLanguages: { type: 'string' as const, nullable: true }, + embedSubtitles: { type: 'boolean' as const }, + }, + additionalProperties: false, +}; + +// ── Route Plugin ── + +/** + * Format profile CRUD route plugin. + * + * Registers: + * POST /api/v1/format-profile — create a new format profile + * GET /api/v1/format-profile — list all format profiles + * GET /api/v1/format-profile/:id — get a single format profile + * PUT /api/v1/format-profile/:id — update format profile fields + * DELETE /api/v1/format-profile/:id — delete a format profile + */ +export async function formatProfileRoutes(fastify: FastifyInstance): Promise { + // ── POST /api/v1/format-profile ── + + fastify.post<{ + Body: { + name: string; + videoResolution?: string | null; + audioCodec?: string | null; + audioBitrate?: string | null; + containerFormat?: string | null; + isDefault?: boolean; + subtitleLanguages?: string | null; + embedSubtitles?: boolean; + }; + }>( + '/api/v1/format-profile', + { + schema: { body: createFormatProfileBodySchema }, + }, + async (request, reply) => { + const profile = await createFormatProfile(fastify.db, request.body); + return reply.status(201).send(profile); + } + ); + + // ── GET /api/v1/format-profile ── + + fastify.get('/api/v1/format-profile', async (_request, _reply) => { + return getAllFormatProfiles(fastify.db); + }); + + // ── GET /api/v1/format-profile/:id ── + + fastify.get<{ Params: { id: string } }>( + '/api/v1/format-profile/:id', + async (request, reply) => { + const id = parseInt(request.params.id, 10); + if (isNaN(id)) { + return reply.status(400).send({ + statusCode: 400, + error: 'Bad Request', + message: 'Format profile ID must be a number', + }); + } + + const profile = await getFormatProfileById(fastify.db, id); + if (!profile) { + return reply.status(404).send({ + statusCode: 404, + error: 'Not Found', + message: `Format profile with ID ${id} not found`, + }); + } + + return profile; + } + ); + + // ── PUT /api/v1/format-profile/:id ── + + fastify.put<{ + Params: { id: string }; + Body: { + name?: string; + videoResolution?: string | null; + audioCodec?: string | null; + audioBitrate?: string | null; + containerFormat?: string | null; + isDefault?: boolean; + subtitleLanguages?: string | null; + embedSubtitles?: boolean; + }; + }>( + '/api/v1/format-profile/:id', + { + schema: { body: updateFormatProfileBodySchema }, + }, + async (request, reply) => { + const id = parseInt(request.params.id, 10); + if (isNaN(id)) { + return reply.status(400).send({ + statusCode: 400, + error: 'Bad Request', + message: 'Format profile ID must be a number', + }); + } + + // Guard: prevent unsetting isDefault on the default profile + const existing = await getFormatProfileById(fastify.db, id); + if (!existing) { + return reply.status(404).send({ + statusCode: 404, + error: 'Not Found', + message: `Format profile with ID ${id} not found`, + }); + } + if (existing.isDefault && request.body.isDefault === false) { + return reply.status(400).send({ + statusCode: 400, + error: 'Bad Request', + message: 'Cannot unset isDefault on the default format profile', + }); + } + + const updated = await updateFormatProfile(fastify.db, id, request.body); + if (!updated) { + return reply.status(404).send({ + statusCode: 404, + error: 'Not Found', + message: `Format profile with ID ${id} not found`, + }); + } + + return updated; + } + ); + + // ── DELETE /api/v1/format-profile/:id ── + + fastify.delete<{ Params: { id: string } }>( + '/api/v1/format-profile/:id', + async (request, reply) => { + const id = parseInt(request.params.id, 10); + if (isNaN(id)) { + return reply.status(400).send({ + statusCode: 400, + error: 'Bad Request', + message: 'Format profile ID must be a number', + }); + } + + // Guard: prevent deleting the default profile + const profile = await getFormatProfileById(fastify.db, id); + if (!profile) { + return reply.status(404).send({ + statusCode: 404, + error: 'Not Found', + message: `Format profile with ID ${id} not found`, + }); + } + if (profile.isDefault) { + return reply.status(403).send({ + statusCode: 403, + error: 'Forbidden', + message: 'Cannot delete the default format profile', + }); + } + + await deleteFormatProfile(fastify.db, id); + return reply.status(204).send(); + } + ); +} diff --git a/src/server/routes/health.ts b/src/server/routes/health.ts new file mode 100644 index 0000000..bf163ac --- /dev/null +++ b/src/server/routes/health.ts @@ -0,0 +1,86 @@ +import { type FastifyInstance } from 'fastify'; +import { getRawClient } from '../../db/index'; +import type { HealthResponse, ComponentHealth } from '../../types/api'; + +const serverStartTime = Date.now(); + +/** + * Health route plugin. + * + * Registers: + * GET /ping — unauthenticated liveness probe (Docker health checks) + * GET /api/v1/health — authenticated component health status + */ +export async function healthRoutes(fastify: FastifyInstance): Promise { + // Unauthenticated liveness probe + fastify.get('/ping', async (_request, _reply) => { + return { status: 'ok' }; + }); + + // Authenticated component health + fastify.get('/api/v1/health', async (_request, _reply) => { + const components: ComponentHealth[] = []; + + // Database health check + const dbHealth = await checkDatabaseHealth(); + components.push(dbHealth); + + // Server health (always healthy if we're responding) + components.push({ + name: 'server', + status: 'healthy', + message: 'Fastify server is running', + }); + + // Health service components (scheduler, yt-dlp, disk space, recent errors) + if (fastify.healthService) { + const serviceComponents = await fastify.healthService.getComponentHealth(); + components.push(...serviceComponents); + } + + // Overall status: degraded if any component is degraded, unhealthy if any is unhealthy + let overallStatus: HealthResponse['status'] = 'healthy'; + if (components.some((c) => c.status === 'unhealthy')) { + overallStatus = 'unhealthy'; + } else if (components.some((c) => c.status === 'degraded')) { + overallStatus = 'degraded'; + } + + const uptimeSeconds = Math.floor((Date.now() - serverStartTime) / 1000); + + const response: HealthResponse = { + status: overallStatus, + components, + uptime: uptimeSeconds, + }; + + return response; + }); +} + +/** + * Check database connectivity by running a simple query. + */ +async function checkDatabaseHealth(): Promise { + const start = Date.now(); + try { + const client = getRawClient(); + await client.execute('SELECT 1'); + const elapsed = Date.now() - start; + + return { + name: 'database', + status: 'healthy', + message: 'SQLite connection is active', + responseTime: elapsed, + }; + } catch (err) { + const elapsed = Date.now() - start; + return { + name: 'database', + status: 'unhealthy', + message: err instanceof Error ? err.message : 'Database check failed', + responseTime: elapsed, + }; + } +} diff --git a/src/server/routes/history.ts b/src/server/routes/history.ts new file mode 100644 index 0000000..bee4592 --- /dev/null +++ b/src/server/routes/history.ts @@ -0,0 +1,85 @@ +import { type FastifyInstance } from 'fastify'; +import { + getHistoryEvents, + getRecentActivity, +} from '../../db/repositories/history-repository'; +import type { PaginatedResponse } from '../../types/api'; +import type { DownloadHistoryRecord } from '../../types/index'; + +// ── Route Plugin ── + +/** + * History and activity route plugin. + * + * Registers: + * GET /api/v1/history — paginated history events with optional filters + * GET /api/v1/activity — recent activity feed + */ +export async function historyRoutes(fastify: FastifyInstance): Promise { + // ── GET /api/v1/history ── + + fastify.get<{ + Querystring: { + page?: string; + pageSize?: string; + eventType?: string; + channelId?: string; + contentItemId?: string; + }; + }>('/api/v1/history', async (request, _reply) => { + const page = Math.max(1, parseInt(request.query.page ?? '1', 10) || 1); + const pageSize = Math.min( + 100, + Math.max(1, parseInt(request.query.pageSize ?? '20', 10) || 20) + ); + + const filters: { + eventType?: string; + channelId?: number; + contentItemId?: number; + } = {}; + + if (request.query.eventType) { + filters.eventType = request.query.eventType; + } + if (request.query.channelId) { + const channelId = parseInt(request.query.channelId, 10); + if (!isNaN(channelId)) filters.channelId = channelId; + } + if (request.query.contentItemId) { + const contentItemId = parseInt(request.query.contentItemId, 10); + if (!isNaN(contentItemId)) filters.contentItemId = contentItemId; + } + + const result = await getHistoryEvents(fastify.db, filters, page, pageSize); + + const response: PaginatedResponse = { + success: true, + data: result.items, + pagination: { + page, + pageSize, + totalItems: result.total, + totalPages: Math.ceil(result.total / pageSize), + }, + }; + + return response; + }); + + // ── GET /api/v1/activity ── + + fastify.get<{ Querystring: { limit?: string } }>( + '/api/v1/activity', + async (request, _reply) => { + const limit = Math.min( + 200, + Math.max(1, parseInt(request.query.limit ?? '50', 10) || 50) + ); + + const events = await getRecentActivity(fastify.db, limit); + + return { success: true, data: events }; + } + ); +} diff --git a/src/server/routes/notification.ts b/src/server/routes/notification.ts new file mode 100644 index 0000000..429251e --- /dev/null +++ b/src/server/routes/notification.ts @@ -0,0 +1,285 @@ +import { type FastifyInstance } from 'fastify'; +import { + createNotificationSetting, + getAllNotificationSettings, + getNotificationSettingById, + updateNotificationSetting, + deleteNotificationSetting, +} from '../../db/repositories/notification-repository'; +import { NotificationService } from '../../services/notification'; +import type { NotificationSetting } from '../../types/index'; + +// ── JSON Schemas for Fastify Validation ── + +const createNotificationBodySchema = { + type: 'object' as const, + required: ['type', 'name', 'config'], + properties: { + type: { type: 'string' as const, enum: ['discord'] }, + name: { type: 'string' as const, minLength: 1 }, + config: { + type: 'object' as const, + required: ['webhookUrl'], + properties: { + webhookUrl: { type: 'string' as const, minLength: 1 }, + }, + }, + enabled: { type: 'boolean' as const }, + onGrab: { type: 'boolean' as const }, + onDownload: { type: 'boolean' as const }, + onFailure: { type: 'boolean' as const }, + }, + additionalProperties: false, +}; + +const updateNotificationBodySchema = { + type: 'object' as const, + properties: { + name: { type: 'string' as const, minLength: 1 }, + type: { type: 'string' as const, enum: ['discord'] }, + config: { + type: 'object' as const, + properties: { + webhookUrl: { type: 'string' as const, minLength: 1 }, + }, + }, + enabled: { type: 'boolean' as const }, + onGrab: { type: 'boolean' as const }, + onDownload: { type: 'boolean' as const }, + onFailure: { type: 'boolean' as const }, + }, + additionalProperties: false, +}; + +// ── Helpers ── + +/** + * Redact a webhook URL for safe display — show only the first 20 chars + '...'. + * Never expose full webhook URLs in API responses (they contain auth tokens). + */ +function redactWebhookUrl(url: string): string { + if (url.length <= 20) return url; + return url.slice(0, 20) + '...'; +} + +/** + * Return a copy of the notification setting with the webhook URL redacted. + */ +function redactSetting(setting: NotificationSetting): NotificationSetting { + const config = { ...setting.config }; + if (typeof config.webhookUrl === 'string') { + config.webhookUrl = redactWebhookUrl(config.webhookUrl); + } + return { ...setting, config }; +} + +// ── Route Plugin ── + +/** + * Notification setting CRUD route plugin. + * + * Registers: + * POST /api/v1/notification — create a notification setting + * GET /api/v1/notification — list all notification settings (redacted) + * GET /api/v1/notification/:id — get a single setting (redacted) + * PUT /api/v1/notification/:id — update setting fields + * DELETE /api/v1/notification/:id — delete a setting + * POST /api/v1/notification/:id/test — send a test notification + */ +export async function notificationRoutes(fastify: FastifyInstance): Promise { + // ── POST /api/v1/notification ── + + fastify.post<{ + Body: { + type: 'discord'; + name: string; + config: { webhookUrl: string }; + enabled?: boolean; + onGrab?: boolean; + onDownload?: boolean; + onFailure?: boolean; + }; + }>( + '/api/v1/notification', + { + schema: { body: createNotificationBodySchema }, + }, + async (request, reply) => { + const setting = await createNotificationSetting(fastify.db, request.body); + return reply.status(201).send(redactSetting(setting)); + } + ); + + // ── GET /api/v1/notification ── + + fastify.get('/api/v1/notification', async (_request, _reply) => { + const settings = await getAllNotificationSettings(fastify.db); + return settings.map(redactSetting); + }); + + // ── GET /api/v1/notification/:id ── + + fastify.get<{ Params: { id: string } }>( + '/api/v1/notification/:id', + async (request, reply) => { + const id = parseInt(request.params.id, 10); + if (isNaN(id)) { + return reply.status(400).send({ + statusCode: 400, + error: 'Bad Request', + message: 'Notification setting ID must be a number', + }); + } + + const setting = await getNotificationSettingById(fastify.db, id); + if (!setting) { + return reply.status(404).send({ + statusCode: 404, + error: 'Not Found', + message: `Notification setting with ID ${id} not found`, + }); + } + + return redactSetting(setting); + } + ); + + // ── PUT /api/v1/notification/:id ── + + fastify.put<{ + Params: { id: string }; + Body: { + name?: string; + type?: 'discord'; + config?: { webhookUrl: string }; + enabled?: boolean; + onGrab?: boolean; + onDownload?: boolean; + onFailure?: boolean; + }; + }>( + '/api/v1/notification/:id', + { + schema: { body: updateNotificationBodySchema }, + }, + async (request, reply) => { + const id = parseInt(request.params.id, 10); + if (isNaN(id)) { + return reply.status(400).send({ + statusCode: 400, + error: 'Bad Request', + message: 'Notification setting ID must be a number', + }); + } + + const updated = await updateNotificationSetting(fastify.db, id, request.body); + if (!updated) { + return reply.status(404).send({ + statusCode: 404, + error: 'Not Found', + message: `Notification setting with ID ${id} not found`, + }); + } + + return redactSetting(updated); + } + ); + + // ── DELETE /api/v1/notification/:id ── + + fastify.delete<{ Params: { id: string } }>( + '/api/v1/notification/:id', + async (request, reply) => { + const id = parseInt(request.params.id, 10); + if (isNaN(id)) { + return reply.status(400).send({ + statusCode: 400, + error: 'Bad Request', + message: 'Notification setting ID must be a number', + }); + } + + const deleted = await deleteNotificationSetting(fastify.db, id); + if (!deleted) { + return reply.status(404).send({ + statusCode: 404, + error: 'Not Found', + message: `Notification setting with ID ${id} not found`, + }); + } + + return reply.status(204).send(); + } + ); + + // ── POST /api/v1/notification/:id/test ── + + fastify.post<{ Params: { id: string } }>( + '/api/v1/notification/:id/test', + async (request, reply) => { + const id = parseInt(request.params.id, 10); + if (isNaN(id)) { + return reply.status(400).send({ + statusCode: 400, + error: 'Bad Request', + message: 'Notification setting ID must be a number', + }); + } + + const setting = await getNotificationSettingById(fastify.db, id); + if (!setting) { + return reply.status(404).send({ + statusCode: 404, + error: 'Not Found', + message: `Notification setting with ID ${id} not found`, + }); + } + + try { + // Send a test notification directly to this setting's webhook + const config = setting.config as { webhookUrl?: string }; + if (!config?.webhookUrl) { + return reply.send({ success: false, error: 'No webhook URL configured' }); + } + + const testEmbed = { + title: '🧪 Test Notification', + description: '**Test Content Title**', + color: 0x2ecc71, + fields: [ + { name: 'Channel', value: 'Test Channel', inline: true }, + { name: 'Platform', value: 'youtube', inline: true }, + { name: 'URL', value: 'https://example.com/test' }, + ], + timestamp: new Date().toISOString(), + }; + + const response = await fetch(config.webhookUrl, { + method: 'POST', + headers: { 'Content-Type': 'application/json' }, + body: JSON.stringify({ embeds: [testEmbed] }), + }); + + if (!response.ok) { + return reply.send({ + success: false, + error: `Discord returned HTTP ${response.status}`, + }); + } + + request.log.info( + { settingId: setting.id, channel: setting.name }, + `[notification] test dispatch success channel="${setting.name}"` + ); + return reply.send({ success: true }); + } catch (err) { + const errorMsg = err instanceof Error ? err.message : String(err); + request.log.error( + { err, settingId: setting.id, channel: setting.name }, + `[notification] test dispatch error channel="${setting.name}"` + ); + return reply.send({ success: false, error: errorMsg }); + } + } + ); +} diff --git a/src/server/routes/platform-settings.ts b/src/server/routes/platform-settings.ts new file mode 100644 index 0000000..b8f669d --- /dev/null +++ b/src/server/routes/platform-settings.ts @@ -0,0 +1,126 @@ +import { type FastifyInstance } from 'fastify'; +import { + getAllPlatformSettings, + getPlatformSettings, + upsertPlatformSettings, + deletePlatformSettings, +} from '../../db/repositories/platform-settings-repository'; +import { Platform } from '../../types/index'; + +// ── JSON Schemas for Fastify Validation ── + +const VALID_PLATFORMS = [Platform.YouTube, Platform.SoundCloud] as const; + +const upsertPlatformSettingsBodySchema = { + type: 'object' as const, + properties: { + defaultFormatProfileId: { type: 'integer' as const, nullable: true }, + checkInterval: { type: 'integer' as const, minimum: 1 }, + concurrencyLimit: { type: 'integer' as const, minimum: 1, maximum: 10 }, + subtitleLanguages: { type: 'string' as const, nullable: true }, + grabAllEnabled: { type: 'boolean' as const }, + grabAllOrder: { type: 'string' as const, enum: ['newest', 'oldest'] }, + scanLimit: { type: 'integer' as const, minimum: 10, maximum: 1000 }, + rateLimitDelay: { type: 'integer' as const, minimum: 0, maximum: 10000 }, + }, + additionalProperties: false, +}; + +// ── Route Plugin ── + +/** + * Platform settings CRUD route plugin. + * + * Registers: + * GET /api/v1/platform-settings — list all platform settings + * GET /api/v1/platform-settings/:platform — get settings for a specific platform + * PUT /api/v1/platform-settings/:platform — upsert settings for a platform + * DELETE /api/v1/platform-settings/:platform — delete settings for a platform + */ +export async function platformSettingsRoutes(fastify: FastifyInstance): Promise { + // ── GET /api/v1/platform-settings ── + + fastify.get('/api/v1/platform-settings', async (_request, _reply) => { + return getAllPlatformSettings(fastify.db); + }); + + // ── GET /api/v1/platform-settings/:platform ── + + fastify.get<{ Params: { platform: string } }>( + '/api/v1/platform-settings/:platform', + async (request, reply) => { + const { platform } = request.params; + + const settings = await getPlatformSettings(fastify.db, platform); + if (!settings) { + return reply.status(404).send({ + statusCode: 404, + error: 'Not Found', + message: `Platform settings for '${platform}' not found`, + }); + } + + return settings; + } + ); + + // ── PUT /api/v1/platform-settings/:platform ── + + fastify.put<{ + Params: { platform: string }; + Body: { + defaultFormatProfileId?: number | null; + checkInterval?: number; + concurrencyLimit?: number; + subtitleLanguages?: string | null; + grabAllEnabled?: boolean; + grabAllOrder?: 'newest' | 'oldest'; + scanLimit?: number; + rateLimitDelay?: number; + }; + }>( + '/api/v1/platform-settings/:platform', + { + schema: { body: upsertPlatformSettingsBodySchema }, + }, + async (request, reply) => { + const { platform } = request.params; + + // Validate platform enum + if (!VALID_PLATFORMS.includes(platform as typeof VALID_PLATFORMS[number])) { + return reply.status(400).send({ + statusCode: 400, + error: 'Bad Request', + message: `Invalid platform '${platform}'. Must be one of: ${VALID_PLATFORMS.join(', ')}`, + }); + } + + const settings = await upsertPlatformSettings(fastify.db, { + platform: platform as typeof VALID_PLATFORMS[number], + ...request.body, + }); + + return settings; + } + ); + + // ── DELETE /api/v1/platform-settings/:platform ── + + fastify.delete<{ Params: { platform: string } }>( + '/api/v1/platform-settings/:platform', + async (request, reply) => { + const { platform } = request.params; + + const deleted = await deletePlatformSettings(fastify.db, platform); + if (!deleted) { + return reply.status(404).send({ + statusCode: 404, + error: 'Not Found', + message: `Platform settings for '${platform}' not found`, + }); + } + + return reply.status(204).send(); + } + ); +} diff --git a/src/server/routes/playlist.ts b/src/server/routes/playlist.ts new file mode 100644 index 0000000..03bfefb --- /dev/null +++ b/src/server/routes/playlist.ts @@ -0,0 +1,122 @@ +import { type FastifyInstance } from 'fastify'; +import { PlatformRegistry } from '../../sources/platform-source'; +import { YouTubeSource } from '../../sources/youtube'; +import { SoundCloudSource } from '../../sources/soundcloud'; +import { Platform } from '../../types/index'; +import { getChannelById } from '../../db/repositories/channel-repository'; +import { + getPlaylistsByChannelId, + upsertPlaylists, + getContentPlaylistMappings, +} from '../../db/repositories/playlist-repository'; + +// ── Default Registry ── + +function buildDefaultRegistry(): PlatformRegistry { + const registry = new PlatformRegistry(); + registry.register(Platform.YouTube, new YouTubeSource()); + registry.register(Platform.SoundCloud, new SoundCloudSource()); + return registry; +} + +// ── Route Plugin ── + +/** + * Playlist API route plugin. + * + * Registers: + * GET /api/v1/channel/:id/playlists — list playlists + content mappings + * POST /api/v1/channel/:id/playlists/refresh — refresh playlists from platform + */ +export async function playlistRoutes(fastify: FastifyInstance): Promise { + const registry = buildDefaultRegistry(); + + // ── GET /api/v1/channel/:id/playlists ── + + fastify.get<{ Params: { id: string } }>( + '/api/v1/channel/:id/playlists', + async (request, reply) => { + const id = parseInt(request.params.id, 10); + if (isNaN(id)) { + return reply.status(400).send({ + statusCode: 400, + error: 'Bad Request', + message: 'Channel ID must be a number', + }); + } + + const channel = await getChannelById(fastify.db, id); + if (!channel) { + return reply.status(404).send({ + statusCode: 404, + error: 'Not Found', + message: `Channel with ID ${id} not found`, + }); + } + + const playlistList = await getPlaylistsByChannelId(fastify.db, id); + const mappings = await getContentPlaylistMappings(fastify.db, id); + + return { success: true, data: { playlists: playlistList, mappings } }; + } + ); + + // ── POST /api/v1/channel/:id/playlists/refresh ── + + fastify.post<{ Params: { id: string } }>( + '/api/v1/channel/:id/playlists/refresh', + async (request, reply) => { + const id = parseInt(request.params.id, 10); + if (isNaN(id)) { + return reply.status(400).send({ + statusCode: 400, + error: 'Bad Request', + message: 'Channel ID must be a number', + }); + } + + const channel = await getChannelById(fastify.db, id); + if (!channel) { + return reply.status(404).send({ + statusCode: 404, + error: 'Not Found', + message: `Channel with ID ${id} not found`, + }); + } + + const source = registry.get(channel.platform as Platform); + if (!source || !source.fetchPlaylists) { + return reply.status(422).send({ + statusCode: 422, + error: 'Unprocessable Entity', + message: `Platform "${channel.platform}" does not support playlist discovery`, + }); + } + + try { + const discoveryResults = await source.fetchPlaylists(channel); + const upsertedPlaylists = await upsertPlaylists( + fastify.db, + channel.id, + discoveryResults + ); + const mappings = await getContentPlaylistMappings(fastify.db, channel.id); + + return { + success: true, + data: { playlists: upsertedPlaylists, mappings }, + }; + } catch (err) { + request.log.error( + { err, channelId: channel.id, platform: channel.platform }, + '[playlist] Failed to refresh playlists' + ); + return reply.status(500).send({ + statusCode: 500, + error: 'Internal Server Error', + message: `Failed to refresh playlists: ${err instanceof Error ? err.message : String(err)}`, + }); + } + } + ); +} diff --git a/src/server/routes/queue.ts b/src/server/routes/queue.ts new file mode 100644 index 0000000..4faf99f --- /dev/null +++ b/src/server/routes/queue.ts @@ -0,0 +1,236 @@ +import { type FastifyInstance } from 'fastify'; +import { + getQueueItemsByStatus, + getQueueItemById, + getAllQueueItems, +} from '../../db/repositories/queue-repository'; +import { getContentItemById } from '../../db/repositories/content-repository'; +import type { QueueStatus } from '../../types/index'; + +// ── Route Plugin ── + +/** + * Queue management route plugin. + * + * Registers: + * GET /api/v1/queue — list queue items (optional ?status= filter) + * GET /api/v1/queue/:id — get a single queue item + * POST /api/v1/queue — enqueue a content item for download + * DELETE /api/v1/queue/:id — cancel a queue item + * POST /api/v1/queue/:id/retry — retry a failed queue item + */ +export async function queueRoutes(fastify: FastifyInstance): Promise { + // ── GET /api/v1/queue ── + + fastify.get<{ Querystring: { status?: string } }>( + '/api/v1/queue', + async (request, _reply) => { + const { status } = request.query; + + if (status) { + const validStatuses: QueueStatus[] = [ + 'pending', + 'downloading', + 'completed', + 'failed', + 'cancelled', + ]; + if (!validStatuses.includes(status as QueueStatus)) { + return _reply.status(400).send({ + statusCode: 400, + error: 'Bad Request', + message: `Invalid status filter: "${status}". Valid values: ${validStatuses.join(', ')}`, + }); + } + const items = await getQueueItemsByStatus(fastify.db, status as QueueStatus); + return { success: true, data: items }; + } + + // No filter — return all queue items with a single query + const allItems = await getAllQueueItems(fastify.db); + return { success: true, data: allItems }; + } + ); + + // ── GET /api/v1/queue/:id ── + + fastify.get<{ Params: { id: string } }>( + '/api/v1/queue/:id', + async (request, reply) => { + const id = parseInt(request.params.id, 10); + if (isNaN(id)) { + return reply.status(400).send({ + statusCode: 400, + error: 'Bad Request', + message: 'Queue item ID must be a number', + }); + } + + const item = await getQueueItemById(fastify.db, id); + if (!item) { + return reply.status(404).send({ + statusCode: 404, + error: 'Not Found', + message: `Queue item with ID ${id} not found`, + }); + } + + return { success: true, data: item }; + } + ); + + // ── POST /api/v1/queue ── + + fastify.post<{ Body: { contentItemId: number; priority?: number } }>( + '/api/v1/queue', + async (request, reply) => { + const { contentItemId, priority } = request.body; + + if (contentItemId == null || typeof contentItemId !== 'number') { + return reply.status(400).send({ + statusCode: 400, + error: 'Bad Request', + message: 'contentItemId is required and must be a number', + }); + } + + // Verify content item exists + const contentItem = await getContentItemById(fastify.db, contentItemId); + if (!contentItem) { + return reply.status(404).send({ + statusCode: 404, + error: 'Not Found', + message: `Content item with ID ${contentItemId} not found`, + }); + } + + // Ensure queue service is available + if (!fastify.queueService) { + return reply.status(503).send({ + statusCode: 503, + error: 'Service Unavailable', + message: 'Queue service is not initialized', + }); + } + + try { + const queueItem = await fastify.queueService.enqueue( + contentItemId, + priority ?? 0 + ); + return reply.status(201).send({ success: true, data: queueItem }); + } catch (err: unknown) { + const message = err instanceof Error ? err.message : String(err); + + // Already queued → 409 Conflict + if (message.includes('already in the queue')) { + return reply.status(409).send({ + statusCode: 409, + error: 'Conflict', + message, + }); + } + + throw err; // Let error handler deal with unexpected errors + } + } + ); + + // ── DELETE /api/v1/queue/:id ── + + fastify.delete<{ Params: { id: string } }>( + '/api/v1/queue/:id', + async (request, reply) => { + const id = parseInt(request.params.id, 10); + if (isNaN(id)) { + return reply.status(400).send({ + statusCode: 400, + error: 'Bad Request', + message: 'Queue item ID must be a number', + }); + } + + if (!fastify.queueService) { + return reply.status(503).send({ + statusCode: 503, + error: 'Service Unavailable', + message: 'Queue service is not initialized', + }); + } + + try { + const cancelled = await fastify.queueService.cancelItem(id); + return reply.status(200).send({ success: true, data: cancelled }); + } catch (err: unknown) { + const message = err instanceof Error ? err.message : String(err); + + if (message.includes('not found')) { + return reply.status(404).send({ + statusCode: 404, + error: 'Not Found', + message, + }); + } + + if (message.includes('Cannot cancel')) { + return reply.status(409).send({ + statusCode: 409, + error: 'Conflict', + message, + }); + } + + throw err; + } + } + ); + + // ── POST /api/v1/queue/:id/retry ── + + fastify.post<{ Params: { id: string } }>( + '/api/v1/queue/:id/retry', + async (request, reply) => { + const id = parseInt(request.params.id, 10); + if (isNaN(id)) { + return reply.status(400).send({ + statusCode: 400, + error: 'Bad Request', + message: 'Queue item ID must be a number', + }); + } + + if (!fastify.queueService) { + return reply.status(503).send({ + statusCode: 503, + error: 'Service Unavailable', + message: 'Queue service is not initialized', + }); + } + + try { + const retried = await fastify.queueService.retryItem(id); + return reply.status(200).send({ success: true, data: retried }); + } catch (err: unknown) { + const message = err instanceof Error ? err.message : String(err); + + if (message.includes('not found')) { + return reply.status(404).send({ + statusCode: 404, + error: 'Not Found', + message, + }); + } + + if (message.includes('Cannot retry')) { + return reply.status(409).send({ + statusCode: 409, + error: 'Conflict', + message, + }); + } + + throw err; + } + } + ); +} diff --git a/src/server/routes/scan.ts b/src/server/routes/scan.ts new file mode 100644 index 0000000..af1ce3a --- /dev/null +++ b/src/server/routes/scan.ts @@ -0,0 +1,108 @@ +import { type FastifyInstance } from 'fastify'; +import type { CheckChannelResult } from '../../services/scheduler'; +import { + getChannelById, + getEnabledChannels, +} from '../../db/repositories/channel-repository'; + +// ── Route Plugin ── + +/** + * Scan trigger routes for manual content checks. + * + * Registers: + * POST /api/v1/channel/:id/scan — scan a single channel for new content + * POST /api/v1/channel/scan-all — scan all enabled channels sequentially + */ +export async function scanRoutes(fastify: FastifyInstance): Promise { + + // ── POST /api/v1/channel/scan-all ── + // Registered before :id/scan to prevent "scan-all" matching as an :id param + fastify.post('/api/v1/channel/scan-all', async (_request, reply) => { + if (!fastify.scheduler) { + return reply.status(503).send({ + statusCode: 503, + error: 'Service Unavailable', + message: 'Scheduler is not running', + }); + } + + const channels = await getEnabledChannels(fastify.db); + const results: CheckChannelResult[] = []; + let errors = 0; + let totalNewItems = 0; + + // Sequential iteration — one channel at a time to respect rate limits + for (const channel of channels) { + try { + const result = await fastify.scheduler.checkChannel(channel); + results.push(result); + totalNewItems += result.newItems; + if (result.status === 'error') { + errors++; + } + } catch (err) { + // Defensive — checkChannel should not throw after T01, but don't let + // one failure prevent scanning the remaining channels + fastify.log.error( + { err, channelId: channel.id }, + '[scan] Unexpected error scanning channel %d', + channel.id + ); + results.push({ + channelId: channel.id, + channelName: channel.name, + newItems: 0, + totalFetched: 0, + status: 'error', + }); + errors++; + } + } + + const summary = { + total: channels.length, + scanned: results.length, + newItems: totalNewItems, + errors, + }; + + return { results, summary }; + }); + + // ── POST /api/v1/channel/:id/scan ── + + fastify.post<{ Params: { id: string } }>( + '/api/v1/channel/:id/scan', + async (request, reply) => { + const id = parseInt(request.params.id, 10); + if (isNaN(id)) { + return reply.status(400).send({ + statusCode: 400, + error: 'Bad Request', + message: 'Channel ID must be a number', + }); + } + + const channel = await getChannelById(fastify.db, id); + if (!channel) { + return reply.status(404).send({ + statusCode: 404, + error: 'Not Found', + message: `Channel with ID ${id} not found`, + }); + } + + if (!fastify.scheduler) { + return reply.status(503).send({ + statusCode: 503, + error: 'Service Unavailable', + message: 'Scheduler is not running', + }); + } + + const result = await fastify.scheduler.checkChannel(channel); + return result; + } + ); +} diff --git a/src/server/routes/system.ts b/src/server/routes/system.ts new file mode 100644 index 0000000..80ecbb9 --- /dev/null +++ b/src/server/routes/system.ts @@ -0,0 +1,182 @@ +import { type FastifyInstance } from 'fastify'; +import { readFileSync } from 'node:fs'; +import { resolve, dirname } from 'node:path'; +import { fileURLToPath } from 'node:url'; +import { randomUUID } from 'node:crypto'; +import { eq } from 'drizzle-orm'; +import { appConfig } from '../../config/index'; +import type { SystemStatusResponse, ApiKeyResponse, AppSettingsResponse } from '../../types/api'; +import { systemConfig } from '../../db/schema/index'; +import { API_KEY_DB_KEY } from '../middleware/auth'; +import { + getAppSettings, + setAppSetting, + APP_CHECK_INTERVAL, + APP_CONCURRENT_DOWNLOADS, +} from '../../db/repositories/system-config-repository'; +import os from 'node:os'; + +const __dirname = dirname(fileURLToPath(import.meta.url)); + +// Read version from package.json at module load time +let appVersion = '0.0.0'; +try { + const pkgPath = resolve(__dirname, '../../../package.json'); + const pkg = JSON.parse(readFileSync(pkgPath, 'utf-8')); + appVersion = pkg.version ?? '0.0.0'; +} catch { + // Fallback — package.json not found (e.g., in tests) + appVersion = '0.0.0'; +} + +const serverStartTime = Date.now(); + +/** + * System route plugin. + * + * Registers: + * GET /api/v1/system/status — authenticated system status & runtime info + * GET /api/v1/system/apikey — returns current API key (for Settings UI) + * POST /api/v1/system/apikey/regenerate — rotates API key and returns new one + */ +export async function systemRoutes(fastify: FastifyInstance): Promise { + fastify.get('/api/v1/system/status', async (_request, _reply) => { + const uptimeSeconds = Math.floor((Date.now() - serverStartTime) / 1000); + const mem = process.memoryUsage(); + + const response: SystemStatusResponse = { + appName: 'Tubearr', + version: appVersion, + uptime: uptimeSeconds, + nodeVersion: process.version, + platform: os.platform(), + arch: os.arch(), + memoryUsage: { + heapUsed: mem.heapUsed, + heapTotal: mem.heapTotal, + rss: mem.rss, + }, + }; + + return response; + }); + + /** + * GET /api/v1/system/apikey — Read the current API key. + * Protected by the same auth middleware (same-origin or API key). + */ + fastify.get('/api/v1/system/apikey', async (_request, _reply) => { + const db = fastify.db; + const rows = await db + .select() + .from(systemConfig) + .where(eq(systemConfig.key, API_KEY_DB_KEY)) + .limit(1); + + const apiKey = rows[0]?.value ?? ''; + + const response: ApiKeyResponse = { apiKey }; + return response; + }); + + /** + * POST /api/v1/system/apikey/regenerate — Rotate the API key. + * Generates a new UUID key, updates the DB, and returns the new key. + * The old key immediately stops working for external requests. + * Protected by the same auth middleware (same-origin or API key). + */ + fastify.post('/api/v1/system/apikey/regenerate', async (request, _reply) => { + const db = fastify.db; + const newKey = randomUUID(); + + await db + .update(systemConfig) + .set({ value: newKey, updatedAt: new Date().toISOString() }) + .where(eq(systemConfig.key, API_KEY_DB_KEY)); + + // Update the cached API key so the auth middleware uses the new one immediately + fastify.apiKeyHolder.value = newKey; + + request.log.info('[system] API key regenerated'); + + const response: ApiKeyResponse = { apiKey: newKey }; + return response; + }); + + // ── App Settings ── + + /** + * GET /api/v1/system/settings — Read current app settings from DB. + * Returns check interval (minutes) and concurrent downloads count. + */ + fastify.get('/api/v1/system/settings', async (_request, _reply) => { + const db = fastify.db; + const settings = await getAppSettings(db, [APP_CHECK_INTERVAL, APP_CONCURRENT_DOWNLOADS]); + + const response: AppSettingsResponse = { + checkInterval: parseInt(settings[APP_CHECK_INTERVAL] ?? '360', 10), + concurrentDownloads: parseInt(settings[APP_CONCURRENT_DOWNLOADS] ?? '2', 10), + }; + + return response; + }); + + /** + * PUT /api/v1/system/settings — Update app settings. + * Accepts partial updates: only provided fields are persisted. + * If concurrentDownloads changes, updates QueueService concurrency at runtime. + */ + fastify.put('/api/v1/system/settings', async (request, reply) => { + const db = fastify.db; + const body = request.body as { checkInterval?: number; concurrentDownloads?: number }; + + // Validate + if (body.checkInterval !== undefined) { + if (typeof body.checkInterval !== 'number' || body.checkInterval < 1) { + return reply.status(400).send({ + statusCode: 400, + error: 'Bad Request', + message: 'checkInterval must be a number >= 1', + }); + } + } + if (body.concurrentDownloads !== undefined) { + if (typeof body.concurrentDownloads !== 'number' || body.concurrentDownloads < 1) { + return reply.status(400).send({ + statusCode: 400, + error: 'Bad Request', + message: 'concurrentDownloads must be a number >= 1', + }); + } + if (body.concurrentDownloads > 10) { + return reply.status(400).send({ + statusCode: 400, + error: 'Bad Request', + message: 'concurrentDownloads must be <= 10', + }); + } + } + + // Persist + if (body.checkInterval !== undefined) { + await setAppSetting(db, APP_CHECK_INTERVAL, body.checkInterval.toString()); + } + if (body.concurrentDownloads !== undefined) { + await setAppSetting(db, APP_CONCURRENT_DOWNLOADS, body.concurrentDownloads.toString()); + + // Update queue concurrency at runtime + if ((fastify as any).queueService?.setConcurrency) { + (fastify as any).queueService.setConcurrency(body.concurrentDownloads); + } + } + + // Return updated values + const settings = await getAppSettings(db, [APP_CHECK_INTERVAL, APP_CONCURRENT_DOWNLOADS]); + const response: AppSettingsResponse = { + checkInterval: parseInt(settings[APP_CHECK_INTERVAL] ?? '360', 10), + concurrentDownloads: parseInt(settings[APP_CONCURRENT_DOWNLOADS] ?? '2', 10), + }; + + return response; + }); +} diff --git a/src/services/back-catalog-import.ts b/src/services/back-catalog-import.ts new file mode 100644 index 0000000..84f372d --- /dev/null +++ b/src/services/back-catalog-import.ts @@ -0,0 +1,142 @@ +import type { LibSQLDatabase } from 'drizzle-orm/libsql'; +import type * as schema from '../db/schema/index'; +import type { PlatformRegistry } from '../sources/platform-source'; +import type { QueueService } from './queue'; +import { getChannelById } from '../db/repositories/channel-repository'; +import { createContentItem } from '../db/repositories/content-repository'; +import type { Platform, PlatformContentMetadata } from '../types/index'; + +// ── Types ── + +type Db = LibSQLDatabase; + +export interface ImportResult { + found: number; + imported: number; + skipped: number; +} + +// ── Service ── + +/** + * Fetches all content for a channel from their platform source, + * deduplicates against existing content, inserts new items with + * 'monitored' status, and enqueues them at priority -10 (below + * normal priority 0, so regular scheduled downloads take precedence). + */ +export class BackCatalogImportService { + constructor( + private readonly db: Db, + private readonly platformRegistry: PlatformRegistry, + private readonly queueService: QueueService + ) {} + + /** + * Import all content for a channel, deduplicate, insert, and enqueue. + * + * @param channelId - The channel ID to import for + * @param order - 'newest' (natural order) or 'oldest' (reversed, oldest enqueued first) + * @returns Counts of found, imported, and skipped (duplicate) items + */ + async importChannel( + channelId: number, + order: 'newest' | 'oldest' = 'newest' + ): Promise { + // 1. Look up channel + const channel = await getChannelById(this.db, channelId); + if (!channel) { + console.log( + `[import] Channel ${channelId} not found — aborting import` + ); + throw new Error(`Channel ${channelId} not found`); + } + + const platform = channel.platform as Platform; + + console.log( + `[import] Starting back-catalog import for channel ${channelId} (${platform}, order=${order})` + ); + + // 2. Get platform source + const source = this.platformRegistry.get(platform); + if (!source) { + console.log( + `[import] No platform source registered for ${platform} — aborting import for channel ${channelId}` + ); + throw new Error(`No platform source for ${platform}`); + } + + // 3. Fetch all content (or fall back to fetchRecentContent with high limit) + let allContent: PlatformContentMetadata[]; + try { + if (source.fetchAllContent) { + allContent = await source.fetchAllContent(channel); + } else { + // Fallback for platforms without fetchAllContent (e.g. SoundCloud) + allContent = await source.fetchRecentContent(channel, { limit: 10_000 }); + } + } catch (err) { + const msg = err instanceof Error ? err.message : String(err); + console.log( + `[import] Failed to fetch content for channel ${channelId} (${platform}): ${msg}` + ); + throw err; + } + + const found = allContent.length; + console.log( + `[import] Fetched ${found} items for channel ${channelId} (${platform})` + ); + + // 4. If order === 'oldest', reverse so oldest items get enqueued first + if (order === 'oldest') { + allContent.reverse(); + } + + // 5. Deduplicate, insert, and enqueue + let imported = 0; + let skipped = 0; + + for (const item of allContent) { + // Back-catalog is *existing* content, so 'all' and 'existing' → monitored + const monitored = channel.monitoringMode === 'all' || channel.monitoringMode === 'existing'; + // createContentItem returns null if duplicate (dedup on channelId + platformContentId) + const contentItem = await createContentItem(this.db, { + channelId: channel.id, + title: item.title, + platformContentId: item.platformContentId, + url: item.url, + contentType: item.contentType, + duration: item.duration, + thumbnailUrl: item.thumbnailUrl, + publishedAt: item.publishedAt ?? null, + status: 'monitored', + monitored, + }); + + if (!contentItem) { + skipped++; + continue; + } + + imported++; + + // Enqueue at priority -10 — yields to normal priority (0) items + try { + await this.queueService.enqueue(contentItem.id, -10); + } catch (enqueueErr) { + // Individual enqueue failures don't abort the import + const msg = enqueueErr instanceof Error ? enqueueErr.message : String(enqueueErr); + console.log( + `[import] Failed to enqueue content item ${contentItem.id} for channel ${channelId}: ${msg}` + ); + } + } + + console.log( + `[import] Import complete: ${imported} imported, ${skipped} duplicates, ${found} total for channel ${channelId} (${platform})` + ); + + return { found, imported, skipped }; + } +} diff --git a/src/services/cookie-manager.ts b/src/services/cookie-manager.ts new file mode 100644 index 0000000..a054196 --- /dev/null +++ b/src/services/cookie-manager.ts @@ -0,0 +1,114 @@ +import * as fs from 'node:fs/promises'; +import { existsSync } from 'node:fs'; +import * as path from 'node:path'; +import type { Platform } from '../types/index'; + +// ── Constants ── + +/** Valid Netscape cookie file header prefixes. */ +const VALID_HEADERS = [ + '# Netscape HTTP Cookie File', + '# HTTP Cookie File', +] as const; + +// ── CookieManager ── + +/** + * Validates, stores, and retrieves Netscape-format cookie files per platform + * for authenticated yt-dlp access. + * + * Cookie files are stored at `{cookiePath}/{platform}_cookies.txt`. + * Cookie file contents are never logged — only the file path and platform. + */ +export class CookieManager { + constructor(private readonly cookiePath: string) {} + + /** + * Import a cookie file for a platform. Validates that the file contains + * a valid Netscape cookie format header, then copies it to the cookie + * storage directory. + * + * @throws Error if the source file doesn't exist or has an invalid header + */ + async importCookieFile(platform: Platform, sourcePath: string): Promise { + // Verify source file exists + if (!existsSync(sourcePath)) { + throw new Error( + `[cookie-manager] Source cookie file not found: ${sourcePath}` + ); + } + + // Read and validate header — never log file contents + const content = await fs.readFile(sourcePath, 'utf-8'); + this.validateCookieHeader(content, sourcePath); + + // Ensure cookie directory exists + await fs.mkdir(this.cookiePath, { recursive: true }); + + // Write to platform-specific location + const destPath = this.buildCookiePath(platform); + await fs.writeFile(destPath, content, 'utf-8'); + + console.log( + `[cookie-manager] Imported cookie file for platform="${platform}" to "${destPath}"` + ); + } + + /** + * Get the path to the cookie file for a platform, or null if none exists. + */ + getCookieFilePath(platform: Platform): string | null { + const filePath = this.buildCookiePath(platform); + return existsSync(filePath) ? filePath : null; + } + + /** + * Check whether a cookie file exists for a given platform. + */ + hasCookies(platform: Platform): boolean { + return existsSync(this.buildCookiePath(platform)); + } + + /** + * Delete the cookie file for a platform. + */ + async deleteCookieFile(platform: Platform): Promise { + const filePath = this.buildCookiePath(platform); + if (existsSync(filePath)) { + await fs.unlink(filePath); + console.log( + `[cookie-manager] Deleted cookie file for platform="${platform}" at "${filePath}"` + ); + } + } + + // ── Internal ── + + private buildCookiePath(platform: Platform): string { + return path.join(this.cookiePath, `${platform}_cookies.txt`); + } + + /** + * Validate that the first non-empty line contains a Netscape cookie header. + * Never log or expose file contents. + */ + private validateCookieHeader(content: string, sourcePath: string): void { + const lines = content.split(/\r?\n/); + const firstNonEmpty = lines.find((line) => line.trim().length > 0); + + if (!firstNonEmpty) { + throw new Error( + `[cookie-manager] Cookie file is empty: ${sourcePath}` + ); + } + + const trimmed = firstNonEmpty.trim(); + const isValid = VALID_HEADERS.some((header) => trimmed.startsWith(header)); + + if (!isValid) { + throw new Error( + `[cookie-manager] Invalid cookie file format — expected Netscape cookie header in: ${sourcePath}` + ); + } + } +} diff --git a/src/services/download.ts b/src/services/download.ts new file mode 100644 index 0000000..4a737c6 --- /dev/null +++ b/src/services/download.ts @@ -0,0 +1,306 @@ +import { stat } from 'node:fs/promises'; +import { extname } from 'node:path'; +import type { LibSQLDatabase } from 'drizzle-orm/libsql'; +import type * as schema from '../db/schema/index'; +import { execYtDlp, YtDlpError } from '../sources/yt-dlp'; +import { updateContentItem } from '../db/repositories/content-repository'; +import type { RateLimiter } from './rate-limiter'; +import type { FileOrganizer } from './file-organizer'; +import type { QualityAnalyzer } from './quality-analyzer'; +import type { CookieManager } from './cookie-manager'; +import type { + ContentItem, + Channel, + FormatProfile, + Platform, + ContentType, +} from '../types/index'; + +// ── Types ── + +type Db = LibSQLDatabase; + +// ── DownloadService ── + +/** + * Orchestrates the full download lifecycle: + * acquire rate limiter → build yt-dlp args → download → organize file → + * run quality analysis → update content item. + */ +export class DownloadService { + constructor( + private readonly db: Db, + private readonly rateLimiter: RateLimiter, + private readonly fileOrganizer: FileOrganizer, + private readonly qualityAnalyzer: QualityAnalyzer, + private readonly cookieManager: CookieManager + ) {} + + /** + * Download a content item and update its record in the database. + * + * Status transitions: monitored → downloading → downloaded | failed + * + * @throws YtDlpError on download failure (after updating status to 'failed') + */ + async downloadItem( + contentItem: ContentItem, + channel: Channel, + formatProfile?: FormatProfile + ): Promise { + const logPrefix = `[download] item=${contentItem.id} channel="${channel.name}"`; + + // Mark as downloading + console.log(`${logPrefix} status=downloading`); + await updateContentItem(this.db, contentItem.id, { status: 'downloading' }); + + try { + // Acquire rate limiter for platform + await this.rateLimiter.acquire(channel.platform as Platform); + + // Build yt-dlp args + const outputTemplate = this.fileOrganizer.buildOutputPath( + channel.platform, + channel.name, + contentItem.title, + this.guessExtension(contentItem.contentType, formatProfile) + ); + const args = this.buildYtDlpArgs( + contentItem, + channel, + formatProfile, + outputTemplate + ); + + console.log( + `${logPrefix} starting yt-dlp download subs=${formatProfile?.subtitleLanguages ? 'write-subs:' + formatProfile.subtitleLanguages : 'none'} embed=${formatProfile?.embedSubtitles ?? false}` + ); + const startTime = Date.now(); + + // Execute download — 30 minute timeout + const result = await execYtDlp(args, { timeout: 1_800_000 }); + + const duration = Date.now() - startTime; + console.log(`${logPrefix} yt-dlp completed in ${duration}ms`); + + // Parse final file path from --print after_move:filepath output + const finalPath = this.parseFinalPath(result.stdout, outputTemplate); + + // Ensure directories exist and resolve duplicate filenames + await this.fileOrganizer.ensureDirectory(finalPath); + + // Get file size + const fileStat = await stat(finalPath); + const fileSize = fileStat.size; + + // Run quality analysis + const qualityInfo = await this.qualityAnalyzer.analyze(finalPath); + + // Determine format from file extension + const format = extname(finalPath).replace(/^\./, '') || null; + + // Update content item as downloaded + const updated = await updateContentItem(this.db, contentItem.id, { + filePath: finalPath, + fileSize, + format, + qualityMetadata: qualityInfo, + status: 'downloaded', + downloadedAt: new Date().toISOString(), + }); + + this.rateLimiter.reportSuccess(channel.platform as Platform); + + console.log( + `${logPrefix} status=downloaded path="${finalPath}" size=${fileSize} format=${format}` + ); + + return updated!; + } catch (err: unknown) { + // Report error to rate limiter + this.rateLimiter.reportError(channel.platform as Platform); + + // Update status to failed + await updateContentItem(this.db, contentItem.id, { status: 'failed' }); + + const errorMsg = err instanceof Error ? err.message : String(err); + console.log(`${logPrefix} status=failed error="${errorMsg.slice(0, 200)}"`); + + throw err; + } + } + + // ── Internal ── + + /** + * Build the yt-dlp command-line args based on content type and format profile. + */ + private buildYtDlpArgs( + contentItem: ContentItem, + channel: Channel, + formatProfile: FormatProfile | undefined, + outputTemplate: string + ): string[] { + const args: string[] = []; + + // Format selection + if (contentItem.contentType === 'audio') { + args.push(...this.buildAudioArgs(formatProfile)); + } else { + args.push(...this.buildVideoArgs(formatProfile)); + } + + // Subtitle support + args.push(...this.buildSubtitleArgs(formatProfile)); + + // Always include these flags + args.push('--no-playlist'); + args.push('--print', 'after_move:filepath'); + + // Cookie support + const cookiePath = this.cookieManager.getCookieFilePath( + channel.platform as Platform + ); + if (cookiePath) { + args.push('--cookies', cookiePath); + } + + // Output template + args.push('-o', outputTemplate); + + // URL is always last + args.push(contentItem.url); + + return args; + } + + /** + * Build format args for video content. + */ + private buildVideoArgs(formatProfile?: FormatProfile): string[] { + const args: string[] = []; + + if (formatProfile?.videoResolution === 'Best') { + // "Best" selects separate best-quality video + audio streams, merged together. + // This is higher quality than `-f best` which picks a single combined format. + args.push('-f', 'bestvideo+bestaudio/best'); + const container = formatProfile.containerFormat ?? 'mp4'; + args.push('--merge-output-format', container); + } else if (formatProfile?.videoResolution) { + const height = parseResolutionHeight(formatProfile.videoResolution); + if (height) { + args.push( + '-f', + `bestvideo[height<=${height}]+bestaudio/best[height<=${height}]` + ); + } else { + args.push('-f', 'best'); + } + + // Container format for merge + const container = formatProfile.containerFormat ?? 'mp4'; + args.push('--merge-output-format', container); + } else { + args.push('-f', 'best'); + } + + return args; + } + + /** + * Build format args for audio content. + */ + private buildAudioArgs(formatProfile?: FormatProfile): string[] { + const args: string[] = ['-f', 'bestaudio']; + + if (formatProfile) { + args.push('--extract-audio'); + + if (formatProfile.audioCodec) { + args.push('--audio-format', formatProfile.audioCodec); + } + if (formatProfile.audioBitrate && formatProfile.audioBitrate !== 'Best') { + args.push('--audio-quality', formatProfile.audioBitrate); + } + } + + return args; + } + + /** + * Build subtitle flags based on format profile preferences. + * + * When subtitleLanguages is set: --write-subs --sub-langs + * When embedSubtitles is also true: --embed-subs + * + * --embed-subs without --write-subs is a no-op (no subs to embed), + * so we only emit it when subtitleLanguages is also set. + * + * yt-dlp gracefully continues when requested subs are unavailable. + */ + private buildSubtitleArgs(formatProfile?: FormatProfile): string[] { + const args: string[] = []; + + if (formatProfile?.subtitleLanguages) { + args.push('--write-subs'); + args.push('--sub-langs', formatProfile.subtitleLanguages); + + if (formatProfile.embedSubtitles) { + args.push('--embed-subs'); + } + } + + return args; + } + + /** + * Parse the final file path from yt-dlp stdout. + * The `--print after_move:filepath` flag makes yt-dlp output the final path + * as the last line of stdout. + */ + private parseFinalPath(stdout: string, fallbackPath: string): string { + const lines = stdout.trim().split('\n'); + // The filepath from --print is typically the last non-empty line + for (let i = lines.length - 1; i >= 0; i--) { + const line = lines[i].trim(); + if (line && !line.startsWith('[') && !line.startsWith('Deleting')) { + return line; + } + } + return fallbackPath; + } + + /** + * Guess a reasonable file extension based on content type and format profile. + * This is used for the output template — yt-dlp may change the actual extension. + */ + private guessExtension( + contentType: ContentType, + formatProfile?: FormatProfile + ): string { + if (formatProfile?.containerFormat) { + return formatProfile.containerFormat; + } + return contentType === 'audio' ? 'mp3' : 'mp4'; + } +} + +// ── Helpers ── + +/** + * Parse a resolution string like "1080p", "720p", "4k" to a numeric height. + * Returns null if the format is not recognized. + */ +function parseResolutionHeight(resolution: string): number | null { + const lower = resolution.toLowerCase().trim(); + + // Handle "4k", "8k" etc. + if (lower === '4k') return 2160; + if (lower === '8k') return 4320; + + // Handle "1080p", "720p", "480p", "1080" etc. + const match = lower.match(/^(\d+)p?$/); + if (match) return Number(match[1]); + + return null; +} diff --git a/src/services/file-organizer.ts b/src/services/file-organizer.ts new file mode 100644 index 0000000..8760308 --- /dev/null +++ b/src/services/file-organizer.ts @@ -0,0 +1,115 @@ +import * as path from 'node:path'; +import * as fs from 'node:fs/promises'; +import { existsSync } from 'node:fs'; + +// ── Constants ── + +/** Characters forbidden in filenames across Windows, macOS, and Linux. */ +const FORBIDDEN_CHARS = /[/\\:*?"<>|]/g; + +/** Control characters (0x00–0x1F). */ +const CONTROL_CHARS = /[\x00-\x1f]/g; + +/** Multiple consecutive spaces or underscores collapsed to one. */ +const COLLAPSE_WHITESPACE = /[ ]{2,}/g; +const COLLAPSE_UNDERSCORES = /_{2,}/g; + +/** Maximum filename length (before extension). */ +const MAX_FILENAME_LENGTH = 200; + +/** Maximum attempts to find a unique filename. */ +const MAX_UNIQUE_ATTEMPTS = 100; + +// ── FileOrganizer ── + +/** + * Builds structured output paths from content metadata and sanitizes + * filenames for cross-platform safety. + * + * Path template: `{mediaPath}/{platform}/{channelName}/{title}.{ext}` + */ +export class FileOrganizer { + constructor(private readonly mediaPath: string) {} + + /** + * Build the full output path for a downloaded file. + * Sanitizes channelName and title for filesystem safety. + */ + buildOutputPath( + platform: string, + channelName: string, + title: string, + ext: string + ): string { + const safeName = this.sanitizeFilename(channelName); + const safeTitle = this.sanitizeFilename(title); + const safeExt = ext.startsWith('.') ? ext.slice(1) : ext; + + return path.join(this.mediaPath, platform, safeName, `${safeTitle}.${safeExt}`); + } + + /** + * Create parent directories for a file path recursively. + */ + async ensureDirectory(filePath: string): Promise { + const dir = path.dirname(filePath); + await fs.mkdir(dir, { recursive: true }); + } + + /** + * If filePath already exists, append ` (2)`, ` (3)` etc. until a free + * name is found. Limits to 100 attempts to prevent infinite loops. + */ + async resolveUniquePath(filePath: string): Promise { + if (!existsSync(filePath)) return filePath; + + const dir = path.dirname(filePath); + const ext = path.extname(filePath); + const base = path.basename(filePath, ext); + + for (let i = 2; i <= MAX_UNIQUE_ATTEMPTS + 1; i++) { + const candidate = path.join(dir, `${base} (${i})${ext}`); + if (!existsSync(candidate)) return candidate; + } + + throw new Error( + `[file-organizer] Could not find a unique filename after ${MAX_UNIQUE_ATTEMPTS} attempts: ${filePath}` + ); + } + + /** + * Strip forbidden characters, control characters, collapse whitespace, + * trim dots/spaces, and enforce max length. Keeps Unicode intact. + */ + sanitizeFilename(name: string): string { + let sanitized = name; + + // Strip forbidden filesystem characters + sanitized = sanitized.replace(FORBIDDEN_CHARS, ''); + + // Replace control characters + sanitized = sanitized.replace(CONTROL_CHARS, ''); + + // Collapse multiple spaces and underscores + sanitized = sanitized.replace(COLLAPSE_WHITESPACE, ' '); + sanitized = sanitized.replace(COLLAPSE_UNDERSCORES, '_'); + + // Trim leading/trailing dots and spaces + sanitized = sanitized.replace(/^[.\s]+/, '').replace(/[.\s]+$/, ''); + + // Handle empty result + if (sanitized.length === 0) return '_unnamed'; + + // Enforce max length, truncating without breaking multi-byte codepoints + if (sanitized.length > MAX_FILENAME_LENGTH) { + // Spread into codepoints to avoid splitting surrogate pairs + const codepoints = [...sanitized]; + sanitized = codepoints.slice(0, MAX_FILENAME_LENGTH).join(''); + // Re-trim trailing spaces/dots from truncation + sanitized = sanitized.replace(/[.\s]+$/, ''); + if (sanitized.length === 0) return '_unnamed'; + } + + return sanitized; + } +} diff --git a/src/services/health.ts b/src/services/health.ts new file mode 100644 index 0000000..fd63b47 --- /dev/null +++ b/src/services/health.ts @@ -0,0 +1,212 @@ +import { statfs } from 'node:fs/promises'; +import type { LibSQLDatabase } from 'drizzle-orm/libsql'; +import type * as schema from '../db/schema/index'; +import type { SchedulerState } from './scheduler'; +import type { ComponentHealth } from '../types/api'; +import { getYtDlpVersion } from '../sources/yt-dlp'; +import { getRecentErrorCount } from '../db/repositories/history-repository'; + +// ── Cache entry ── + +interface CacheEntry { + value: T; + timestamp: number; +} + +const CACHE_TTL_MS = 60_000; // 60 seconds + +// ── HealthService ── + +/** + * Aggregates system health from multiple sources: + * - Scheduler running state and channel count + * - yt-dlp availability and version (cached 60s) + * - Disk space on the media path (cached 60s) + * - Recent download error count from history + */ +export class HealthService { + private readonly db: LibSQLDatabase; + private readonly getSchedulerState: () => SchedulerState | null; + private readonly mediaPath: string; + + // Caches + private ytDlpCache: CacheEntry | null = null; + private diskCache: CacheEntry | null = null; + + constructor( + db: LibSQLDatabase, + getSchedulerState: () => SchedulerState | null, + mediaPath: string + ) { + this.db = db; + this.getSchedulerState = getSchedulerState; + this.mediaPath = mediaPath; + } + + /** + * Return health components for scheduler, yt-dlp, disk space, and recent errors. + */ + async getComponentHealth(): Promise { + const [schedulerHealth, ytDlpHealth, diskHealth, errorHealth] = + await Promise.all([ + this.checkScheduler(), + this.checkYtDlp(), + this.checkDiskSpace(), + this.checkRecentErrors(), + ]); + + return [schedulerHealth, ytDlpHealth, diskHealth, errorHealth]; + } + + // ── Scheduler ── + + private checkScheduler(): ComponentHealth { + const state = this.getSchedulerState(); + + if (state === null) { + return { + name: 'scheduler', + status: 'degraded', + message: 'Scheduler disabled', + }; + } + + if (state.running) { + return { + name: 'scheduler', + status: 'healthy', + message: `Running — ${state.channelCount} channel(s) monitored`, + details: { channelCount: state.channelCount }, + }; + } + + return { + name: 'scheduler', + status: 'unhealthy', + message: 'Scheduler stopped', + }; + } + + // ── yt-dlp ── + + private async checkYtDlp(): Promise { + const version = await this.getCachedYtDlpVersion(); + + if (version) { + return { + name: 'ytDlp', + status: 'healthy', + message: `yt-dlp ${version}`, + details: { version }, + }; + } + + return { + name: 'ytDlp', + status: 'unhealthy', + message: 'yt-dlp not found', + }; + } + + private async getCachedYtDlpVersion(): Promise { + if ( + this.ytDlpCache && + Date.now() - this.ytDlpCache.timestamp < CACHE_TTL_MS + ) { + return this.ytDlpCache.value; + } + + const version = await getYtDlpVersion(); + this.ytDlpCache = { value: version, timestamp: Date.now() }; + return version; + } + + // ── Disk Space ── + + private async checkDiskSpace(): Promise { + try { + const info = await this.getCachedDiskInfo(); + + const freePercent = info.totalBytes > 0 + ? (info.availableBytes / info.totalBytes) * 100 + : 0; + + const freeGb = info.availableBytes / (1024 ** 3); + const totalGb = info.totalBytes / (1024 ** 3); + const message = `${freeGb.toFixed(1)} GB free of ${totalGb.toFixed(1)} GB (${freePercent.toFixed(0)}%)`; + + let status: ComponentHealth['status']; + if (freePercent > 10) { + status = 'healthy'; + } else if (freePercent >= 5) { + status = 'degraded'; + } else { + status = 'unhealthy'; + } + + return { + name: 'diskSpace', + status, + message, + details: { + availableBytes: info.availableBytes, + totalBytes: info.totalBytes, + freePercent: Math.round(freePercent * 100) / 100, + }, + }; + } catch (err) { + return { + name: 'diskSpace', + status: 'degraded', + message: `Disk check failed: ${err instanceof Error ? err.message : String(err)}`, + }; + } + } + + private async getCachedDiskInfo(): Promise { + if ( + this.diskCache && + Date.now() - this.diskCache.timestamp < CACHE_TTL_MS + ) { + return this.diskCache.value; + } + + const stats = await statfs(this.mediaPath); + const info: DiskInfo = { + availableBytes: Number(stats.bsize) * Number(stats.bavail), + totalBytes: Number(stats.bsize) * Number(stats.blocks), + }; + this.diskCache = { value: info, timestamp: Date.now() }; + return info; + } + + // ── Recent Errors ── + + private async checkRecentErrors(): Promise { + const since = new Date(Date.now() - 24 * 60 * 60 * 1000).toISOString(); + const count = await getRecentErrorCount(this.db, since); + + let status: ComponentHealth['status']; + if (count === 0) { + status = 'healthy'; + } else if (count <= 5) { + status = 'degraded'; + } else { + status = 'unhealthy'; + } + + return { + name: 'recentErrors', + status, + message: `${count} error(s) in the last 24 hours`, + details: { errorCount: count }, + }; + } +} + +// ── Internal Types ── + +interface DiskInfo { + availableBytes: number; + totalBytes: number; +} diff --git a/src/services/notification.ts b/src/services/notification.ts new file mode 100644 index 0000000..b530ef7 --- /dev/null +++ b/src/services/notification.ts @@ -0,0 +1,192 @@ +import type { LibSQLDatabase } from 'drizzle-orm/libsql'; +import type * as schema from '../db/schema/index'; +import { getEnabledNotificationSettings } from '../db/repositories/notification-repository'; + +// ── Types ── + +type Db = LibSQLDatabase; + +/** Payload shape for notification callbacks from QueueService. */ +export interface NotificationEvent { + contentTitle: string; + channelName: string; + platform: string; + url: string; + filePath?: string; + error?: string; + attempt?: number; + maxAttempts?: number; +} + +/** Discord embed field shape. */ +interface DiscordEmbedField { + name: string; + value: string; + inline?: boolean; +} + +/** Discord embed shape sent in webhook payload. */ +interface DiscordEmbed { + title: string; + description: string; + color: number; + fields: DiscordEmbedField[]; + timestamp: string; +} + +// ── Colors ── + +const COLOR_GRAB = 0x3498db; // blue +const COLOR_DOWNLOAD = 0x2ecc71; // green +const COLOR_FAILURE = 0xe74c3c; // red + +// ── NotificationService ── + +/** + * Dispatches notification embeds to configured Discord webhooks. + * All dispatch is fire-and-forget — errors are logged but never thrown. + */ +export class NotificationService { + constructor(private readonly db: Db) {} + + /** + * Notify about a content grab (added to queue). + */ + async notifyGrab( + contentTitle: string, + channelName: string, + platform: string, + url: string + ): Promise { + const embed: DiscordEmbed = { + title: '📥 Content Grabbed', + description: `**${contentTitle}**`, + color: COLOR_GRAB, + fields: [ + { name: 'Channel', value: channelName, inline: true }, + { name: 'Platform', value: platform, inline: true }, + { name: 'URL', value: url }, + ], + timestamp: new Date().toISOString(), + }; + + await this.dispatch('onGrab', embed); + } + + /** + * Notify about a successful download. + */ + async notifyDownload( + contentTitle: string, + channelName: string, + platform: string, + url: string, + filePath?: string + ): Promise { + const fields: DiscordEmbedField[] = [ + { name: 'Channel', value: channelName, inline: true }, + { name: 'Platform', value: platform, inline: true }, + { name: 'URL', value: url }, + ]; + + if (filePath) { + fields.push({ name: 'File', value: filePath }); + } + + const embed: DiscordEmbed = { + title: '✅ Download Complete', + description: `**${contentTitle}**`, + color: COLOR_DOWNLOAD, + fields, + timestamp: new Date().toISOString(), + }; + + await this.dispatch('onDownload', embed); + } + + /** + * Notify about a download failure. + */ + async notifyFailure( + contentTitle: string, + channelName: string, + platform: string, + error: string, + attempt: number, + maxAttempts: number + ): Promise { + const embed: DiscordEmbed = { + title: '❌ Download Failed', + description: `**${contentTitle}**`, + color: COLOR_FAILURE, + fields: [ + { name: 'Channel', value: channelName, inline: true }, + { name: 'Platform', value: platform, inline: true }, + { name: 'Error', value: error.slice(0, 1024) }, + { name: 'Attempt', value: `${attempt} / ${maxAttempts}`, inline: true }, + ], + timestamp: new Date().toISOString(), + }; + + await this.dispatch('onFailure', embed); + } + + // ── Private ── + + /** + * Load enabled notification settings with the matching event toggle, + * and POST the embed to each Discord webhook. Errors are logged with + * channel name but never thrown — dispatch is fire-and-forget. + */ + private async dispatch( + eventType: 'onGrab' | 'onDownload' | 'onFailure', + embed: DiscordEmbed + ): Promise { + let settings; + try { + settings = await getEnabledNotificationSettings(this.db); + } catch (err) { + console.log( + `[notification] failed to load settings: ${err instanceof Error ? err.message : String(err)}` + ); + return; + } + + // Filter to settings that have the matching event toggle enabled + const matching = settings.filter((s) => s[eventType] === true); + + for (const setting of matching) { + const config = setting.config as { webhookUrl?: string }; + const webhookUrl = config?.webhookUrl; + + if (!webhookUrl) { + console.log( + `[notification] skip channel="${setting.name}" — no webhook URL configured` + ); + continue; + } + + try { + const response = await fetch(webhookUrl, { + method: 'POST', + headers: { 'Content-Type': 'application/json' }, + body: JSON.stringify({ embeds: [embed] }), + }); + + if (!response.ok) { + console.log( + `[notification] dispatch failed channel="${setting.name}" event=${eventType} httpStatus=${response.status}` + ); + } else { + console.log( + `[notification] dispatch success channel="${setting.name}" event=${eventType}` + ); + } + } catch (err) { + console.log( + `[notification] dispatch error channel="${setting.name}" event=${eventType} error="${err instanceof Error ? err.message : String(err)}"` + ); + } + } + } +} diff --git a/src/services/quality-analyzer.ts b/src/services/quality-analyzer.ts new file mode 100644 index 0000000..8eda2dc --- /dev/null +++ b/src/services/quality-analyzer.ts @@ -0,0 +1,208 @@ +import { execFile as execFileCb } from 'node:child_process'; +import { promisify } from 'node:util'; +import type { QualityInfo } from '../types/index'; + +const execFileAsync = promisify(execFileCb); + +// ── ffprobe Output Types ── + +interface FfprobeStream { + codec_type?: string; + codec_name?: string; + width?: number; + height?: number; + bit_rate?: string; +} + +interface FfprobeFormat { + format_name?: string; + bit_rate?: string; +} + +interface FfprobeOutput { + streams?: FfprobeStream[]; + format?: FfprobeFormat; +} + +// ── Constants ── + +/** Lossy codecs that should not appear inside lossless containers. */ +const LOSSY_CODECS = new Set([ + 'aac', 'mp3', 'opus', 'vorbis', 'ac3', 'eac3', 'wmav2', +]); + +/** Lossless container format names (as reported by ffprobe). */ +const LOSSLESS_CONTAINERS = new Set([ + 'wav', 'flac', +]); + +/** Audio bitrate threshold (bps) below which a warning is generated in lossless containers. */ +const LOW_BITRATE_THRESHOLD = 192_000; + +// ── QualityAnalyzer ── + +/** + * Inspects downloaded media files using ffprobe and builds QualityInfo + * with honest quality metadata and warnings. + * + * Degrades gracefully when ffprobe is unavailable — returns warnings + * instead of throwing, so downloads succeed even without ffprobe. + */ +export class QualityAnalyzer { + /** + * Analyze a downloaded file and return quality metadata. + * Never throws — returns a QualityInfo with warnings on failure. + */ + async analyze(filePath: string): Promise { + try { + const { stdout } = await execFileAsync('ffprobe', [ + '-v', 'quiet', + '-print_format', 'json', + '-show_format', + '-show_streams', + filePath, + ], { + timeout: 30_000, + windowsHide: true, + }); + + const parsed = JSON.parse(stdout) as FfprobeOutput; + return this.buildQualityInfo(parsed); + } catch (err: unknown) { + const reason = describeError(err); + console.log(`[quality] ffprobe analysis failed for "${filePath}": ${reason}`); + return { + actualResolution: null, + actualCodec: null, + actualBitrate: null, + containerFormat: null, + qualityWarnings: [`ffprobe analysis failed: ${reason}`], + }; + } + } + + /** + * Check if ffprobe is available on the system PATH. + * Returns true if `ffprobe -version` exits 0. + */ + async checkFfprobeAvailable(): Promise { + try { + await execFileAsync('ffprobe', ['-version'], { + timeout: 5_000, + windowsHide: true, + }); + return true; + } catch { + return false; + } + } + + // ── Internal ── + + private buildQualityInfo(output: FfprobeOutput): QualityInfo { + const streams = output.streams ?? []; + const format = output.format; + + const videoStream = streams.find((s) => s.codec_type === 'video'); + const audioStream = streams.find((s) => s.codec_type === 'audio'); + const primaryStream = videoStream ?? audioStream; + + // Resolution — null for audio-only + const actualResolution = videoStream && videoStream.width && videoStream.height + ? `${videoStream.width}x${videoStream.height}` + : null; + + // Codec — primary stream codec + const actualCodec = primaryStream?.codec_name ?? null; + + // Bitrate — try stream first, then format-level + const rawBitrate = primaryStream?.bit_rate ?? format?.bit_rate ?? null; + const actualBitrate = rawBitrate ? formatBitrate(Number(rawBitrate)) : null; + + // Container format + const containerFormat = format?.format_name ?? null; + + // Warnings + const qualityWarnings = this.detectWarnings( + audioStream, + containerFormat, + rawBitrate ? Number(rawBitrate) : null + ); + + const info: QualityInfo = { + actualResolution, + actualCodec, + actualBitrate, + containerFormat, + qualityWarnings, + }; + + if (qualityWarnings.length > 0) { + console.log(`[quality] Warnings detected: ${qualityWarnings.join('; ')}`); + } + + return info; + } + + private detectWarnings( + audioStream: FfprobeStream | undefined, + containerFormat: string | null, + bitrateBps: number | null + ): string[] { + const warnings: string[] = []; + + if (!containerFormat) return warnings; + + // Check each container name component (ffprobe may report "wav" or "pcm_s16le,wav") + const containerParts = containerFormat.split(',').map((s) => s.trim().toLowerCase()); + const isLosslessContainer = containerParts.some((p) => LOSSLESS_CONTAINERS.has(p)); + + if (isLosslessContainer && audioStream?.codec_name) { + const codec = audioStream.codec_name.toLowerCase(); + + // Lossy-in-lossless detection + if (LOSSY_CODECS.has(codec)) { + warnings.push( + `Lossy codec "${audioStream.codec_name}" inside lossless container "${containerFormat}"` + ); + } + + // Low bitrate in lossless container + const audioBitrate = audioStream.bit_rate + ? Number(audioStream.bit_rate) + : bitrateBps; + if (audioBitrate !== null && audioBitrate > 0 && audioBitrate < LOW_BITRATE_THRESHOLD) { + warnings.push( + `Low audio bitrate (${formatBitrate(audioBitrate)}) in lossless container "${containerFormat}"` + ); + } + } + + return warnings; + } +} + +// ── Helpers ── + +/** Format a bitrate in bps to a human-readable string (e.g. "320 kbps"). */ +function formatBitrate(bps: number): string { + if (bps >= 1_000_000) { + return `${(bps / 1_000_000).toFixed(1)} Mbps`; + } + if (bps >= 1_000) { + return `${Math.round(bps / 1_000)} kbps`; + } + return `${bps} bps`; +} + +/** Extract a readable error description without exposing stack traces. */ +function describeError(err: unknown): string { + if (err instanceof Error) { + const errno = (err as NodeJS.ErrnoException).code; + if (errno === 'ENOENT') return 'ffprobe not found on PATH'; + if (errno === 'ABORT_ERR' || err.name === 'AbortError') return 'ffprobe timed out'; + if (err.message.includes('SIGTERM')) return 'ffprobe timed out'; + return err.message.slice(0, 200); + } + return String(err).slice(0, 200); +} diff --git a/src/services/queue.ts b/src/services/queue.ts new file mode 100644 index 0000000..907a702 --- /dev/null +++ b/src/services/queue.ts @@ -0,0 +1,430 @@ +import type { LibSQLDatabase } from 'drizzle-orm/libsql'; +import type * as schema from '../db/schema/index'; +import type { DownloadService } from './download'; +import { + createQueueItem, + getQueueItemById, + getQueueItemsByStatus, + getPendingQueueItems, + updateQueueItemStatus, + countQueueItemsByStatus, + getQueueItemByContentItemId, +} from '../db/repositories/queue-repository'; +import { createHistoryEvent } from '../db/repositories/history-repository'; +import { getContentItemById, updateContentItem } from '../db/repositories/content-repository'; +import { getChannelById } from '../db/repositories/channel-repository'; +import { getFormatProfileById, getDefaultFormatProfile } from '../db/repositories/format-profile-repository'; +import type { QueueItem, QueueStatus } from '../types/index'; +import type { NotificationEvent } from './notification'; +import { appConfig } from '../config/index'; + +// ── Types ── + +type Db = LibSQLDatabase; + +export interface QueueServiceOptions { + concurrency?: number; + onDownloadComplete?: (event: NotificationEvent) => void; + onDownloadFailed?: (event: NotificationEvent) => void; +} + +export interface QueueState { + pending: number; + downloading: number; + completed: number; + failed: number; + cancelled: number; +} + +// ── QueueService ── + +/** + * Orchestrates the download queue lifecycle: enqueue, process with concurrency + * control, retry on failure, cancel, and recover interrupted items on startup. + * + * Status transitions: + * pending → downloading → completed | failed + * failed → pending (retry) or failed (max attempts exhausted) + * pending | failed → cancelled + * + * Concurrency is managed via an in-memory counter — Node's single-threaded + * event loop ensures processNext() is not re-entrant within a single tick. + */ +export class QueueService { + private activeCount = 0; + private stopped = false; + private concurrency: number; + private readonly onDownloadComplete?: (event: NotificationEvent) => void; + private readonly onDownloadFailed?: (event: NotificationEvent) => void; + + constructor( + private readonly db: Db, + private readonly downloadService: DownloadService, + options?: QueueServiceOptions | number + ) { + // Support both old (concurrency number) and new (options object) signatures + if (typeof options === 'number') { + this.concurrency = options; + } else { + this.concurrency = options?.concurrency ?? appConfig.concurrentDownloads; + this.onDownloadComplete = options?.onDownloadComplete; + this.onDownloadFailed = options?.onDownloadFailed; + } + } + + // ── Public API ── + + /** + * Update the concurrency limit at runtime. + * Immediately tries to pick up pending items if concurrency increased. + */ + setConcurrency(n: number): void { + this.concurrency = n; + console.log(`[queue] concurrency updated to ${n}`); + this.processNext(); + } + + /** + * Enqueue a content item for download. Creates a queue item, updates the + * content status to 'queued', records a 'grabbed' history event, and kicks + * off processing. + * + * @throws Error if the content item is already queued or downloading. + */ + async enqueue(contentItemId: number, priority = 0): Promise { + // Dedup check — don't allow double-enqueue + const existing = await getQueueItemByContentItemId(this.db, contentItemId); + if (existing) { + const activeStatuses: QueueStatus[] = ['pending', 'downloading']; + if (activeStatuses.includes(existing.status)) { + throw new Error( + `Content item ${contentItemId} is already in the queue with status '${existing.status}'` + ); + } + } + + // Look up the content item so we can set channel info on history + const contentItem = await getContentItemById(this.db, contentItemId); + if (!contentItem) { + throw new Error(`Content item ${contentItemId} not found`); + } + + // Create queue item + const queueItem = await createQueueItem(this.db, { + contentItemId, + priority, + }); + + // Update content status to queued + await updateContentItem(this.db, contentItemId, { status: 'queued' }); + + // Record grabbed history event + await createHistoryEvent(this.db, { + contentItemId, + channelId: contentItem.channelId, + eventType: 'grabbed', + status: 'pending', + details: { queueItemId: queueItem.id, title: contentItem.title }, + }); + + console.log( + `[queue] enqueue queueId=${queueItem.id} contentId=${contentItemId} status=pending priority=${priority}` + ); + + // Kick off processing + this.processNext(); + + return queueItem; + } + + /** + * Synchronous entry point that picks up pending items up to the concurrency + * limit and fires off async processing for each. + */ + processNext(): void { + if (this.stopped) return; + + const slots = this.concurrency - this.activeCount; + if (slots <= 0) return; + + // Fetch pending items — async but we fire-and-forget + getPendingQueueItems(this.db, slots).then((items) => { + for (const item of items) { + if (this.stopped) break; + if (this.activeCount >= this.concurrency) break; + + this.activeCount++; + this.processItem(item).catch((err) => { + console.log( + `[queue] unhandled error processing queueId=${item.id}: ${err instanceof Error ? err.message : String(err)}` + ); + }); + } + }).catch((err) => { + console.log( + `[queue] error fetching pending items: ${err instanceof Error ? err.message : String(err)}` + ); + }); + } + + /** + * Retry a failed queue item. Resets it to pending if under maxAttempts. + * + * @throws Error if item not found, not in failed status, or attempts exhausted. + */ + async retryItem(queueItemId: number): Promise { + const item = await getQueueItemById(this.db, queueItemId); + if (!item) { + throw new Error(`Queue item ${queueItemId} not found`); + } + if (item.status !== 'failed') { + throw new Error( + `Cannot retry queue item ${queueItemId} — status is '${item.status}', expected 'failed'` + ); + } + if (item.attempts >= item.maxAttempts) { + throw new Error( + `Cannot retry queue item ${queueItemId} — attempts (${item.attempts}) >= maxAttempts (${item.maxAttempts})` + ); + } + + const updated = await updateQueueItemStatus(this.db, queueItemId, 'pending', { + error: null, + }); + + // Reset content status to queued + await updateContentItem(this.db, item.contentItemId, { status: 'queued' }); + + console.log( + `[queue] retry queueId=${queueItemId} contentId=${item.contentItemId} status=pending attempts=${item.attempts}/${item.maxAttempts}` + ); + + this.processNext(); + + return updated!; + } + + /** + * Cancel a queue item. Only pending or failed items can be cancelled. + * + * @throws Error if item not found or not in a cancellable status. + */ + async cancelItem(queueItemId: number): Promise { + const item = await getQueueItemById(this.db, queueItemId); + if (!item) { + throw new Error(`Queue item ${queueItemId} not found`); + } + + const cancellable: QueueStatus[] = ['pending', 'failed']; + if (!cancellable.includes(item.status)) { + throw new Error( + `Cannot cancel queue item ${queueItemId} — status is '${item.status}', must be 'pending' or 'failed'` + ); + } + + const updated = await updateQueueItemStatus(this.db, queueItemId, 'cancelled'); + + console.log( + `[queue] cancel queueId=${queueItemId} contentId=${item.contentItemId} status=cancelled` + ); + + return updated!; + } + + /** + * Recover items that were stuck in 'downloading' status after a crash/restart. + * Resets them to 'pending' so they'll be picked up again. + * + * @returns Number of items recovered. + */ + async recoverOnStartup(): Promise { + const stuckItems = await getQueueItemsByStatus(this.db, 'downloading'); + + for (const item of stuckItems) { + await updateQueueItemStatus(this.db, item.id, 'pending', { + startedAt: null, + error: null, + }); + } + + if (stuckItems.length > 0) { + console.log( + `[queue] recovery: reset ${stuckItems.length} stuck item(s) from downloading → pending` + ); + } + + return stuckItems.length; + } + + /** + * Get current queue state — count of items by status. + */ + async getState(): Promise { + return countQueueItemsByStatus(this.db); + } + + /** + * Stop processing — no new items will be picked up. + * Items already downloading will finish. + */ + stop(): void { + this.stopped = true; + console.log('[queue] stopped — no new items will be processed'); + } + + /** + * Resume processing after stop(). + */ + start(): void { + this.stopped = false; + console.log('[queue] started — processing resumed'); + this.processNext(); + } + + // ── Internal ── + + /** + * Process a single queue item: download the content, update status, + * and record history events. + */ + private async processItem(queueItem: QueueItem): Promise { + const logPrefix = `[queue] process queueId=${queueItem.id} contentId=${queueItem.contentItemId}`; + + try { + // Transition to downloading + await updateQueueItemStatus(this.db, queueItem.id, 'downloading', { + startedAt: new Date().toISOString(), + }); + + console.log(`${logPrefix} status=downloading`); + + // Look up content item and channel + const contentItem = await getContentItemById(this.db, queueItem.contentItemId); + if (!contentItem) { + throw new Error(`Content item ${queueItem.contentItemId} not found`); + } + + const channel = await getChannelById(this.db, contentItem.channelId); + if (!channel) { + throw new Error(`Channel ${contentItem.channelId} not found for content item ${contentItem.id}`); + } + + // Resolve format profile: channel-specific > default > undefined + let formatProfile = undefined; + if (channel.formatProfileId) { + formatProfile = await getFormatProfileById(this.db, channel.formatProfileId) ?? undefined; + } + if (!formatProfile) { + formatProfile = await getDefaultFormatProfile(this.db) ?? undefined; + } + + // Execute download + await this.downloadService.downloadItem(contentItem, channel, formatProfile); + + // Success — mark completed + await updateQueueItemStatus(this.db, queueItem.id, 'completed', { + completedAt: new Date().toISOString(), + }); + + // Record downloaded history event + await createHistoryEvent(this.db, { + contentItemId: queueItem.contentItemId, + channelId: channel.id, + eventType: 'downloaded', + status: 'completed', + details: { + queueItemId: queueItem.id, + title: contentItem.title, + attempts: queueItem.attempts + 1, + }, + }); + + console.log(`${logPrefix} status=completed`); + + // Fire notification callback (fire-and-forget) + if (this.onDownloadComplete) { + try { + this.onDownloadComplete({ + contentTitle: contentItem.title, + channelName: channel.name, + platform: channel.platform, + url: contentItem.url, + filePath: contentItem.filePath ?? undefined, + }); + } catch (notifyErr) { + console.log( + `[queue] notification callback error: ${notifyErr instanceof Error ? notifyErr.message : String(notifyErr)}` + ); + } + } + } catch (err: unknown) { + const errorMsg = err instanceof Error ? err.message : String(err); + const newAttempts = queueItem.attempts + 1; + const exhausted = newAttempts >= queueItem.maxAttempts; + const newStatus: QueueStatus = exhausted ? 'failed' : 'pending'; + + await updateQueueItemStatus(this.db, queueItem.id, newStatus, { + attempts: newAttempts, + error: errorMsg, + ...(exhausted ? {} : { startedAt: null }), + }); + + // Update content status to failed when attempts exhausted + if (exhausted) { + await updateContentItem(this.db, queueItem.contentItemId, { status: 'failed' }); + } + + // Record failed history event + const contentItem = await getContentItemById(this.db, queueItem.contentItemId); + await createHistoryEvent(this.db, { + contentItemId: queueItem.contentItemId, + channelId: contentItem?.channelId ?? null, + eventType: 'failed', + status: newStatus, + details: { + queueItemId: queueItem.id, + error: errorMsg, + attempt: newAttempts, + maxAttempts: queueItem.maxAttempts, + exhausted, + }, + }); + + console.log( + `${logPrefix} status=${newStatus} attempt=${newAttempts}/${queueItem.maxAttempts} error="${errorMsg.slice(0, 200)}"` + ); + + // Fire failure notification callback when attempts exhausted (fire-and-forget) + if (exhausted && this.onDownloadFailed) { + try { + // Look up channel for notification context + let failedChannelName = 'Unknown'; + let failedPlatform = 'unknown'; + if (contentItem?.channelId) { + const failedChannel = await getChannelById(this.db, contentItem.channelId); + if (failedChannel) { + failedChannelName = failedChannel.name; + failedPlatform = failedChannel.platform; + } + } + this.onDownloadFailed({ + contentTitle: contentItem?.title ?? `Content #${queueItem.contentItemId}`, + channelName: failedChannelName, + platform: failedPlatform, + url: contentItem?.url ?? '', + error: errorMsg, + attempt: newAttempts, + maxAttempts: queueItem.maxAttempts, + }); + } catch (notifyErr) { + console.log( + `[queue] notification callback error: ${notifyErr instanceof Error ? notifyErr.message : String(notifyErr)}` + ); + } + } + } finally { + this.activeCount--; + this.processNext(); + } + } +} diff --git a/src/services/rate-limiter.ts b/src/services/rate-limiter.ts new file mode 100644 index 0000000..8d29927 --- /dev/null +++ b/src/services/rate-limiter.ts @@ -0,0 +1,133 @@ +import type { Platform } from '../types/index'; + +// ── Types ── + +export interface RateLimiterConfig { + minIntervalMs: number; +} + +export interface PlatformRateLimitState { + lastCallTime: number | null; + errorCount: number; + effectiveIntervalMs: number; +} + +// ── Constants ── + +/** Maximum backoff interval — 60 seconds. */ +const MAX_BACKOFF_MS = 60_000; + +// ── Rate Limiter ── + +/** + * Per-platform rate limiter with exponential backoff. + * + * `acquire(platform)` waits until the minimum interval has elapsed since the + * last call for that platform. `reportError` doubles the effective interval + * (up to MAX_BACKOFF_MS). `reportSuccess` resets to the configured minimum. + */ +export class RateLimiter { + private readonly config: Record; + private readonly state = new Map(); + + constructor(config: Partial>) { + this.config = config as Record; + + // Initialize state for each configured platform + for (const [platform, cfg] of Object.entries(this.config)) { + this.state.set(platform, { + lastCallTime: null, + errorCount: 0, + effectiveIntervalMs: cfg.minIntervalMs, + }); + } + } + + /** + * Wait until enough time has elapsed since the last call for this platform. + * Updates lastCallTime after the wait completes. + */ + async acquire(platform: Platform): Promise { + const state = this.getOrCreateState(platform); + const now = Date.now(); + + if (state.lastCallTime !== null) { + const elapsed = now - state.lastCallTime; + const remaining = state.effectiveIntervalMs - elapsed; + + if (remaining > 0) { + console.log( + `[rate-limiter] ${platform}: waiting ${remaining}ms (effective interval: ${state.effectiveIntervalMs}ms)` + ); + await delay(remaining); + } + } + + state.lastCallTime = Date.now(); + } + + /** + * Report a rate-limit or transient error for a platform. + * Doubles the effective interval (exponential backoff) up to MAX_BACKOFF_MS. + */ + reportError(platform: Platform): void { + const state = this.getOrCreateState(platform); + state.errorCount++; + const cfg = this.config[platform]; + const baseInterval = cfg?.minIntervalMs ?? 1000; + state.effectiveIntervalMs = Math.min( + baseInterval * Math.pow(2, state.errorCount), + MAX_BACKOFF_MS + ); + + console.log( + `[rate-limiter] ${platform}: backoff — errorCount=${state.errorCount}, effectiveInterval=${state.effectiveIntervalMs}ms` + ); + } + + /** + * Report a successful call for a platform. + * Resets error count and effective interval to the configured minimum. + */ + reportSuccess(platform: Platform): void { + const state = this.getOrCreateState(platform); + const cfg = this.config[platform]; + const baseInterval = cfg?.minIntervalMs ?? 1000; + state.errorCount = 0; + state.effectiveIntervalMs = baseInterval; + } + + /** + * Get the current state of all platforms for diagnostic inspection. + */ + getState(): Record { + const result: Record = {}; + for (const [platform, state] of this.state) { + result[platform] = { ...state }; + } + return result; + } + + // ── Internal ── + + private getOrCreateState(platform: Platform): PlatformRateLimitState { + let state = this.state.get(platform); + if (!state) { + const cfg = this.config[platform]; + const minInterval = cfg?.minIntervalMs ?? 1000; + state = { + lastCallTime: null, + errorCount: 0, + effectiveIntervalMs: minInterval, + }; + this.state.set(platform, state); + } + return state; + } +} + +// ── Helpers ── + +function delay(ms: number): Promise { + return new Promise((resolve) => setTimeout(resolve, ms)); +} diff --git a/src/services/scheduler.ts b/src/services/scheduler.ts new file mode 100644 index 0000000..d084ee4 --- /dev/null +++ b/src/services/scheduler.ts @@ -0,0 +1,368 @@ +import { Cron } from 'croner'; +import type { LibSQLDatabase } from 'drizzle-orm/libsql'; +import type * as schema from '../db/schema/index'; +import type { Channel, Platform, PlatformContentMetadata } from '../types/index'; +import type { PlatformRegistry, FetchRecentContentOptions } from '../sources/platform-source'; +import type { RateLimiter } from './rate-limiter'; +import { YtDlpError } from '../sources/yt-dlp'; +import { + getEnabledChannels, + updateChannel, +} from '../db/repositories/channel-repository'; +import { + createContentItem, + getRecentContentIds, +} from '../db/repositories/content-repository'; +import { getPlatformSettings } from '../db/repositories/platform-settings-repository'; + +// ── Types ── + +export interface ChannelJobState { + channelId: number; + channelName: string; + platform: Platform; + isRunning: boolean; + nextRun: Date | null; + lastCheckedAt: string | null; + lastCheckStatus: string | null; +} + +export interface SchedulerState { + running: boolean; + channelCount: number; + channels: ChannelJobState[]; +} + +export interface CheckChannelResult { + channelId: number; + channelName: string; + newItems: number; + totalFetched: number; + status: 'success' | 'error' | 'rate_limited' | 'already_running'; +} + +/** Optional configuration for the scheduler service. */ +export interface SchedulerOptions { + /** Called when a new content item is inserted — used to auto-enqueue for download. */ + onNewContent?: (contentItemId: number) => void; +} + +// ── Scheduler Service ── + +/** + * Manages per-channel cron jobs for content monitoring. + * + * Loads enabled channels from the database, creates Cron jobs that periodically + * check for new content via platform sources, deduplicates against existing + * records, and inserts new items with `monitored` status. + */ +export class SchedulerService { + private readonly db: LibSQLDatabase; + private readonly platformRegistry: PlatformRegistry; + private readonly rateLimiter: RateLimiter; + private readonly onNewContent?: (contentItemId: number) => void; + private readonly jobs = new Map(); + private readonly channelCache = new Map(); + private readonly activeChecks = new Set(); + private running = false; + + constructor( + db: LibSQLDatabase, + platformRegistry: PlatformRegistry, + rateLimiter: RateLimiter, + options?: SchedulerOptions + ) { + this.db = db; + this.platformRegistry = platformRegistry; + this.rateLimiter = rateLimiter; + this.onNewContent = options?.onNewContent; + } + + /** + * Load all enabled channels and create cron jobs for each. + * Returns the number of channels loaded. + */ + async start(): Promise { + const channels = await getEnabledChannels(this.db); + + for (const channel of channels) { + this.createJob(channel); + } + + this.running = true; + console.log(`[scheduler] Started with ${channels.length} channels`); + return channels.length; + } + + /** Stop all active cron jobs. */ + stop(): void { + for (const [channelId, job] of this.jobs) { + job.stop(); + console.log(`[scheduler] Stopped job for channel ${channelId}`); + } + this.jobs.clear(); + this.channelCache.clear(); + this.running = false; + console.log('[scheduler] Stopped'); + } + + /** Create a cron job for a newly added channel. */ + addChannel(channel: Channel): void { + if (!channel.monitoringEnabled) return; + this.createJob(channel); + console.log( + `[scheduler] Added job for channel ${channel.id} ("${channel.name}") — interval: ${channel.checkInterval}m` + ); + } + + /** Stop and remove the cron job for a deleted channel. */ + removeChannel(channelId: number): void { + const job = this.jobs.get(channelId); + if (job) { + job.stop(); + this.jobs.delete(channelId); + this.channelCache.delete(channelId); + console.log(`[scheduler] Removed job for channel ${channelId}`); + } + } + + /** Update a channel's cron job (remove old, create new with updated interval). */ + updateChannel(channel: Channel): void { + this.removeChannel(channel.id); + if (channel.monitoringEnabled) { + this.createJob(channel); + console.log( + `[scheduler] Updated job for channel ${channel.id} ("${channel.name}") — interval: ${channel.checkInterval}m` + ); + } + } + + /** + * Check a channel for new content. + * + * 1. Check per-channel lock (reject overlap) + * 2. Acquire rate limiter slot for the platform + * 3. Fetch recent content via platform source + * 4. Deduplicate against existing content + * 5. Insert new items with `monitored` status + * 6. Update channel's lastCheckedAt and lastCheckStatus + * + * Returns a structured result with item counts and status. + */ + async checkChannel(channel: Channel): Promise { + // Per-channel lock — reject overlap before any async work + if (this.activeChecks.has(channel.id)) { + console.log( + `[scheduler] Skipping channel ${channel.id} ("${channel.name}") — already running` + ); + return { + channelId: channel.id, + channelName: channel.name, + newItems: 0, + totalFetched: 0, + status: 'already_running', + }; + } + + this.activeChecks.add(channel.id); + + console.log( + `[scheduler] Checking channel ${channel.id} ("${channel.name}") on ${channel.platform}` + ); + + try { + // 1. Rate limit + await this.rateLimiter.acquire(channel.platform); + + // 2. Get platform source + const source = this.platformRegistry.get(channel.platform); + if (!source) { + throw new Error( + `No platform source registered for "${channel.platform}"` + ); + } + + // 3. Load platform settings for scan limit and rate limit delay + const platformSettingsRow = await getPlatformSettings(this.db, channel.platform); + const scanLimit = platformSettingsRow?.scanLimit ?? 100; + const rateLimitDelay = platformSettingsRow?.rateLimitDelay ?? 1000; + + // 4. Load existing content IDs for dedup gating + const existingIds = new Set( + await getRecentContentIds(this.db, channel.id) + ); + + // 5. Fetch recent content with hybrid options + const fetchOptions: FetchRecentContentOptions = { + limit: scanLimit, + existingIds, + rateLimitDelay, + }; + const items: PlatformContentMetadata[] = + await source.fetchRecentContent(channel, fetchOptions); + + // 6. Deduplicate — filter out items already known + const newItems = items.filter( + (item) => !existingIds.has(item.platformContentId) + ); + + // 7. Insert new items + let insertedCount = 0; + for (const item of newItems) { + // Scheduler discovers *new* content (future), so 'all' and 'future' → monitored + const monitored = channel.monitoringMode === 'all' || channel.monitoringMode === 'future'; + const created = await createContentItem(this.db, { + channelId: channel.id, + title: item.title, + platformContentId: item.platformContentId, + url: item.url, + contentType: item.contentType, + duration: item.duration, + thumbnailUrl: item.thumbnailUrl, + publishedAt: item.publishedAt ?? null, + status: 'monitored', + monitored, + }); + if (created) { + insertedCount++; + // Only auto-enqueue monitored items + if (this.onNewContent && created.monitored) { + this.onNewContent(created.id); + } + } + } + + // 8. Update channel status + await updateChannel(this.db, channel.id, { + lastCheckedAt: new Date().toISOString(), + lastCheckStatus: 'success', + }); + + this.rateLimiter.reportSuccess(channel.platform); + + console.log( + `[scheduler] Check complete for channel ${channel.id}: ${insertedCount} new items (${items.length} fetched, ${existingIds.size} existing)` + ); + + return { + channelId: channel.id, + channelName: channel.name, + newItems: insertedCount, + totalFetched: items.length, + status: 'success', + }; + } catch (err) { + // Determine status based on error type + const isRateLimit = + err instanceof YtDlpError && err.isRateLimit; + const status = isRateLimit ? 'rate_limited' : 'error'; + + // Update channel status + try { + await updateChannel(this.db, channel.id, { + lastCheckedAt: new Date().toISOString(), + lastCheckStatus: status, + }); + } catch (updateErr) { + console.error( + `[scheduler] Failed to update status for channel ${channel.id}:`, + updateErr + ); + } + + this.rateLimiter.reportError(channel.platform); + + console.error( + `[scheduler] Check failed for channel ${channel.id} ("${channel.name}"): ${status}`, + err instanceof Error ? err.message : err + ); + + return { + channelId: channel.id, + channelName: channel.name, + newItems: 0, + totalFetched: 0, + status, + }; + } finally { + this.activeChecks.delete(channel.id); + } + } + + /** + * Get the current state of the scheduler for diagnostic inspection. + */ + getState(): SchedulerState { + const channels: ChannelJobState[] = []; + + for (const [channelId, job] of this.jobs) { + const channel = this.channelCache.get(channelId); + channels.push({ + channelId, + channelName: channel?.name ?? 'unknown', + platform: (channel?.platform ?? 'unknown') as Platform, + isRunning: job.isBusy(), + nextRun: job.nextRun() ?? null, + lastCheckedAt: channel?.lastCheckedAt ?? null, + lastCheckStatus: channel?.lastCheckStatus ?? null, + }); + } + + return { + running: this.running, + channelCount: this.jobs.size, + channels, + }; + } + + // ── Internal ── + + /** + * Create a Cron job for a channel. + * Uses the interval option for arbitrary check intervals. + */ + private createJob(channel: Channel): void { + const intervalSeconds = channel.checkInterval * 60; + const cronPattern = minutesToCronPattern(channel.checkInterval); + + const job = new Cron( + cronPattern, + { + protect: true, // Prevent overlapping runs + interval: intervalSeconds, // Minimum seconds between runs + }, + async () => { + // Refresh channel from cache (it may have been updated) + const current = this.channelCache.get(channel.id) ?? channel; + await this.checkChannel(current); + } + ); + + this.jobs.set(channel.id, job); + this.channelCache.set(channel.id, channel); + } +} + +// ── Helpers ── + +/** + * Convert a check interval in minutes to a cron pattern. + * For intervals that divide evenly into 60, use `* /{n} * * * *`. + * For other intervals, use the closest reasonable pattern and rely on + * croner's `interval` option for exact timing. + */ +function minutesToCronPattern(minutes: number): string { + if (minutes <= 0) return '*/5 * * * *'; // Fallback: every 5 minutes + if (minutes < 60 && 60 % minutes === 0) { + return `*/${minutes} * * * *`; + } + if (minutes === 60) { + return '0 * * * *'; // Every hour + } + if (minutes < 60) { + // Arbitrary sub-hour interval — run every minute, use `interval` option + return '* * * * *'; + } + // For intervals >= 60 minutes, run every hour and use `interval` option + return '0 * * * *'; +} diff --git a/src/sources/platform-source.ts b/src/sources/platform-source.ts new file mode 100644 index 0000000..7ec9f18 --- /dev/null +++ b/src/sources/platform-source.ts @@ -0,0 +1,115 @@ +import type { + Platform, + PlatformSourceMetadata, + PlatformContentMetadata, + PlaylistDiscoveryResult, + Channel, +} from '../types/index'; + +// ── Options ── + +/** Options for fetchRecentContent, loaded from platform settings. */ +export interface FetchRecentContentOptions { + /** Maximum items to enumerate in the discovery phase. Default: 50 */ + limit?: number; + /** Set of platformContentIds already known — skips enrichment for these. */ + existingIds?: Set; + /** Milliseconds to wait between per-item enrichment calls. Default: 1000 */ + rateLimitDelay?: number; +} + +// ── Interface ── + +/** + * Extensible plugin contract for platform integrations. + * Each platform (YouTube, SoundCloud, etc.) implements this interface + * to provide channel resolution and content fetching via yt-dlp. + */ +export interface PlatformSource { + /** Resolve a platform URL to channel metadata (name, platformId, image). */ + resolveChannel(url: string): Promise; + + /** Fetch recent content items for a channel. */ + fetchRecentContent( + channel: Channel, + options?: FetchRecentContentOptions + ): Promise; + + /** + * Fetch ALL content for a channel (no playlist-items limit). + * Used by back-catalog import. Optional — platforms that don't support + * full catalog fetch fall back to fetchRecentContent with a high limit. + */ + fetchAllContent?(channel: Channel): Promise; + + /** + * Fetch playlists for a channel, with video-to-playlist mappings. + * Optional — not all platforms expose playlist information. + */ + fetchPlaylists?(channel: Channel): Promise; +} + +// ── Registry ── + +/** + * Maps Platform enum values to PlatformSource implementations. + * Resolves the correct source from a URL via pattern matching. + */ +export class PlatformRegistry { + private readonly sources = new Map(); + + /** Register a platform source implementation. */ + register(platform: Platform, source: PlatformSource): void { + this.sources.set(platform, source); + } + + /** Get the source for a known platform. */ + get(platform: Platform): PlatformSource | undefined { + return this.sources.get(platform); + } + + /** + * Detect the platform from a URL and return the corresponding source. + * Returns null if the URL doesn't match any registered platform. + */ + getForUrl(url: string): { platform: Platform; source: PlatformSource } | null { + const platform = detectPlatformFromUrl(url); + if (!platform) return null; + + const source = this.sources.get(platform); + if (!source) return null; + + return { platform, source }; + } +} + +// ── URL Detection ── + +const YOUTUBE_PATTERNS = [ + /^https?:\/\/(www\.)?youtube\.com\/@/, + /^https?:\/\/(www\.)?youtube\.com\/channel\//, + /^https?:\/\/(www\.)?youtube\.com\/c\//, + /^https?:\/\/(www\.)?youtube\.com\/user\//, + /^https?:\/\/youtu\.be\//, +]; + +const SOUNDCLOUD_PATTERNS = [ + /^https?:\/\/(www\.)?soundcloud\.com\/[^/]+\/?$/, + /^https?:\/\/(www\.)?soundcloud\.com\/[^/]+$/, +]; + +function detectPlatformFromUrl(url: string): Platform | null { + for (const pattern of YOUTUBE_PATTERNS) { + if (pattern.test(url)) return 'youtube' as Platform; + } + + // SoundCloud: match artist pages, reject track/set URLs + if (/^https?:\/\/(www\.)?soundcloud\.com\//.test(url)) { + // Reject track and set URLs + if (/\/(tracks|sets)\//.test(url)) return null; + // Accept artist-level URLs + return 'soundcloud' as Platform; + } + + return null; +} diff --git a/src/sources/soundcloud.ts b/src/sources/soundcloud.ts new file mode 100644 index 0000000..7338781 --- /dev/null +++ b/src/sources/soundcloud.ts @@ -0,0 +1,103 @@ +import type { PlatformSource, FetchRecentContentOptions } from './platform-source'; +import type { + Channel, + PlatformSourceMetadata, + PlatformContentMetadata, +} from '../types/index'; +import { Platform, ContentType } from '../types/index'; +import { execYtDlp, parseSingleJson, parseJsonLines } from './yt-dlp'; + +// ── URL Validation ── + +/** + * Accept SoundCloud artist-level URLs. + * Reject track and set URLs (which contain /tracks/ or /sets/). + */ +export function isSoundCloudChannelUrl(url: string): boolean { + if (!/^https?:\/\/(www\.)?soundcloud\.com\//.test(url)) return false; + if (/\/(tracks|sets)\//.test(url)) return false; + return true; +} + +// ── Implementation ── + +export class SoundCloudSource implements PlatformSource { + async resolveChannel(url: string): Promise { + const result = await execYtDlp( + ['--dump-single-json', '--playlist-items', '0', '--flat-playlist', url], + { timeout: 30_000 } + ); + + const data = parseSingleJson(result.stdout) as Record; + + // SoundCloud uses uploader/uploader_id/uploader_url instead of channel fields + const channelName = + (data.uploader as string) ?? + (data.channel as string) ?? + 'Unknown Artist'; + const uploaderId = (data.uploader_id as string) ?? ''; + const uploaderUrl = + (data.uploader_url as string) ?? + (data.channel_url as string) ?? + url; + + // Pick best available thumbnail + const thumbnails = data.thumbnails as Array<{ url?: string }> | undefined; + const imageUrl = thumbnails?.length + ? (thumbnails[thumbnails.length - 1]?.url ?? null) + : null; + + return { + name: channelName, + platformId: uploaderId, + imageUrl, + url: uploaderUrl, + platform: Platform.SoundCloud, + }; + } + + async fetchRecentContent( + channel: Channel, + options?: FetchRecentContentOptions + ): Promise { + const limit = options?.limit ?? 50; + const result = await execYtDlp( + [ + '--flat-playlist', + '--dump-json', + '--playlist-items', + `1:${limit}`, + '--sleep-requests', + '2', // SoundCloud rate limit mitigation + channel.url, + ], + { timeout: 120_000 } // Longer timeout due to sleep-requests + ); + + const entries = parseJsonLines(result.stdout); + + return entries.map((entry) => { + const e = entry as Record; + return { + platformContentId: (e.id as string) ?? '', + title: (e.title as string) ?? 'Untitled', + url: (e.url as string) ?? (e.webpage_url as string) ?? '', + contentType: ContentType.Audio, // SoundCloud defaults to audio + duration: typeof e.duration === 'number' ? e.duration : null, + thumbnailUrl: extractThumbnailUrl(e), + publishedAt: null, // populated in T02 from upload_date + }; + }); + } +} + +// ── Helpers ── + +function extractThumbnailUrl(entry: Record): string | null { + if (typeof entry.thumbnail === 'string') return entry.thumbnail; + const thumbnails = entry.thumbnails as Array<{ url?: string }> | undefined; + if (thumbnails?.length) { + return thumbnails[thumbnails.length - 1]?.url ?? null; + } + return null; +} diff --git a/src/sources/youtube.ts b/src/sources/youtube.ts new file mode 100644 index 0000000..e3ed739 --- /dev/null +++ b/src/sources/youtube.ts @@ -0,0 +1,261 @@ +import type { PlatformSource, FetchRecentContentOptions } from './platform-source'; +import type { + Channel, + PlatformSourceMetadata, + PlatformContentMetadata, + PlaylistDiscoveryResult, +} from '../types/index'; +import { Platform, ContentType } from '../types/index'; +import { execYtDlp, parseSingleJson, parseJsonLines } from './yt-dlp'; + +// ── URL Validation ── + +const YOUTUBE_URL_PATTERNS = [ + /^https?:\/\/(www\.)?youtube\.com\/@[\w.-]+/, + /^https?:\/\/(www\.)?youtube\.com\/channel\/[\w-]+/, + /^https?:\/\/(www\.)?youtube\.com\/c\/[\w.-]+/, + /^https?:\/\/(www\.)?youtube\.com\/user\/[\w.-]+/, + /^https?:\/\/youtu\.be\/[\w-]+/, +]; + +export function isYouTubeUrl(url: string): boolean { + return YOUTUBE_URL_PATTERNS.some((p) => p.test(url)); +} + +// ── Implementation ── + +export class YouTubeSource implements PlatformSource { + async resolveChannel(url: string): Promise { + const result = await execYtDlp( + ['--dump-single-json', '--playlist-items', '0', '--flat-playlist', url], + { timeout: 30_000 } + ); + + const data = parseSingleJson(result.stdout) as Record; + + const channelName = + (data.channel as string) ?? + (data.uploader as string) ?? + 'Unknown Channel'; + const channelId = (data.channel_id as string) ?? ''; + const channelUrl = + (data.channel_url as string) ?? + (data.uploader_url as string) ?? + url; + + // Pick the best thumbnail — yt-dlp returns an array sorted by quality + const thumbnails = data.thumbnails as Array<{ url?: string }> | undefined; + const imageUrl = thumbnails?.length + ? (thumbnails[thumbnails.length - 1]?.url ?? null) + : null; + + return { + name: channelName, + platformId: channelId, + imageUrl, + url: channelUrl, + platform: Platform.YouTube, + }; + } + + /** + * Hybrid two-phase fetch for YouTube content. + * + * Phase 1 (Discovery): `--flat-playlist` for fast enumeration — returns IDs + * and titles without per-video metadata fetches. Extremely fast even for + * large channels. + * + * Phase 2 (Enrichment): For items NOT in `existingIds`, fetch full metadata + * per-video to get `upload_date`. Respects `rateLimitDelay` between calls. + * Individual enrichment failures are tolerated — the item is returned with + * `publishedAt: null` rather than aborting the scan. + */ + async fetchRecentContent( + channel: Channel, + options?: FetchRecentContentOptions + ): Promise { + const limit = options?.limit ?? 50; + const existingIds = options?.existingIds ?? new Set(); + const rateLimitDelay = options?.rateLimitDelay ?? 1000; + + // ── Phase 1: Fast discovery via --flat-playlist ── + const flatResult = await execYtDlp( + [ + '--flat-playlist', + '--dump-json', + '--playlist-items', + `1:${limit}`, + channel.url, + ], + { timeout: 60_000 } + ); + + const flatEntries = parseJsonLines(flatResult.stdout); + const discoveredItems = flatEntries.map((entry) => mapEntry(entry)); + + // ── Phase 2: Enrich new items with upload_date ── + const newItems = discoveredItems.filter( + (item) => !existingIds.has(item.platformContentId) + ); + + if (newItems.length === 0) { + // All items already known — return flat results as-is + return discoveredItems; + } + + console.log( + `[youtube] Phase 2: enriching ${newItems.length} new items (${discoveredItems.length - newItems.length} already known)` + ); + + // Build a map of enriched data keyed by platformContentId + const enrichedMap = new Map(); + + for (let i = 0; i < newItems.length; i++) { + const item = newItems[i]; + + // Rate limit delay between enrichment calls (skip before first) + if (i > 0 && rateLimitDelay > 0) { + await sleep(rateLimitDelay); + } + + try { + const videoUrl = `https://www.youtube.com/watch?v=${item.platformContentId}`; + const enrichResult = await execYtDlp( + ['--dump-json', '--no-playlist', videoUrl], + { timeout: 15_000 } + ); + + const enrichedEntry = parseSingleJson(enrichResult.stdout); + const enrichedItem = mapEntry(enrichedEntry); + enrichedMap.set(item.platformContentId, enrichedItem); + + console.log( + `[youtube] Enriched ${i + 1}/${newItems.length}: ${item.platformContentId}` + ); + } catch (err) { + // Tolerate individual failures — item keeps flat-playlist data (publishedAt: null) + console.warn( + `[youtube] Enrichment failed for ${item.platformContentId}: ${err instanceof Error ? err.message : err}` + ); + } + } + + // Merge enriched data back into the full list + return discoveredItems.map((item) => { + const enriched = enrichedMap.get(item.platformContentId); + return enriched ?? item; + }); + } + + /** + * Fetch ALL content for a channel — no playlist-items limit. + * Used by back-catalog import. Extended timeout (300s) to handle + * large channels with thousands of videos. + */ + async fetchAllContent( + channel: Channel + ): Promise { + const result = await execYtDlp( + [ + '--flat-playlist', + '--dump-json', + channel.url, + ], + { timeout: 300_000 } + ); + + const entries = parseJsonLines(result.stdout); + + return entries.map((entry) => mapEntry(entry)); + } + + /** + * Fetch playlists for a YouTube channel, with video-to-playlist mappings. + * Step 1: Enumerate playlists from the channel's /playlists tab. + * Step 2: For each playlist, fetch the video IDs it contains. + */ + async fetchPlaylists( + channel: Channel + ): Promise { + // Get playlist list from /playlists tab + const listResult = await execYtDlp( + ['--flat-playlist', '--dump-json', `${channel.url}/playlists`], + { timeout: 60_000 } + ); + const playlistEntries = parseJsonLines(listResult.stdout); + + // For each playlist, fetch its video IDs + const results: PlaylistDiscoveryResult[] = []; + for (const entry of playlistEntries) { + const e = entry as Record; + const playlistId = e.id as string; + const title = (e.title as string) ?? 'Untitled Playlist'; + + if (!playlistId) continue; + + const playlistUrl = `https://www.youtube.com/playlist?list=${playlistId}`; + const videoResult = await execYtDlp( + ['--flat-playlist', '--dump-json', playlistUrl], + { timeout: 60_000 } + ); + const videoEntries = parseJsonLines(videoResult.stdout); + const videoIds = videoEntries + .map((v) => (v as Record).id as string) + .filter(Boolean); + + results.push({ platformPlaylistId: playlistId, title, videoIds }); + } + + return results; + } +} + +// ── Helpers ── + +/** + * Parse yt-dlp's upload_date (YYYYMMDD) into an ISO 8601 datetime string. + * Returns null for missing or malformed values. + */ +function parseUploadDate(raw: string | undefined): string | null { + if (!raw || raw.length !== 8) return null; + const y = raw.slice(0, 4); + const m = raw.slice(4, 6); + const d = raw.slice(6, 8); + return `${y}-${m}-${d}T00:00:00Z`; +} + +function mapEntry(entry: unknown): PlatformContentMetadata { + const e = entry as Record; + return { + platformContentId: (e.id as string) ?? '', + title: (e.title as string) ?? 'Untitled', + url: (e.url as string) ?? (e.webpage_url as string) ?? '', + contentType: detectContentType(e), + duration: typeof e.duration === 'number' ? e.duration : null, + thumbnailUrl: extractThumbnailUrl(e), + publishedAt: parseUploadDate(e.upload_date as string | undefined), + }; +} + +function detectContentType( + entry: Record +): PlatformContentMetadata['contentType'] { + const liveStatus = entry.live_status as string | undefined; + if (liveStatus === 'is_live' || liveStatus === 'is_upcoming') { + return ContentType.Livestream; + } + return ContentType.Video; +} + +function extractThumbnailUrl(entry: Record): string | null { + if (typeof entry.thumbnail === 'string') return entry.thumbnail; + const thumbnails = entry.thumbnails as Array<{ url?: string }> | undefined; + if (thumbnails?.length) { + return thumbnails[thumbnails.length - 1]?.url ?? null; + } + return null; +} + +function sleep(ms: number): Promise { + return new Promise((resolve) => setTimeout(resolve, ms)); +} diff --git a/src/sources/yt-dlp.ts b/src/sources/yt-dlp.ts new file mode 100644 index 0000000..a94e548 --- /dev/null +++ b/src/sources/yt-dlp.ts @@ -0,0 +1,195 @@ +import { execFile as execFileCb } from 'node:child_process'; +import { promisify } from 'node:util'; + +const execFileAsync = promisify(execFileCb); + +// ── Types ── + +export interface YtDlpResult { + stdout: string; + stderr: string; + exitCode: number; +} + +export interface ExecYtDlpOptions { + /** Timeout in milliseconds. Default: 30_000 (30s). */ + timeout?: number; +} + +// ── Error ── + +/** + * Structured error from a yt-dlp subprocess invocation. + * Carries stderr, exit code, and a rate-limit detection flag. + */ +export class YtDlpError extends Error { + readonly stderr: string; + readonly exitCode: number; + readonly isRateLimit: boolean; + + constructor(message: string, stderr: string, exitCode: number) { + super(message); + this.name = 'YtDlpError'; + this.stderr = stderr; + this.exitCode = exitCode; + this.isRateLimit = detectRateLimit(stderr); + } +} + +function detectRateLimit(stderr: string): boolean { + const lower = stderr.toLowerCase(); + return ( + lower.includes('429') || + lower.includes('too many requests') || + lower.includes('http error 429') + ); +} + +// ── Core Functions ── + +/** + * Execute yt-dlp with the given arguments. + * Uses `execFile` (no shell) to avoid injection. + * Enforces timeout via AbortController. + */ +export async function execYtDlp( + args: string[], + options?: ExecYtDlpOptions +): Promise { + const timeout = options?.timeout ?? 30_000; + const controller = new AbortController(); + + try { + const { stdout, stderr } = await execFileAsync('yt-dlp', args, { + signal: controller.signal, + timeout, + maxBuffer: 10 * 1024 * 1024, // 10 MB — playlist dumps can be large + windowsHide: true, + }); + + return { stdout: stdout ?? '', stderr: stderr ?? '', exitCode: 0 }; + } catch (err: unknown) { + // AbortController or timeout cancellation + if (isAbortError(err)) { + throw new YtDlpError( + `yt-dlp timed out after ${timeout}ms`, + (err as NodeJS.ErrnoException & { stderr?: string }).stderr ?? '', + -1 + ); + } + + // execFile error with exit code + const execErr = err as NodeJS.ErrnoException & { + stdout?: string; + stderr?: string; + code?: number | string; + }; + const exitCode = typeof execErr.code === 'number' ? execErr.code : 1; + const stderr = execErr.stderr ?? ''; + const stdout = execErr.stdout ?? ''; + + // If we got stdout despite non-zero exit, still return it — some yt-dlp + // operations emit partial output on error + if (stdout && !stderr) { + return { stdout, stderr, exitCode }; + } + + throw new YtDlpError( + `yt-dlp exited with code ${exitCode}: ${stderr.slice(0, 200)}`, + stderr, + exitCode + ); + } +} + +function isAbortError(err: unknown): boolean { + if (err instanceof Error) { + return ( + err.name === 'AbortError' || + (err as NodeJS.ErrnoException).code === 'ABORT_ERR' || + (err as NodeJS.ErrnoException).code === 'ERR_CHILD_PROCESS_STDIO_MAXBUFFER' || + err.message.includes('was killed with signal SIGTERM') + ); + } + return false; +} + +// ── JSON Parsing ── + +/** + * Parse newline-delimited JSON output (from `--flat-playlist --dump-json`). + * Skips empty lines and catches per-line parse errors. + */ +export function parseJsonLines(stdout: string): unknown[] { + const results: unknown[] = []; + const lines = stdout.split('\n'); + + for (const line of lines) { + const trimmed = line.trim(); + if (!trimmed) continue; + + try { + results.push(JSON.parse(trimmed)); + } catch { + // Skip unparseable lines — yt-dlp occasionally emits progress text + // interspersed with JSON + } + } + + return results; +} + +/** + * Parse a single JSON object from stdout (from `--dump-single-json`). + * Throws if the output is not valid JSON. + */ +export function parseSingleJson(stdout: string): unknown { + const trimmed = stdout.trim(); + if (!trimmed) { + throw new YtDlpError('yt-dlp returned empty output', '', 0); + } + try { + return JSON.parse(trimmed); + } catch { + throw new YtDlpError( + `Failed to parse yt-dlp JSON output: ${trimmed.slice(0, 100)}`, + '', + 0 + ); + } +} + +// ── Health Check ── + +/** + * Check if yt-dlp is available on the system PATH. + * Returns true if `yt-dlp --version` exits 0. + */ +export async function checkYtDlpAvailable(): Promise { + try { + await execFileAsync('yt-dlp', ['--version'], { + timeout: 5_000, + windowsHide: true, + }); + return true; + } catch { + return false; + } +} + +/** + * Get the yt-dlp version string from the system PATH. + * Returns the trimmed version (e.g. "2024.12.23") on success, or null if + * yt-dlp is not available or the call fails. + */ +export async function getYtDlpVersion(): Promise { + try { + const { stdout } = await execFileAsync('yt-dlp', ['--version'], { + timeout: 5_000, + windowsHide: true, + }); + return stdout.trim() || null; + } catch { + return null; + } +} diff --git a/src/types/api.ts b/src/types/api.ts new file mode 100644 index 0000000..247f233 --- /dev/null +++ b/src/types/api.ts @@ -0,0 +1,82 @@ +// ── API Response Envelopes ── + +/** Standard API response wrapper. */ +export interface ApiResponse { + success: boolean; + data: T; +} + +/** Standard API error shape. */ +export interface ApiError { + error: string; + statusCode: number; + message: string; + stack?: string; // included only in development +} + +/** Paginated API response wrapper. */ +export interface PaginatedResponse { + success: boolean; + data: T[]; + pagination: { + page: number; + pageSize: number; + totalItems: number; + totalPages: number; + }; +} + +// ── Endpoint-Specific Responses ── + +export interface ComponentHealth { + name: string; + status: 'healthy' | 'degraded' | 'unhealthy'; + message?: string; + responseTime?: number; // ms + details?: Record; +} + +export interface HealthResponse { + status: 'healthy' | 'degraded' | 'unhealthy'; + components: ComponentHealth[]; + uptime: number; // seconds +} + +export interface SystemStatusResponse { + appName: string; + version: string; + uptime: number; // seconds + nodeVersion: string; + platform: string; + arch: string; + memoryUsage: { + heapUsed: number; + heapTotal: number; + rss: number; + }; +} + +/** Response shape for API key management endpoints. */ +export interface ApiKeyResponse { + apiKey: string; +} + +// ── Channel Content Counts ── + +/** Aggregated content counts for a single channel. */ +export interface ContentCounts { + total: number; + monitored: number; + downloaded: number; +} + +/** App-wide settings (check interval, concurrent downloads). */ +export interface AppSettingsResponse { + checkInterval: number; + concurrentDownloads: number; +} + +/** Channel with aggregated content counts — returned by GET /api/v1/channel. */ +export type ChannelWithCounts = import('./index').Channel & { + contentCounts: ContentCounts; +}; diff --git a/src/types/index.ts b/src/types/index.ts new file mode 100644 index 0000000..4b75f5c --- /dev/null +++ b/src/types/index.ts @@ -0,0 +1,200 @@ +// ── Platform & Content Enums ── + +export const Platform = { + YouTube: 'youtube', + SoundCloud: 'soundcloud', +} as const; +export type Platform = (typeof Platform)[keyof typeof Platform]; + +export const ContentType = { + Video: 'video', + Audio: 'audio', + Livestream: 'livestream', +} as const; +export type ContentType = (typeof ContentType)[keyof typeof ContentType]; + +export const ContentStatus = { + Monitored: 'monitored', + Queued: 'queued', + Downloading: 'downloading', + Downloaded: 'downloaded', + Failed: 'failed', + Ignored: 'ignored', +} as const; +export type ContentStatus = (typeof ContentStatus)[keyof typeof ContentStatus]; + +export const QueueStatus = { + Pending: 'pending', + Downloading: 'downloading', + Completed: 'completed', + Failed: 'failed', + Cancelled: 'cancelled', +} as const; +export type QueueStatus = (typeof QueueStatus)[keyof typeof QueueStatus]; + +export type MonitoringMode = 'all' | 'future' | 'existing' | 'none'; + +// ── Platform Source Types ── + +/** Metadata resolved from a platform URL identifying a channel. */ +export interface PlatformSourceMetadata { + name: string; + platformId: string; + imageUrl: string | null; + url: string; + platform: Platform; +} + +/** Metadata for a single piece of content from a platform. */ +export interface PlatformContentMetadata { + platformContentId: string; + title: string; + url: string; + contentType: ContentType; + duration: number | null; + thumbnailUrl: string | null; + publishedAt: string | null; +} + +// ── Domain Interfaces ── + +export interface Channel { + id: number; + name: string; + platform: Platform; + platformId: string; + url: string; + monitoringEnabled: boolean; + checkInterval: number; // minutes + imageUrl: string | null; + metadata: Record | null; + formatProfileId: number | null; + monitoringMode: MonitoringMode; + createdAt: string; + updatedAt: string; + lastCheckedAt: string | null; + lastCheckStatus: 'success' | 'error' | 'rate_limited' | null; +} + +export interface ContentItem { + id: number; + channelId: number; + title: string; + platformContentId: string; + url: string; + contentType: ContentType; + duration: number | null; // seconds + filePath: string | null; + fileSize: number | null; // bytes + format: string | null; + qualityMetadata: QualityInfo | null; + status: ContentStatus; + thumbnailUrl: string | null; + publishedAt: string | null; + downloadedAt: string | null; + monitored: boolean; + createdAt: string; + updatedAt: string; +} + +export interface QualityInfo { + actualResolution: string | null; + actualCodec: string | null; + actualBitrate: string | null; + containerFormat: string | null; + qualityWarnings: string[]; +} + +export interface QueueItem { + id: number; + contentItemId: number; + status: QueueStatus; + priority: number; + attempts: number; + maxAttempts: number; + error: string | null; + startedAt: string | null; + completedAt: string | null; + createdAt: string; + updatedAt: string; + /** Channel name resolved via content_items → channels LEFT JOIN. Null when the content/channel was deleted. */ + channelName?: string | null; + /** Content title resolved via content_items LEFT JOIN. Null when the content item was deleted. */ + contentTitle?: string | null; +} + +export interface FormatProfile { + id: number; + name: string; + videoResolution: string | null; + audioCodec: string | null; + audioBitrate: string | null; + containerFormat: string | null; + isDefault: boolean; + subtitleLanguages: string | null; // comma-separated lang codes e.g. "en,es,fr" + embedSubtitles: boolean; + createdAt: string; + updatedAt: string; +} + +export interface DownloadHistoryRecord { + id: number; + contentItemId: number | null; + channelId: number | null; + eventType: string; + status: string; + details: Record | null; + createdAt: string; +} + +export interface NotificationSetting { + id: number; + type: 'discord' | 'email' | 'pushover' | 'telegram'; + name: string; + enabled: boolean; + config: Record; + onGrab: boolean; + onDownload: boolean; + onFailure: boolean; + createdAt: string; + updatedAt: string; +} + +export interface PlatformSettings { + platform: Platform; + defaultFormatProfileId: number | null; + checkInterval: number; + concurrencyLimit: number; + subtitleLanguages: string | null; + grabAllEnabled: boolean; + grabAllOrder: 'newest' | 'oldest'; + scanLimit: number; + rateLimitDelay: number; + createdAt: string; + updatedAt: string; +} + +export interface SystemConfig { + key: string; + value: string; + createdAt: string; + updatedAt: string; +} + +export interface Playlist { + id: number; + channelId: number; + platformPlaylistId: string; + title: string; + position: number; + createdAt: string; + updatedAt: string; +} + +/** Result from yt-dlp playlist discovery — maps playlist metadata to video IDs. */ +export interface PlaylistDiscoveryResult { + platformPlaylistId: string; + title: string; + /** platformContentIds of videos in this playlist */ + videoIds: string[]; +} diff --git a/tsconfig.json b/tsconfig.json new file mode 100644 index 0000000..0e4846a --- /dev/null +++ b/tsconfig.json @@ -0,0 +1,23 @@ +{ + "compilerOptions": { + "target": "ES2022", + "module": "ES2022", + "moduleResolution": "bundler", + "strict": true, + "esModuleInterop": true, + "skipLibCheck": true, + "forceConsistentCasingInFileNames": true, + "resolveJsonModule": true, + "declaration": true, + "declarationMap": true, + "sourceMap": true, + "outDir": "dist", + "rootDir": "src", + "baseUrl": ".", + "paths": { + "@/*": ["src/*"] + } + }, + "include": ["src/**/*.ts"], + "exclude": ["node_modules", "dist", "drizzle", "src/frontend"] +} diff --git a/vitest.config.ts b/vitest.config.ts new file mode 100644 index 0000000..ca608c1 --- /dev/null +++ b/vitest.config.ts @@ -0,0 +1,16 @@ +import { defineConfig } from 'vitest/config'; +import { resolve } from 'node:path'; + +export default defineConfig({ + resolve: { + alias: { + '@': resolve(__dirname, 'src'), + }, + }, + test: { + include: ['src/__tests__/**/*.test.ts'], + environment: 'node', + testTimeout: 15000, + hookTimeout: 15000, + }, +});