diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index 4d3ac42e..f6b930b4 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -152,6 +152,8 @@ jobs: - metadata-openlibrary - recommendations-anilist - sync-anilist + - release-mangaupdates + - release-nyaa steps: - uses: actions/checkout@v4 - name: Setup Node.js @@ -562,6 +564,8 @@ jobs: - metadata-openlibrary - recommendations-anilist - sync-anilist + - release-mangaupdates + - release-nyaa steps: - uses: actions/checkout@v4 with: diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 39c82570..df4bfc1c 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -134,6 +134,8 @@ jobs: - metadata-openlibrary - recommendations-anilist - sync-anilist + - release-mangaupdates + - release-nyaa steps: - uses: actions/checkout@v4 - name: Setup Node.js diff --git a/Makefile b/Makefile index a8500b1a..71ab45da 100644 --- a/Makefile +++ b/Makefile @@ -271,7 +271,12 @@ plugins-outdated: ## Check for outdated plugin dependencies plugins-build: ## Build all plugins @echo "$(BLUE)Building plugins...$(NC)" - @for dir in $(PLUGIN_DIRS); do \ + @# sdk-typescript must build first: every other plugin imports from + @# its compiled dist, and esbuild fails fast on missing exports if + @# the SDK dist is stale. + @echo "$(YELLOW)Building sdk-typescript...$(NC)" + @(cd plugins/sdk-typescript && npm run build) + @for dir in $(PLUGIN_ONLY_DIRS); do \ echo "$(YELLOW)Building $$dir...$(NC)"; \ (cd plugins/$$dir && npm run build); \ done diff --git a/config/seed-config.sample.yaml b/config/seed-config.sample.yaml index e63987ae..680b2a9b 100644 --- a/config/seed-config.sample.yaml +++ b/config/seed-config.sample.yaml @@ -88,28 +88,72 @@ plugins: credential_delivery: env # AniList Recommendations - Personalized manga recommendations + # Recommendation plugins are gated by manifest capability + per-user enable; + # they don't write metadata, so no `permissions` or `scopes` are needed. - name: recommendations-anilist display_name: AniList Recommendations description: Personalized manga recommendations from AniList based on reading history plugin_type: user command: node args: ["/opt/codex/plugins/recommendations-anilist/dist/index.js"] - permissions: - - "metadata:read" + permissions: [] scopes: [] credential_delivery: env # AniList Sync - Reading progress sync + # Sync plugins are gated by manifest capability + per-user enable; they don't + # write series/book metadata, so no `permissions` or `scopes` are needed. - name: sync-anilist display_name: AniList Sync description: Sync manga reading progress between Codex and AniList plugin_type: user command: node args: ["/opt/codex/plugins/sync-anilist/dist/index.js"] - permissions: - - "metadata:read" + permissions: [] + scopes: [] + credential_delivery: env + + # MangaUpdates Releases - Translation/scanlation release feed (no credentials needed) + # Release-source plugins are gated by manifest capability at reverse-RPC + # dispatch; they don't write metadata, so no `permissions` or `scopes` are needed. + # + # On first start the plugin auto-registers a single source row visible at + # Settings → Release tracking. Optional `config.blockedGroups` (CSV) filters + # noisy scanlators server-wide. + - name: release-mangaupdates + display_name: MangaUpdates Releases + description: Announces new chapter releases for tracked series via MangaUpdates per-series RSS feeds + command: node + args: ["/opt/codex/plugins/release-mangaupdates/dist/index.js"] + permissions: [] + scopes: [] + credential_delivery: env + # config: + # blockedGroups: "LowQualityScans,MTL Group" + + # Nyaa Releases - Acquisition-pointer source for trusted uploaders (no credentials needed) + # + # The `uploaders` config below drives one source row per entry at + # Settings → Release tracking. Each entry is one of: + # `username` -> Nyaa user feed (https://nyaa.si/?page=rss&u=) + # `q:` -> plain site-wide search + # `q:?` -> URL-style search with allowlisted keys (q, c, f, u), + # e.g. `q:?c=3_1&q=Berserk` for English-translated + # Literature category. See plugin docs for details. + # + # Removing an entry and re-saving prunes its source row (and its + # release_ledger history). Per-source poll interval / enable / "Poll now" + # live in the Release tracking UI. + - name: release-nyaa + display_name: Nyaa Releases + description: Announces new chapter / volume torrents for tracked series via Nyaa.si uploader RSS feeds + command: node + args: ["/opt/codex/plugins/release-nyaa/dist/index.js"] + permissions: [] scopes: [] credential_delivery: env + # config: + # uploaders: "tsuna69,TankobonBlur,q:?c=3_1&q=Berserk" # ============================================================================= # Libraries diff --git a/docker-compose.yml b/docker-compose.yml index 1d4908e6..e3fceed9 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -94,6 +94,8 @@ services: - ./plugins/metadata-openlibrary/dist:/opt/codex/plugins/metadata-openlibrary/dist:ro - ./plugins/recommendations-anilist/dist:/opt/codex/plugins/recommendations-anilist/dist:ro - ./plugins/sync-anilist/dist:/opt/codex/plugins/sync-anilist/dist:ro + - ./plugins/release-mangaupdates/dist:/opt/codex/plugins/release-mangaupdates/dist:ro + - ./plugins/release-nyaa/dist:/opt/codex/plugins/release-nyaa/dist:ro environment: RUST_BACKTRACE: 1 # Email configuration for Mailhog @@ -156,6 +158,8 @@ services: - ./plugins/metadata-openlibrary/dist:/opt/codex/plugins/metadata-openlibrary/dist:ro - ./plugins/recommendations-anilist/dist:/opt/codex/plugins/recommendations-anilist/dist:ro - ./plugins/sync-anilist/dist:/opt/codex/plugins/sync-anilist/dist:ro + - ./plugins/release-mangaupdates/dist:/opt/codex/plugins/release-mangaupdates/dist:ro + - ./plugins/release-nyaa/dist:/opt/codex/plugins/release-nyaa/dist:ro command: [ "cargo", @@ -217,6 +221,8 @@ services: - /plugins/metadata-openlibrary/node_modules - /plugins/recommendations-anilist/node_modules - /plugins/sync-anilist/node_modules + - /plugins/release-mangaupdates/node_modules + - /plugins/release-nyaa/node_modules command: - sh - -c @@ -228,15 +234,19 @@ services: cd /plugins/metadata-openlibrary && npm install && npm run build && cd /plugins/recommendations-anilist && npm install && npm run build && cd /plugins/sync-anilist && npm install && npm run build && + cd /plugins/release-mangaupdates && npm install && npm run build && + cd /plugins/release-nyaa && npm install && npm run build && echo 'Initial build complete. Watching for changes...' && npm install -g concurrently && - concurrently --names 'sdk,echo,mangabaka,openlibrary,rec-anilist,sync-anilist' --prefix-colors 'blue,green,yellow,magenta,cyan,red' \ + concurrently --names 'sdk,echo,mangabaka,openlibrary,rec-anilist,sync-anilist,rel-mu,rel-nyaa' --prefix-colors 'blue,green,yellow,magenta,cyan,red,gray,white' \ "cd /plugins/sdk-typescript && npm run dev" \ "cd /plugins/metadata-echo && npm run dev" \ "cd /plugins/metadata-mangabaka && npm run dev" \ "cd /plugins/metadata-openlibrary && npm run dev" \ "cd /plugins/recommendations-anilist && npm run dev" \ - "cd /plugins/sync-anilist && npm run dev" + "cd /plugins/sync-anilist && npm run dev" \ + "cd /plugins/release-mangaupdates && npm run dev" \ + "cd /plugins/release-nyaa && npm run dev" networks: - codex-network profiles: @@ -375,6 +385,8 @@ services: start_period: 30s networks: - codex-network + ports: + - "8081:8080" profiles: - screenshots diff --git a/docs/api/openapi.json b/docs/api/openapi.json index b2e200e7..1504fb23 100644 --- a/docs/api/openapi.json +++ b/docs/api/openapi.json @@ -7107,35 +7107,31 @@ ] } }, - "/api/v1/scans/active": { + "/api/v1/release-sources": { "get": { "tags": [ - "Scans" + "Releases" ], - "summary": "List all active scans", - "description": "# Permission Required\n- `libraries:read`", - "operationId": "list_active_scans", + "summary": "List all configured release sources (admin-only).", + "operationId": "list_release_sources", "responses": { "200": { - "description": "List of active scans", + "description": "Source list", "content": { "application/json": { "schema": { - "type": "array", - "items": { - "$ref": "#/components/schemas/ScanStatusDto" - } + "$ref": "#/components/schemas/ReleaseSourceListResponse" } } } }, "403": { - "description": "Permission denied" + "description": "PluginsManage permission required" } }, "security": [ { - "bearer_auth": [] + "jwt_bearer": [] }, { "api_key": [] @@ -7143,25 +7139,47 @@ ] } }, - "/api/v1/scans/stream": { + "/api/v1/release-sources/applicability": { "get": { "tags": [ - "Scans" + "Releases" + ], + "summary": "Whether release tracking is available for a given library.", + "description": "Read-only, requires only `SeriesRead`: the response carries no\nadmin-sensitive data (no plugin IDs, no configs, no library\nallowlists), just the boolean and friendly display names. Used by the\nfrontend to:\n\n- hide the per-series Tracking panel + Releases tab on libraries with\n no applicable plugin (cleaner UX);\n- decide whether to show the \"Track for releases\" / \"Don't track for\n releases\" entries in the bulk-selection menu.", + "operationId": "get_release_tracking_applicability", + "parameters": [ + { + "name": "libraryId", + "in": "query", + "description": "Optional library scope. When provided, only plugins that apply to\nthis library are considered (a plugin's `library_ids` field is\neither empty = all, or contains this UUID).", + "required": false, + "schema": { + "type": [ + "string", + "null" + ], + "format": "uuid" + } + } ], - "summary": "Stream scan progress updates via Server-Sent Events", - "description": "# Permission Required\n- `libraries:read`\n\n**DEPRECATED**: This endpoint is replaced by `/api/v1/tasks/stream` which provides\nreal-time updates for all task types including scans. This endpoint now filters\nthe task stream to only show scan_library tasks for backwards compatibility.", - "operationId": "scan_progress_stream", "responses": { "200": { - "description": "SSE stream of scan progress updates" + "description": "Applicability info", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApplicabilityResponse" + } + } + } }, "403": { - "description": "Permission denied" + "description": "SeriesRead permission required" } }, "security": [ { - "bearer_auth": [] + "jwt_bearer": [] }, { "api_key": [] @@ -7169,64 +7187,213 @@ ] } }, - "/api/v1/series": { - "get": { + "/api/v1/release-sources/{source_id}": { + "patch": { "tags": [ - "Series" + "Releases" ], - "summary": "List series with optional library filter and pagination", - "operationId": "list_series", + "summary": "PATCH a release source (admin-only).", + "description": "Toggle `enabled`, override `cronSchedule`, or rename `displayName`.\nSending `cronSchedule: null` clears the override and reverts the row to\ninheriting the server-wide `release_tracking.default_cron_schedule`.", + "operationId": "update_release_source", "parameters": [ { - "name": "page", - "in": "query", - "description": "Page number (1-indexed, default 1)", - "required": false, + "name": "source_id", + "in": "path", + "description": "Source ID", + "required": true, "schema": { - "type": "integer", - "format": "int64", - "minimum": 0 + "type": "string", + "format": "uuid" + } + } + ], + "requestBody": { + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/UpdateReleaseSourceRequest" + } + } + }, + "required": true + }, + "responses": { + "200": { + "description": "Source updated", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ReleaseSourceDto" + } + } } }, + "400": { + "description": "Invalid update payload" + }, + "403": { + "description": "PluginsManage permission required" + }, + "404": { + "description": "Source not found" + } + }, + "security": [ + { + "jwt_bearer": [] + }, { - "name": "pageSize", + "api_key": [] + } + ] + } + }, + "/api/v1/release-sources/{source_id}/poll-now": { + "post": { + "tags": [ + "Releases" + ], + "summary": "Trigger a manual poll for a source.", + "description": "Enqueues a `PollReleaseSource` task immediately. The task runs\nasynchronously via the worker pool; the response confirms the enqueue,\nnot the poll outcome.", + "operationId": "poll_release_source_now", + "parameters": [ + { + "name": "source_id", + "in": "path", + "description": "Source ID", + "required": true, + "schema": { + "type": "string", + "format": "uuid" + } + } + ], + "responses": { + "202": { + "description": "Poll task enqueued", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/PollNowResponse" + } + } + } + }, + "403": { + "description": "PluginsManage permission required" + }, + "404": { + "description": "Source not found" + }, + "409": { + "description": "Source disabled" + } + }, + "security": [ + { + "jwt_bearer": [] + }, + { + "api_key": [] + } + ] + } + }, + "/api/v1/release-sources/{source_id}/reset": { + "post": { + "tags": [ + "Releases" + ], + "summary": "Reset a release source to a clean slate.", + "description": "Deletes every `release_ledger` row owned by the source and clears the\nsource's transient poll state (`etag`, `last_polled_at`, `last_error`,\n`last_error_at`, `last_summary`). User-managed fields (`enabled`,\n`cron_schedule`, `display_name`, `config`) are preserved.\n\nIntended for testing/troubleshooting: after a reset, the next poll\nfetches the upstream feed without an `If-None-Match` header (so no 304\nshort-circuit) and re-records every release as `announced`. Does NOT\nauto-enqueue a poll — call `POST /release-sources/{id}/poll-now` after\nresetting if you want immediate re-fetch.", + "operationId": "reset_release_source", + "parameters": [ + { + "name": "source_id", + "in": "path", + "description": "Source ID", + "required": true, + "schema": { + "type": "string", + "format": "uuid" + } + } + ], + "responses": { + "200": { + "description": "Source reset", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ResetReleaseSourceResponse" + } + } + } + }, + "403": { + "description": "PluginsManage permission required" + }, + "404": { + "description": "Source not found" + } + }, + "security": [ + { + "jwt_bearer": [] + }, + { + "api_key": [] + } + ] + } + }, + "/api/v1/releases": { + "get": { + "tags": [ + "Releases" + ], + "summary": "Cross-series inbox: announced (or filtered) ledger entries, paginated.", + "operationId": "list_release_inbox", + "parameters": [ + { + "name": "state", "in": "query", - "description": "Number of items per page (max 100, default 50)", + "description": "Filter by state. Defaults to `announced`. Pass `all` to disable\nstate filtering entirely (returns rows in every state).", "required": false, "schema": { - "type": "integer", - "format": "int64", - "minimum": 0 + "type": [ + "string", + "null" + ] } }, { - "name": "sort", + "name": "seriesId", "in": "query", - "description": "Sort parameter (format: \"field,direction\" e.g. \"name,asc\")", "required": false, "schema": { "type": [ "string", "null" - ] + ], + "format": "uuid" } }, { - "name": "genres", + "name": "sourceId", "in": "query", - "description": "Filter by genres (comma-separated, AND logic - series must have ALL specified genres)", "required": false, "schema": { "type": [ "string", "null" - ] + ], + "format": "uuid" } }, { - "name": "tags", + "name": "language", "in": "query", - "description": "Filter by tags (comma-separated, AND logic - series must have ALL specified tags)", "required": false, "schema": { "type": [ @@ -7238,7 +7405,7 @@ { "name": "libraryId", "in": "query", - "description": "Filter by library ID", + "description": "Restrict to series belonging to this library.", "required": false, "schema": { "type": [ @@ -7249,22 +7416,33 @@ } }, { - "name": "full", + "name": "page", "in": "query", - "description": "Return full series data including metadata, locks, genres, tags, alternate titles,\nexternal ratings, and external links. Default is false for backward compatibility.", "required": false, "schema": { - "type": "boolean" + "type": "integer", + "format": "int64", + "minimum": 0 + } + }, + { + "name": "pageSize", + "in": "query", + "required": false, + "schema": { + "type": "integer", + "format": "int64", + "minimum": 0 } } ], "responses": { "200": { - "description": "Paginated list of series (returns FullSeriesListResponse when full=true)", + "description": "Paginated inbox entries", "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/PaginatedResponse" + "$ref": "#/components/schemas/PaginatedResponse_ReleaseLedgerEntryDto" } } } @@ -7283,19 +7461,19 @@ ] } }, - "/api/v1/series/bulk/analyze": { + "/api/v1/releases/bulk": { "post": { "tags": [ - "Bulk Operations" + "Releases" ], - "summary": "Bulk analyze multiple series", - "description": "Enqueues analysis tasks for all books in the specified series.\nSeries that don't exist are silently skipped.", - "operationId": "bulk_analyze_series", + "summary": "Apply an action to a batch of ledger rows.", + "description": "`dismiss`, `mark-acquired`, `ignore`, and `reset` all set state\nin-place. `delete` removes the rows and clears the affected sources'\netags so the next poll re-fetches without `If-None-Match`. All run\nas bulk SQL (no per-row round trips), so this scales to thousands of\nrows in one call.", + "operationId": "bulk_release_action", "requestBody": { "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/BulkAnalyzeSeriesRequest" + "$ref": "#/components/schemas/BulkReleaseActionRequest" } } }, @@ -7303,17 +7481,17 @@ }, "responses": { "200": { - "description": "Analysis tasks enqueued", + "description": "Bulk action applied", "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/BulkAnalyzeResponse" + "$ref": "#/components/schemas/BulkReleaseActionResponse" } } } }, - "401": { - "description": "Unauthorized" + "400": { + "description": "Empty ID list or invalid action" }, "403": { "description": "Forbidden" @@ -7321,7 +7499,7 @@ }, "security": [ { - "bearer_auth": [] + "jwt_bearer": [] }, { "api_key": [] @@ -7329,47 +7507,95 @@ ] } }, - "/api/v1/series/bulk/genres": { - "post": { + "/api/v1/releases/facets": { + "get": { "tags": [ - "Bulk Operations" + "Releases" ], - "summary": "Bulk add/remove genres for multiple series", - "operationId": "bulk_modify_series_genres", - "requestBody": { - "content": { - "application/json": { - "schema": { - "$ref": "#/components/schemas/BulkModifySeriesGenresRequest" - } - } - }, - "required": true - }, - "responses": { - "200": { - "description": "Genres modified", - "content": { - "application/json": { + "summary": "Distinct values present in the inbox under the given filters.", + "description": "Returns the languages, libraries, and series that have at least one\nmatching ledger row. The frontend uses this to populate cascading\nSelect dropdowns so users never have to type a UUID and never see\ndropdown options that would yield zero results.", + "operationId": "list_release_facets", + "parameters": [ + { + "name": "state", + "in": "query", + "required": false, + "schema": { + "type": [ + "string", + "null" + ] + } + }, + { + "name": "seriesId", + "in": "query", + "required": false, + "schema": { + "type": [ + "string", + "null" + ], + "format": "uuid" + } + }, + { + "name": "sourceId", + "in": "query", + "required": false, + "schema": { + "type": [ + "string", + "null" + ], + "format": "uuid" + } + }, + { + "name": "language", + "in": "query", + "required": false, + "schema": { + "type": [ + "string", + "null" + ] + } + }, + { + "name": "libraryId", + "in": "query", + "required": false, + "schema": { + "type": [ + "string", + "null" + ], + "format": "uuid" + } + } + ], + "responses": { + "200": { + "description": "Facets for the inbox view", + "content": { + "application/json": { "schema": { - "$ref": "#/components/schemas/BulkMetadataUpdateResponse" + "$ref": "#/components/schemas/ReleaseFacetsResponse" } } } }, "400": { - "description": "Bad request" - }, - "401": { - "description": "Unauthorized" + "description": "Invalid state filter" }, "403": { - "description": "Forbidden" + "description": "SeriesRead permission required" } }, "security": [ { - "bearer_auth": [] + "jwt_bearer": [] }, { "api_key": [] @@ -7377,67 +7603,76 @@ ] } }, - "/api/v1/series/bulk/metadata": { - "patch": { + "/api/v1/releases/{release_id}": { + "delete": { "tags": [ - "Bulk Operations" + "Releases" ], - "summary": "Bulk patch series metadata", - "description": "Applies the same partial metadata update to multiple series at once.\nOnly provided fields will be updated. Changed fields are auto-locked.\nNon-existent series are silently skipped.", - "operationId": "bulk_patch_series_metadata", - "requestBody": { - "content": { - "application/json": { - "schema": { - "$ref": "#/components/schemas/BulkPatchSeriesMetadataRequest" - } + "summary": "Hard-delete a single ledger row.", + "description": "Also clears the source's `etag` so the next poll bypasses\n`If-None-Match` and re-records the deleted row in `announced` state\n(assuming the upstream still lists it). This is the lever users want\nwhen they marked something incorrectly and need to \"get it back\".", + "operationId": "delete_release", + "parameters": [ + { + "name": "release_id", + "in": "path", + "description": "Ledger entry ID", + "required": true, + "schema": { + "type": "string", + "format": "uuid" } - }, - "required": true - }, + } + ], "responses": { "200": { - "description": "Metadata updated", + "description": "Release deleted", "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/BulkMetadataUpdateResponse" + "$ref": "#/components/schemas/DeleteReleaseResponse" } } } }, - "400": { - "description": "Bad request" - }, - "401": { - "description": "Unauthorized" - }, "403": { "description": "Forbidden" + }, + "404": { + "description": "Ledger entry not found" } }, "security": [ { - "bearer_auth": [] + "jwt_bearer": [] }, { "api_key": [] } ] - } - }, - "/api/v1/series/bulk/metadata/locks": { - "put": { + }, + "patch": { "tags": [ - "Bulk Operations" + "Releases" + ], + "summary": "PATCH a ledger entry's state (general-purpose state transition).", + "operationId": "update_release_entry", + "parameters": [ + { + "name": "release_id", + "in": "path", + "description": "Ledger entry ID", + "required": true, + "schema": { + "type": "string", + "format": "uuid" + } + } ], - "summary": "Bulk update metadata locks for multiple series", - "operationId": "bulk_update_series_locks", "requestBody": { "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/BulkUpdateSeriesLocksRequest" + "$ref": "#/components/schemas/UpdateReleaseLedgerEntryRequest" } } }, @@ -7445,28 +7680,28 @@ }, "responses": { "200": { - "description": "Locks updated", + "description": "Updated ledger entry", "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/BulkMetadataUpdateResponse" + "$ref": "#/components/schemas/ReleaseLedgerEntryDto" } } } }, "400": { - "description": "Bad request" - }, - "401": { - "description": "Unauthorized" + "description": "Invalid state" }, "403": { "description": "Forbidden" + }, + "404": { + "description": "Ledger entry not found" } }, "security": [ { - "bearer_auth": [] + "jwt_bearer": [] }, { "api_key": [] @@ -7474,45 +7709,46 @@ ] } }, - "/api/v1/series/bulk/metadata/reset": { + "/api/v1/releases/{release_id}/dismiss": { "post": { "tags": [ - "Bulk Operations" + "Releases" ], - "summary": "Bulk reset metadata for multiple series", - "description": "Resets all metadata for the specified series back to filesystem-derived defaults.\nEach series has its metadata row deleted and recreated, and all associated data\n(genres, tags, alternate titles, external IDs/ratings/links, covers, metadata sources,\nsharing tags) is cleared. User ratings, read progress, and book data are preserved.\n\nSeries that don't exist are silently skipped.", - "operationId": "bulk_reset_series_metadata", - "requestBody": { - "content": { - "application/json": { - "schema": { - "$ref": "#/components/schemas/BulkSeriesRequest" - } + "summary": "Convenience POST: dismiss a release.", + "operationId": "dismiss_release", + "parameters": [ + { + "name": "release_id", + "in": "path", + "description": "Ledger entry ID", + "required": true, + "schema": { + "type": "string", + "format": "uuid" } - }, - "required": true - }, + } + ], "responses": { "200": { - "description": "Metadata reset for specified series", + "description": "Release dismissed", "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/BulkMetadataResetResponse" + "$ref": "#/components/schemas/ReleaseLedgerEntryDto" } } } }, - "401": { - "description": "Unauthorized" - }, "403": { "description": "Forbidden" + }, + "404": { + "description": "Ledger entry not found" } }, "security": [ { - "bearer_auth": [] + "jwt_bearer": [] }, { "api_key": [] @@ -7520,45 +7756,46 @@ ] } }, - "/api/v1/series/bulk/read": { + "/api/v1/releases/{release_id}/mark-acquired": { "post": { "tags": [ - "Bulk Operations" + "Releases" ], - "summary": "Bulk mark multiple series as read", - "description": "Marks all books in the specified series as read for the authenticated user.\nSeries that don't exist are silently skipped.", - "operationId": "bulk_mark_series_as_read", - "requestBody": { - "content": { - "application/json": { - "schema": { - "$ref": "#/components/schemas/BulkSeriesRequest" - } + "summary": "Convenience POST: mark a release acquired.", + "operationId": "mark_release_acquired", + "parameters": [ + { + "name": "release_id", + "in": "path", + "description": "Ledger entry ID", + "required": true, + "schema": { + "type": "string", + "format": "uuid" } - }, - "required": true - }, + } + ], "responses": { "200": { - "description": "Series marked as read", + "description": "Release marked acquired", "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/MarkReadResponse" + "$ref": "#/components/schemas/ReleaseLedgerEntryDto" } } } }, - "401": { - "description": "Unauthorized" - }, "403": { "description": "Forbidden" + }, + "404": { + "description": "Ledger entry not found" } }, "security": [ { - "bearer_auth": [] + "jwt_bearer": [] }, { "api_key": [] @@ -7566,40 +7803,30 @@ ] } }, - "/api/v1/series/bulk/renumber": { - "post": { + "/api/v1/scans/active": { + "get": { "tags": [ - "Bulk Operations" + "Scans" ], - "summary": "Bulk renumber books in multiple series", - "description": "Enqueues a fan-out task that will renumber books in the specified series\nusing each library's number strategy. Returns a task ID for tracking progress via SSE.", - "operationId": "bulk_renumber_series", - "requestBody": { - "content": { - "application/json": { - "schema": { - "$ref": "#/components/schemas/BulkRenumberSeriesRequest" - } - } - }, - "required": true - }, + "summary": "List all active scans", + "description": "# Permission Required\n- `libraries:read`", + "operationId": "list_active_scans", "responses": { "200": { - "description": "Renumber task queued", + "description": "List of active scans", "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/BulkTaskResponse" + "type": "array", + "items": { + "$ref": "#/components/schemas/ScanStatusDto" + } } } } }, - "401": { - "description": "Unauthorized" - }, "403": { - "description": "Forbidden" + "description": "Permission denied" } }, "security": [ @@ -7612,47 +7839,139 @@ ] } }, - "/api/v1/series/bulk/tags": { - "post": { + "/api/v1/scans/stream": { + "get": { "tags": [ - "Bulk Operations" + "Scans" ], - "summary": "Bulk add/remove tags for multiple series", - "operationId": "bulk_modify_series_tags", - "requestBody": { - "content": { - "application/json": { - "schema": { - "$ref": "#/components/schemas/BulkModifySeriesTagsRequest" - } - } - }, - "required": true - }, + "summary": "Stream scan progress updates via Server-Sent Events", + "description": "# Permission Required\n- `libraries:read`\n\n**DEPRECATED**: This endpoint is replaced by `/api/v1/tasks/stream` which provides\nreal-time updates for all task types including scans. This endpoint now filters\nthe task stream to only show scan_library tasks for backwards compatibility.", + "operationId": "scan_progress_stream", "responses": { "200": { - "description": "Tags modified", + "description": "SSE stream of scan progress updates" + }, + "403": { + "description": "Permission denied" + } + }, + "security": [ + { + "bearer_auth": [] + }, + { + "api_key": [] + } + ] + } + }, + "/api/v1/series": { + "get": { + "tags": [ + "Series" + ], + "summary": "List series with optional library filter and pagination", + "operationId": "list_series", + "parameters": [ + { + "name": "page", + "in": "query", + "description": "Page number (1-indexed, default 1)", + "required": false, + "schema": { + "type": "integer", + "format": "int64", + "minimum": 0 + } + }, + { + "name": "pageSize", + "in": "query", + "description": "Number of items per page (max 100, default 50)", + "required": false, + "schema": { + "type": "integer", + "format": "int64", + "minimum": 0 + } + }, + { + "name": "sort", + "in": "query", + "description": "Sort parameter (format: \"field,direction\" e.g. \"name,asc\")", + "required": false, + "schema": { + "type": [ + "string", + "null" + ] + } + }, + { + "name": "genres", + "in": "query", + "description": "Filter by genres (comma-separated, AND logic - series must have ALL specified genres)", + "required": false, + "schema": { + "type": [ + "string", + "null" + ] + } + }, + { + "name": "tags", + "in": "query", + "description": "Filter by tags (comma-separated, AND logic - series must have ALL specified tags)", + "required": false, + "schema": { + "type": [ + "string", + "null" + ] + } + }, + { + "name": "libraryId", + "in": "query", + "description": "Filter by library ID", + "required": false, + "schema": { + "type": [ + "string", + "null" + ], + "format": "uuid" + } + }, + { + "name": "full", + "in": "query", + "description": "Return full series data including metadata, locks, genres, tags, alternate titles,\nexternal ratings, and external links. Default is false for backward compatibility.", + "required": false, + "schema": { + "type": "boolean" + } + } + ], + "responses": { + "200": { + "description": "Paginated list of series (returns FullSeriesListResponse when full=true)", "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/BulkMetadataUpdateResponse" + "$ref": "#/components/schemas/PaginatedResponse" } } } }, - "400": { - "description": "Bad request" - }, - "401": { - "description": "Unauthorized" - }, "403": { "description": "Forbidden" } }, "security": [ { - "bearer_auth": [] + "jwt_bearer": [] }, { "api_key": [] @@ -7660,19 +7979,19 @@ ] } }, - "/api/v1/series/bulk/thumbnails/books/generate": { + "/api/v1/series/bulk/analyze": { "post": { "tags": [ "Bulk Operations" ], - "summary": "Bulk generate thumbnails for books in multiple series", - "description": "Enqueues a fan-out task that will generate thumbnails for all books in the specified series.\nThis is useful for regenerating thumbnails after changing thumbnail settings or fixing\ncorrupt thumbnails.", - "operationId": "bulk_generate_series_book_thumbnails", + "summary": "Bulk analyze multiple series", + "description": "Enqueues analysis tasks for all books in the specified series.\nSeries that don't exist are silently skipped.", + "operationId": "bulk_analyze_series", "requestBody": { "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/BulkGenerateSeriesBookThumbnailsRequest" + "$ref": "#/components/schemas/BulkAnalyzeSeriesRequest" } } }, @@ -7680,11 +7999,11 @@ }, "responses": { "200": { - "description": "Thumbnail generation task queued", + "description": "Analysis tasks enqueued", "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/BulkTaskResponse" + "$ref": "#/components/schemas/BulkAnalyzeResponse" } } } @@ -7706,19 +8025,18 @@ ] } }, - "/api/v1/series/bulk/thumbnails/generate": { + "/api/v1/series/bulk/genres": { "post": { "tags": [ "Bulk Operations" ], - "summary": "Bulk generate series thumbnails", - "description": "Enqueues a fan-out task that will generate thumbnails for the specified series.\nSeries thumbnails are derived from the first book's cover in each series.", - "operationId": "bulk_generate_series_thumbnails", + "summary": "Bulk add/remove genres for multiple series", + "operationId": "bulk_modify_series_genres", "requestBody": { "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/BulkGenerateSeriesThumbnailsRequest" + "$ref": "#/components/schemas/BulkModifySeriesGenresRequest" } } }, @@ -7726,15 +8044,18 @@ }, "responses": { "200": { - "description": "Series thumbnail generation task queued", + "description": "Genres modified", "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/BulkTaskResponse" + "$ref": "#/components/schemas/BulkMetadataUpdateResponse" } } } }, + "400": { + "description": "Bad request" + }, "401": { "description": "Unauthorized" }, @@ -7752,19 +8073,19 @@ ] } }, - "/api/v1/series/bulk/titles/reprocess": { - "post": { + "/api/v1/series/bulk/metadata": { + "patch": { "tags": [ "Bulk Operations" ], - "summary": "Bulk reprocess series titles", - "description": "Enqueues a fan-out task that will reprocess titles for the specified series\nusing their library's preprocessing rules. This is useful when preprocessing\nrules are added or changed after series have already been created.", - "operationId": "bulk_reprocess_series_titles", + "summary": "Bulk patch series metadata", + "description": "Applies the same partial metadata update to multiple series at once.\nOnly provided fields will be updated. Changed fields are auto-locked.\nNon-existent series are silently skipped.", + "operationId": "bulk_patch_series_metadata", "requestBody": { "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/BulkReprocessSeriesTitlesRequest" + "$ref": "#/components/schemas/BulkPatchSeriesMetadataRequest" } } }, @@ -7772,15 +8093,18 @@ }, "responses": { "200": { - "description": "Title reprocessing task queued", + "description": "Metadata updated", "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/BulkTaskResponse" + "$ref": "#/components/schemas/BulkMetadataUpdateResponse" } } } }, + "400": { + "description": "Bad request" + }, "401": { "description": "Unauthorized" }, @@ -7798,19 +8122,18 @@ ] } }, - "/api/v1/series/bulk/unread": { - "post": { + "/api/v1/series/bulk/metadata/locks": { + "put": { "tags": [ "Bulk Operations" ], - "summary": "Bulk mark multiple series as unread", - "description": "Marks all books in the specified series as unread for the authenticated user.\nSeries that don't exist are silently skipped.", - "operationId": "bulk_mark_series_as_unread", + "summary": "Bulk update metadata locks for multiple series", + "operationId": "bulk_update_series_locks", "requestBody": { "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/BulkSeriesRequest" + "$ref": "#/components/schemas/BulkUpdateSeriesLocksRequest" } } }, @@ -7818,15 +8141,18 @@ }, "responses": { "200": { - "description": "Series marked as unread", + "description": "Locks updated", "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/MarkReadResponse" + "$ref": "#/components/schemas/BulkMetadataUpdateResponse" } } } }, + "400": { + "description": "Bad request" + }, "401": { "description": "Unauthorized" }, @@ -7844,58 +8170,45 @@ ] } }, - "/api/v1/series/in-progress": { - "get": { + "/api/v1/series/bulk/metadata/reset": { + "post": { "tags": [ - "Series" + "Bulk Operations" ], - "summary": "List series with in-progress books (series that have at least one book with reading progress that is not completed)", - "operationId": "list_in_progress_series", - "parameters": [ - { - "name": "libraryId", - "in": "query", - "description": "Filter by library ID (optional)", - "required": false, - "schema": { - "type": [ - "string", - "null" - ], - "format": "uuid" + "summary": "Bulk reset metadata for multiple series", + "description": "Resets all metadata for the specified series back to filesystem-derived defaults.\nEach series has its metadata row deleted and recreated, and all associated data\n(genres, tags, alternate titles, external IDs/ratings/links, covers, metadata sources,\nsharing tags) is cleared. User ratings, read progress, and book data are preserved.\n\nSeries that don't exist are silently skipped.", + "operationId": "bulk_reset_series_metadata", + "requestBody": { + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/BulkSeriesRequest" + } } }, - { - "name": "full", - "in": "query", - "description": "Return full series data including metadata, locks, genres, tags, etc.", - "required": false, - "schema": { - "type": "boolean" - } - } - ], + "required": true + }, "responses": { "200": { - "description": "List of in-progress series (returns Vec when full=true)", + "description": "Metadata reset for specified series", "content": { "application/json": { "schema": { - "type": "array", - "items": { - "$ref": "#/components/schemas/SeriesDto" - } + "$ref": "#/components/schemas/BulkMetadataResetResponse" } } } }, + "401": { + "description": "Unauthorized" + }, "403": { "description": "Forbidden" } }, "security": [ { - "jwt_bearer": [] + "bearer_auth": [] }, { "api_key": [] @@ -7903,67 +8216,19 @@ ] } }, - "/api/v1/series/list": { + "/api/v1/series/bulk/read": { "post": { "tags": [ - "Series" - ], - "summary": "List series with advanced filtering", - "description": "Supports complex filter conditions including nested AllOf/AnyOf logic,\ngenre/tag filtering with include/exclude, and more.\n\nPagination parameters (page, pageSize, sort) are passed as query parameters.\nFilter conditions are passed in the request body.", - "operationId": "list_series_filtered", - "parameters": [ - { - "name": "page", - "in": "query", - "description": "Page number (1-indexed, minimum 1)", - "required": false, - "schema": { - "type": "integer", - "format": "int64", - "default": 1, - "minimum": 1 - } - }, - { - "name": "pageSize", - "in": "query", - "description": "Number of items per page (max 500, default 50)", - "required": false, - "schema": { - "type": "integer", - "format": "int64", - "default": 50, - "maximum": 500, - "minimum": 1 - } - }, - { - "name": "sort", - "in": "query", - "description": "Sort field and direction (e.g., \"name,asc\" or \"createdAt,desc\")", - "required": false, - "schema": { - "type": [ - "string", - "null" - ] - } - }, - { - "name": "full", - "in": "query", - "description": "Return full data including metadata, locks, and related entities.\nDefault is false for backward compatibility.", - "required": false, - "schema": { - "type": "boolean" - } - } + "Bulk Operations" ], + "summary": "Bulk mark multiple series as read", + "description": "Marks all books in the specified series as read for the authenticated user.\nSeries that don't exist are silently skipped.", + "operationId": "bulk_mark_series_as_read", "requestBody": { "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/SeriesListRequest" + "$ref": "#/components/schemas/BulkSeriesRequest" } } }, @@ -7971,22 +8236,25 @@ }, "responses": { "200": { - "description": "Paginated list of filtered series (returns FullSeriesListResponse when full=true)", + "description": "Series marked as read", "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/PaginatedResponse" + "$ref": "#/components/schemas/MarkReadResponse" } } } }, + "401": { + "description": "Unauthorized" + }, "403": { "description": "Forbidden" } }, "security": [ { - "jwt_bearer": [] + "bearer_auth": [] }, { "api_key": [] @@ -7994,19 +8262,19 @@ ] } }, - "/api/v1/series/list/alphabetical-groups": { + "/api/v1/series/bulk/renumber": { "post": { "tags": [ - "Series" + "Bulk Operations" ], - "summary": "Get alphabetical groups for series", - "description": "Returns a list of alphabetical groups with counts, showing how many series\nstart with each letter/character. This is useful for building A-Z navigation.\nThe same filters as list_series_filtered can be applied.", - "operationId": "list_series_alphabetical_groups", + "summary": "Bulk renumber books in multiple series", + "description": "Enqueues a fan-out task that will renumber books in the specified series\nusing each library's number strategy. Returns a task ID for tracking progress via SSE.", + "operationId": "bulk_renumber_series", "requestBody": { "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/SeriesListRequest" + "$ref": "#/components/schemas/BulkRenumberSeriesRequest" } } }, @@ -8014,25 +8282,25 @@ }, "responses": { "200": { - "description": "List of alphabetical groups with counts", + "description": "Renumber task queued", "content": { "application/json": { "schema": { - "type": "array", - "items": { - "$ref": "#/components/schemas/AlphabeticalGroupDto" - } + "$ref": "#/components/schemas/BulkTaskResponse" } } } }, + "401": { + "description": "Unauthorized" + }, "403": { "description": "Forbidden" } }, "security": [ { - "jwt_bearer": [] + "bearer_auth": [] }, { "api_key": [] @@ -8040,19 +8308,18 @@ ] } }, - "/api/v1/series/metadata/auto-match/task/bulk": { + "/api/v1/series/bulk/tags": { "post": { "tags": [ - "Plugin Actions" + "Bulk Operations" ], - "summary": "Enqueue plugin auto-match tasks for multiple series (bulk operation)", - "description": "Creates background tasks to auto-match metadata for multiple series using the specified plugin.\nEach series gets its own task that runs asynchronously in a worker process.", - "operationId": "enqueue_bulk_auto_match_tasks", + "summary": "Bulk add/remove tags for multiple series", + "operationId": "bulk_modify_series_tags", "requestBody": { "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/EnqueueBulkAutoMatchRequest" + "$ref": "#/components/schemas/BulkModifySeriesTagsRequest" } } }, @@ -8060,26 +8327,23 @@ }, "responses": { "200": { - "description": "Tasks enqueued", + "description": "Tags modified", "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/EnqueueAutoMatchResponse" + "$ref": "#/components/schemas/BulkMetadataUpdateResponse" } } } }, "400": { - "description": "Invalid request" + "description": "Bad request" }, "401": { "description": "Unauthorized" }, "403": { - "description": "No permission to edit series" - }, - "404": { - "description": "Plugin not found" + "description": "Forbidden" } }, "security": [ @@ -8092,69 +8356,45 @@ ] } }, - "/api/v1/series/recently-added": { - "get": { + "/api/v1/series/bulk/thumbnails/books/generate": { + "post": { "tags": [ - "Series" + "Bulk Operations" ], - "summary": "List recently added series", - "operationId": "list_recently_added_series", - "parameters": [ - { - "name": "limit", - "in": "query", - "description": "Maximum number of series to return (default: 50)", - "required": false, - "schema": { - "type": "integer", - "format": "int64", - "minimum": 0 - } - }, - { - "name": "libraryId", - "in": "query", - "description": "Filter by library ID (optional)", - "required": false, - "schema": { - "type": [ - "string", - "null" - ], - "format": "uuid" + "summary": "Bulk generate thumbnails for books in multiple series", + "description": "Enqueues a fan-out task that will generate thumbnails for all books in the specified series.\nThis is useful for regenerating thumbnails after changing thumbnail settings or fixing\ncorrupt thumbnails.", + "operationId": "bulk_generate_series_book_thumbnails", + "requestBody": { + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/BulkGenerateSeriesBookThumbnailsRequest" + } } }, - { - "name": "full", - "in": "query", - "description": "Return full series data including metadata, locks, genres, tags, etc.", - "required": false, - "schema": { - "type": "boolean" - } - } - ], + "required": true + }, "responses": { "200": { - "description": "List of recently added series (returns Vec when full=true)", + "description": "Thumbnail generation task queued", "content": { "application/json": { "schema": { - "type": "array", - "items": { - "$ref": "#/components/schemas/SeriesDto" - } + "$ref": "#/components/schemas/BulkTaskResponse" } } } }, + "401": { + "description": "Unauthorized" + }, "403": { "description": "Forbidden" } }, "security": [ { - "jwt_bearer": [] + "bearer_auth": [] }, { "api_key": [] @@ -8162,69 +8402,45 @@ ] } }, - "/api/v1/series/recently-updated": { - "get": { + "/api/v1/series/bulk/thumbnails/generate": { + "post": { "tags": [ - "Series" + "Bulk Operations" ], - "summary": "List recently updated series", - "operationId": "list_recently_updated_series", - "parameters": [ - { - "name": "limit", - "in": "query", - "description": "Maximum number of series to return (default: 50)", - "required": false, - "schema": { - "type": "integer", - "format": "int64", - "minimum": 0 - } - }, - { - "name": "libraryId", - "in": "query", - "description": "Filter by library ID (optional)", - "required": false, - "schema": { - "type": [ - "string", - "null" - ], - "format": "uuid" + "summary": "Bulk generate series thumbnails", + "description": "Enqueues a fan-out task that will generate thumbnails for the specified series.\nSeries thumbnails are derived from the first book's cover in each series.", + "operationId": "bulk_generate_series_thumbnails", + "requestBody": { + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/BulkGenerateSeriesThumbnailsRequest" + } } }, - { - "name": "full", - "in": "query", - "description": "Return full series data including metadata, locks, genres, tags, etc.", - "required": false, - "schema": { - "type": "boolean" - } - } - ], + "required": true + }, "responses": { "200": { - "description": "List of recently updated series (returns Vec when full=true)", + "description": "Series thumbnail generation task queued", "content": { "application/json": { "schema": { - "type": "array", - "items": { - "$ref": "#/components/schemas/SeriesDto" - } + "$ref": "#/components/schemas/BulkTaskResponse" } } } }, + "401": { + "description": "Unauthorized" + }, "403": { "description": "Forbidden" } }, "security": [ { - "jwt_bearer": [] + "bearer_auth": [] }, { "api_key": [] @@ -8232,18 +8448,19 @@ ] } }, - "/api/v1/series/search": { + "/api/v1/series/bulk/titles/reprocess": { "post": { "tags": [ - "Series" + "Bulk Operations" ], - "summary": "Search series by name", - "operationId": "search_series", + "summary": "Bulk reprocess series titles", + "description": "Enqueues a fan-out task that will reprocess titles for the specified series\nusing their library's preprocessing rules. This is useful when preprocessing\nrules are added or changed after series have already been created.", + "operationId": "bulk_reprocess_series_titles", "requestBody": { "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/SearchSeriesRequest" + "$ref": "#/components/schemas/BulkReprocessSeriesTitlesRequest" } } }, @@ -8251,25 +8468,25 @@ }, "responses": { "200": { - "description": "Search results (returns Vec when full=true)", + "description": "Title reprocessing task queued", "content": { "application/json": { "schema": { - "type": "array", - "items": { - "$ref": "#/components/schemas/SeriesDto" - } + "$ref": "#/components/schemas/BulkTaskResponse" } } } }, + "401": { + "description": "Unauthorized" + }, "403": { "description": "Forbidden" } }, "security": [ { - "jwt_bearer": [] + "bearer_auth": [] }, { "api_key": [] @@ -8277,19 +8494,19 @@ ] } }, - "/api/v1/series/thumbnails/generate": { + "/api/v1/series/bulk/track-for-releases": { "post": { "tags": [ - "Thumbnails" + "Bulk Operations" ], - "summary": "Generate thumbnails for series in a scope", - "description": "This queues a fan-out task that enqueues individual series thumbnail generation tasks.\nSeries thumbnails are the cover images displayed for each series (derived from the first book's cover).\n\n**Scope priority:**\n1. If `series_ids` is provided, only those specific series\n2. If `library_id` is provided, only series in that library\n3. If neither provided, all series in all libraries\n\n**Force behavior:**\n- `force: false` (default): Only generates thumbnails for series that don't have one\n- `force: true`: Regenerates all thumbnails, replacing existing ones\n\n# Permission Required\n- `tasks:write`", - "operationId": "generate_series_thumbnails", + "summary": "Bulk-enable release tracking for multiple series.", + "description": "For each `series_id` in the request, flips `series_tracking.tracked` to\n`true` and runs the seed pass (auto-derives aliases, `latest_known_*`,\n`track_chapters` / `track_volumes` from existing data). Series that don't\nexist are reported as `outcome: skipped`. Series already tracked are\nreported as `outcome: skipped, detail: \"already tracked\"` and the seed is\nnot re-run (idempotent — a re-run would simply re-derive identical\nvalues, but we skip the work).\n\nMirrors the per-series PATCH `false -> true` transition: same seed\nfunction, same idempotency guarantees.", + "operationId": "bulk_track_series_for_releases", "requestBody": { "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/GenerateSeriesThumbnailsRequest" + "$ref": "#/components/schemas/BulkSeriesRequest" } } }, @@ -8297,17 +8514,20 @@ }, "responses": { "200": { - "description": "Series thumbnail generation task queued", + "description": "Bulk-tracked series", "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/CreateTaskResponse" + "$ref": "#/components/schemas/BulkTrackForReleasesResponse" } } } }, + "401": { + "description": "Unauthorized" + }, "403": { - "description": "Permission denied" + "description": "Forbidden" } }, "security": [ @@ -8320,19 +8540,19 @@ ] } }, - "/api/v1/series/titles/reprocess": { + "/api/v1/series/bulk/unread": { "post": { "tags": [ - "Tasks" + "Bulk Operations" ], - "summary": "Reprocess series titles in a scope", - "description": "This queues a fan-out task that enqueues individual series title reprocessing tasks.\nApplies the library's preprocessing rules to regenerate display titles.\n\n**Scope priority:**\n1. If `series_ids` is provided, only those specific series\n2. If `library_id` is provided, only series in that library\n3. If neither provided, all series in all libraries\n\n**Lock behavior:**\n- Series with `title_lock: true` are skipped\n- If title changes and `title_sort_lock` is false, `title_sort` is cleared\n\n# Permission Required\n- `series:write`", - "operationId": "reprocess_series_titles", + "summary": "Bulk mark multiple series as unread", + "description": "Marks all books in the specified series as unread for the authenticated user.\nSeries that don't exist are silently skipped.", + "operationId": "bulk_mark_series_as_unread", "requestBody": { "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/ReprocessSeriesTitlesRequest" + "$ref": "#/components/schemas/BulkSeriesRequest" } } }, @@ -8340,17 +8560,17 @@ }, "responses": { "200": { - "description": "Task enqueued", + "description": "Series marked as unread", "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/EnqueueReprocessTitleResponse" + "$ref": "#/components/schemas/MarkReadResponse" } } } }, - "400": { - "description": "Invalid request" + "401": { + "description": "Unauthorized" }, "403": { "description": "Forbidden" @@ -8366,31 +8586,19 @@ ] } }, - "/api/v1/series/{id}/metadata/apply": { + "/api/v1/series/bulk/untrack-for-releases": { "post": { "tags": [ - "Plugin Actions" - ], - "summary": "Apply metadata from a plugin to a series", - "description": "Fetches metadata from a plugin and applies it to the series, respecting\nRBAC permissions and field locks.", - "operationId": "apply_series_metadata", - "parameters": [ - { - "name": "id", - "in": "path", - "description": "Series ID", - "required": true, - "schema": { - "type": "string", - "format": "uuid" - } - } + "Bulk Operations" ], + "summary": "Bulk-disable release tracking for multiple series.", + "description": "Flips `series_tracking.tracked` to `false`. Does not delete aliases,\n`latest_known_*`, or other tracking config — the user can re-track\nwithout losing customizations, and the seed will re-derive any\nauto-derived fields on the next track-on transition.", + "operationId": "bulk_untrack_series_for_releases", "requestBody": { "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/MetadataApplyRequest" + "$ref": "#/components/schemas/BulkSeriesRequest" } } }, @@ -8398,26 +8606,20 @@ }, "responses": { "200": { - "description": "Metadata applied", + "description": "Bulk-untracked series", "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/MetadataApplyResponse" + "$ref": "#/components/schemas/BulkTrackForReleasesResponse" } } } }, - "400": { - "description": "Invalid request" - }, "401": { "description": "Unauthorized" }, "403": { - "description": "No permission to edit series" - }, - "404": { - "description": "Series or plugin not found" + "description": "Forbidden" } }, "security": [ @@ -8430,63 +8632,58 @@ ] } }, - "/api/v1/series/{id}/metadata/auto-match": { - "post": { + "/api/v1/series/in-progress": { + "get": { "tags": [ - "Plugin Actions" + "Series" ], - "summary": "Auto-match and apply metadata from a plugin to a series", - "description": "Searches for the series using the plugin's metadata search, picks the best match,\nand applies the metadata in one step. This is a convenience endpoint for quick\nmetadata updates without user intervention.", - "operationId": "auto_match_series_metadata", + "summary": "List series with in-progress books (series that have at least one book with reading progress that is not completed)", + "operationId": "list_in_progress_series", "parameters": [ { - "name": "id", - "in": "path", - "description": "Series ID", - "required": true, + "name": "libraryId", + "in": "query", + "description": "Filter by library ID (optional)", + "required": false, "schema": { - "type": "string", + "type": [ + "string", + "null" + ], "format": "uuid" } + }, + { + "name": "full", + "in": "query", + "description": "Return full series data including metadata, locks, genres, tags, etc.", + "required": false, + "schema": { + "type": "boolean" + } } ], - "requestBody": { - "content": { - "application/json": { - "schema": { - "$ref": "#/components/schemas/MetadataAutoMatchRequest" - } - } - }, - "required": true - }, "responses": { "200": { - "description": "Auto-match completed", + "description": "List of in-progress series (returns Vec when full=true)", "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/MetadataAutoMatchResponse" + "type": "array", + "items": { + "$ref": "#/components/schemas/SeriesDto" + } } } } }, - "400": { - "description": "Invalid request" - }, - "401": { - "description": "Unauthorized" - }, "403": { - "description": "No permission to edit series" - }, - "404": { - "description": "Series or plugin not found or no match found" + "description": "Forbidden" } }, "security": [ { - "bearer_auth": [] + "jwt_bearer": [] }, { "api_key": [] @@ -8494,23 +8691,59 @@ ] } }, - "/api/v1/series/{id}/metadata/auto-match/task": { + "/api/v1/series/list": { "post": { "tags": [ - "Plugin Actions" + "Series" ], - "summary": "Enqueue a plugin auto-match task for a single series", - "description": "Creates a background task to auto-match metadata for a series using the specified plugin.\nThe task runs asynchronously in a worker process and emits a SeriesMetadataUpdated event\nwhen complete.", - "operationId": "enqueue_auto_match_task", + "summary": "List series with advanced filtering", + "description": "Supports complex filter conditions including nested AllOf/AnyOf logic,\ngenre/tag filtering with include/exclude, and more.\n\nPagination parameters (page, pageSize, sort) are passed as query parameters.\nFilter conditions are passed in the request body.", + "operationId": "list_series_filtered", "parameters": [ { - "name": "id", - "in": "path", - "description": "Series ID", - "required": true, + "name": "page", + "in": "query", + "description": "Page number (1-indexed, minimum 1)", + "required": false, "schema": { - "type": "string", - "format": "uuid" + "type": "integer", + "format": "int64", + "default": 1, + "minimum": 1 + } + }, + { + "name": "pageSize", + "in": "query", + "description": "Number of items per page (max 500, default 50)", + "required": false, + "schema": { + "type": "integer", + "format": "int64", + "default": 50, + "maximum": 500, + "minimum": 1 + } + }, + { + "name": "sort", + "in": "query", + "description": "Sort field and direction (e.g., \"name,asc\" or \"createdAt,desc\")", + "required": false, + "schema": { + "type": [ + "string", + "null" + ] + } + }, + { + "name": "full", + "in": "query", + "description": "Return full data including metadata, locks, and related entities.\nDefault is false for backward compatibility.", + "required": false, + "schema": { + "type": "boolean" } } ], @@ -8518,7 +8751,7 @@ "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/EnqueueAutoMatchRequest" + "$ref": "#/components/schemas/SeriesListRequest" } } }, @@ -8526,31 +8759,22 @@ }, "responses": { "200": { - "description": "Task enqueued", + "description": "Paginated list of filtered series (returns FullSeriesListResponse when full=true)", "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/EnqueueAutoMatchResponse" + "$ref": "#/components/schemas/PaginatedResponse" } } } }, - "400": { - "description": "Invalid request" - }, - "401": { - "description": "Unauthorized" - }, "403": { - "description": "No permission to edit series" - }, - "404": { - "description": "Series or plugin not found" + "description": "Forbidden" } }, "security": [ { - "bearer_auth": [] + "jwt_bearer": [] }, { "api_key": [] @@ -8558,31 +8782,19 @@ ] } }, - "/api/v1/series/{id}/metadata/preview": { + "/api/v1/series/list/alphabetical-groups": { "post": { "tags": [ - "Plugin Actions" - ], - "summary": "Preview metadata from a plugin for a series", - "description": "Fetches metadata from a plugin and computes a field-by-field diff with the current\nseries metadata, showing which fields will be applied, locked, or denied by RBAC.", - "operationId": "preview_series_metadata", - "parameters": [ - { - "name": "id", - "in": "path", - "description": "Series ID", - "required": true, - "schema": { - "type": "string", - "format": "uuid" - } - } + "Series" ], + "summary": "Get alphabetical groups for series", + "description": "Returns a list of alphabetical groups with counts, showing how many series\nstart with each letter/character. This is useful for building A-Z navigation.\nThe same filters as list_series_filtered can be applied.", + "operationId": "list_series_alphabetical_groups", "requestBody": { "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/MetadataPreviewRequest" + "$ref": "#/components/schemas/SeriesListRequest" } } }, @@ -8590,31 +8802,25 @@ }, "responses": { "200": { - "description": "Preview computed", + "description": "List of alphabetical groups with counts", "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/MetadataPreviewResponse" + "type": "array", + "items": { + "$ref": "#/components/schemas/AlphabeticalGroupDto" + } } } } }, - "400": { - "description": "Invalid request" - }, - "401": { - "description": "Unauthorized" - }, "403": { - "description": "No permission to edit series" - }, - "404": { - "description": "Series or plugin not found" + "description": "Forbidden" } }, "security": [ { - "bearer_auth": [] + "jwt_bearer": [] }, { "api_key": [] @@ -8622,52 +8828,46 @@ ] } }, - "/api/v1/series/{id}/metadata/search-title": { - "get": { + "/api/v1/series/metadata/auto-match/task/bulk": { + "post": { "tags": [ "Plugin Actions" ], - "summary": "Get the preprocessed search title for a series", - "description": "Returns the series title after applying plugin and library preprocessing rules.\nUse this to get the correct search query before opening the metadata search modal.", - "operationId": "get_series_search_title", - "parameters": [ - { - "name": "id", - "in": "path", - "description": "Series ID", - "required": true, - "schema": { - "type": "string", - "format": "uuid" + "summary": "Enqueue plugin auto-match tasks for multiple series (bulk operation)", + "description": "Creates background tasks to auto-match metadata for multiple series using the specified plugin.\nEach series gets its own task that runs asynchronously in a worker process.", + "operationId": "enqueue_bulk_auto_match_tasks", + "requestBody": { + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/EnqueueBulkAutoMatchRequest" + } } }, - { - "name": "pluginId", - "in": "query", - "description": "Plugin ID to get preprocessing rules from", - "required": true, - "schema": { - "type": "string", - "format": "uuid" - } - } - ], + "required": true + }, "responses": { "200": { - "description": "Preprocessed search title", + "description": "Tasks enqueued", "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/SearchTitleResponse" + "$ref": "#/components/schemas/EnqueueAutoMatchResponse" } } } }, + "400": { + "description": "Invalid request" + }, "401": { "description": "Unauthorized" }, + "403": { + "description": "No permission to edit series" + }, "404": { - "description": "Series or plugin not found" + "description": "Plugin not found" } }, "security": [ @@ -8680,21 +8880,35 @@ ] } }, - "/api/v1/series/{series_id}": { + "/api/v1/series/recently-added": { "get": { "tags": [ "Series" ], - "summary": "Get series by ID", - "operationId": "get_series", + "summary": "List recently added series", + "operationId": "list_recently_added_series", "parameters": [ { - "name": "series_id", - "in": "path", - "description": "Series ID", - "required": true, + "name": "limit", + "in": "query", + "description": "Maximum number of series to return (default: 50)", + "required": false, "schema": { - "type": "string", + "type": "integer", + "format": "int64", + "minimum": 0 + } + }, + { + "name": "libraryId", + "in": "query", + "description": "Filter by library ID (optional)", + "required": false, + "schema": { + "type": [ + "string", + "null" + ], "format": "uuid" } }, @@ -8710,17 +8924,20 @@ ], "responses": { "200": { - "description": "Series details (returns FullSeriesResponse when full=true)", + "description": "List of recently added series (returns Vec when full=true)", "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/SeriesDto" + "type": "array", + "items": { + "$ref": "#/components/schemas/SeriesDto" + } } } } }, - "404": { - "description": "Series not found" + "403": { + "description": "Forbidden" } }, "security": [ @@ -8731,31 +8948,90 @@ "api_key": [] } ] - }, - "patch": { + } + }, + "/api/v1/series/recently-updated": { + "get": { "tags": [ "Series" ], - "summary": "Update series core fields (name/title)", - "description": "Partially updates series_metadata fields. Only provided fields will be updated.\nAbsent fields are unchanged. When name is set to a non-null value, it is automatically locked.", - "operationId": "patch_series", + "summary": "List recently updated series", + "operationId": "list_recently_updated_series", "parameters": [ { - "name": "series_id", - "in": "path", - "description": "Series ID", - "required": true, + "name": "limit", + "in": "query", + "description": "Maximum number of series to return (default: 50)", + "required": false, "schema": { - "type": "string", + "type": "integer", + "format": "int64", + "minimum": 0 + } + }, + { + "name": "libraryId", + "in": "query", + "description": "Filter by library ID (optional)", + "required": false, + "schema": { + "type": [ + "string", + "null" + ], "format": "uuid" } + }, + { + "name": "full", + "in": "query", + "description": "Return full series data including metadata, locks, genres, tags, etc.", + "required": false, + "schema": { + "type": "boolean" + } + } + ], + "responses": { + "200": { + "description": "List of recently updated series (returns Vec when full=true)", + "content": { + "application/json": { + "schema": { + "type": "array", + "items": { + "$ref": "#/components/schemas/SeriesDto" + } + } + } + } + }, + "403": { + "description": "Forbidden" + } + }, + "security": [ + { + "jwt_bearer": [] + }, + { + "api_key": [] } + ] + } + }, + "/api/v1/series/search": { + "post": { + "tags": [ + "Series" ], + "summary": "Search series by name", + "operationId": "search_series", "requestBody": { "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/PatchSeriesRequest" + "$ref": "#/components/schemas/SearchSeriesRequest" } } }, @@ -8763,20 +9039,20 @@ }, "responses": { "200": { - "description": "Series updated successfully", + "description": "Search results (returns Vec when full=true)", "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/SeriesUpdateResponse" + "type": "array", + "items": { + "$ref": "#/components/schemas/SeriesDto" + } } } } }, "403": { "description": "Forbidden" - }, - "404": { - "description": "Series not found" } }, "security": [ @@ -8789,101 +9065,88 @@ ] } }, - "/api/v1/series/{series_id}/alternate-titles": { - "get": { + "/api/v1/series/thumbnails/generate": { + "post": { "tags": [ - "Series" + "Thumbnails" ], - "summary": "Get alternate titles for a series", - "operationId": "get_series_alternate_titles", - "parameters": [ - { - "name": "series_id", - "in": "path", - "description": "Series ID", - "required": true, - "schema": { - "type": "string", - "format": "uuid" + "summary": "Generate thumbnails for series in a scope", + "description": "This queues a fan-out task that enqueues individual series thumbnail generation tasks.\nSeries thumbnails are the cover images displayed for each series (derived from the first book's cover).\n\n**Scope priority:**\n1. If `series_ids` is provided, only those specific series\n2. If `library_id` is provided, only series in that library\n3. If neither provided, all series in all libraries\n\n**Force behavior:**\n- `force: false` (default): Only generates thumbnails for series that don't have one\n- `force: true`: Regenerates all thumbnails, replacing existing ones\n\n# Permission Required\n- `tasks:write`", + "operationId": "generate_series_thumbnails", + "requestBody": { + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/GenerateSeriesThumbnailsRequest" + } } - } - ], + }, + "required": true + }, "responses": { "200": { - "description": "List of alternate titles for the series", + "description": "Series thumbnail generation task queued", "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/AlternateTitleListResponse" + "$ref": "#/components/schemas/CreateTaskResponse" } } } }, "403": { - "description": "Forbidden" - }, - "404": { - "description": "Series not found" + "description": "Permission denied" } }, "security": [ { - "jwt_bearer": [] + "bearer_auth": [] }, { "api_key": [] } ] - }, + } + }, + "/api/v1/series/titles/reprocess": { "post": { "tags": [ - "Series" - ], - "summary": "Add an alternate title to a series", - "operationId": "create_alternate_title", - "parameters": [ - { - "name": "series_id", - "in": "path", - "description": "Series ID", - "required": true, - "schema": { - "type": "string", - "format": "uuid" - } - } + "Tasks" ], + "summary": "Reprocess series titles in a scope", + "description": "This queues a fan-out task that enqueues individual series title reprocessing tasks.\nApplies the library's preprocessing rules to regenerate display titles.\n\n**Scope priority:**\n1. If `series_ids` is provided, only those specific series\n2. If `library_id` is provided, only series in that library\n3. If neither provided, all series in all libraries\n\n**Lock behavior:**\n- Series with `title_lock: true` are skipped\n- If title changes and `title_sort_lock` is false, `title_sort` is cleared\n\n# Permission Required\n- `series:write`", + "operationId": "reprocess_series_titles", "requestBody": { "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/CreateAlternateTitleRequest" + "$ref": "#/components/schemas/ReprocessSeriesTitlesRequest" } } }, "required": true }, "responses": { - "201": { - "description": "Alternate title created", + "200": { + "description": "Task enqueued", "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/AlternateTitleDto" + "$ref": "#/components/schemas/EnqueueReprocessTitleResponse" } } } }, + "400": { + "description": "Invalid request" + }, "403": { "description": "Forbidden" - }, - "404": { - "description": "Series not found" } }, "security": [ { - "jwt_bearer": [] + "bearer_auth": [] }, { "api_key": [] @@ -8891,16 +9154,17 @@ ] } }, - "/api/v1/series/{series_id}/alternate-titles/{title_id}": { - "delete": { + "/api/v1/series/{id}/metadata/apply": { + "post": { "tags": [ - "Series" + "Plugin Actions" ], - "summary": "Delete an alternate title", - "operationId": "delete_alternate_title", + "summary": "Apply metadata from a plugin to a series", + "description": "Fetches metadata from a plugin and applies it to the series, respecting\nRBAC permissions and field locks.", + "operationId": "apply_series_metadata", "parameters": [ { - "name": "series_id", + "name": "id", "in": "path", "description": "Series ID", "required": true, @@ -8908,47 +9172,63 @@ "type": "string", "format": "uuid" } - }, - { - "name": "title_id", - "in": "path", - "description": "Alternate title ID", - "required": true, - "schema": { - "type": "string", - "format": "uuid" - } } ], + "requestBody": { + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/MetadataApplyRequest" + } + } + }, + "required": true + }, "responses": { - "204": { - "description": "Alternate title deleted" + "200": { + "description": "Metadata applied", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/MetadataApplyResponse" + } + } + } + }, + "400": { + "description": "Invalid request" + }, + "401": { + "description": "Unauthorized" }, "403": { - "description": "Forbidden" + "description": "No permission to edit series" }, "404": { - "description": "Series or title not found" + "description": "Series or plugin not found" } }, "security": [ { - "jwt_bearer": [] + "bearer_auth": [] }, { "api_key": [] } ] - }, - "patch": { + } + }, + "/api/v1/series/{id}/metadata/auto-match": { + "post": { "tags": [ - "Series" + "Plugin Actions" ], - "summary": "Update an alternate title", - "operationId": "update_alternate_title", + "summary": "Auto-match and apply metadata from a plugin to a series", + "description": "Searches for the series using the plugin's metadata search, picks the best match,\nand applies the metadata in one step. This is a convenience endpoint for quick\nmetadata updates without user intervention.", + "operationId": "auto_match_series_metadata", "parameters": [ { - "name": "series_id", + "name": "id", "in": "path", "description": "Series ID", "required": true, @@ -8956,11 +9236,65 @@ "type": "string", "format": "uuid" } + } + ], + "requestBody": { + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/MetadataAutoMatchRequest" + } + } + }, + "required": true + }, + "responses": { + "200": { + "description": "Auto-match completed", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/MetadataAutoMatchResponse" + } + } + } + }, + "400": { + "description": "Invalid request" + }, + "401": { + "description": "Unauthorized" + }, + "403": { + "description": "No permission to edit series" + }, + "404": { + "description": "Series or plugin not found or no match found" + } + }, + "security": [ + { + "bearer_auth": [] }, { - "name": "title_id", + "api_key": [] + } + ] + } + }, + "/api/v1/series/{id}/metadata/auto-match/task": { + "post": { + "tags": [ + "Plugin Actions" + ], + "summary": "Enqueue a plugin auto-match task for a single series", + "description": "Creates a background task to auto-match metadata for a series using the specified plugin.\nThe task runs asynchronously in a worker process and emits a SeriesMetadataUpdated event\nwhen complete.", + "operationId": "enqueue_auto_match_task", + "parameters": [ + { + "name": "id", "in": "path", - "description": "Alternate title ID", + "description": "Series ID", "required": true, "schema": { "type": "string", @@ -8972,7 +9306,7 @@ "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/UpdateAlternateTitleRequest" + "$ref": "#/components/schemas/EnqueueAutoMatchRequest" } } }, @@ -8980,25 +9314,31 @@ }, "responses": { "200": { - "description": "Alternate title updated", + "description": "Task enqueued", "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/AlternateTitleDto" + "$ref": "#/components/schemas/EnqueueAutoMatchResponse" } } } }, + "400": { + "description": "Invalid request" + }, + "401": { + "description": "Unauthorized" + }, "403": { - "description": "Forbidden" + "description": "No permission to edit series" }, "404": { - "description": "Series or title not found" + "description": "Series or plugin not found" } }, "security": [ { - "jwt_bearer": [] + "bearer_auth": [] }, { "api_key": [] @@ -9006,17 +9346,17 @@ ] } }, - "/api/v1/series/{series_id}/analyze": { + "/api/v1/series/{id}/metadata/preview": { "post": { "tags": [ - "Scans" + "Plugin Actions" ], - "summary": "Trigger analysis of all books in a series", - "description": "# Permission Required\n- `series:write`\n\n# Behavior\nEnqueues an AnalyzeSeries task which will create individual AnalyzeBook tasks\nfor each book in the series. All books are analyzed with force=true.\nReturns immediately with a task_id to track progress.", - "operationId": "trigger_series_analysis", + "summary": "Preview metadata from a plugin for a series", + "description": "Fetches metadata from a plugin and computes a field-by-field diff with the current\nseries metadata, showing which fields will be applied, locked, or denied by RBAC.", + "operationId": "preview_series_metadata", "parameters": [ { - "name": "series_id", + "name": "id", "in": "path", "description": "Series ID", "required": true, @@ -9026,22 +9366,38 @@ } } ], + "requestBody": { + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/MetadataPreviewRequest" + } + } + }, + "required": true + }, "responses": { "200": { - "description": "Analysis task enqueued successfully", + "description": "Preview computed", "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/CreateTaskResponse" + "$ref": "#/components/schemas/MetadataPreviewResponse" } } } }, + "400": { + "description": "Invalid request" + }, + "401": { + "description": "Unauthorized" + }, "403": { - "description": "Permission denied" + "description": "No permission to edit series" }, "404": { - "description": "Series not found" + "description": "Series or plugin not found" } }, "security": [ @@ -9054,17 +9410,17 @@ ] } }, - "/api/v1/series/{series_id}/analyze-unanalyzed": { - "post": { + "/api/v1/series/{id}/metadata/search-title": { + "get": { "tags": [ - "Scans" + "Plugin Actions" ], - "summary": "Trigger analysis of unanalyzed books in a series", - "description": "# Permission Required\n- `series:write`\n\n# Behavior\nEnqueues AnalyzeBook tasks (with force=false) for books in the series that have not been analyzed yet.\nThis is useful for recovering from failures or analyzing newly discovered books.\nReturns immediately with a task_id to track progress.", - "operationId": "trigger_series_unanalyzed_analysis", + "summary": "Get the preprocessed search title for a series", + "description": "Returns the series title after applying plugin and library preprocessing rules.\nUse this to get the correct search query before opening the metadata search modal.", + "operationId": "get_series_search_title", "parameters": [ { - "name": "series_id", + "name": "id", "in": "path", "description": "Series ID", "required": true, @@ -9072,24 +9428,34 @@ "type": "string", "format": "uuid" } + }, + { + "name": "pluginId", + "in": "query", + "description": "Plugin ID to get preprocessing rules from", + "required": true, + "schema": { + "type": "string", + "format": "uuid" + } } ], "responses": { "200": { - "description": "Analysis tasks enqueued successfully", + "description": "Preprocessed search title", "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/CreateTaskResponse" + "$ref": "#/components/schemas/SearchTitleResponse" } } } }, - "403": { - "description": "Permission denied" + "401": { + "description": "Unauthorized" }, "404": { - "description": "Series not found" + "description": "Series or plugin not found" } }, "security": [ @@ -9102,13 +9468,13 @@ ] } }, - "/api/v1/series/{series_id}/books": { + "/api/v1/series/{series_id}": { "get": { "tags": [ "Series" ], - "summary": "Get books in a series", - "operationId": "get_series_books", + "summary": "Get series by ID", + "operationId": "get_series", "parameters": [ { "name": "series_id", @@ -9120,19 +9486,10 @@ "format": "uuid" } }, - { - "name": "includeDeleted", - "in": "query", - "description": "Include deleted books in the result", - "required": false, - "schema": { - "type": "boolean" - } - }, { "name": "full", "in": "query", - "description": "Return full data including metadata and locks.\nDefault is false for backward compatibility.", + "description": "Return full series data including metadata, locks, genres, tags, etc.", "required": false, "schema": { "type": "boolean" @@ -9141,21 +9498,15 @@ ], "responses": { "200": { - "description": "List of books in the series (returns Vec when full=true)", + "description": "Series details (returns FullSeriesResponse when full=true)", "content": { "application/json": { "schema": { - "type": "array", - "items": { - "$ref": "#/components/schemas/BookDto" - } + "$ref": "#/components/schemas/SeriesDto" } } } }, - "403": { - "description": "Forbidden" - }, "404": { "description": "Series not found" } @@ -9168,15 +9519,14 @@ "api_key": [] } ] - } - }, - "/api/v1/series/{series_id}/cover": { - "post": { + }, + "patch": { "tags": [ "Series" ], - "summary": "Upload a custom cover/poster for a series", - "operationId": "upload_series_cover", + "summary": "Update series core fields (name/title)", + "description": "Partially updates series_metadata fields. Only provided fields will be updated.\nAbsent fields are unchanged. When name is set to a non-null value, it is automatically locked.", + "operationId": "patch_series", "parameters": [ { "name": "series_id", @@ -9190,11 +9540,10 @@ } ], "requestBody": { - "description": "Multipart form with image file", "content": { - "multipart/form-data": { + "application/json": { "schema": { - "type": "object" + "$ref": "#/components/schemas/PatchSeriesRequest" } } }, @@ -9202,10 +9551,14 @@ }, "responses": { "200": { - "description": "Cover uploaded successfully" - }, - "400": { - "description": "Invalid image or request" + "description": "Series updated successfully", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/SeriesUpdateResponse" + } + } + } }, "403": { "description": "Forbidden" @@ -9224,13 +9577,13 @@ ] } }, - "/api/v1/series/{series_id}/cover/source": { - "patch": { + "/api/v1/series/{series_id}/aliases": { + "get": { "tags": [ - "Series" + "Tracking" ], - "summary": "Set which cover source to use for a series (partial update)", - "operationId": "set_series_cover_source", + "summary": "List release-matching aliases for a series.", + "operationId": "list_series_aliases", "parameters": [ { "name": "series_id", @@ -9243,19 +9596,16 @@ } } ], - "requestBody": { - "content": { - "application/json": { - "schema": { - "$ref": "#/components/schemas/SelectCoverSourceRequest" - } - } - }, - "required": true - }, "responses": { "200": { - "description": "Cover source updated successfully" + "description": "List of aliases", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/SeriesAliasListResponse" + } + } + } }, "403": { "description": "Forbidden" @@ -9272,15 +9622,14 @@ "api_key": [] } ] - } - }, - "/api/v1/series/{series_id}/covers": { - "get": { + }, + "post": { "tags": [ - "Series" + "Tracking" ], - "summary": "List all covers for a series", - "operationId": "list_series_covers", + "summary": "Create a release-matching alias for a series.", + "description": "Idempotent: if `(series_id, alias)` already exists, returns the existing\nrow with HTTP 200 instead of inserting a duplicate.", + "operationId": "create_series_alias", "parameters": [ { "name": "series_id", @@ -9293,17 +9642,40 @@ } } ], + "requestBody": { + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/CreateSeriesAliasRequest" + } + } + }, + "required": true + }, "responses": { "200": { - "description": "List of series covers", + "description": "Alias already existed (idempotent)", "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/SeriesCoverListResponse" + "$ref": "#/components/schemas/SeriesAliasDto" + } + } + } + }, + "201": { + "description": "Alias created", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/SeriesAliasDto" } } } }, + "400": { + "description": "Invalid alias (empty after normalization)" + }, "403": { "description": "Forbidden" }, @@ -9321,13 +9693,13 @@ ] } }, - "/api/v1/series/{series_id}/covers/selected": { + "/api/v1/series/{series_id}/aliases/{alias_id}": { "delete": { "tags": [ - "Series" + "Tracking" ], - "summary": "Reset series cover to default (deselect all custom covers)", - "operationId": "reset_series_cover", + "summary": "Delete a release-matching alias.", + "operationId": "delete_series_alias", "parameters": [ { "name": "series_id", @@ -9338,17 +9710,27 @@ "type": "string", "format": "uuid" } + }, + { + "name": "alias_id", + "in": "path", + "description": "Alias ID", + "required": true, + "schema": { + "type": "string", + "format": "uuid" + } } ], "responses": { "204": { - "description": "Reset to default cover successfully" + "description": "Alias deleted" }, "403": { "description": "Forbidden" }, "404": { - "description": "Series not found" + "description": "Series or alias not found" } }, "security": [ @@ -9361,13 +9743,13 @@ ] } }, - "/api/v1/series/{series_id}/covers/{cover_id}": { - "delete": { + "/api/v1/series/{series_id}/alternate-titles": { + "get": { "tags": [ "Series" ], - "summary": "Delete a cover from a series", - "operationId": "delete_series_cover", + "summary": "Get alternate titles for a series", + "operationId": "get_series_alternate_titles", "parameters": [ { "name": "series_id", @@ -9378,11 +9760,46 @@ "type": "string", "format": "uuid" } + } + ], + "responses": { + "200": { + "description": "List of alternate titles for the series", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/AlternateTitleListResponse" + } + } + } }, + "403": { + "description": "Forbidden" + }, + "404": { + "description": "Series not found" + } + }, + "security": [ { - "name": "cover_id", + "jwt_bearer": [] + }, + { + "api_key": [] + } + ] + }, + "post": { + "tags": [ + "Series" + ], + "summary": "Add an alternate title to a series", + "operationId": "create_alternate_title", + "parameters": [ + { + "name": "series_id", "in": "path", - "description": "Cover ID to delete", + "description": "Series ID", "required": true, "schema": { "type": "string", @@ -9390,18 +9807,32 @@ } } ], - "responses": { - "204": { - "description": "Cover deleted successfully" + "requestBody": { + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/CreateAlternateTitleRequest" + } + } }, - "400": { - "description": "Cannot delete the only selected cover" + "required": true + }, + "responses": { + "201": { + "description": "Alternate title created", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/AlternateTitleDto" + } + } + } }, "403": { "description": "Forbidden" }, "404": { - "description": "Series or cover not found" + "description": "Series not found" } }, "security": [ @@ -9414,14 +9845,13 @@ ] } }, - "/api/v1/series/{series_id}/covers/{cover_id}/image": { - "get": { + "/api/v1/series/{series_id}/alternate-titles/{title_id}": { + "delete": { "tags": [ "Series" ], - "summary": "Get a specific cover image for a series", - "description": "Supports HTTP conditional caching with ETag and Last-Modified headers,\nreturning 304 Not Modified when the client has a valid cached copy.", - "operationId": "get_series_cover_image", + "summary": "Delete an alternate title", + "operationId": "delete_alternate_title", "parameters": [ { "name": "series_id", @@ -9434,9 +9864,9 @@ } }, { - "name": "cover_id", + "name": "title_id", "in": "path", - "description": "Cover ID", + "description": "Alternate title ID", "required": true, "schema": { "type": "string", @@ -9445,20 +9875,14 @@ } ], "responses": { - "200": { - "description": "Cover image", - "content": { - "image/jpeg": {} - } - }, - "304": { - "description": "Not modified (client cache is valid)" + "204": { + "description": "Alternate title deleted" }, "403": { "description": "Forbidden" }, "404": { - "description": "Series or cover not found" + "description": "Series or title not found" } }, "security": [ @@ -9469,15 +9893,13 @@ "api_key": [] } ] - } - }, - "/api/v1/series/{series_id}/covers/{cover_id}/select": { - "put": { + }, + "patch": { "tags": [ "Series" ], - "summary": "Select a cover as the primary cover for a series", - "operationId": "select_series_cover", + "summary": "Update an alternate title", + "operationId": "update_alternate_title", "parameters": [ { "name": "series_id", @@ -9490,9 +9912,9 @@ } }, { - "name": "cover_id", + "name": "title_id", "in": "path", - "description": "Cover ID to select", + "description": "Alternate title ID", "required": true, "schema": { "type": "string", @@ -9500,13 +9922,23 @@ } } ], + "requestBody": { + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/UpdateAlternateTitleRequest" + } + } + }, + "required": true + }, "responses": { "200": { - "description": "Cover selected successfully", + "description": "Alternate title updated", "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/SeriesCoverDto" + "$ref": "#/components/schemas/AlternateTitleDto" } } } @@ -9515,7 +9947,7 @@ "description": "Forbidden" }, "404": { - "description": "Series or cover not found" + "description": "Series or title not found" } }, "security": [ @@ -9528,14 +9960,14 @@ ] } }, - "/api/v1/series/{series_id}/download": { - "get": { + "/api/v1/series/{series_id}/analyze": { + "post": { "tags": [ - "Series" + "Scans" ], - "summary": "Download all books in a series as a zip file", - "description": "Creates a zip archive containing all detected books in the series.\nOnly includes books that were scanned and detected by the library scanner.", - "operationId": "download_series", + "summary": "Trigger analysis of all books in a series", + "description": "# Permission Required\n- `series:write`\n\n# Behavior\nEnqueues an AnalyzeSeries task which will create individual AnalyzeBook tasks\nfor each book in the series. All books are analyzed with force=true.\nReturns immediately with a task_id to track progress.", + "operationId": "trigger_series_analysis", "parameters": [ { "name": "series_id", @@ -9550,21 +9982,25 @@ ], "responses": { "200": { - "description": "Zip file containing all books in the series", + "description": "Analysis task enqueued successfully", "content": { - "application/zip": {} + "application/json": { + "schema": { + "$ref": "#/components/schemas/CreateTaskResponse" + } + } } }, "403": { - "description": "Forbidden" + "description": "Permission denied" }, "404": { - "description": "Series not found or has no books" + "description": "Series not found" } }, "security": [ { - "jwt_bearer": [] + "bearer_auth": [] }, { "api_key": [] @@ -9572,13 +10008,14 @@ ] } }, - "/api/v1/series/{series_id}/external-ids": { - "get": { + "/api/v1/series/{series_id}/analyze-unanalyzed": { + "post": { "tags": [ - "Series" + "Scans" ], - "summary": "List all external IDs for a series", - "operationId": "list_series_external_ids", + "summary": "Trigger analysis of unanalyzed books in a series", + "description": "# Permission Required\n- `series:write`\n\n# Behavior\nEnqueues AnalyzeBook tasks (with force=false) for books in the series that have not been analyzed yet.\nThis is useful for recovering from failures or analyzing newly discovered books.\nReturns immediately with a task_id to track progress.", + "operationId": "trigger_series_unanalyzed_analysis", "parameters": [ { "name": "series_id", @@ -9593,17 +10030,17 @@ ], "responses": { "200": { - "description": "List of external IDs", + "description": "Analysis tasks enqueued successfully", "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/SeriesExternalIdListResponse" + "$ref": "#/components/schemas/CreateTaskResponse" } } } }, "403": { - "description": "Forbidden" + "description": "Permission denied" }, "404": { "description": "Series not found" @@ -9611,20 +10048,21 @@ }, "security": [ { - "jwt_bearer": [] + "bearer_auth": [] }, { "api_key": [] } ] - }, - "post": { + } + }, + "/api/v1/series/{series_id}/books": { + "get": { "tags": [ "Series" ], - "summary": "Create or update an external ID for a series", - "description": "Upserts by series_id + source: if an external ID with the same source already exists,\nit will be updated instead of creating a duplicate.", - "operationId": "create_series_external_id", + "summary": "Get books in a series", + "operationId": "get_series_books", "parameters": [ { "name": "series_id", @@ -9635,25 +10073,36 @@ "type": "string", "format": "uuid" } - } - ], - "requestBody": { - "content": { - "application/json": { - "schema": { - "$ref": "#/components/schemas/CreateSeriesExternalIdRequest" - } + }, + { + "name": "includeDeleted", + "in": "query", + "description": "Include deleted books in the result", + "required": false, + "schema": { + "type": "boolean" } }, - "required": true - }, + { + "name": "full", + "in": "query", + "description": "Return full data including metadata and locks.\nDefault is false for backward compatibility.", + "required": false, + "schema": { + "type": "boolean" + } + } + ], "responses": { "200": { - "description": "External ID created or updated", + "description": "List of books in the series (returns Vec when full=true)", "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/SeriesExternalIdDto" + "type": "array", + "items": { + "$ref": "#/components/schemas/BookDto" + } } } } @@ -9675,13 +10124,13 @@ ] } }, - "/api/v1/series/{series_id}/external-ids/{external_id_id}": { - "delete": { + "/api/v1/series/{series_id}/cover": { + "post": { "tags": [ "Series" ], - "summary": "Delete an external ID from a series", - "operationId": "delete_series_external_id", + "summary": "Upload a custom cover/poster for a series", + "operationId": "upload_series_cover", "parameters": [ { "name": "series_id", @@ -9692,11 +10141,55 @@ "type": "string", "format": "uuid" } + } + ], + "requestBody": { + "description": "Multipart form with image file", + "content": { + "multipart/form-data": { + "schema": { + "type": "object" + } + } + }, + "required": true + }, + "responses": { + "200": { + "description": "Cover uploaded successfully" + }, + "400": { + "description": "Invalid image or request" + }, + "403": { + "description": "Forbidden" + }, + "404": { + "description": "Series not found" + } + }, + "security": [ + { + "jwt_bearer": [] }, { - "name": "external_id_id", + "api_key": [] + } + ] + } + }, + "/api/v1/series/{series_id}/cover/source": { + "patch": { + "tags": [ + "Series" + ], + "summary": "Set which cover source to use for a series (partial update)", + "operationId": "set_series_cover_source", + "parameters": [ + { + "name": "series_id", "in": "path", - "description": "External ID record ID", + "description": "Series ID", "required": true, "schema": { "type": "string", @@ -9704,15 +10197,25 @@ } } ], + "requestBody": { + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/SelectCoverSourceRequest" + } + } + }, + "required": true + }, "responses": { - "204": { - "description": "External ID deleted" + "200": { + "description": "Cover source updated successfully" }, "403": { "description": "Forbidden" }, "404": { - "description": "Series or external ID not found" + "description": "Series not found" } }, "security": [ @@ -9725,13 +10228,13 @@ ] } }, - "/api/v1/series/{series_id}/external-links": { + "/api/v1/series/{series_id}/covers": { "get": { "tags": [ "Series" ], - "summary": "Get external links for a series", - "operationId": "get_series_external_links", + "summary": "List all covers for a series", + "operationId": "list_series_covers", "parameters": [ { "name": "series_id", @@ -9746,11 +10249,11 @@ ], "responses": { "200": { - "description": "List of external links for the series", + "description": "List of series covers", "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/ExternalLinkListResponse" + "$ref": "#/components/schemas/SeriesCoverListResponse" } } } @@ -9770,13 +10273,15 @@ "api_key": [] } ] - }, - "post": { + } + }, + "/api/v1/series/{series_id}/covers/selected": { + "delete": { "tags": [ "Series" ], - "summary": "Add or update an external link for a series", - "operationId": "create_external_link", + "summary": "Reset series cover to default (deselect all custom covers)", + "operationId": "reset_series_cover", "parameters": [ { "name": "series_id", @@ -9789,29 +10294,12 @@ } } ], - "requestBody": { - "content": { - "application/json": { - "schema": { - "$ref": "#/components/schemas/CreateExternalLinkRequest" - } - } - }, - "required": true - }, "responses": { - "200": { - "description": "External link created or updated", - "content": { - "application/json": { - "schema": { - "$ref": "#/components/schemas/ExternalLinkDto" - } - } - } + "204": { + "description": "Reset to default cover successfully" }, "403": { - "description": "Forbidden - admin only" + "description": "Forbidden" }, "404": { "description": "Series not found" @@ -9827,13 +10315,13 @@ ] } }, - "/api/v1/series/{series_id}/external-links/{source}": { + "/api/v1/series/{series_id}/covers/{cover_id}": { "delete": { "tags": [ "Series" ], - "summary": "Delete an external link by source name", - "operationId": "delete_external_link", + "summary": "Delete a cover from a series", + "operationId": "delete_series_cover", "parameters": [ { "name": "series_id", @@ -9846,24 +10334,28 @@ } }, { - "name": "source", + "name": "cover_id", "in": "path", - "description": "Source name (e.g., 'myanimelist', 'mangadex')", + "description": "Cover ID to delete", "required": true, "schema": { - "type": "string" + "type": "string", + "format": "uuid" } } ], "responses": { "204": { - "description": "External link deleted" + "description": "Cover deleted successfully" + }, + "400": { + "description": "Cannot delete the only selected cover" }, "403": { - "description": "Forbidden - admin only" + "description": "Forbidden" }, "404": { - "description": "Series or link not found" + "description": "Series or cover not found" } }, "security": [ @@ -9876,13 +10368,14 @@ ] } }, - "/api/v1/series/{series_id}/external-ratings": { + "/api/v1/series/{series_id}/covers/{cover_id}/image": { "get": { "tags": [ "Series" ], - "summary": "Get external ratings for a series", - "operationId": "get_series_external_ratings", + "summary": "Get a specific cover image for a series", + "description": "Supports HTTP conditional caching with ETag and Last-Modified headers,\nreturning 304 Not Modified when the client has a valid cached copy.", + "operationId": "get_series_cover_image", "parameters": [ { "name": "series_id", @@ -9893,24 +10386,33 @@ "type": "string", "format": "uuid" } + }, + { + "name": "cover_id", + "in": "path", + "description": "Cover ID", + "required": true, + "schema": { + "type": "string", + "format": "uuid" + } } ], "responses": { "200": { - "description": "List of external ratings for the series", + "description": "Cover image", "content": { - "application/json": { - "schema": { - "$ref": "#/components/schemas/ExternalRatingListResponse" - } - } + "image/jpeg": {} } }, + "304": { + "description": "Not modified (client cache is valid)" + }, "403": { "description": "Forbidden" }, "404": { - "description": "Series not found" + "description": "Series or cover not found" } }, "security": [ @@ -9921,13 +10423,15 @@ "api_key": [] } ] - }, - "post": { + } + }, + "/api/v1/series/{series_id}/covers/{cover_id}/select": { + "put": { "tags": [ "Series" ], - "summary": "Add or update an external rating for a series", - "operationId": "create_external_rating", + "summary": "Select a cover as the primary cover for a series", + "operationId": "select_series_cover", "parameters": [ { "name": "series_id", @@ -9938,34 +10442,34 @@ "type": "string", "format": "uuid" } + }, + { + "name": "cover_id", + "in": "path", + "description": "Cover ID to select", + "required": true, + "schema": { + "type": "string", + "format": "uuid" + } } ], - "requestBody": { - "content": { - "application/json": { - "schema": { - "$ref": "#/components/schemas/CreateExternalRatingRequest" - } - } - }, - "required": true - }, "responses": { "200": { - "description": "External rating created or updated", + "description": "Cover selected successfully", "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/ExternalRatingDto" + "$ref": "#/components/schemas/SeriesCoverDto" } } } }, "403": { - "description": "Forbidden - admin only" + "description": "Forbidden" }, "404": { - "description": "Series not found" + "description": "Series or cover not found" } }, "security": [ @@ -9978,62 +10482,14 @@ ] } }, - "/api/v1/series/{series_id}/external-ratings/{source}": { - "delete": { + "/api/v1/series/{series_id}/download": { + "get": { "tags": [ "Series" ], - "summary": "Delete an external rating by source name", - "operationId": "delete_external_rating", - "parameters": [ - { - "name": "series_id", - "in": "path", - "description": "Series ID", - "required": true, - "schema": { - "type": "string", - "format": "uuid" - } - }, - { - "name": "source", - "in": "path", - "description": "Source name (e.g., 'myanimelist', 'anilist')", - "required": true, - "schema": { - "type": "string" - } - } - ], - "responses": { - "204": { - "description": "External rating deleted" - }, - "403": { - "description": "Forbidden - admin only" - }, - "404": { - "description": "Series or rating not found" - } - }, - "security": [ - { - "jwt_bearer": [] - }, - { - "api_key": [] - } - ] - } - }, - "/api/v1/series/{series_id}/genres": { - "get": { - "tags": [ - "Genres" - ], - "summary": "Get genres for a series", - "operationId": "get_series_genres", + "summary": "Download all books in a series as a zip file", + "description": "Creates a zip archive containing all detected books in the series.\nOnly includes books that were scanned and detected by the library scanner.", + "operationId": "download_series", "parameters": [ { "name": "series_id", @@ -10048,20 +10504,16 @@ ], "responses": { "200": { - "description": "List of genres for the series", + "description": "Zip file containing all books in the series", "content": { - "application/json": { - "schema": { - "$ref": "#/components/schemas/GenreListResponse" - } - } + "application/zip": {} } }, "403": { "description": "Forbidden" }, "404": { - "description": "Series not found" + "description": "Series not found or has no books" } }, "security": [ @@ -10072,13 +10524,15 @@ "api_key": [] } ] - }, - "put": { + } + }, + "/api/v1/series/{series_id}/external-ids": { + "get": { "tags": [ - "Genres" + "Series" ], - "summary": "Set genres for a series (replaces existing)", - "operationId": "set_series_genres", + "summary": "List all external IDs for a series", + "operationId": "list_series_external_ids", "parameters": [ { "name": "series_id", @@ -10091,23 +10545,13 @@ } } ], - "requestBody": { - "content": { - "application/json": { - "schema": { - "$ref": "#/components/schemas/SetSeriesGenresRequest" - } - } - }, - "required": true - }, "responses": { "200": { - "description": "Genres updated", + "description": "List of external IDs", "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/GenreListResponse" + "$ref": "#/components/schemas/SeriesExternalIdListResponse" } } } @@ -10130,10 +10574,11 @@ }, "post": { "tags": [ - "Genres" + "Series" ], - "summary": "Add a single genre to a series", - "operationId": "add_series_genre", + "summary": "Create or update an external ID for a series", + "description": "Upserts by series_id + source: if an external ID with the same source already exists,\nit will be updated instead of creating a duplicate.", + "operationId": "create_series_external_id", "parameters": [ { "name": "series_id", @@ -10150,7 +10595,7 @@ "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/AddSeriesGenreRequest" + "$ref": "#/components/schemas/CreateSeriesExternalIdRequest" } } }, @@ -10158,11 +10603,11 @@ }, "responses": { "200": { - "description": "Genre added", + "description": "External ID created or updated", "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/GenreDto" + "$ref": "#/components/schemas/SeriesExternalIdDto" } } } @@ -10184,13 +10629,13 @@ ] } }, - "/api/v1/series/{series_id}/genres/{genre_id}": { + "/api/v1/series/{series_id}/external-ids/{external_id_id}": { "delete": { "tags": [ - "Genres" + "Series" ], - "summary": "Remove a genre from a series", - "operationId": "remove_series_genre", + "summary": "Delete an external ID from a series", + "operationId": "delete_series_external_id", "parameters": [ { "name": "series_id", @@ -10203,9 +10648,9 @@ } }, { - "name": "genre_id", + "name": "external_id_id", "in": "path", - "description": "Genre ID", + "description": "External ID record ID", "required": true, "schema": { "type": "string", @@ -10215,13 +10660,13 @@ ], "responses": { "204": { - "description": "Genre removed from series" + "description": "External ID deleted" }, "403": { "description": "Forbidden" }, "404": { - "description": "Series or genre link not found" + "description": "Series or external ID not found" } }, "security": [ @@ -10234,14 +10679,13 @@ ] } }, - "/api/v1/series/{series_id}/metadata": { + "/api/v1/series/{series_id}/external-links": { "get": { "tags": [ "Series" ], - "summary": "Get series metadata including all related data", - "description": "Returns comprehensive metadata with lock states, genres, tags, alternate titles,\nexternal ratings, and external links.", - "operationId": "get_series_metadata", + "summary": "Get external links for a series", + "operationId": "get_series_external_links", "parameters": [ { "name": "series_id", @@ -10256,11 +10700,11 @@ ], "responses": { "200": { - "description": "Series metadata with all related data", + "description": "List of external links for the series", "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/FullSeriesMetadataResponse" + "$ref": "#/components/schemas/ExternalLinkListResponse" } } } @@ -10281,13 +10725,12 @@ } ] }, - "put": { + "post": { "tags": [ "Series" ], - "summary": "Replace all series metadata (PUT)", - "description": "Replaces all metadata fields with the values in the request.\nOmitting a field (or setting it to null) will clear that field.", - "operationId": "replace_series_metadata", + "summary": "Add or update an external link for a series", + "operationId": "create_external_link", "parameters": [ { "name": "series_id", @@ -10304,7 +10747,7 @@ "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/ReplaceSeriesMetadataRequest" + "$ref": "#/components/schemas/CreateExternalLinkRequest" } } }, @@ -10312,17 +10755,17 @@ }, "responses": { "200": { - "description": "Metadata replaced successfully", + "description": "External link created or updated", "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/SeriesMetadataResponse" + "$ref": "#/components/schemas/ExternalLinkDto" } } } }, "403": { - "description": "Forbidden" + "description": "Forbidden - admin only" }, "404": { "description": "Series not found" @@ -10336,14 +10779,15 @@ "api_key": [] } ] - }, + } + }, + "/api/v1/series/{series_id}/external-links/{source}": { "delete": { "tags": [ "Series" ], - "summary": "Reset series metadata to filesystem-derived defaults", - "description": "Completely resets all series metadata back to original values derived from\nthe filesystem. This deletes and recreates the metadata row, clears all\nassociated data (genres, tags, alternate titles, external IDs, external\nratings, external links, covers, metadata sources, sharing tags), and\nunlocks all fields. The title is reset to the series directory name.\n\nUser ratings, read progress, book records, and book metadata are preserved.", - "operationId": "reset_series_metadata", + "summary": "Delete an external link by source name", + "operationId": "delete_external_link", "parameters": [ { "name": "series_id", @@ -10354,80 +10798,26 @@ "type": "string", "format": "uuid" } - } - ], - "responses": { - "200": { - "description": "Metadata reset successfully", - "content": { - "application/json": { - "schema": { - "$ref": "#/components/schemas/FullSeriesMetadataResponse" - } - } - } - }, - "403": { - "description": "Forbidden" - }, - "404": { - "description": "Series not found" - } - }, - "security": [ - { - "jwt_bearer": [] }, { - "api_key": [] - } - ] - }, - "patch": { - "tags": [ - "Series" - ], - "summary": "Partially update series metadata (PATCH)", - "description": "Only provided fields will be updated. Absent fields are unchanged.\nExplicitly null fields will be cleared.", - "operationId": "patch_series_metadata", - "parameters": [ - { - "name": "series_id", + "name": "source", "in": "path", - "description": "Series ID", + "description": "Source name (e.g., 'myanimelist', 'mangadex')", "required": true, "schema": { - "type": "string", - "format": "uuid" + "type": "string" } } ], - "requestBody": { - "content": { - "application/json": { - "schema": { - "$ref": "#/components/schemas/PatchSeriesMetadataRequest" - } - } - }, - "required": true - }, "responses": { - "200": { - "description": "Metadata updated successfully", - "content": { - "application/json": { - "schema": { - "$ref": "#/components/schemas/SeriesMetadataResponse" - } - } - } + "204": { + "description": "External link deleted" }, "403": { - "description": "Forbidden" + "description": "Forbidden - admin only" }, "404": { - "description": "Series not found" + "description": "Series or link not found" } }, "security": [ @@ -10440,13 +10830,13 @@ ] } }, - "/api/v1/series/{series_id}/metadata/locks": { + "/api/v1/series/{series_id}/external-ratings": { "get": { "tags": [ "Series" ], - "summary": "Get metadata lock states", - "operationId": "get_metadata_locks", + "summary": "Get external ratings for a series", + "operationId": "get_series_external_ratings", "parameters": [ { "name": "series_id", @@ -10461,11 +10851,11 @@ ], "responses": { "200": { - "description": "Current lock states", + "description": "List of external ratings for the series", "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/MetadataLocks" + "$ref": "#/components/schemas/ExternalRatingListResponse" } } } @@ -10486,13 +10876,12 @@ } ] }, - "put": { + "post": { "tags": [ "Series" ], - "summary": "Update metadata lock states", - "description": "Sets which metadata fields are locked. Locked fields will not be overwritten\nby automatic metadata refresh from book analysis or external sources.", - "operationId": "update_metadata_locks", + "summary": "Add or update an external rating for a series", + "operationId": "create_external_rating", "parameters": [ { "name": "series_id", @@ -10509,7 +10898,7 @@ "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/UpdateMetadataLocksRequest" + "$ref": "#/components/schemas/CreateExternalRatingRequest" } } }, @@ -10517,17 +10906,17 @@ }, "responses": { "200": { - "description": "Lock states updated", + "description": "External rating created or updated", "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/MetadataLocks" + "$ref": "#/components/schemas/ExternalRatingDto" } } } }, "403": { - "description": "Forbidden" + "description": "Forbidden - admin only" }, "404": { "description": "Series not found" @@ -10543,13 +10932,13 @@ ] } }, - "/api/v1/series/{series_id}/purge-deleted": { + "/api/v1/series/{series_id}/external-ratings/{source}": { "delete": { "tags": [ "Series" ], - "summary": "Purge deleted books from a series", - "operationId": "purge_series_deleted_books", + "summary": "Delete an external rating by source name", + "operationId": "delete_external_rating", "parameters": [ { "name": "series_id", @@ -10560,26 +10949,26 @@ "type": "string", "format": "uuid" } + }, + { + "name": "source", + "in": "path", + "description": "Source name (e.g., 'myanimelist', 'anilist')", + "required": true, + "schema": { + "type": "string" + } } ], "responses": { - "200": { - "description": "Number of books purged", - "content": { - "text/plain": { - "schema": { - "type": "integer", - "format": "int64", - "minimum": 0 - } - } - } + "204": { + "description": "External rating deleted" }, "403": { - "description": "Forbidden" + "description": "Forbidden - admin only" }, "404": { - "description": "Series not found" + "description": "Series or rating not found" } }, "security": [ @@ -10592,14 +10981,13 @@ ] } }, - "/api/v1/series/{series_id}/rating": { + "/api/v1/series/{series_id}/genres": { "get": { "tags": [ - "Ratings" + "Genres" ], - "summary": "Get the current user's rating for a series", - "description": "Returns null if no rating exists (not a 404, since the series exists but has no rating)", - "operationId": "get_series_rating", + "summary": "Get genres for a series", + "operationId": "get_series_genres", "parameters": [ { "name": "series_id", @@ -10614,18 +11002,11 @@ ], "responses": { "200": { - "description": "User's rating for the series (null if not rated)", + "description": "List of genres for the series", "content": { "application/json": { "schema": { - "oneOf": [ - { - "type": "null" - }, - { - "$ref": "#/components/schemas/UserSeriesRatingDto" - } - ] + "$ref": "#/components/schemas/GenreListResponse" } } } @@ -10648,10 +11029,10 @@ }, "put": { "tags": [ - "Ratings" + "Genres" ], - "summary": "Set (create or update) the current user's rating for a series", - "operationId": "set_series_rating", + "summary": "Set genres for a series (replaces existing)", + "operationId": "set_series_genres", "parameters": [ { "name": "series_id", @@ -10668,7 +11049,7 @@ "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/SetUserRatingRequest" + "$ref": "#/components/schemas/SetSeriesGenresRequest" } } }, @@ -10676,18 +11057,15 @@ }, "responses": { "200": { - "description": "Rating saved", + "description": "Genres updated", "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/UserSeriesRatingDto" + "$ref": "#/components/schemas/GenreListResponse" } } } }, - "400": { - "description": "Invalid rating value" - }, "403": { "description": "Forbidden" }, @@ -10704,12 +11082,12 @@ } ] }, - "delete": { + "post": { "tags": [ - "Ratings" + "Genres" ], - "summary": "Delete the current user's rating for a series", - "operationId": "delete_series_rating", + "summary": "Add a single genre to a series", + "operationId": "add_series_genre", "parameters": [ { "name": "series_id", @@ -10722,15 +11100,32 @@ } } ], + "requestBody": { + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/AddSeriesGenreRequest" + } + } + }, + "required": true + }, "responses": { - "204": { - "description": "Rating deleted" + "200": { + "description": "Genre added", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/GenreDto" + } + } + } }, "403": { "description": "Forbidden" }, "404": { - "description": "Series or rating not found" + "description": "Series not found" } }, "security": [ @@ -10743,14 +11138,13 @@ ] } }, - "/api/v1/series/{series_id}/ratings/average": { - "get": { + "/api/v1/series/{series_id}/genres/{genre_id}": { + "delete": { "tags": [ - "Series" + "Genres" ], - "summary": "Get the average community rating for a series", - "description": "Returns the average rating from all users and the total count of ratings.\nRatings are stored on a 0-100 scale internally.", - "operationId": "get_series_average_rating", + "summary": "Remove a genre from a series", + "operationId": "remove_series_genre", "parameters": [ { "name": "series_id", @@ -10761,28 +11155,27 @@ "type": "string", "format": "uuid" } + }, + { + "name": "genre_id", + "in": "path", + "description": "Genre ID", + "required": true, + "schema": { + "type": "string", + "format": "uuid" + } } ], "responses": { - "200": { - "description": "Average rating for the series", - "content": { - "application/json": { - "schema": { - "$ref": "#/components/schemas/SeriesAverageRatingResponse" - }, - "example": { - "average": 78.5, - "count": 15 - } - } - } + "204": { + "description": "Genre removed from series" }, "403": { "description": "Forbidden" }, "404": { - "description": "Series not found" + "description": "Series or genre link not found" } }, "security": [ @@ -10795,13 +11188,14 @@ ] } }, - "/api/v1/series/{series_id}/read": { - "post": { + "/api/v1/series/{series_id}/metadata": { + "get": { "tags": [ "Series" ], - "summary": "Mark all books in a series as read", - "operationId": "mark_series_as_read", + "summary": "Get series metadata including all related data", + "description": "Returns comprehensive metadata with lock states, genres, tags, alternate titles,\nexternal ratings, and external links.", + "operationId": "get_series_metadata", "parameters": [ { "name": "series_id", @@ -10816,11 +11210,11 @@ ], "responses": { "200": { - "description": "Series marked as read", + "description": "Series metadata with all related data", "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/MarkReadResponse" + "$ref": "#/components/schemas/FullSeriesMetadataResponse" } } } @@ -10840,16 +11234,14 @@ "api_key": [] } ] - } - }, - "/api/v1/series/{series_id}/renumber": { - "post": { + }, + "put": { "tags": [ - "Scans" + "Series" ], - "summary": "Renumber all books in a series", - "description": "# Permission Required\n- `series:write`\n\n# Behavior\nEnqueues a `RenumberSeries` task that recalculates book numbers based on the\nlibrary's number strategy and the current natural sort order of filenames.\nReturns a task ID for tracking progress via SSE.", - "operationId": "renumber_series", + "summary": "Replace all series metadata (PUT)", + "description": "Replaces all metadata fields with the values in the request.\nOmitting a field (or setting it to null) will clear that field.", + "operationId": "replace_series_metadata", "parameters": [ { "name": "series_id", @@ -10862,19 +11254,29 @@ } } ], + "requestBody": { + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ReplaceSeriesMetadataRequest" + } + } + }, + "required": true + }, "responses": { "200": { - "description": "Renumber task enqueued", + "description": "Metadata replaced successfully", "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/CreateTaskResponse" + "$ref": "#/components/schemas/SeriesMetadataResponse" } } } }, "403": { - "description": "Permission denied" + "description": "Forbidden" }, "404": { "description": "Series not found" @@ -10882,21 +11284,20 @@ }, "security": [ { - "bearer_auth": [] + "jwt_bearer": [] }, { "api_key": [] } ] - } - }, - "/api/v1/series/{series_id}/sharing-tags": { - "get": { + }, + "delete": { "tags": [ - "Sharing Tags" + "Series" ], - "summary": "Get sharing tags for a series (admin only)", - "operationId": "get_series_sharing_tags", + "summary": "Reset series metadata to filesystem-derived defaults", + "description": "Completely resets all series metadata back to original values derived from\nthe filesystem. This deletes and recreates the metadata row, clears all\nassociated data (genres, tags, alternate titles, external IDs, external\nratings, external links, covers, metadata sources, sharing tags), and\nunlocks all fields. The title is reset to the series directory name.\n\nUser ratings, read progress, book records, and book metadata are preserved.", + "operationId": "reset_series_metadata", "parameters": [ { "name": "series_id", @@ -10911,20 +11312,20 @@ ], "responses": { "200": { - "description": "List of sharing tags for the series", + "description": "Metadata reset successfully", "content": { "application/json": { "schema": { - "type": "array", - "items": { - "$ref": "#/components/schemas/SharingTagSummaryDto" - } + "$ref": "#/components/schemas/FullSeriesMetadataResponse" } } } }, "403": { - "description": "Forbidden - Missing permission" + "description": "Forbidden" + }, + "404": { + "description": "Series not found" } }, "security": [ @@ -10936,12 +11337,13 @@ } ] }, - "put": { + "patch": { "tags": [ - "Sharing Tags" + "Series" ], - "summary": "Set sharing tags for a series (replaces existing) (admin only)", - "operationId": "set_series_sharing_tags", + "summary": "Partially update series metadata (PATCH)", + "description": "Only provided fields will be updated. Absent fields are unchanged.\nExplicitly null fields will be cleared.", + "operationId": "patch_series_metadata", "parameters": [ { "name": "series_id", @@ -10958,7 +11360,7 @@ "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/SetSeriesSharingTagsRequest" + "$ref": "#/components/schemas/PatchSeriesMetadataRequest" } } }, @@ -10966,20 +11368,20 @@ }, "responses": { "200": { - "description": "Sharing tags set", + "description": "Metadata updated successfully", "content": { "application/json": { "schema": { - "type": "array", - "items": { - "$ref": "#/components/schemas/SharingTagSummaryDto" - } + "$ref": "#/components/schemas/SeriesMetadataResponse" } } } }, "403": { - "description": "Forbidden - Missing permission" + "description": "Forbidden" + }, + "404": { + "description": "Series not found" } }, "security": [ @@ -10990,13 +11392,15 @@ "api_key": [] } ] - }, - "post": { + } + }, + "/api/v1/series/{series_id}/metadata/locks": { + "get": { "tags": [ - "Sharing Tags" + "Series" ], - "summary": "Add a sharing tag to a series (admin only)", - "operationId": "add_series_sharing_tag", + "summary": "Get metadata lock states", + "operationId": "get_metadata_locks", "parameters": [ { "name": "series_id", @@ -11009,25 +11413,22 @@ } } ], - "requestBody": { - "content": { - "application/json": { - "schema": { - "$ref": "#/components/schemas/ModifySeriesSharingTagRequest" - } - } - }, - "required": true - }, "responses": { "200": { - "description": "Sharing tag added" - }, - "400": { - "description": "Tag already assigned" + "description": "Current lock states", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/MetadataLocks" + } + } + } }, "403": { - "description": "Forbidden - Missing permission" + "description": "Forbidden" + }, + "404": { + "description": "Series not found" } }, "security": [ @@ -11038,15 +11439,14 @@ "api_key": [] } ] - } - }, - "/api/v1/series/{series_id}/sharing-tags/{tag_id}": { - "delete": { - "tags": [ - "Sharing Tags" + }, + "put": { + "tags": [ + "Series" ], - "summary": "Remove a sharing tag from a series (admin only)", - "operationId": "remove_series_sharing_tag", + "summary": "Update metadata lock states", + "description": "Sets which metadata fields are locked. Locked fields will not be overwritten\nby automatic metadata refresh from book analysis or external sources.", + "operationId": "update_metadata_locks", "parameters": [ { "name": "series_id", @@ -11057,27 +11457,34 @@ "type": "string", "format": "uuid" } - }, - { - "name": "tag_id", - "in": "path", - "description": "Sharing tag ID", - "required": true, - "schema": { - "type": "string", - "format": "uuid" - } } ], + "requestBody": { + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/UpdateMetadataLocksRequest" + } + } + }, + "required": true + }, "responses": { - "204": { - "description": "Sharing tag removed" + "200": { + "description": "Lock states updated", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/MetadataLocks" + } + } + } }, "403": { - "description": "Forbidden - Missing permission" + "description": "Forbidden" }, "404": { - "description": "Sharing tag not assigned to series" + "description": "Series not found" } }, "security": [ @@ -11090,13 +11497,13 @@ ] } }, - "/api/v1/series/{series_id}/tags": { - "get": { + "/api/v1/series/{series_id}/purge-deleted": { + "delete": { "tags": [ - "Tags" + "Series" ], - "summary": "Get tags for a series", - "operationId": "get_series_tags", + "summary": "Purge deleted books from a series", + "operationId": "purge_series_deleted_books", "parameters": [ { "name": "series_id", @@ -11111,11 +11518,13 @@ ], "responses": { "200": { - "description": "List of tags for the series", + "description": "Number of books purged", "content": { - "application/json": { + "text/plain": { "schema": { - "$ref": "#/components/schemas/TagListResponse" + "type": "integer", + "format": "int64", + "minimum": 0 } } } @@ -11135,13 +11544,16 @@ "api_key": [] } ] - }, - "put": { + } + }, + "/api/v1/series/{series_id}/rating": { + "get": { "tags": [ - "Tags" + "Ratings" ], - "summary": "Set tags for a series (replaces existing)", - "operationId": "set_series_tags", + "summary": "Get the current user's rating for a series", + "description": "Returns null if no rating exists (not a 404, since the series exists but has no rating)", + "operationId": "get_series_rating", "parameters": [ { "name": "series_id", @@ -11154,23 +11566,20 @@ } } ], - "requestBody": { - "content": { - "application/json": { - "schema": { - "$ref": "#/components/schemas/SetSeriesTagsRequest" - } - } - }, - "required": true - }, "responses": { "200": { - "description": "Tags updated", + "description": "User's rating for the series (null if not rated)", "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/TagListResponse" + "oneOf": [ + { + "type": "null" + }, + { + "$ref": "#/components/schemas/UserSeriesRatingDto" + } + ] } } } @@ -11191,12 +11600,12 @@ } ] }, - "post": { + "put": { "tags": [ - "Tags" + "Ratings" ], - "summary": "Add a single tag to a series", - "operationId": "add_series_tag", + "summary": "Set (create or update) the current user's rating for a series", + "operationId": "set_series_rating", "parameters": [ { "name": "series_id", @@ -11213,7 +11622,7 @@ "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/AddSeriesTagRequest" + "$ref": "#/components/schemas/SetUserRatingRequest" } } }, @@ -11221,15 +11630,18 @@ }, "responses": { "200": { - "description": "Tag added", + "description": "Rating saved", "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/TagDto" + "$ref": "#/components/schemas/UserSeriesRatingDto" } } } }, + "400": { + "description": "Invalid rating value" + }, "403": { "description": "Forbidden" }, @@ -11245,15 +11657,13 @@ "api_key": [] } ] - } - }, - "/api/v1/series/{series_id}/tags/{tag_id}": { + }, "delete": { "tags": [ - "Tags" + "Ratings" ], - "summary": "Remove a tag from a series", - "operationId": "remove_series_tag", + "summary": "Delete the current user's rating for a series", + "operationId": "delete_series_rating", "parameters": [ { "name": "series_id", @@ -11264,27 +11674,17 @@ "type": "string", "format": "uuid" } - }, - { - "name": "tag_id", - "in": "path", - "description": "Tag ID", - "required": true, - "schema": { - "type": "string", - "format": "uuid" - } } ], "responses": { "204": { - "description": "Tag removed from series" + "description": "Rating deleted" }, "403": { "description": "Forbidden" }, "404": { - "description": "Series or tag link not found" + "description": "Series or rating not found" } }, "security": [ @@ -11297,13 +11697,14 @@ ] } }, - "/api/v1/series/{series_id}/thumbnail": { + "/api/v1/series/{series_id}/ratings/average": { "get": { "tags": [ "Series" ], - "summary": "Get thumbnail/cover image for a series", - "operationId": "get_series_thumbnail", + "summary": "Get the average community rating for a series", + "description": "Returns the average rating from all users and the total count of ratings.\nRatings are stored on a 0-100 scale internally.", + "operationId": "get_series_average_rating", "parameters": [ { "name": "series_id", @@ -11318,14 +11719,19 @@ ], "responses": { "200": { - "description": "Thumbnail image", + "description": "Average rating for the series", "content": { - "image/jpeg": {} + "application/json": { + "schema": { + "$ref": "#/components/schemas/SeriesAverageRatingResponse" + }, + "example": { + "average": 78.5, + "count": 15 + } + } } }, - "304": { - "description": "Not modified (client cache is valid)" - }, "403": { "description": "Forbidden" }, @@ -11343,14 +11749,13 @@ ] } }, - "/api/v1/series/{series_id}/thumbnail/generate": { + "/api/v1/series/{series_id}/read": { "post": { "tags": [ - "Thumbnails" + "Series" ], - "summary": "Generate thumbnail for a series", - "description": "Queues a task to generate (or regenerate) the thumbnail for a specific series.\nThe series thumbnail is derived from the first book's cover.\n\n# Permission Required\n- `tasks:write`", - "operationId": "generate_series_thumbnail", + "summary": "Mark all books in a series as read", + "operationId": "mark_series_as_read", "parameters": [ { "name": "series_id", @@ -11363,29 +11768,19 @@ } } ], - "requestBody": { - "content": { - "application/json": { - "schema": { - "$ref": "#/components/schemas/ForceRequest" - } - } - }, - "required": true - }, "responses": { "200": { - "description": "Thumbnail generation task queued", + "description": "Series marked as read", "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/CreateTaskResponse" + "$ref": "#/components/schemas/MarkReadResponse" } } } }, "403": { - "description": "Permission denied" + "description": "Forbidden" }, "404": { "description": "Series not found" @@ -11393,7 +11788,7 @@ }, "security": [ { - "bearer_auth": [] + "jwt_bearer": [] }, { "api_key": [] @@ -11401,14 +11796,13 @@ ] } }, - "/api/v1/series/{series_id}/title/reprocess": { - "post": { + "/api/v1/series/{series_id}/releases": { + "get": { "tags": [ - "Tasks" + "Releases" ], - "summary": "Reprocess a series title using library preprocessing rules", - "description": "Applies the library's preprocessing rules to the series' original directory name\nto regenerate the display title. This is useful when preprocessing rules are added\nor changed after series have already been created.\n\nThe title will only be updated if:\n- The `title_lock` is false (respects user edits)\n- The preprocessing rules produce a different title\n\nIf the title is changed and `title_sort_lock` is false, the `title_sort` will be\ncleared (set to None) to let it fall back to the new title for sorting.\n\n- With `dryRun: true`: Returns a synchronous preview of what would change\n- With `dryRun: false` (default): Enqueues a background task to process\n\n# Permission Required\n- `series:write`", - "operationId": "reprocess_series_title", + "summary": "List release-ledger entries for a series.", + "operationId": "list_series_releases", "parameters": [ { "name": "series_id", @@ -11419,25 +11813,49 @@ "type": "string", "format": "uuid" } - } - ], - "requestBody": { - "content": { - "application/json": { - "schema": { - "$ref": "#/components/schemas/EnqueueReprocessTitleRequest" - } + }, + { + "name": "state", + "in": "query", + "description": "Filter by state. Defaults to all states (no filter) so the per-series\nview shows the full history.", + "required": false, + "schema": { + "type": [ + "string", + "null" + ] } }, - "required": true - }, + { + "name": "page", + "in": "query", + "description": "1-indexed page number.", + "required": false, + "schema": { + "type": "integer", + "format": "int64", + "minimum": 0 + } + }, + { + "name": "pageSize", + "in": "query", + "description": "Items per page (max 500, default 50).", + "required": false, + "schema": { + "type": "integer", + "format": "int64", + "minimum": 0 + } + } + ], "responses": { "200": { - "description": "Task enqueued or dry run preview", + "description": "Paginated ledger entries for the series", "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/EnqueueReprocessTitleResponse" + "$ref": "#/components/schemas/PaginatedResponse_ReleaseLedgerEntryDto" } } } @@ -11451,7 +11869,7 @@ }, "security": [ { - "bearer_auth": [] + "jwt_bearer": [] }, { "api_key": [] @@ -11459,13 +11877,14 @@ ] } }, - "/api/v1/series/{series_id}/unread": { + "/api/v1/series/{series_id}/renumber": { "post": { "tags": [ - "Series" + "Scans" ], - "summary": "Mark all books in a series as unread", - "operationId": "mark_series_as_unread", + "summary": "Renumber all books in a series", + "description": "# Permission Required\n- `series:write`\n\n# Behavior\nEnqueues a `RenumberSeries` task that recalculates book numbers based on the\nlibrary's number strategy and the current natural sort order of filenames.\nReturns a task ID for tracking progress via SSE.", + "operationId": "renumber_series", "parameters": [ { "name": "series_id", @@ -11480,17 +11899,17 @@ ], "responses": { "200": { - "description": "Series marked as unread", + "description": "Renumber task enqueued", "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/MarkReadResponse" + "$ref": "#/components/schemas/CreateTaskResponse" } } } }, "403": { - "description": "Forbidden" + "description": "Permission denied" }, "404": { "description": "Series not found" @@ -11498,7 +11917,7 @@ }, "security": [ { - "jwt_bearer": [] + "bearer_auth": [] }, { "api_key": [] @@ -11506,64 +11925,41 @@ ] } }, - "/api/v1/settings/branding": { + "/api/v1/series/{series_id}/sharing-tags": { "get": { "tags": [ - "Settings" + "Sharing Tags" ], - "summary": "Get branding settings (unauthenticated)", - "description": "Returns branding-related settings that are needed on unauthenticated pages\nlike the login screen. This endpoint does not require authentication.", - "operationId": "get_branding_settings", - "responses": { - "200": { - "description": "Branding settings", - "content": { - "application/json": { - "schema": { - "$ref": "#/components/schemas/BrandingSettingsDto" - }, - "example": { - "applicationName": "Codex" - } - } + "summary": "Get sharing tags for a series (admin only)", + "operationId": "get_series_sharing_tags", + "parameters": [ + { + "name": "series_id", + "in": "path", + "description": "Series ID", + "required": true, + "schema": { + "type": "string", + "format": "uuid" } } - } - } - }, - "/api/v1/settings/public": { - "get": { - "tags": [ - "Settings" ], - "summary": "Get public display settings (authenticated users)", - "description": "Returns non-sensitive settings that affect UI/display behavior.\nThis endpoint is available to all authenticated users, not just admins.", - "operationId": "get_public_settings", "responses": { "200": { - "description": "Public settings", + "description": "List of sharing tags for the series", "content": { "application/json": { "schema": { - "type": "object", - "additionalProperties": { - "$ref": "#/components/schemas/PublicSettingDto" - }, - "propertyNames": { - "type": "string" - } - }, - "example": { - "display.custom_metadata_template": { - "key": "display.custom_metadata_template", - "value": "{{#if custom_metadata}}## Additional Information\n{{#each custom_metadata}}- **{{@key}}**: {{this}}\n{{/each}}{{/if}}" + "type": "array", + "items": { + "$ref": "#/components/schemas/SharingTagSummaryDto" } } } } }, - "401": { - "description": "Unauthorized" + "403": { + "description": "Forbidden - Missing permission" } }, "security": [ @@ -11574,21 +11970,30 @@ "api_key": [] } ] - } - }, - "/api/v1/setup/initialize": { - "post": { + }, + "put": { "tags": [ - "Setup" + "Sharing Tags" + ], + "summary": "Set sharing tags for a series (replaces existing) (admin only)", + "operationId": "set_series_sharing_tags", + "parameters": [ + { + "name": "series_id", + "in": "path", + "description": "Series ID", + "required": true, + "schema": { + "type": "string", + "format": "uuid" + } + } ], - "summary": "Initialize application setup by creating the first admin user", - "description": "Creates the first admin user with email verification bypassed and returns a JWT token", - "operationId": "initialize_setup", "requestBody": { "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/InitializeSetupRequest" + "$ref": "#/components/schemas/SetSeriesSharingTagsRequest" } } }, @@ -11596,37 +12001,54 @@ }, "responses": { "200": { - "description": "Setup initialized", + "description": "Sharing tags set", "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/InitializeSetupResponse" + "type": "array", + "items": { + "$ref": "#/components/schemas/SharingTagSummaryDto" + } } } } }, - "400": { - "description": "Invalid request or setup already completed" + "403": { + "description": "Forbidden - Missing permission" + } + }, + "security": [ + { + "jwt_bearer": [] }, - "422": { - "description": "Validation error" + { + "api_key": [] } - } - } - }, - "/api/v1/setup/settings": { - "patch": { + ] + }, + "post": { "tags": [ - "Setup" + "Sharing Tags" + ], + "summary": "Add a sharing tag to a series (admin only)", + "operationId": "add_series_sharing_tag", + "parameters": [ + { + "name": "series_id", + "in": "path", + "description": "Series ID", + "required": true, + "schema": { + "type": "string", + "format": "uuid" + } + } ], - "summary": "Configure initial settings (optional step in setup wizard)", - "description": "Allows the newly created admin to configure database settings", - "operationId": "configure_initial_settings", "requestBody": { "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/ConfigureSettingsRequest" + "$ref": "#/components/schemas/ModifySeriesSharingTagRequest" } } }, @@ -11634,92 +12056,63 @@ }, "responses": { "200": { - "description": "Settings configured", - "content": { - "application/json": { - "schema": { - "$ref": "#/components/schemas/ConfigureSettingsResponse" - } - } - } + "description": "Sharing tag added" + }, + "400": { + "description": "Tag already assigned" }, "403": { - "description": "Forbidden - Admin only" + "description": "Forbidden - Missing permission" } }, "security": [ { "jwt_bearer": [] + }, + { + "api_key": [] } ] } }, - "/api/v1/setup/status": { - "get": { - "tags": [ - "Setup" - ], - "summary": "Check if initial setup is required", - "description": "Returns whether the application needs initial setup (no users exist)", - "operationId": "setup_status", - "responses": { - "200": { - "description": "Setup status", - "content": { - "application/json": { - "schema": { - "$ref": "#/components/schemas/SetupStatusResponse" - } - } - } - } - } - } - }, - "/api/v1/tags": { - "get": { + "/api/v1/series/{series_id}/sharing-tags/{tag_id}": { + "delete": { "tags": [ - "Tags" + "Sharing Tags" ], - "summary": "List all tags", - "operationId": "list_tags", + "summary": "Remove a sharing tag from a series (admin only)", + "operationId": "remove_series_sharing_tag", "parameters": [ { - "name": "page", - "in": "query", - "description": "Page number (1-indexed, default 1)", - "required": false, + "name": "series_id", + "in": "path", + "description": "Series ID", + "required": true, "schema": { - "type": "integer", - "format": "int64", - "minimum": 0 + "type": "string", + "format": "uuid" } }, { - "name": "pageSize", - "in": "query", - "description": "Number of items per page (default 50, max 500)", - "required": false, + "name": "tag_id", + "in": "path", + "description": "Sharing tag ID", + "required": true, "schema": { - "type": "integer", - "format": "int64", - "minimum": 0 + "type": "string", + "format": "uuid" } } ], "responses": { - "200": { - "description": "List of all tags", - "content": { - "application/json": { - "schema": { - "$ref": "#/components/schemas/PaginatedResponse_TagDto" - } - } - } + "204": { + "description": "Sharing tag removed" }, "403": { - "description": "Forbidden" + "description": "Forbidden - Missing permission" + }, + "404": { + "description": "Sharing tag not assigned to series" } }, "security": [ @@ -11732,29 +12125,44 @@ ] } }, - "/api/v1/tags/cleanup": { - "post": { + "/api/v1/series/{series_id}/tags": { + "get": { "tags": [ "Tags" ], - "summary": "Delete all unused tags (tags with no series linked)", - "operationId": "cleanup_tags", + "summary": "Get tags for a series", + "operationId": "get_series_tags", + "parameters": [ + { + "name": "series_id", + "in": "path", + "description": "Series ID", + "required": true, + "schema": { + "type": "string", + "format": "uuid" + } + } + ], "responses": { "200": { - "description": "Cleanup completed", + "description": "List of tags for the series", "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/TaxonomyCleanupResponse" + "$ref": "#/components/schemas/TagListResponse" } } } }, "403": { - "description": "Forbidden - admin only" - } - }, - "security": [ + "description": "Forbidden" + }, + "404": { + "description": "Series not found" + } + }, + "security": [ { "jwt_bearer": [] }, @@ -11762,20 +12170,18 @@ "api_key": [] } ] - } - }, - "/api/v1/tags/{tag_id}": { - "delete": { + }, + "put": { "tags": [ "Tags" ], - "summary": "Delete a tag from the taxonomy (admin only)", - "operationId": "delete_tag", + "summary": "Set tags for a series (replaces existing)", + "operationId": "set_series_tags", "parameters": [ { - "name": "tag_id", + "name": "series_id", "in": "path", - "description": "Tag ID", + "description": "Series ID", "required": true, "schema": { "type": "string", @@ -11783,90 +12189,37 @@ } } ], - "responses": { - "204": { - "description": "Tag deleted" - }, - "403": { - "description": "Forbidden - admin only" - }, - "404": { - "description": "Tag not found" - } - }, - "security": [ - { - "jwt_bearer": [] - }, - { - "api_key": [] - } - ] - } - }, - "/api/v1/tasks": { - "get": { - "tags": [ - "Task Queue" - ], - "summary": "List tasks with optional filtering", - "description": "# Permission Required\n- `tasks:read`", - "operationId": "list_tasks", - "parameters": [ - { - "name": "status", - "in": "query", - "required": false, - "schema": { - "type": [ - "string", - "null" - ] - } - }, - { - "name": "taskType", - "in": "query", - "required": false, - "schema": { - "type": [ - "string", - "null" - ] + "requestBody": { + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/SetSeriesTagsRequest" + } } }, - { - "name": "limit", - "in": "query", - "required": false, - "schema": { - "type": "integer", - "format": "int64", - "minimum": 0 - } - } - ], + "required": true + }, "responses": { "200": { - "description": "Tasks retrieved successfully", + "description": "Tags updated", "content": { "application/json": { "schema": { - "type": "array", - "items": { - "$ref": "#/components/schemas/TaskResponse" - } + "$ref": "#/components/schemas/TagListResponse" } } } }, "403": { - "description": "Permission denied" + "description": "Forbidden" + }, + "404": { + "description": "Series not found" } }, "security": [ { - "bearer_auth": [] + "jwt_bearer": [] }, { "api_key": [] @@ -11875,16 +12228,27 @@ }, "post": { "tags": [ - "Task Queue" + "Tags" + ], + "summary": "Add a single tag to a series", + "operationId": "add_series_tag", + "parameters": [ + { + "name": "series_id", + "in": "path", + "description": "Series ID", + "required": true, + "schema": { + "type": "string", + "format": "uuid" + } + } ], - "summary": "Create a new task", - "description": "# Permission Required\n- `tasks:write`", - "operationId": "create_task", "requestBody": { "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/CreateTaskRequest" + "$ref": "#/components/schemas/AddSeriesTagRequest" } } }, @@ -11892,25 +12256,25 @@ }, "responses": { "200": { - "description": "Task created successfully", + "description": "Tag added", "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/CreateTaskResponse" + "$ref": "#/components/schemas/TagDto" } } } }, - "400": { - "description": "Invalid request" - }, "403": { - "description": "Permission denied" + "description": "Forbidden" + }, + "404": { + "description": "Series not found" } }, "security": [ { - "bearer_auth": [] + "jwt_bearer": [] }, { "api_key": [] @@ -11918,32 +12282,49 @@ ] } }, - "/api/v1/tasks/nuke": { + "/api/v1/series/{series_id}/tags/{tag_id}": { "delete": { "tags": [ - "Task Queue" + "Tags" ], - "summary": "Nuclear option: Delete ALL tasks", - "description": "# Permission Required\n- `admin`", - "operationId": "nuke_all_tasks", - "responses": { - "200": { - "description": "All tasks deleted", - "content": { - "application/json": { - "schema": { - "$ref": "#/components/schemas/PurgeTasksResponse" - } - } + "summary": "Remove a tag from a series", + "operationId": "remove_series_tag", + "parameters": [ + { + "name": "series_id", + "in": "path", + "description": "Series ID", + "required": true, + "schema": { + "type": "string", + "format": "uuid" + } + }, + { + "name": "tag_id", + "in": "path", + "description": "Tag ID", + "required": true, + "schema": { + "type": "string", + "format": "uuid" } + } + ], + "responses": { + "204": { + "description": "Tag removed from series" }, "403": { - "description": "Permission denied (admin only)" + "description": "Forbidden" + }, + "404": { + "description": "Series or tag link not found" } }, "security": [ { - "bearer_auth": [] + "jwt_bearer": [] }, { "api_key": [] @@ -11951,44 +12332,45 @@ ] } }, - "/api/v1/tasks/purge": { - "delete": { + "/api/v1/series/{series_id}/thumbnail": { + "get": { "tags": [ - "Task Queue" + "Series" ], - "summary": "Purge old completed/failed tasks", - "description": "# Permission Required\n- `tasks:write`", - "operationId": "purge_old_tasks", + "summary": "Get thumbnail/cover image for a series", + "operationId": "get_series_thumbnail", "parameters": [ { - "name": "days", - "in": "query", - "description": "Delete tasks older than N days (default: 30)", - "required": false, + "name": "series_id", + "in": "path", + "description": "Series ID", + "required": true, "schema": { - "type": "integer", - "format": "int64" + "type": "string", + "format": "uuid" } } ], "responses": { "200": { - "description": "Tasks purged successfully", + "description": "Thumbnail image", "content": { - "application/json": { - "schema": { - "$ref": "#/components/schemas/PurgeTasksResponse" - } - } + "image/jpeg": {} } }, + "304": { + "description": "Not modified (client cache is valid)" + }, "403": { - "description": "Permission denied" + "description": "Forbidden" + }, + "404": { + "description": "Series not found" } }, "security": [ { - "bearer_auth": [] + "jwt_bearer": [] }, { "api_key": [] @@ -11996,64 +12378,57 @@ ] } }, - "/api/v1/tasks/stats": { - "get": { + "/api/v1/series/{series_id}/thumbnail/generate": { + "post": { "tags": [ - "Task Queue" + "Thumbnails" ], - "summary": "Get queue statistics", - "description": "# Permission Required\n- `tasks:read`", - "operationId": "get_task_stats", - "responses": { - "200": { - "description": "Statistics retrieved successfully", - "content": { - "application/json": { - "schema": { - "$ref": "#/components/schemas/TaskStats" - } - } - } - }, - "403": { - "description": "Permission denied" - } - }, - "security": [ - { - "bearer_auth": [] - }, + "summary": "Generate thumbnail for a series", + "description": "Queues a task to generate (or regenerate) the thumbnail for a specific series.\nThe series thumbnail is derived from the first book's cover.\n\n# Permission Required\n- `tasks:write`", + "operationId": "generate_series_thumbnail", + "parameters": [ { - "api_key": [] + "name": "series_id", + "in": "path", + "description": "Series ID", + "required": true, + "schema": { + "type": "string", + "format": "uuid" + } } - ] - } - }, - "/api/v1/tasks/stream": { - "get": { - "tags": [ - "Events" ], - "summary": "Subscribe to real-time task progress events via SSE", - "description": "Clients can subscribe to this endpoint to receive real-time notifications\nabout background task progress (analyze_book, generate_thumbnails, etc.).\n\n## Authentication\nRequires valid authentication with `LibrariesRead` permission.\n\n## Event Format\nEvents are sent as JSON-encoded `TaskProgressEvent` objects with the following structure:\n```json\n{\n \"task_id\": \"uuid\",\n \"task_type\": \"analyze_book\",\n \"status\": \"running\",\n \"progress\": {\n \"current\": 5,\n \"total\": 10,\n \"message\": \"Processing book 5 of 10\"\n },\n \"started_at\": \"2024-01-06T12:00:00Z\",\n \"library_id\": \"uuid\"\n}\n```\n\n## Keep-Alive\nA keep-alive message is sent every 15 seconds to prevent connection timeout.", - "operationId": "task_progress_stream", + "requestBody": { + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ForceRequest" + } + } + }, + "required": true + }, "responses": { "200": { - "description": "SSE stream of task progress events", + "description": "Thumbnail generation task queued", "content": { - "text/event-stream": {} + "application/json": { + "schema": { + "$ref": "#/components/schemas/CreateTaskResponse" + } + } } }, - "401": { - "description": "Unauthorized" - }, "403": { - "description": "Forbidden" + "description": "Permission denied" + }, + "404": { + "description": "Series not found" } }, "security": [ { - "jwt_bearer": [] + "bearer_auth": [] }, { "api_key": [] @@ -12061,19 +12436,19 @@ ] } }, - "/api/v1/tasks/{task_id}": { - "get": { + "/api/v1/series/{series_id}/title/reprocess": { + "post": { "tags": [ - "Task Queue" + "Tasks" ], - "summary": "Get task by ID", - "description": "# Permission Required\n- `tasks:read`", - "operationId": "get_task", + "summary": "Reprocess a series title using library preprocessing rules", + "description": "Applies the library's preprocessing rules to the series' original directory name\nto regenerate the display title. This is useful when preprocessing rules are added\nor changed after series have already been created.\n\nThe title will only be updated if:\n- The `title_lock` is false (respects user edits)\n- The preprocessing rules produce a different title\n\nIf the title is changed and `title_sort_lock` is false, the `title_sort` will be\ncleared (set to None) to let it fall back to the new title for sorting.\n\n- With `dryRun: true`: Returns a synchronous preview of what would change\n- With `dryRun: false` (default): Enqueues a background task to process\n\n# Permission Required\n- `series:write`", + "operationId": "reprocess_series_title", "parameters": [ { - "name": "task_id", + "name": "series_id", "in": "path", - "description": "Task ID", + "description": "Series ID", "required": true, "schema": { "type": "string", @@ -12081,22 +12456,32 @@ } } ], + "requestBody": { + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/EnqueueReprocessTitleRequest" + } + } + }, + "required": true + }, "responses": { "200": { - "description": "Task retrieved successfully", + "description": "Task enqueued or dry run preview", "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/TaskResponse" + "$ref": "#/components/schemas/EnqueueReprocessTitleResponse" } } } }, "403": { - "description": "Permission denied" + "description": "Forbidden" }, "404": { - "description": "Task not found" + "description": "Series not found" } }, "security": [ @@ -12109,19 +12494,19 @@ ] } }, - "/api/v1/tasks/{task_id}/cancel": { - "post": { + "/api/v1/series/{series_id}/tracking": { + "get": { "tags": [ - "Task Queue" + "Tracking" ], - "summary": "Cancel a task", - "description": "# Permission Required\n- `tasks:write`", - "operationId": "cancel_task", + "summary": "Get release-tracking config for a series.", + "description": "Returns a virtual untracked row when no `series_tracking` row exists, so the\nfrontend can render the panel uniformly without special-casing absent rows.", + "operationId": "get_series_tracking", "parameters": [ { - "name": "task_id", + "name": "series_id", "in": "path", - "description": "Task ID", + "description": "Series ID", "required": true, "schema": { "type": "string", @@ -12131,48 +12516,43 @@ ], "responses": { "200": { - "description": "Task cancelled successfully", + "description": "Tracking config", "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/MessageResponse" + "$ref": "#/components/schemas/SeriesTrackingDto" } } } }, - "400": { - "description": "Task cannot be cancelled" - }, "403": { - "description": "Permission denied" + "description": "Forbidden" }, "404": { - "description": "Task not found" + "description": "Series not found" } }, "security": [ { - "bearer_auth": [] + "jwt_bearer": [] }, { "api_key": [] } ] - } - }, - "/api/v1/tasks/{task_id}/retry": { - "post": { + }, + "patch": { "tags": [ - "Task Queue" + "Tracking" ], - "summary": "Retry a failed task", - "description": "# Permission Required\n- `tasks:write`", - "operationId": "retry_task", + "summary": "Update release-tracking config for a series.", + "description": "Upserts: creates the row on first write, applies the patch otherwise.\nAll fields are optional — omit to leave alone, send `null` on a nullable\nfield to clear it.", + "operationId": "update_series_tracking", "parameters": [ { - "name": "task_id", + "name": "series_id", "in": "path", - "description": "Task ID", + "description": "Series ID", "required": true, "schema": { "type": "string", @@ -12180,30 +12560,37 @@ } } ], + "requestBody": { + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/UpdateSeriesTrackingRequest" + } + } + }, + "required": true + }, "responses": { "200": { - "description": "Task queued for retry", + "description": "Tracking config updated", "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/MessageResponse" + "$ref": "#/components/schemas/SeriesTrackingDto" } } } }, - "400": { - "description": "Task is not in failed state" - }, "403": { - "description": "Permission denied" + "description": "Forbidden" }, "404": { - "description": "Task not found" + "description": "Series not found" } }, "security": [ { - "bearer_auth": [] + "jwt_bearer": [] }, { "api_key": [] @@ -12211,19 +12598,18 @@ ] } }, - "/api/v1/tasks/{task_id}/unlock": { + "/api/v1/series/{series_id}/unread": { "post": { "tags": [ - "Task Queue" + "Series" ], - "summary": "Unlock a stuck task", - "description": "# Permission Required\n- `tasks:write`", - "operationId": "unlock_task", + "summary": "Mark all books in a series as unread", + "operationId": "mark_series_as_unread", "parameters": [ { - "name": "task_id", + "name": "series_id", "in": "path", - "description": "Task ID", + "description": "Series ID", "required": true, "schema": { "type": "string", @@ -12233,25 +12619,25 @@ ], "responses": { "200": { - "description": "Task unlocked successfully", + "description": "Series marked as unread", "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/MessageResponse" + "$ref": "#/components/schemas/MarkReadResponse" } } } }, "403": { - "description": "Permission denied" + "description": "Forbidden" }, "404": { - "description": "Task not found" + "description": "Series not found" } }, "security": [ { - "bearer_auth": [] + "jwt_bearer": [] }, { "api_key": [] @@ -12259,55 +12645,64 @@ ] } }, - "/api/v1/user": { + "/api/v1/settings/branding": { "get": { "tags": [ - "Current User" + "Settings" ], - "summary": "Get the currently authenticated user's profile", - "operationId": "get_current_user", + "summary": "Get branding settings (unauthenticated)", + "description": "Returns branding-related settings that are needed on unauthenticated pages\nlike the login screen. This endpoint does not require authentication.", + "operationId": "get_branding_settings", "responses": { "200": { - "description": "Current user's profile with sharing tags", + "description": "Branding settings", "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/UserDetailDto" + "$ref": "#/components/schemas/BrandingSettingsDto" + }, + "example": { + "applicationName": "Codex" } } } - }, - "401": { - "description": "Unauthorized" - } - }, - "security": [ - { - "jwt_bearer": [] - }, - { - "api_key": [] } - ] + } } }, - "/api/v1/user/exports/series": { + "/api/v1/settings/public": { "get": { "tags": [ - "Series Exports" + "Settings" ], - "summary": "GET /user/exports/series - List current user's exports", - "operationId": "list_exports", + "summary": "Get public display settings (authenticated users)", + "description": "Returns non-sensitive settings that affect UI/display behavior.\nThis endpoint is available to all authenticated users, not just admins.", + "operationId": "get_public_settings", "responses": { "200": { - "description": "List of exports", + "description": "Public settings", "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/SeriesExportListResponse" + "type": "object", + "additionalProperties": { + "$ref": "#/components/schemas/PublicSettingDto" + }, + "propertyNames": { + "type": "string" + } + }, + "example": { + "display.custom_metadata_template": { + "key": "display.custom_metadata_template", + "value": "{{#if custom_metadata}}## Additional Information\n{{#each custom_metadata}}- **{{@key}}**: {{this}}\n{{/each}}{{/if}}" + } } } } + }, + "401": { + "description": "Unauthorized" } }, "security": [ @@ -12318,112 +12713,152 @@ "api_key": [] } ] - }, + } + }, + "/api/v1/setup/initialize": { "post": { "tags": [ - "Series Exports" + "Setup" ], - "summary": "POST /user/exports/series - Create a new series export job", - "operationId": "create_export", + "summary": "Initialize application setup by creating the first admin user", + "description": "Creates the first admin user with email verification bypassed and returns a JWT token", + "operationId": "initialize_setup", "requestBody": { "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/CreateSeriesExportRequest" + "$ref": "#/components/schemas/InitializeSetupRequest" } } }, "required": true }, "responses": { - "202": { - "description": "Export job created", + "200": { + "description": "Setup initialized", "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/SeriesExportDto" + "$ref": "#/components/schemas/InitializeSetupResponse" } } } }, "400": { - "description": "Invalid request" - }, - "409": { - "description": "Concurrent export limit reached" - } - }, - "security": [ - { - "jwt_bearer": [] + "description": "Invalid request or setup already completed" }, - { - "api_key": [] + "422": { + "description": "Validation error" } - ] + } } }, - "/api/v1/user/exports/series/fields": { - "get": { + "/api/v1/setup/settings": { + "patch": { "tags": [ - "Series Exports" + "Setup" ], - "summary": "GET /user/exports/series/fields - Get the field catalog", - "operationId": "get_field_catalog", + "summary": "Configure initial settings (optional step in setup wizard)", + "description": "Allows the newly created admin to configure database settings", + "operationId": "configure_initial_settings", + "requestBody": { + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ConfigureSettingsRequest" + } + } + }, + "required": true + }, "responses": { "200": { - "description": "Field catalog", + "description": "Settings configured", "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/ExportFieldCatalogResponse" + "$ref": "#/components/schemas/ConfigureSettingsResponse" } } } + }, + "403": { + "description": "Forbidden - Admin only" } }, "security": [ { "jwt_bearer": [] - }, - { - "api_key": [] } ] } }, - "/api/v1/user/exports/series/{id}": { + "/api/v1/setup/status": { "get": { "tags": [ - "Series Exports" + "Setup" ], - "summary": "GET /user/exports/series/{id} - Get a single export's details", - "operationId": "get_export", + "summary": "Check if initial setup is required", + "description": "Returns whether the application needs initial setup (no users exist)", + "operationId": "setup_status", + "responses": { + "200": { + "description": "Setup status", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/SetupStatusResponse" + } + } + } + } + } + } + }, + "/api/v1/tags": { + "get": { + "tags": [ + "Tags" + ], + "summary": "List all tags", + "operationId": "list_tags", "parameters": [ { - "name": "id", - "in": "path", - "description": "Export ID", - "required": true, + "name": "page", + "in": "query", + "description": "Page number (1-indexed, default 1)", + "required": false, "schema": { - "type": "string", - "format": "uuid" + "type": "integer", + "format": "int64", + "minimum": 0 + } + }, + { + "name": "pageSize", + "in": "query", + "description": "Number of items per page (default 50, max 500)", + "required": false, + "schema": { + "type": "integer", + "format": "int64", + "minimum": 0 } } ], "responses": { "200": { - "description": "Export details", + "description": "List of all tags", "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/SeriesExportDto" + "$ref": "#/components/schemas/PaginatedResponse_TagDto" } } } }, - "404": { - "description": "Export not found" + "403": { + "description": "Forbidden" } }, "security": [ @@ -12434,31 +12869,28 @@ "api_key": [] } ] - }, - "delete": { + } + }, + "/api/v1/tags/cleanup": { + "post": { "tags": [ - "Series Exports" - ], - "summary": "DELETE /user/exports/series/{id} - Delete an export and its file", - "operationId": "delete_export", - "parameters": [ - { - "name": "id", - "in": "path", - "description": "Export ID", - "required": true, - "schema": { - "type": "string", - "format": "uuid" - } - } + "Tags" ], + "summary": "Delete all unused tags (tags with no series linked)", + "operationId": "cleanup_tags", "responses": { - "204": { - "description": "Export deleted" + "200": { + "description": "Cleanup completed", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/TaxonomyCleanupResponse" + } + } + } }, - "404": { - "description": "Export not found" + "403": { + "description": "Forbidden - admin only" } }, "security": [ @@ -12471,18 +12903,18 @@ ] } }, - "/api/v1/user/exports/series/{id}/download": { - "get": { + "/api/v1/tags/{tag_id}": { + "delete": { "tags": [ - "Series Exports" + "Tags" ], - "summary": "GET /user/exports/series/{id}/download - Download the export file", - "operationId": "download_export", + "summary": "Delete a tag from the taxonomy (admin only)", + "operationId": "delete_tag", "parameters": [ { - "name": "id", + "name": "tag_id", "in": "path", - "description": "Export ID", + "description": "Tag ID", "required": true, "schema": { "type": "string", @@ -12491,17 +12923,14 @@ } ], "responses": { - "200": { - "description": "Export file", - "content": { - "application/octet-stream": {} - } + "204": { + "description": "Tag deleted" }, - "404": { - "description": "Export not found or file missing" + "403": { + "description": "Forbidden - admin only" }, - "409": { - "description": "Export not yet completed" + "404": { + "description": "Tag not found" } }, "security": [ @@ -12514,164 +12943,87 @@ ] } }, - "/api/v1/user/plugins": { - "get": { - "tags": [ - "User Plugins" - ], - "summary": "List user's plugins (enabled and available)", - "description": "Returns both plugins the user has enabled and plugins available for them to enable.", - "operationId": "list_user_plugins", - "responses": { - "200": { - "description": "User plugins list", - "content": { - "application/json": { - "schema": { - "$ref": "#/components/schemas/UserPluginsListResponse" - } - } - } - }, - "401": { - "description": "Not authenticated" - } - } - } - }, - "/api/v1/user/plugins/oauth/callback": { + "/api/v1/tasks": { "get": { "tags": [ - "User Plugins" + "Task Queue" ], - "summary": "Handle OAuth callback from external provider", - "description": "This endpoint receives the callback after the user authenticates with the\nexternal service. It exchanges the authorization code for tokens and stores\nthem encrypted in the database.", - "operationId": "oauth_callback", + "summary": "List tasks with optional filtering", + "description": "# Permission Required\n- `tasks:read`", + "operationId": "list_tasks", "parameters": [ { - "name": "code", + "name": "status", "in": "query", - "description": "Authorization code from OAuth provider", - "required": true, + "required": false, "schema": { - "type": "string" + "type": [ + "string", + "null" + ] } }, { - "name": "state", + "name": "taskType", "in": "query", - "description": "State parameter for CSRF protection", - "required": true, + "required": false, "schema": { - "type": "string" + "type": [ + "string", + "null" + ] } - } - ], - "responses": { - "200": { - "description": "HTML page that auto-closes the popup" }, - "400": { - "description": "Invalid callback parameters" - } - } - } - }, - "/api/v1/user/plugins/{plugin_id}": { - "get": { - "tags": [ - "User Plugins" - ], - "summary": "Get a single user plugin instance", - "description": "Returns detailed status for a plugin the user has enabled.", - "operationId": "get_user_plugin", - "parameters": [ { - "name": "plugin_id", - "in": "path", - "description": "Plugin ID", - "required": true, + "name": "limit", + "in": "query", + "required": false, "schema": { - "type": "string", - "format": "uuid" + "type": "integer", + "format": "int64", + "minimum": 0 } } ], "responses": { "200": { - "description": "User plugin details", + "description": "Tasks retrieved successfully", "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/UserPluginDto" + "type": "array", + "items": { + "$ref": "#/components/schemas/TaskResponse" + } } } } }, - "401": { - "description": "Not authenticated" - }, - "404": { - "description": "Plugin not enabled for this user" + "403": { + "description": "Permission denied" } - } - }, - "delete": { - "tags": [ - "User Plugins" - ], - "summary": "Disconnect a plugin (remove data and credentials)", - "operationId": "disconnect_plugin", - "parameters": [ + }, + "security": [ { - "name": "plugin_id", - "in": "path", - "description": "Plugin ID to disconnect", - "required": true, - "schema": { - "type": "string", - "format": "uuid" - } - } - ], - "responses": { - "200": { - "description": "Plugin disconnected and data removed" - }, - "401": { - "description": "Not authenticated" + "bearer_auth": [] }, - "404": { - "description": "Plugin not enabled for this user" - } - } - } - }, - "/api/v1/user/plugins/{plugin_id}/config": { - "patch": { - "tags": [ - "User Plugins" - ], - "summary": "Update user plugin configuration", - "description": "Allows the user to set per-user configuration overrides for their plugin instance.", - "operationId": "update_user_plugin_config", - "parameters": [ { - "name": "plugin_id", - "in": "path", - "description": "Plugin ID to update config for", - "required": true, - "schema": { - "type": "string", - "format": "uuid" - } + "api_key": [] } + ] + }, + "post": { + "tags": [ + "Task Queue" ], + "summary": "Create a new task", + "description": "# Permission Required\n- `tasks:write`", + "operationId": "create_task", "requestBody": { "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/UpdateUserPluginConfigRequest" + "$ref": "#/components/schemas/CreateTaskRequest" } } }, @@ -12679,210 +13031,188 @@ }, "responses": { "200": { - "description": "Configuration updated", + "description": "Task created successfully", "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/UserPluginDto" + "$ref": "#/components/schemas/CreateTaskResponse" } } } }, "400": { - "description": "Invalid configuration" + "description": "Invalid request" }, - "401": { - "description": "Not authenticated" + "403": { + "description": "Permission denied" + } + }, + "security": [ + { + "bearer_auth": [] }, - "404": { - "description": "Plugin not enabled for this user" + { + "api_key": [] } - } + ] } }, - "/api/v1/user/plugins/{plugin_id}/credentials": { - "post": { + "/api/v1/tasks/nuke": { + "delete": { "tags": [ - "User Plugins" - ], - "summary": "Set user credentials (personal access token) for a plugin", - "description": "Allows users to authenticate by pasting a personal access token\ninstead of going through the OAuth flow.", - "operationId": "set_user_credentials", - "parameters": [ - { - "name": "plugin_id", - "in": "path", - "description": "Plugin ID to set credentials for", - "required": true, - "schema": { - "type": "string", - "format": "uuid" - } - } + "Task Queue" ], - "requestBody": { - "content": { - "application/json": { - "schema": { - "$ref": "#/components/schemas/SetUserCredentialsRequest" - } - } - }, - "required": true - }, + "summary": "Nuclear option: Delete ALL tasks", + "description": "# Permission Required\n- `admin`", + "operationId": "nuke_all_tasks", "responses": { "200": { - "description": "Credentials stored", + "description": "All tasks deleted", "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/UserPluginDto" + "$ref": "#/components/schemas/PurgeTasksResponse" } } } }, - "400": { - "description": "Invalid request" - }, - "401": { - "description": "Not authenticated" - }, - "404": { - "description": "Plugin not enabled for this user" + "403": { + "description": "Permission denied (admin only)" } - } - } - }, - "/api/v1/user/plugins/{plugin_id}/disable": { - "post": { - "tags": [ - "User Plugins" - ], - "summary": "Disable a plugin for the current user", - "operationId": "disable_user_plugin", - "parameters": [ + }, + "security": [ { - "name": "plugin_id", - "in": "path", - "description": "Plugin ID to disable", - "required": true, - "schema": { - "type": "string", - "format": "uuid" - } - } - ], - "responses": { - "200": { - "description": "Plugin disabled" - }, - "401": { - "description": "Not authenticated" + "bearer_auth": [] }, - "404": { - "description": "Plugin not enabled for this user" + { + "api_key": [] } - } + ] } }, - "/api/v1/user/plugins/{plugin_id}/enable": { - "post": { + "/api/v1/tasks/purge": { + "delete": { "tags": [ - "User Plugins" + "Task Queue" ], - "summary": "Enable a plugin for the current user", - "operationId": "enable_user_plugin", + "summary": "Purge old completed/failed tasks", + "description": "# Permission Required\n- `tasks:write`", + "operationId": "purge_old_tasks", "parameters": [ { - "name": "plugin_id", - "in": "path", - "description": "Plugin ID to enable", - "required": true, + "name": "days", + "in": "query", + "description": "Delete tasks older than N days (default: 30)", + "required": false, "schema": { - "type": "string", - "format": "uuid" + "type": "integer", + "format": "int64" } } ], "responses": { "200": { - "description": "Plugin enabled", + "description": "Tasks purged successfully", "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/UserPluginDto" + "$ref": "#/components/schemas/PurgeTasksResponse" } } } }, - "400": { - "description": "Plugin is not a user plugin or not available" - }, - "401": { - "description": "Not authenticated" + "403": { + "description": "Permission denied" + } + }, + "security": [ + { + "bearer_auth": [] }, - "409": { - "description": "Plugin already enabled for this user" + { + "api_key": [] } - } + ] } }, - "/api/v1/user/plugins/{plugin_id}/oauth/start": { - "post": { + "/api/v1/tasks/stats": { + "get": { "tags": [ - "User Plugins" - ], - "summary": "Start OAuth flow for a user plugin", - "description": "Generates an authorization URL and returns it to the client.\nThe client should open this URL in a popup or redirect the user.", - "operationId": "oauth_start", - "parameters": [ - { - "name": "plugin_id", - "in": "path", - "description": "Plugin ID to start OAuth for", - "required": true, - "schema": { - "type": "string", - "format": "uuid" - } - } + "Task Queue" ], + "summary": "Get queue statistics", + "description": "# Permission Required\n- `tasks:read`", + "operationId": "get_task_stats", "responses": { "200": { - "description": "OAuth authorization URL generated", + "description": "Statistics retrieved successfully", "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/OAuthStartResponse" + "$ref": "#/components/schemas/TaskStats" } } } }, - "400": { - "description": "Plugin does not support OAuth or not configured" + "403": { + "description": "Permission denied" + } + }, + "security": [ + { + "bearer_auth": [] + }, + { + "api_key": [] + } + ] + } + }, + "/api/v1/tasks/stream": { + "get": { + "tags": [ + "Events" + ], + "summary": "Subscribe to real-time task progress events via SSE", + "description": "Clients can subscribe to this endpoint to receive real-time notifications\nabout background task progress (analyze_book, generate_thumbnails, etc.).\n\n## Authentication\nRequires valid authentication with `LibrariesRead` permission.\n\n## Event Format\nEvents are sent as JSON-encoded `TaskProgressEvent` objects with the following structure:\n```json\n{\n \"task_id\": \"uuid\",\n \"task_type\": \"analyze_book\",\n \"status\": \"running\",\n \"progress\": {\n \"current\": 5,\n \"total\": 10,\n \"message\": \"Processing book 5 of 10\"\n },\n \"started_at\": \"2024-01-06T12:00:00Z\",\n \"library_id\": \"uuid\"\n}\n```\n\n## Keep-Alive\nA keep-alive message is sent every 15 seconds to prevent connection timeout.", + "operationId": "task_progress_stream", + "responses": { + "200": { + "description": "SSE stream of task progress events", + "content": { + "text/event-stream": {} + } }, "401": { - "description": "Not authenticated" + "description": "Unauthorized" }, - "404": { - "description": "Plugin not found or not enabled" + "403": { + "description": "Forbidden" } - } + }, + "security": [ + { + "jwt_bearer": [] + }, + { + "api_key": [] + } + ] } }, - "/api/v1/user/plugins/{plugin_id}/sync": { - "post": { + "/api/v1/tasks/{task_id}": { + "get": { "tags": [ - "User Plugins" + "Task Queue" ], - "summary": "Trigger a sync operation for a user plugin", - "description": "Enqueues a background sync task that will push/pull reading progress\nbetween Codex and the external service.", - "operationId": "trigger_sync", + "summary": "Get task by ID", + "description": "# Permission Required\n- `tasks:read`", + "operationId": "get_task", "parameters": [ { - "name": "plugin_id", + "name": "task_id", "in": "path", - "description": "Plugin ID to sync", + "description": "Task ID", "required": true, "schema": { "type": "string", @@ -12892,132 +13222,127 @@ ], "responses": { "200": { - "description": "Sync task enqueued", + "description": "Task retrieved successfully", "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/SyncTriggerResponse" + "$ref": "#/components/schemas/TaskResponse" } } } }, - "400": { - "description": "Plugin is not a sync provider or not connected" - }, - "401": { - "description": "Not authenticated" + "403": { + "description": "Permission denied" }, "404": { - "description": "Plugin not enabled for this user" + "description": "Task not found" + } + }, + "security": [ + { + "bearer_auth": [] }, - "409": { - "description": "Sync already in progress" + { + "api_key": [] } - } + ] } }, - "/api/v1/user/plugins/{plugin_id}/sync/status": { - "get": { + "/api/v1/tasks/{task_id}/cancel": { + "post": { "tags": [ - "User Plugins" + "Task Queue" ], - "summary": "Get sync status for a user plugin", - "description": "Returns the current sync status including last sync time, health, and failure count.\nPass `?live=true` to also query the plugin process for live sync state (pending push/pull,\nconflicts, external entry count). This spawns the plugin process and is more expensive.", - "operationId": "get_sync_status", + "summary": "Cancel a task", + "description": "# Permission Required\n- `tasks:write`", + "operationId": "cancel_task", "parameters": [ { - "name": "plugin_id", + "name": "task_id", "in": "path", - "description": "Plugin ID to check sync status", + "description": "Task ID", "required": true, "schema": { "type": "string", "format": "uuid" } - }, - { - "name": "live", - "in": "query", - "description": "If true, spawn the plugin process and query live sync state\n(external count, pending push/pull, conflicts).\nDefault: false (returns database-stored metadata only).", - "required": false, - "schema": { - "type": "boolean" - } } ], "responses": { "200": { - "description": "Sync status", + "description": "Task cancelled successfully", "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/SyncStatusDto" + "$ref": "#/components/schemas/MessageResponse" } } } }, - "401": { - "description": "Not authenticated" + "400": { + "description": "Task cannot be cancelled" + }, + "403": { + "description": "Permission denied" }, "404": { - "description": "Plugin not enabled for this user" + "description": "Task not found" } - } + }, + "security": [ + { + "bearer_auth": [] + }, + { + "api_key": [] + } + ] } }, - "/api/v1/user/plugins/{plugin_id}/tasks": { - "get": { + "/api/v1/tasks/{task_id}/retry": { + "post": { "tags": [ - "User Plugins" + "Task Queue" ], - "summary": "Get the latest task for a user plugin", - "description": "Returns the most recent background task for this user+plugin combination.\nUse the `?type=user_plugin_sync` query parameter to filter by task type.\n\nThis endpoint is user-scoped and does NOT require `TasksRead` permission.\nOnly the authenticated user's own tasks are returned.", - "operationId": "get_plugin_tasks", + "summary": "Retry a failed task", + "description": "# Permission Required\n- `tasks:write`", + "operationId": "retry_task", "parameters": [ { - "name": "plugin_id", + "name": "task_id", "in": "path", - "description": "Plugin ID", + "description": "Task ID", "required": true, "schema": { "type": "string", "format": "uuid" } - }, - { - "name": "type", - "in": "query", - "description": "Filter by task type (e.g., \"user_plugin_sync\").\nIf omitted, returns the latest task of any type for this plugin.", - "required": false, - "schema": { - "type": [ - "string", - "null" - ] - } } ], "responses": { "200": { - "description": "Latest task found", + "description": "Task queued for retry", "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/UserPluginTaskDto" + "$ref": "#/components/schemas/MessageResponse" } } } }, - "401": { - "description": "Not authenticated" + "400": { + "description": "Task is not in failed state" + }, + "403": { + "description": "Permission denied" }, "404": { - "description": "No tasks found for this plugin" + "description": "Task not found" } }, "security": [ { - "jwt_bearer": [] + "bearer_auth": [] }, { "api_key": [] @@ -13025,26 +13350,42 @@ ] } }, - "/api/v1/user/preferences": { - "get": { + "/api/v1/tasks/{task_id}/unlock": { + "post": { "tags": [ - "User Preferences" + "Task Queue" + ], + "summary": "Unlock a stuck task", + "description": "# Permission Required\n- `tasks:write`", + "operationId": "unlock_task", + "parameters": [ + { + "name": "task_id", + "in": "path", + "description": "Task ID", + "required": true, + "schema": { + "type": "string", + "format": "uuid" + } + } ], - "summary": "Get all preferences for the authenticated user", - "operationId": "get_all_preferences", "responses": { "200": { - "description": "User preferences retrieved", + "description": "Task unlocked successfully", "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/UserPreferencesResponse" + "$ref": "#/components/schemas/MessageResponse" } } } }, - "401": { - "description": "Unauthorized" + "403": { + "description": "Permission denied" + }, + "404": { + "description": "Task not found" } }, "security": [ @@ -13055,44 +13396,33 @@ "api_key": [] } ] - }, - "put": { + } + }, + "/api/v1/user": { + "get": { "tags": [ - "User Preferences" + "Current User" ], - "summary": "Set multiple preferences at once", - "operationId": "set_bulk_preferences", - "requestBody": { - "content": { - "application/json": { - "schema": { - "$ref": "#/components/schemas/BulkSetPreferencesRequest" - } - } - }, - "required": true - }, + "summary": "Get the currently authenticated user's profile", + "operationId": "get_current_user", "responses": { "200": { - "description": "Preferences updated successfully", + "description": "Current user's profile with sharing tags", "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/SetPreferencesResponse" + "$ref": "#/components/schemas/UserDetailDto" } } } }, - "400": { - "description": "Invalid preference key or value" - }, "401": { "description": "Unauthorized" } }, "security": [ { - "bearer_auth": [] + "jwt_bearer": [] }, { "api_key": [] @@ -13100,140 +13430,100 @@ ] } }, - "/api/v1/user/preferences/{key}": { + "/api/v1/user/exports/series": { "get": { "tags": [ - "User Preferences" - ], - "summary": "Get a single preference by key", - "operationId": "get_preference", - "parameters": [ - { - "name": "key", - "in": "path", - "description": "Preference key (e.g., 'ui.theme')", - "required": true, - "schema": { - "type": "string" - } - } + "Series Exports" ], + "summary": "GET /user/exports/series - List current user's exports", + "operationId": "list_exports", "responses": { "200": { - "description": "Preference retrieved", + "description": "List of exports", "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/UserPreferenceDto" + "$ref": "#/components/schemas/SeriesExportListResponse" } } } - }, - "401": { - "description": "Unauthorized" - }, - "404": { - "description": "Preference not found" } }, "security": [ { - "bearer_auth": [] + "jwt_bearer": [] }, { "api_key": [] } ] }, - "put": { + "post": { "tags": [ - "User Preferences" - ], - "summary": "Set a single preference value", - "operationId": "set_preference", - "parameters": [ - { - "name": "key", - "in": "path", - "description": "Preference key (e.g., 'ui.theme')", - "required": true, - "schema": { - "type": "string" - } - } + "Series Exports" ], + "summary": "POST /user/exports/series - Create a new series export job", + "operationId": "create_export", "requestBody": { "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/SetPreferenceRequest" + "$ref": "#/components/schemas/CreateSeriesExportRequest" } } }, "required": true }, "responses": { - "200": { - "description": "Preference set successfully", + "202": { + "description": "Export job created", "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/UserPreferenceDto" + "$ref": "#/components/schemas/SeriesExportDto" } } } }, "400": { - "description": "Invalid preference value" + "description": "Invalid request" }, - "401": { - "description": "Unauthorized" + "409": { + "description": "Concurrent export limit reached" } }, "security": [ { - "bearer_auth": [] + "jwt_bearer": [] }, { "api_key": [] } ] - }, - "delete": { + } + }, + "/api/v1/user/exports/series/fields": { + "get": { "tags": [ - "User Preferences" - ], - "summary": "Delete (reset) a preference to its default", - "operationId": "delete_preference", - "parameters": [ - { - "name": "key", - "in": "path", - "description": "Preference key to delete", - "required": true, - "schema": { - "type": "string" - } - } + "Series Exports" ], + "summary": "GET /user/exports/series/fields - Get the field catalog", + "operationId": "get_field_catalog", "responses": { "200": { - "description": "Preference deleted", + "description": "Field catalog", "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/DeletePreferenceResponse" + "$ref": "#/components/schemas/ExportFieldCatalogResponse" } } } - }, - "401": { - "description": "Unauthorized" } }, "security": [ { - "bearer_auth": [] + "jwt_bearer": [] }, { "api_key": [] @@ -13241,26 +13531,38 @@ ] } }, - "/api/v1/user/ratings": { + "/api/v1/user/exports/series/{id}": { "get": { "tags": [ - "Ratings" + "Series Exports" + ], + "summary": "GET /user/exports/series/{id} - Get a single export's details", + "operationId": "get_export", + "parameters": [ + { + "name": "id", + "in": "path", + "description": "Export ID", + "required": true, + "schema": { + "type": "string", + "format": "uuid" + } + } ], - "summary": "List all of the current user's ratings", - "operationId": "list_user_ratings", "responses": { "200": { - "description": "List of user's ratings", + "description": "Export details", "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/UserRatingsListResponse" + "$ref": "#/components/schemas/SeriesExportDto" } } } }, - "403": { - "description": "Forbidden" + "404": { + "description": "Export not found" } }, "security": [ @@ -13271,296 +13573,162 @@ "api_key": [] } ] - } - }, - "/api/v1/user/recommendations": { - "get": { + }, + "delete": { "tags": [ - "Recommendations" + "Series Exports" ], - "summary": "Get personalized recommendations", - "description": "Returns cached recommendations from the database. If no cached data exists\nor the data is stale, an empty list is returned and a background refresh\ntask is auto-triggered. The frontend should use SSE task progress events\nto know when fresh data is ready.", - "operationId": "get_recommendations", - "responses": { - "200": { - "description": "Personalized recommendations", - "content": { - "application/json": { - "schema": { - "$ref": "#/components/schemas/RecommendationsResponse" - } - } + "summary": "DELETE /user/exports/series/{id} - Delete an export and its file", + "operationId": "delete_export", + "parameters": [ + { + "name": "id", + "in": "path", + "description": "Export ID", + "required": true, + "schema": { + "type": "string", + "format": "uuid" } - }, - "401": { - "description": "Not authenticated" - }, - "404": { - "description": "No recommendation plugin enabled" } - } - } - }, - "/api/v1/user/recommendations/refresh": { - "post": { - "tags": [ - "Recommendations" ], - "summary": "Refresh recommendations", - "description": "Enqueues a background task to regenerate recommendations by clearing\nthe cache and updating the taste profile.", - "operationId": "refresh_recommendations", "responses": { - "200": { - "description": "Refresh task enqueued", - "content": { - "application/json": { - "schema": { - "$ref": "#/components/schemas/RecommendationsRefreshResponse" - } - } - } - }, - "401": { - "description": "Not authenticated" + "204": { + "description": "Export deleted" }, "404": { - "description": "No recommendation plugin enabled" + "description": "Export not found" + } + }, + "security": [ + { + "jwt_bearer": [] }, - "409": { - "description": "Recommendation refresh already in progress" + { + "api_key": [] } - } + ] } }, - "/api/v1/user/recommendations/{external_id}/dismiss": { - "post": { + "/api/v1/user/exports/series/{id}/download": { + "get": { "tags": [ - "Recommendations" + "Series Exports" ], - "summary": "Dismiss a recommendation", - "description": "Removes the recommendation from the cached list immediately and enqueues\na background task to notify the plugin asynchronously. Returns instantly.", - "operationId": "dismiss_recommendation", + "summary": "GET /user/exports/series/{id}/download - Download the export file", + "operationId": "download_export", "parameters": [ { - "name": "external_id", + "name": "id", "in": "path", - "description": "External ID of the recommendation to dismiss", + "description": "Export ID", "required": true, "schema": { - "type": "string" + "type": "string", + "format": "uuid" } } ], - "requestBody": { - "content": { - "application/json": { - "schema": { - "$ref": "#/components/schemas/DismissRecommendationRequest" - } - } - }, - "required": true - }, "responses": { "200": { - "description": "Recommendation dismissed", + "description": "Export file", "content": { - "application/json": { - "schema": { - "$ref": "#/components/schemas/DismissRecommendationResponse" - } - } + "application/octet-stream": {} } }, - "401": { - "description": "Not authenticated" - }, "404": { - "description": "No recommendation plugin enabled" + "description": "Export not found or file missing" + }, + "409": { + "description": "Export not yet completed" } - } + }, + "security": [ + { + "jwt_bearer": [] + }, + { + "api_key": [] + } + ] } }, - "/api/v1/user/sharing-tags": { + "/api/v1/user/plugins": { "get": { "tags": [ - "Sharing Tags" + "User Plugins" ], - "summary": "Get current user's sharing tag grants", - "operationId": "get_my_sharing_tags", + "summary": "List user's plugins (enabled and available)", + "description": "Returns both plugins the user has enabled and plugins available for them to enable.", + "operationId": "list_user_plugins", "responses": { "200": { - "description": "List of sharing tag grants for the current user", + "description": "User plugins list", "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/UserSharingTagGrantsResponse" + "$ref": "#/components/schemas/UserPluginsListResponse" } } } - } - }, - "security": [ - { - "jwt_bearer": [] }, - { - "api_key": [] + "401": { + "description": "Not authenticated" } - ] + } } }, - "/api/v1/users": { + "/api/v1/user/plugins/oauth/callback": { "get": { "tags": [ - "Users" + "User Plugins" ], - "summary": "List all users (admin only) with pagination and filtering", - "operationId": "list_users", + "summary": "Handle OAuth callback from external provider", + "description": "This endpoint receives the callback after the user authenticates with the\nexternal service. It exchanges the authorization code for tokens and stores\nthem encrypted in the database.", + "operationId": "oauth_callback", "parameters": [ { - "name": "role", - "in": "query", - "description": "Filter by role", - "required": false, - "schema": { - "oneOf": [ - { - "type": "null" - }, - { - "$ref": "#/components/schemas/UserRole" - } - ] - } - }, - { - "name": "sharingTag", - "in": "query", - "description": "Filter by sharing tag name (users who have a grant for this tag)", - "required": false, - "schema": { - "type": [ - "string", - "null" - ] - } - }, - { - "name": "sharingTagMode", - "in": "query", - "description": "Filter by sharing tag access mode (allow/deny) - only used with sharing_tag", - "required": false, - "schema": { - "type": [ - "string", - "null" - ] - } - }, - { - "name": "page", + "name": "code", "in": "query", - "description": "Page number (1-indexed, default 1)", - "required": false, + "description": "Authorization code from OAuth provider", + "required": true, "schema": { - "type": "integer", - "format": "int64", - "minimum": 0 + "type": "string" } }, { - "name": "pageSize", + "name": "state", "in": "query", - "description": "Number of items per page (max 100, default 50)", - "required": false, + "description": "State parameter for CSRF protection", + "required": true, "schema": { - "type": "integer", - "format": "int64", - "minimum": 0 + "type": "string" } } ], "responses": { "200": { - "description": "Paginated list of users", - "content": { - "application/json": { - "schema": { - "$ref": "#/components/schemas/PaginatedResponse_UserDto" - } - } - } - }, - "403": { - "description": "Forbidden - Admin only" - } - }, - "security": [ - { - "jwt_bearer": [] - }, - { - "api_key": [] - } - ] - }, - "post": { - "tags": [ - "Users" - ], - "summary": "Create a new user (admin only)", - "operationId": "create_user", - "requestBody": { - "content": { - "application/json": { - "schema": { - "$ref": "#/components/schemas/CreateUserRequest" - } - } - }, - "required": true - }, - "responses": { - "201": { - "description": "User created", - "content": { - "application/json": { - "schema": { - "$ref": "#/components/schemas/UserDto" - } - } - } + "description": "HTML page that auto-closes the popup" }, "400": { - "description": "Invalid request" - }, - "403": { - "description": "Forbidden - Admin only" - } - }, - "security": [ - { - "jwt_bearer": [] - }, - { - "api_key": [] + "description": "Invalid callback parameters" } - ] + } } }, - "/api/v1/users/{user_id}": { + "/api/v1/user/plugins/{plugin_id}": { "get": { "tags": [ - "Users" + "User Plugins" ], - "summary": "Get user by ID (admin only)", - "operationId": "get_user", + "summary": "Get a single user plugin instance", + "description": "Returns detailed status for a plugin the user has enabled.", + "operationId": "get_user_plugin", "parameters": [ { - "name": "user_id", + "name": "plugin_id", "in": "path", - "description": "User ID", + "description": "Plugin ID", "required": true, "schema": { "type": "string", @@ -13570,42 +13738,34 @@ ], "responses": { "200": { - "description": "User details with sharing tags", + "description": "User plugin details", "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/UserDetailDto" + "$ref": "#/components/schemas/UserPluginDto" } } } }, - "403": { - "description": "Forbidden - Admin only" + "401": { + "description": "Not authenticated" }, "404": { - "description": "User not found" - } - }, - "security": [ - { - "jwt_bearer": [] - }, - { - "api_key": [] + "description": "Plugin not enabled for this user" } - ] + } }, "delete": { "tags": [ - "Users" + "User Plugins" ], - "summary": "Delete a user (admin only)", - "operationId": "delete_user", + "summary": "Disconnect a plugin (remove data and credentials)", + "operationId": "disconnect_plugin", "parameters": [ { - "name": "user_id", + "name": "plugin_id", "in": "path", - "description": "User ID", + "description": "Plugin ID to disconnect", "required": true, "schema": { "type": "string", @@ -13614,36 +13774,31 @@ } ], "responses": { - "204": { - "description": "User deleted" + "200": { + "description": "Plugin disconnected and data removed" }, - "403": { - "description": "Forbidden - Admin only" + "401": { + "description": "Not authenticated" }, "404": { - "description": "User not found" - } - }, - "security": [ - { - "jwt_bearer": [] - }, - { - "api_key": [] + "description": "Plugin not enabled for this user" } - ] - }, + } + } + }, + "/api/v1/user/plugins/{plugin_id}/config": { "patch": { "tags": [ - "Users" + "User Plugins" ], - "summary": "Update a user (admin only, partial update)", - "operationId": "update_user", + "summary": "Update user plugin configuration", + "description": "Allows the user to set per-user configuration overrides for their plugin instance.", + "operationId": "update_user_plugin_config", "parameters": [ { - "name": "user_id", + "name": "plugin_id", "in": "path", - "description": "User ID", + "description": "Plugin ID to update config for", "required": true, "schema": { "type": "string", @@ -13655,7 +13810,7 @@ "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/UpdateUserRequest" + "$ref": "#/components/schemas/UpdateUserPluginConfigRequest" } } }, @@ -13663,86 +13818,40 @@ }, "responses": { "200": { - "description": "User updated", + "description": "Configuration updated", "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/UserDto" + "$ref": "#/components/schemas/UserPluginDto" } } } }, - "403": { - "description": "Forbidden - Admin only" + "400": { + "description": "Invalid configuration" }, - "404": { - "description": "User not found" - } - }, - "security": [ - { - "jwt_bearer": [] + "401": { + "description": "Not authenticated" }, - { - "api_key": [] + "404": { + "description": "Plugin not enabled for this user" } - ] + } } }, - "/api/v1/users/{user_id}/sharing-tags": { - "get": { + "/api/v1/user/plugins/{plugin_id}/credentials": { + "post": { "tags": [ - "Sharing Tags" + "User Plugins" ], - "summary": "Get sharing tag grants for a user (admin only)", - "operationId": "get_user_sharing_tags", + "summary": "Set user credentials (personal access token) for a plugin", + "description": "Allows users to authenticate by pasting a personal access token\ninstead of going through the OAuth flow.", + "operationId": "set_user_credentials", "parameters": [ { - "name": "user_id", + "name": "plugin_id", "in": "path", - "description": "User ID", - "required": true, - "schema": { - "type": "string", - "format": "uuid" - } - } - ], - "responses": { - "200": { - "description": "List of sharing tag grants for the user", - "content": { - "application/json": { - "schema": { - "$ref": "#/components/schemas/UserSharingTagGrantsResponse" - } - } - } - }, - "403": { - "description": "Forbidden - Missing permission" - } - }, - "security": [ - { - "jwt_bearer": [] - }, - { - "api_key": [] - } - ] - }, - "put": { - "tags": [ - "Sharing Tags" - ], - "summary": "Set a user's sharing tag grant (admin only)", - "operationId": "set_user_sharing_tag", - "parameters": [ - { - "name": "user_id", - "in": "path", - "description": "User ID", + "description": "Plugin ID to set credentials for", "required": true, "schema": { "type": "string", @@ -13754,7 +13863,7 @@ "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/SetUserSharingTagGrantRequest" + "$ref": "#/components/schemas/SetUserCredentialsRequest" } } }, @@ -13762,54 +13871,71 @@ }, "responses": { "200": { - "description": "Sharing tag grant set", + "description": "Credentials stored", "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/UserSharingTagGrantDto" + "$ref": "#/components/schemas/UserPluginDto" } } } }, - "403": { - "description": "Forbidden - Missing permission" + "400": { + "description": "Invalid request" }, - "404": { - "description": "Sharing tag not found" - } - }, - "security": [ - { - "jwt_bearer": [] + "401": { + "description": "Not authenticated" }, - { - "api_key": [] + "404": { + "description": "Plugin not enabled for this user" } - ] + } } }, - "/api/v1/users/{user_id}/sharing-tags/{tag_id}": { - "delete": { + "/api/v1/user/plugins/{plugin_id}/disable": { + "post": { "tags": [ - "Sharing Tags" + "User Plugins" ], - "summary": "Remove a user's sharing tag grant (admin only)", - "operationId": "remove_user_sharing_tag", + "summary": "Disable a plugin for the current user", + "operationId": "disable_user_plugin", "parameters": [ { - "name": "user_id", + "name": "plugin_id", "in": "path", - "description": "User ID", + "description": "Plugin ID to disable", "required": true, "schema": { "type": "string", "format": "uuid" } + } + ], + "responses": { + "200": { + "description": "Plugin disabled" }, + "401": { + "description": "Not authenticated" + }, + "404": { + "description": "Plugin not enabled for this user" + } + } + } + }, + "/api/v1/user/plugins/{plugin_id}/enable": { + "post": { + "tags": [ + "User Plugins" + ], + "summary": "Enable a plugin for the current user", + "operationId": "enable_user_plugin", + "parameters": [ { - "name": "tag_id", + "name": "plugin_id", "in": "path", - "description": "Sharing tag ID", + "description": "Plugin ID to enable", "required": true, "schema": { "type": "string", @@ -13818,86 +13944,84 @@ } ], "responses": { - "204": { - "description": "Sharing tag grant removed" + "200": { + "description": "Plugin enabled", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/UserPluginDto" + } + } + } }, - "403": { - "description": "Forbidden - Missing permission" + "400": { + "description": "Plugin is not a user plugin or not available" }, - "404": { - "description": "Grant not found" - } - }, - "security": [ - { - "jwt_bearer": [] + "401": { + "description": "Not authenticated" }, - { - "api_key": [] + "409": { + "description": "Plugin already enabled for this user" } - ] + } } }, - "/health": { - "get": { + "/api/v1/user/plugins/{plugin_id}/oauth/start": { + "post": { "tags": [ - "Health" + "User Plugins" ], - "summary": "Health check endpoint - checks database connectivity", - "description": "Returns \"OK\" with 200 status if database is healthy,\nor \"Service Unavailable\" with 503 status if database check fails.", - "operationId": "health_check", - "responses": { - "200": { - "description": "Service is healthy" - }, - "503": { - "description": "Service is unavailable" + "summary": "Start OAuth flow for a user plugin", + "description": "Generates an authorization URL and returns it to the client.\nThe client should open this URL in a popup or redirect the user.", + "operationId": "oauth_start", + "parameters": [ + { + "name": "plugin_id", + "in": "path", + "description": "Plugin ID to start OAuth for", + "required": true, + "schema": { + "type": "string", + "format": "uuid" + } } - } - } - }, - "/opds": { - "get": { - "tags": [ - "OPDS" ], - "summary": "Root OPDS catalog", - "description": "Returns the main navigation feed with links to:\n- All libraries\n- Search\n- Recent additions", - "operationId": "root_catalog", "responses": { "200": { - "description": "OPDS root catalog", + "description": "OAuth authorization URL generated", "content": { - "application/atom+xml": {} + "application/json": { + "schema": { + "$ref": "#/components/schemas/OAuthStartResponse" + } + } } }, - "403": { - "description": "Forbidden" - } - }, - "security": [ - { - "jwt_bearer": [] + "400": { + "description": "Plugin does not support OAuth or not configured" }, - { - "api_key": [] + "401": { + "description": "Not authenticated" + }, + "404": { + "description": "Plugin not found or not enabled" } - ] + } } }, - "/opds/books/{book_id}/pages": { - "get": { + "/api/v1/user/plugins/{plugin_id}/sync": { + "post": { "tags": [ - "OPDS" + "User Plugins" ], - "summary": "OPDS-PSE: List all pages in a book", - "description": "Returns a PSE page feed with individual page links for streaming.\nThis allows OPDS clients to read books page-by-page without downloading the entire file.", - "operationId": "opds_book_pages", + "summary": "Trigger a sync operation for a user plugin", + "description": "Enqueues a background sync task that will push/pull reading progress\nbetween Codex and the external service.", + "operationId": "trigger_sync", "parameters": [ { - "name": "book_id", + "name": "plugin_id", "in": "path", - "description": "Book ID", + "description": "Plugin ID to sync", "required": true, "schema": { "type": "string", @@ -13907,41 +14031,43 @@ ], "responses": { "200": { - "description": "OPDS-PSE page feed", + "description": "Sync task enqueued", "content": { - "application/atom+xml": {} + "application/json": { + "schema": { + "$ref": "#/components/schemas/SyncTriggerResponse" + } + } } }, - "403": { - "description": "Forbidden" + "400": { + "description": "Plugin is not a sync provider or not connected" + }, + "401": { + "description": "Not authenticated" }, "404": { - "description": "Book not found" - } - }, - "security": [ - { - "jwt_bearer": [] + "description": "Plugin not enabled for this user" }, - { - "api_key": [] + "409": { + "description": "Sync already in progress" } - ] + } } }, - "/opds/books/{book_id}/pages/{page_number}": { + "/api/v1/user/plugins/{plugin_id}/sync/status": { "get": { "tags": [ - "OPDS" + "User Plugins" ], - "summary": "OPDS-PSE: Get a page image with reading progress tracking", - "description": "Serves the page image (delegating to the v1 handler) and records reading\nprogress via the batching service. This is the endpoint used by OPDS PSE\nclients that read page-by-page and need implicit progress tracking, since\nthey don't have a JavaScript frontend to send explicit progress updates.", - "operationId": "opds_book_page_image", + "summary": "Get sync status for a user plugin", + "description": "Returns the current sync status including last sync time, health, and failure count.\nPass `?live=true` to also query the plugin process for live sync state (pending push/pull,\nconflicts, external entry count). This spawns the plugin process and is more expensive.", + "operationId": "get_sync_status", "parameters": [ { - "name": "book_id", + "name": "plugin_id", "in": "path", - "description": "Book ID", + "description": "Plugin ID to check sync status", "required": true, "schema": { "type": "string", @@ -13949,82 +14075,48 @@ } }, { - "name": "page_number", - "in": "path", - "description": "Page number (1-indexed)", - "required": true, + "name": "live", + "in": "query", + "description": "If true, spawn the plugin process and query live sync state\n(external count, pending push/pull, conflicts).\nDefault: false (returns database-stored metadata only).", + "required": false, "schema": { - "type": "integer", - "format": "int32" + "type": "boolean" } } ], "responses": { "200": { - "description": "Page image (also records reading progress)", + "description": "Sync status", "content": { - "image/jpeg": {} + "application/json": { + "schema": { + "$ref": "#/components/schemas/SyncStatusDto" + } + } } }, - "403": { - "description": "Forbidden" + "401": { + "description": "Not authenticated" }, "404": { - "description": "Book or page not found" - } - }, - "security": [ - { - "jwt_bearer": [] - }, - { - "api_key": [] - } - ] - } - }, - "/opds/libraries": { - "get": { - "tags": [ - "OPDS" - ], - "summary": "List all libraries", - "description": "Returns a navigation feed with all available libraries", - "operationId": "opds_list_libraries", - "responses": { - "200": { - "description": "OPDS libraries feed", - "content": { - "application/atom+xml": {} - } - }, - "403": { - "description": "Forbidden" - } - }, - "security": [ - { - "jwt_bearer": [] - }, - { - "api_key": [] + "description": "Plugin not enabled for this user" } - ] + } } }, - "/opds/libraries/{library_id}": { + "/api/v1/user/plugins/{plugin_id}/tasks": { "get": { "tags": [ - "OPDS" + "User Plugins" ], - "summary": "List series in a library", - "description": "Returns an acquisition feed with all series in the specified library", - "operationId": "opds_library_series", + "summary": "Get the latest task for a user plugin", + "description": "Returns the most recent background task for this user+plugin combination.\nUse the `?type=user_plugin_sync` query parameter to filter by task type.\n\nThis endpoint is user-scoped and does NOT require `TasksRead` permission.\nOnly the authenticated user's own tasks are returned.", + "operationId": "get_plugin_tasks", "parameters": [ { - "name": "library_id", + "name": "plugin_id", "in": "path", - "description": "Library ID", + "description": "Plugin ID", "required": true, "schema": { "type": "string", @@ -14032,38 +14124,34 @@ } }, { - "name": "page", - "in": "query", - "required": false, - "schema": { - "type": "integer", - "format": "int32", - "minimum": 0 - } - }, - { - "name": "pageSize", + "name": "type", "in": "query", + "description": "Filter by task type (e.g., \"user_plugin_sync\").\nIf omitted, returns the latest task of any type for this plugin.", "required": false, "schema": { - "type": "integer", - "format": "int32", - "minimum": 0 + "type": [ + "string", + "null" + ] } } ], "responses": { "200": { - "description": "OPDS library series feed", + "description": "Latest task found", "content": { - "application/atom+xml": {} + "application/json": { + "schema": { + "$ref": "#/components/schemas/UserPluginTaskDto" + } + } } }, - "403": { - "description": "Forbidden" + "401": { + "description": "Not authenticated" }, "404": { - "description": "Library not found" + "description": "No tasks found for this plugin" } }, "security": [ @@ -14076,65 +14164,74 @@ ] } }, - "/opds/search": { + "/api/v1/user/preferences": { "get": { "tags": [ - "OPDS" - ], - "summary": "OPDS search endpoint", - "description": "Searches books and series by title and returns an OPDS acquisition feed", - "operationId": "opds_search", - "parameters": [ - { - "name": "q", - "in": "query", - "description": "Search query string", - "required": true, - "schema": { - "type": "string" - } - } + "User Preferences" ], + "summary": "Get all preferences for the authenticated user", + "operationId": "get_all_preferences", "responses": { "200": { - "description": "OPDS search results", + "description": "User preferences retrieved", "content": { - "application/atom+xml": {} + "application/json": { + "schema": { + "$ref": "#/components/schemas/UserPreferencesResponse" + } + } } }, - "403": { - "description": "Forbidden" + "401": { + "description": "Unauthorized" } }, "security": [ { - "jwt_bearer": [] + "bearer_auth": [] }, { "api_key": [] } ] - } - }, - "/opds/search.xml": { - "get": { + }, + "put": { "tags": [ - "OPDS" + "User Preferences" ], - "summary": "OpenSearch descriptor endpoint", - "description": "Returns the OpenSearch XML descriptor for OPDS clients", - "operationId": "opds_opensearch_descriptor", + "summary": "Set multiple preferences at once", + "operationId": "set_bulk_preferences", + "requestBody": { + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/BulkSetPreferencesRequest" + } + } + }, + "required": true + }, "responses": { "200": { - "description": "OpenSearch descriptor", + "description": "Preferences updated successfully", "content": { - "application/opensearchdescription+xml": {} + "application/json": { + "schema": { + "$ref": "#/components/schemas/SetPreferencesResponse" + } + } } + }, + "400": { + "description": "Invalid preference key or value" + }, + "401": { + "description": "Unauthorized" } }, "security": [ { - "jwt_bearer": [] + "bearer_auth": [] }, { "api_key": [] @@ -14142,109 +14239,140 @@ ] } }, - "/opds/series/{series_id}": { + "/api/v1/user/preferences/{key}": { "get": { "tags": [ - "OPDS" + "User Preferences" ], - "summary": "List books in a series", - "description": "Returns an acquisition feed with all books in the specified series", - "operationId": "opds_series_books", + "summary": "Get a single preference by key", + "operationId": "get_preference", "parameters": [ { - "name": "series_id", + "name": "key", "in": "path", - "description": "Series ID", + "description": "Preference key (e.g., 'ui.theme')", "required": true, "schema": { - "type": "string", - "format": "uuid" + "type": "string" } } ], "responses": { "200": { - "description": "OPDS series books feed", + "description": "Preference retrieved", "content": { - "application/atom+xml": {} + "application/json": { + "schema": { + "$ref": "#/components/schemas/UserPreferenceDto" + } + } } }, - "403": { - "description": "Forbidden" + "401": { + "description": "Unauthorized" }, "404": { - "description": "Series not found" + "description": "Preference not found" } }, "security": [ { - "jwt_bearer": [] + "bearer_auth": [] }, { "api_key": [] } ] - } - }, - "/opds/v2": { - "get": { + }, + "put": { "tags": [ - "OPDS 2.0" + "User Preferences" ], - "summary": "Root OPDS 2.0 catalog", - "description": "Returns the main navigation feed with links to:\n- All libraries\n- Search\n- Recent additions", - "operationId": "opds2_root", + "summary": "Set a single preference value", + "operationId": "set_preference", + "parameters": [ + { + "name": "key", + "in": "path", + "description": "Preference key (e.g., 'ui.theme')", + "required": true, + "schema": { + "type": "string" + } + } + ], + "requestBody": { + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/SetPreferenceRequest" + } + } + }, + "required": true + }, "responses": { "200": { - "description": "OPDS 2.0 root catalog", + "description": "Preference set successfully", "content": { - "application/opds+json": { + "application/json": { "schema": { - "$ref": "#/components/schemas/Opds2Feed" + "$ref": "#/components/schemas/UserPreferenceDto" } } } }, - "403": { - "description": "Forbidden" + "400": { + "description": "Invalid preference value" + }, + "401": { + "description": "Unauthorized" } }, "security": [ { - "jwt_bearer": [] + "bearer_auth": [] }, { "api_key": [] } ] - } - }, - "/opds/v2/libraries": { - "get": { + }, + "delete": { "tags": [ - "OPDS 2.0" + "User Preferences" + ], + "summary": "Delete (reset) a preference to its default", + "operationId": "delete_preference", + "parameters": [ + { + "name": "key", + "in": "path", + "description": "Preference key to delete", + "required": true, + "schema": { + "type": "string" + } + } ], - "summary": "List all libraries (OPDS 2.0)", - "description": "Returns a navigation feed with all available libraries", - "operationId": "opds2_libraries", "responses": { "200": { - "description": "OPDS 2.0 libraries feed", + "description": "Preference deleted", "content": { - "application/opds+json": { + "application/json": { "schema": { - "$ref": "#/components/schemas/Opds2Feed" + "$ref": "#/components/schemas/DeletePreferenceResponse" } } } }, - "403": { - "description": "Forbidden" + "401": { + "description": "Unauthorized" } }, "security": [ { - "jwt_bearer": [] + "bearer_auth": [] }, { "api_key": [] @@ -14252,62 +14380,26 @@ ] } }, - "/opds/v2/libraries/{library_id}": { + "/api/v1/user/ratings": { "get": { "tags": [ - "OPDS 2.0" - ], - "summary": "List series in a library (OPDS 2.0)", - "description": "Returns a navigation feed with all series in the specified library", - "operationId": "opds2_library_series", - "parameters": [ - { - "name": "library_id", - "in": "path", - "description": "Library ID", - "required": true, - "schema": { - "type": "string", - "format": "uuid" - } - }, - { - "name": "page", - "in": "query", - "required": false, - "schema": { - "type": "integer", - "format": "int32", - "minimum": 0 - } - }, - { - "name": "pageSize", - "in": "query", - "required": false, - "schema": { - "type": "integer", - "format": "int32", - "minimum": 0 - } - } + "Ratings" ], + "summary": "List all of the current user's ratings", + "operationId": "list_user_ratings", "responses": { "200": { - "description": "OPDS 2.0 library series feed", + "description": "List of user's ratings", "content": { - "application/opds+json": { + "application/json": { "schema": { - "$ref": "#/components/schemas/Opds2Feed" + "$ref": "#/components/schemas/UserRatingsListResponse" } } } }, "403": { "description": "Forbidden" - }, - "404": { - "description": "Library not found" } }, "security": [ @@ -14320,144 +14412,131 @@ ] } }, - "/opds/v2/recent": { + "/api/v1/user/recommendations": { "get": { "tags": [ - "OPDS 2.0" + "Recommendations" ], - "summary": "List recent additions (OPDS 2.0)", - "description": "Returns a publications feed with recently added books", - "operationId": "opds2_recent", - "parameters": [ - { - "name": "page", - "in": "query", - "required": false, - "schema": { - "type": "integer", - "format": "int32", - "minimum": 0 + "summary": "Get personalized recommendations", + "description": "Returns cached recommendations from the database. If no cached data exists\nor the data is stale, an empty list is returned and a background refresh\ntask is auto-triggered. The frontend should use SSE task progress events\nto know when fresh data is ready.", + "operationId": "get_recommendations", + "responses": { + "200": { + "description": "Personalized recommendations", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/RecommendationsResponse" + } + } } }, - { - "name": "pageSize", - "in": "query", - "required": false, - "schema": { - "type": "integer", - "format": "int32", - "minimum": 0 - } + "401": { + "description": "Not authenticated" + }, + "404": { + "description": "No recommendation plugin enabled" } + } + } + }, + "/api/v1/user/recommendations/refresh": { + "post": { + "tags": [ + "Recommendations" ], + "summary": "Refresh recommendations", + "description": "Enqueues a background task to regenerate recommendations by clearing\nthe cache and updating the taste profile.", + "operationId": "refresh_recommendations", "responses": { "200": { - "description": "OPDS 2.0 recent additions feed", + "description": "Refresh task enqueued", "content": { - "application/opds+json": { + "application/json": { "schema": { - "$ref": "#/components/schemas/Opds2Feed" + "$ref": "#/components/schemas/RecommendationsRefreshResponse" } } } }, - "403": { - "description": "Forbidden" - } - }, - "security": [ - { - "jwt_bearer": [] + "401": { + "description": "Not authenticated" }, - { - "api_key": [] + "404": { + "description": "No recommendation plugin enabled" + }, + "409": { + "description": "Recommendation refresh already in progress" } - ] + } } }, - "/opds/v2/search": { - "get": { + "/api/v1/user/recommendations/{external_id}/dismiss": { + "post": { "tags": [ - "OPDS 2.0" + "Recommendations" ], - "summary": "OPDS 2.0 search endpoint", - "description": "Searches books and series by title and returns an OPDS 2.0 publications feed", - "operationId": "opds2_search", + "summary": "Dismiss a recommendation", + "description": "Removes the recommendation from the cached list immediately and enqueues\na background task to notify the plugin asynchronously. Returns instantly.", + "operationId": "dismiss_recommendation", "parameters": [ { - "name": "query", - "in": "query", - "description": "Search query string", + "name": "external_id", + "in": "path", + "description": "External ID of the recommendation to dismiss", "required": true, "schema": { "type": "string" } } ], + "requestBody": { + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/DismissRecommendationRequest" + } + } + }, + "required": true + }, "responses": { "200": { - "description": "OPDS 2.0 search results", + "description": "Recommendation dismissed", "content": { - "application/opds+json": { + "application/json": { "schema": { - "$ref": "#/components/schemas/Opds2Feed" + "$ref": "#/components/schemas/DismissRecommendationResponse" } } } }, - "400": { - "description": "Bad request - empty query" - }, - "403": { - "description": "Forbidden" - } - }, - "security": [ - { - "jwt_bearer": [] + "401": { + "description": "Not authenticated" }, - { - "api_key": [] + "404": { + "description": "No recommendation plugin enabled" } - ] + } } }, - "/opds/v2/series/{series_id}": { + "/api/v1/user/sharing-tags": { "get": { "tags": [ - "OPDS 2.0" - ], - "summary": "List books in a series (OPDS 2.0)", - "description": "Returns a publications feed with all books in the specified series", - "operationId": "opds2_series_books", - "parameters": [ - { - "name": "series_id", - "in": "path", - "description": "Series ID", - "required": true, - "schema": { - "type": "string", - "format": "uuid" - } - } + "Sharing Tags" ], + "summary": "Get current user's sharing tag grants", + "operationId": "get_my_sharing_tags", "responses": { "200": { - "description": "OPDS 2.0 series books feed", + "description": "List of sharing tag grants for the current user", "content": { - "application/opds+json": { + "application/json": { "schema": { - "$ref": "#/components/schemas/Opds2Feed" + "$ref": "#/components/schemas/UserSharingTagGrantsResponse" } } } - }, - "403": { - "description": "Forbidden" - }, - "404": { - "description": "Series not found" } }, "security": [ @@ -14470,141 +14549,90 @@ ] } }, - "/{prefix}/api/v1/age-ratings": { + "/api/v1/users": { "get": { "tags": [ - "Komga" + "Users" ], - "summary": "List age ratings (stub - always returns empty array)", - "description": "Returns all age ratings in the library.\nCurrently returns empty as Codex doesn't aggregate age ratings separately.\n\n## Endpoint\n`GET /{prefix}/api/v1/age-ratings`\n\n## Authentication\n- Bearer token (JWT)\n- Basic Auth\n- API Key", - "operationId": "komga_list_age_ratings", + "summary": "List all users (admin only) with pagination and filtering", + "operationId": "list_users", "parameters": [ { - "name": "prefix", - "in": "path", - "description": "Komga API prefix (default: komga)", - "required": true, + "name": "role", + "in": "query", + "description": "Filter by role", + "required": false, "schema": { - "type": "string" - } - } - ], - "responses": { - "200": { - "description": "Empty list of age ratings", - "content": { - "application/json": { - "schema": { - "type": "array", - "items": { - "type": "integer", - "format": "int32" - } + "oneOf": [ + { + "type": "null" + }, + { + "$ref": "#/components/schemas/UserRole" } - } - } - }, - "401": { - "description": "Unauthorized" - } - }, - "security": [ - { - "jwt_bearer": [] - }, - { - "api_key": [] - } - ] - } - }, - "/{prefix}/api/v1/books/list": { - "post": { - "tags": [ - "Komga" - ], - "summary": "Search/filter books", - "description": "Returns books matching the filter criteria.\nThis uses POST to support complex filter bodies.\n\n## Endpoint\n`POST /{prefix}/api/v1/books/list`\n\n## Query Parameters\n- `page` - Page number (0-indexed, default: 0)\n- `size` - Page size (default: 20)\n- `sort` - Sort parameter (e.g., \"createdDate,desc\")\n\n## Request Body\nJSON object with filter criteria (library_id, series_id, search_term, etc.)\n\n## Authentication\n- Bearer token (JWT)\n- Basic Auth\n- API Key", - "operationId": "komga_search_books", - "parameters": [ - { - "name": "prefix", - "in": "path", - "description": "Komga API prefix (default: komga)", - "required": true, - "schema": { - "type": "string" + ] } }, { - "name": "page", + "name": "sharingTag", "in": "query", - "description": "Page number (0-indexed, Komga-style)", + "description": "Filter by sharing tag name (users who have a grant for this tag)", "required": false, "schema": { - "type": "integer", - "format": "int32" + "type": [ + "string", + "null" + ] } }, { - "name": "size", + "name": "sharingTagMode", "in": "query", - "description": "Page size (default: 20)", + "description": "Filter by sharing tag access mode (allow/deny) - only used with sharing_tag", "required": false, "schema": { - "type": "integer", - "format": "int32" + "type": [ + "string", + "null" + ] } }, { - "name": "sort", + "name": "page", "in": "query", - "description": "Sort parameter (e.g., \"createdDate,desc\", \"metadata.numberSort,asc\")", + "description": "Page number (1-indexed, default 1)", "required": false, "schema": { - "type": [ - "string", - "null" - ] + "type": "integer", + "format": "int64", + "minimum": 0 } }, { - "name": "library_id", + "name": "pageSize", "in": "query", - "description": "Filter by library ID", + "description": "Number of items per page (max 100, default 50)", "required": false, "schema": { - "type": [ - "string", - "null" - ], - "format": "uuid" + "type": "integer", + "format": "int64", + "minimum": 0 } } ], - "requestBody": { - "content": { - "application/json": { - "schema": { - "$ref": "#/components/schemas/KomgaBooksSearchRequestDto" - } - } - }, - "required": true - }, "responses": { "200": { - "description": "Paginated list of books matching filter", + "description": "Paginated list of users", "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/KomgaPage_KomgaBookDto" + "$ref": "#/components/schemas/PaginatedResponse_UserDto" } } } }, - "401": { - "description": "Unauthorized" + "403": { + "description": "Forbidden - Admin only" } }, "security": [ @@ -14615,85 +14643,39 @@ "api_key": [] } ] - } - }, - "/{prefix}/api/v1/books/ondeck": { - "get": { + }, + "post": { "tags": [ - "Komga" + "Users" ], - "summary": "Get \"on deck\" books", - "description": "Returns books that are currently in-progress (started but not completed).\nThis is the \"continue reading\" shelf in Komic.\n\n## Endpoint\n`GET /{prefix}/api/v1/books/ondeck`\n\n## Query Parameters\n- `page` - Page number (0-indexed, default: 0)\n- `size` - Page size (default: 20)\n\n## Authentication\n- Bearer token (JWT)\n- Basic Auth\n- API Key", - "operationId": "komga_get_books_ondeck", - "parameters": [ - { - "name": "prefix", - "in": "path", - "description": "Komga API prefix (default: komga)", - "required": true, - "schema": { - "type": "string" - } - }, - { - "name": "page", - "in": "query", - "description": "Page number (0-indexed, Komga-style)", - "required": false, - "schema": { - "type": "integer", - "format": "int32" - } - }, - { - "name": "size", - "in": "query", - "description": "Page size (default: 20)", - "required": false, - "schema": { - "type": "integer", - "format": "int32" - } - }, - { - "name": "sort", - "in": "query", - "description": "Sort parameter (e.g., \"createdDate,desc\", \"metadata.numberSort,asc\")", - "required": false, - "schema": { - "type": [ - "string", - "null" - ] + "summary": "Create a new user (admin only)", + "operationId": "create_user", + "requestBody": { + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/CreateUserRequest" + } } }, - { - "name": "library_id", - "in": "query", - "description": "Filter by library ID", - "required": false, - "schema": { - "type": [ - "string", - "null" - ], - "format": "uuid" - } - } - ], + "required": true + }, "responses": { - "200": { - "description": "Paginated list of in-progress books", + "201": { + "description": "User created", "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/KomgaPage_KomgaBookDto" + "$ref": "#/components/schemas/UserDto" } } } }, - "401": { - "description": "Unauthorized" + "400": { + "description": "Invalid request" + }, + "403": { + "description": "Forbidden - Admin only" } }, "security": [ @@ -14706,28 +14688,18 @@ ] } }, - "/{prefix}/api/v1/books/{book_id}": { + "/api/v1/users/{user_id}": { "get": { "tags": [ - "Komga" + "Users" ], - "summary": "Get a book by ID", - "description": "Returns a single book in Komga-compatible format.\n\n## Endpoint\n`GET /{prefix}/api/v1/books/{bookId}`\n\n## Authentication\n- Bearer token (JWT)\n- Basic Auth\n- API Key", - "operationId": "komga_get_book", + "summary": "Get user by ID (admin only)", + "operationId": "get_user", "parameters": [ { - "name": "prefix", - "in": "path", - "description": "Komga API prefix (default: komga)", - "required": true, - "schema": { - "type": "string" - } - }, - { - "name": "book_id", + "name": "user_id", "in": "path", - "description": "Book ID", + "description": "User ID", "required": true, "schema": { "type": "string", @@ -14737,20 +14709,20 @@ ], "responses": { "200": { - "description": "Book details", + "description": "User details with sharing tags", "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/KomgaBookDto" + "$ref": "#/components/schemas/UserDetailDto" } } } }, - "401": { - "description": "Unauthorized" + "403": { + "description": "Forbidden - Admin only" }, "404": { - "description": "Book not found" + "description": "User not found" } }, "security": [ @@ -14761,30 +14733,18 @@ "api_key": [] } ] - } - }, - "/{prefix}/api/v1/books/{book_id}/file": { - "get": { - "tags": [ - "Komga" + }, + "delete": { + "tags": [ + "Users" ], - "summary": "Download book file", - "description": "Streams the original book file (CBZ, CBR, EPUB, PDF) for download.\nIncludes proper Content-Disposition header with UTF-8 encoding.\n\n## Endpoint\n`GET /{prefix}/api/v1/books/{bookId}/file`\n\n## Authentication\n- Bearer token (JWT)\n- Basic Auth\n- API Key", - "operationId": "komga_download_book_file", + "summary": "Delete a user (admin only)", + "operationId": "delete_user", "parameters": [ { - "name": "prefix", - "in": "path", - "description": "Komga API prefix (default: komga)", - "required": true, - "schema": { - "type": "string" - } - }, - { - "name": "book_id", + "name": "user_id", "in": "path", - "description": "Book ID", + "description": "User ID", "required": true, "schema": { "type": "string", @@ -14793,17 +14753,14 @@ } ], "responses": { - "200": { - "description": "Book file download", - "content": { - "application/octet-stream": {} - } + "204": { + "description": "User deleted" }, - "401": { - "description": "Unauthorized" + "403": { + "description": "Forbidden - Admin only" }, "404": { - "description": "Book not found or file missing" + "description": "User not found" } }, "security": [ @@ -14814,30 +14771,18 @@ "api_key": [] } ] - } - }, - "/{prefix}/api/v1/books/{book_id}/next": { - "get": { + }, + "patch": { "tags": [ - "Komga" + "Users" ], - "summary": "Get next book in series", - "description": "Returns the next book in the same series by sort order.\n\n## Endpoint\n`GET /{prefix}/api/v1/books/{bookId}/next`\n\n## Response\n- 200: Next book DTO\n- 404: No next book (this is the last book in series)\n\n## Authentication\n- Bearer token (JWT)\n- Basic Auth\n- API Key", - "operationId": "komga_get_next_book", + "summary": "Update a user (admin only, partial update)", + "operationId": "update_user", "parameters": [ { - "name": "prefix", - "in": "path", - "description": "Komga API prefix (default: komga)", - "required": true, - "schema": { - "type": "string" - } - }, - { - "name": "book_id", + "name": "user_id", "in": "path", - "description": "Book ID", + "description": "User ID", "required": true, "schema": { "type": "string", @@ -14845,22 +14790,32 @@ } } ], + "requestBody": { + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/UpdateUserRequest" + } + } + }, + "required": true + }, "responses": { "200": { - "description": "Next book in series", + "description": "User updated", "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/KomgaBookDto" + "$ref": "#/components/schemas/UserDto" } } } }, - "401": { - "description": "Unauthorized" + "403": { + "description": "Forbidden - Admin only" }, "404": { - "description": "No next book" + "description": "User not found" } }, "security": [ @@ -14873,28 +14828,18 @@ ] } }, - "/{prefix}/api/v1/books/{book_id}/pages": { + "/api/v1/users/{user_id}/sharing-tags": { "get": { "tags": [ - "Komga" + "Sharing Tags" ], - "summary": "List all pages for a book", - "description": "Returns an array of page metadata for all pages in a book.\nPages are ordered by page number (1-indexed).\n\n## Endpoint\n`GET /{prefix}/api/v1/books/{bookId}/pages`\n\n## Authentication\n- Bearer token (JWT)\n- Basic Auth\n- API Key\n\n## Response\nReturns an array of `KomgaPageDto` objects with page metadata including\nfilename, MIME type, dimensions, and size.", - "operationId": "komga_list_pages", + "summary": "Get sharing tag grants for a user (admin only)", + "operationId": "get_user_sharing_tags", "parameters": [ { - "name": "prefix", - "in": "path", - "description": "Komga API prefix (default: komga)", - "required": true, - "schema": { - "type": "string" - } - }, - { - "name": "book_id", + "name": "user_id", "in": "path", - "description": "Book ID", + "description": "User ID", "required": true, "schema": { "type": "string", @@ -14904,23 +14849,17 @@ ], "responses": { "200": { - "description": "List of pages in the book", + "description": "List of sharing tag grants for the user", "content": { "application/json": { "schema": { - "type": "array", - "items": { - "$ref": "#/components/schemas/KomgaPageDto" - } + "$ref": "#/components/schemas/UserSharingTagGrantsResponse" } } } }, - "401": { - "description": "Unauthorized" - }, - "404": { - "description": "Book not found" + "403": { + "description": "Forbidden - Missing permission" } }, "security": [ @@ -14931,59 +14870,51 @@ "api_key": [] } ] - } - }, - "/{prefix}/api/v1/books/{book_id}/pages/{page_number}": { - "get": { + }, + "put": { "tags": [ - "Komga" + "Sharing Tags" ], - "summary": "Get a specific page image", - "description": "Streams the raw page image for the requested page number.\nPage numbers are 1-indexed.\n\n## Endpoint\n`GET /{prefix}/api/v1/books/{bookId}/pages/{pageNumber}`\n\n## Authentication\n- Bearer token (JWT)\n- Basic Auth\n- API Key (via cookie fallback for browser image tags)\n\n## Response\nReturns the raw image data with appropriate Content-Type header.\nResponse is cached for 1 year (immutable content).", - "operationId": "komga_get_page", + "summary": "Set a user's sharing tag grant (admin only)", + "operationId": "set_user_sharing_tag", "parameters": [ { - "name": "prefix", - "in": "path", - "description": "Komga API prefix (default: komga)", - "required": true, - "schema": { - "type": "string" - } - }, - { - "name": "book_id", + "name": "user_id", "in": "path", - "description": "Book ID", + "description": "User ID", "required": true, "schema": { "type": "string", "format": "uuid" } - }, - { - "name": "page_number", - "in": "path", - "description": "Page number (1-indexed)", - "required": true, - "schema": { - "type": "integer", - "format": "int32" - } } ], + "requestBody": { + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/SetUserSharingTagGrantRequest" + } + } + }, + "required": true + }, "responses": { "200": { - "description": "Page image", + "description": "Sharing tag grant set", "content": { - "image/*": {} + "application/json": { + "schema": { + "$ref": "#/components/schemas/UserSharingTagGrantDto" + } + } } }, - "401": { - "description": "Unauthorized" + "403": { + "description": "Forbidden - Missing permission" }, "404": { - "description": "Book or page not found" + "description": "Sharing tag not found" } }, "security": [ @@ -14996,28 +14927,18 @@ ] } }, - "/{prefix}/api/v1/books/{book_id}/pages/{page_number}/thumbnail": { - "get": { + "/api/v1/users/{user_id}/sharing-tags/{tag_id}": { + "delete": { "tags": [ - "Komga" + "Sharing Tags" ], - "summary": "Get a page thumbnail", - "description": "Returns a thumbnail version of the requested page.\nThumbnails are resized to max 300px width/height while maintaining aspect ratio.\n\n## Endpoint\n`GET /{prefix}/api/v1/books/{bookId}/pages/{pageNumber}/thumbnail`\n\n## Authentication\n- Bearer token (JWT)\n- Basic Auth\n- API Key (via cookie fallback for browser image tags)\n\n## Response\nReturns a JPEG thumbnail with appropriate caching headers.", - "operationId": "komga_get_page_thumbnail", + "summary": "Remove a user's sharing tag grant (admin only)", + "operationId": "remove_user_sharing_tag", "parameters": [ { - "name": "prefix", - "in": "path", - "description": "Komga API prefix (default: komga)", - "required": true, - "schema": { - "type": "string" - } - }, - { - "name": "book_id", + "name": "user_id", "in": "path", - "description": "Book ID", + "description": "User ID", "required": true, "schema": { "type": "string", @@ -15025,28 +14946,25 @@ } }, { - "name": "page_number", + "name": "tag_id", "in": "path", - "description": "Page number (1-indexed)", + "description": "Sharing tag ID", "required": true, "schema": { - "type": "integer", - "format": "int32" + "type": "string", + "format": "uuid" } } ], "responses": { - "200": { - "description": "Page thumbnail image", - "content": { - "image/jpeg": {} - } + "204": { + "description": "Sharing tag grant removed" }, - "401": { - "description": "Unauthorized" + "403": { + "description": "Forbidden - Missing permission" }, "404": { - "description": "Book or page not found" + "description": "Grant not found" } }, "security": [ @@ -15059,51 +14977,41 @@ ] } }, - "/{prefix}/api/v1/books/{book_id}/previous": { + "/health": { "get": { "tags": [ - "Komga" - ], - "summary": "Get previous book in series", - "description": "Returns the previous book in the same series by sort order.\n\n## Endpoint\n`GET /{prefix}/api/v1/books/{bookId}/previous`\n\n## Response\n- 200: Previous book DTO\n- 404: No previous book (this is the first book in series)\n\n## Authentication\n- Bearer token (JWT)\n- Basic Auth\n- API Key", - "operationId": "komga_get_previous_book", - "parameters": [ - { - "name": "prefix", - "in": "path", - "description": "Komga API prefix (default: komga)", - "required": true, - "schema": { - "type": "string" - } - }, - { - "name": "book_id", - "in": "path", - "description": "Book ID", - "required": true, - "schema": { - "type": "string", - "format": "uuid" - } - } + "Health" ], + "summary": "Health check endpoint - checks database connectivity", + "description": "Returns \"OK\" with 200 status if database is healthy,\nor \"Service Unavailable\" with 503 status if database check fails.", + "operationId": "health_check", "responses": { "200": { - "description": "Previous book in series", - "content": { - "application/json": { - "schema": { - "$ref": "#/components/schemas/KomgaBookDto" - } - } - } + "description": "Service is healthy" }, - "401": { - "description": "Unauthorized" + "503": { + "description": "Service is unavailable" + } + } + } + }, + "/opds": { + "get": { + "tags": [ + "OPDS" + ], + "summary": "Root OPDS catalog", + "description": "Returns the main navigation feed with links to:\n- All libraries\n- Search\n- Recent additions", + "operationId": "root_catalog", + "responses": { + "200": { + "description": "OPDS root catalog", + "content": { + "application/atom+xml": {} + } }, - "404": { - "description": "No previous book" + "403": { + "description": "Forbidden" } }, "security": [ @@ -15116,24 +15024,15 @@ ] } }, - "/{prefix}/api/v1/books/{book_id}/read-progress": { - "delete": { + "/opds/books/{book_id}/pages": { + "get": { "tags": [ - "Komga" + "OPDS" ], - "summary": "Delete reading progress for a book (mark as unread)", - "description": "Removes all reading progress for a book, effectively marking it as unread.\n\n## Endpoint\n`DELETE /{prefix}/api/v1/books/{bookId}/read-progress`\n\n## Response\n- 204 No Content on success\n\n## Authentication\n- Bearer token (JWT)\n- Basic Auth\n- API Key", - "operationId": "komga_delete_progress", + "summary": "OPDS-PSE: List all pages in a book", + "description": "Returns a PSE page feed with individual page links for streaming.\nThis allows OPDS clients to read books page-by-page without downloading the entire file.", + "operationId": "opds_book_pages", "parameters": [ - { - "name": "prefix", - "in": "path", - "description": "Komga API prefix (default: komga)", - "required": true, - "schema": { - "type": "string" - } - }, { "name": "book_id", "in": "path", @@ -15146,11 +15045,14 @@ } ], "responses": { - "204": { - "description": "Progress deleted successfully" + "200": { + "description": "OPDS-PSE page feed", + "content": { + "application/atom+xml": {} + } }, - "401": { - "description": "Unauthorized" + "403": { + "description": "Forbidden" }, "404": { "description": "Book not found" @@ -15164,54 +15066,79 @@ "api_key": [] } ] - }, - "patch": { + } + }, + "/opds/books/{book_id}/pages/{page_number}": { + "get": { "tags": [ - "Komga" + "OPDS" ], - "summary": "Update reading progress for a book", - "description": "Updates the user's reading progress for a specific book.\nKomic sends: `{ \"completed\": false, \"page\": 151 }`\n\n## Endpoint\n`PATCH /{prefix}/api/v1/books/{bookId}/read-progress`\n\n## Request Body\n- `page` - Current page number (1-indexed, optional)\n- `completed` - Whether book is completed (optional)\n- `device_id` - Device ID (optional, not used by Komic)\n- `device_name` - Device name (optional, not used by Komic)\n\n## Response\n- 204 No Content on success (Komga behavior)\n\n## Authentication\n- Bearer token (JWT)\n- Basic Auth\n- API Key", - "operationId": "komga_update_progress", + "summary": "OPDS-PSE: Get a page image with reading progress tracking", + "description": "Serves the page image (delegating to the v1 handler) and records reading\nprogress via the batching service. This is the endpoint used by OPDS PSE\nclients that read page-by-page and need implicit progress tracking, since\nthey don't have a JavaScript frontend to send explicit progress updates.", + "operationId": "opds_book_page_image", "parameters": [ { - "name": "prefix", + "name": "book_id", "in": "path", - "description": "Komga API prefix (default: komga)", + "description": "Book ID", "required": true, "schema": { - "type": "string" + "type": "string", + "format": "uuid" } }, { - "name": "book_id", + "name": "page_number", "in": "path", - "description": "Book ID", + "description": "Page number (1-indexed)", "required": true, "schema": { - "type": "string", - "format": "uuid" + "type": "integer", + "format": "int32" } } ], - "requestBody": { - "content": { - "application/json": { - "schema": { - "$ref": "#/components/schemas/KomgaReadProgressUpdateDto" - } + "responses": { + "200": { + "description": "Page image (also records reading progress)", + "content": { + "image/jpeg": {} } }, - "required": true + "403": { + "description": "Forbidden" + }, + "404": { + "description": "Book or page not found" + } }, - "responses": { - "204": { - "description": "Progress updated successfully" + "security": [ + { + "jwt_bearer": [] }, - "401": { - "description": "Unauthorized" + { + "api_key": [] + } + ] + } + }, + "/opds/libraries": { + "get": { + "tags": [ + "OPDS" + ], + "summary": "List all libraries", + "description": "Returns a navigation feed with all available libraries", + "operationId": "opds_list_libraries", + "responses": { + "200": { + "description": "OPDS libraries feed", + "content": { + "application/atom+xml": {} + } }, - "404": { - "description": "Book not found" + "403": { + "description": "Forbidden" } }, "security": [ @@ -15224,47 +15151,58 @@ ] } }, - "/{prefix}/api/v1/books/{book_id}/thumbnail": { + "/opds/libraries/{library_id}": { "get": { "tags": [ - "Komga" + "OPDS" ], - "summary": "Get book thumbnail", - "description": "Returns a thumbnail image for the book's first page.\n\n## Endpoint\n`GET /{prefix}/api/v1/books/{bookId}/thumbnail`\n\n## Authentication\n- Bearer token (JWT)\n- Basic Auth\n- API Key", - "operationId": "komga_get_book_thumbnail", + "summary": "List series in a library", + "description": "Returns an acquisition feed with all series in the specified library", + "operationId": "opds_library_series", "parameters": [ { - "name": "prefix", + "name": "library_id", "in": "path", - "description": "Komga API prefix (default: komga)", + "description": "Library ID", "required": true, "schema": { - "type": "string" + "type": "string", + "format": "uuid" } }, { - "name": "book_id", - "in": "path", - "description": "Book ID", - "required": true, + "name": "page", + "in": "query", + "required": false, "schema": { - "type": "string", - "format": "uuid" + "type": "integer", + "format": "int32", + "minimum": 0 + } + }, + { + "name": "pageSize", + "in": "query", + "required": false, + "schema": { + "type": "integer", + "format": "int32", + "minimum": 0 } } ], "responses": { "200": { - "description": "Book thumbnail image", + "description": "OPDS library series feed", "content": { - "image/jpeg": {} + "application/atom+xml": {} } }, - "401": { - "description": "Unauthorized" + "403": { + "description": "Forbidden" }, "404": { - "description": "Book not found or has no pages" + "description": "Library not found" } }, "security": [ @@ -15277,19 +15215,19 @@ ] } }, - "/{prefix}/api/v1/collections": { + "/opds/search": { "get": { "tags": [ - "Komga" + "OPDS" ], - "summary": "List collections (stub - always returns empty)", - "description": "Komga collections are user-created groupings of series.\nCodex doesn't support this feature, so we return empty results.\n\n## Endpoint\n`GET /{prefix}/api/v1/collections`\n\n## Authentication\n- Bearer token (JWT)\n- Basic Auth\n- API Key", - "operationId": "komga_list_collections", + "summary": "OPDS search endpoint", + "description": "Searches books and series by title and returns an OPDS acquisition feed", + "operationId": "opds_search", "parameters": [ { - "name": "prefix", - "in": "path", - "description": "Komga API prefix (default: komga)", + "name": "q", + "in": "query", + "description": "Search query string", "required": true, "schema": { "type": "string" @@ -15298,17 +15236,13 @@ ], "responses": { "200": { - "description": "Empty list of collections", + "description": "OPDS search results", "content": { - "application/json": { - "schema": { - "$ref": "#/components/schemas/KomgaPage_KomgaCollectionDto" - } - } + "application/atom+xml": {} } }, - "401": { - "description": "Unauthorized" + "403": { + "description": "Forbidden" } }, "security": [ @@ -15321,41 +15255,20 @@ ] } }, - "/{prefix}/api/v1/genres": { + "/opds/search.xml": { "get": { "tags": [ - "Komga" - ], - "summary": "List genres", - "description": "Returns all genres in the library.\n\n## Endpoint\n`GET /{prefix}/api/v1/genres`\n\n## Authentication\n- Bearer token (JWT)\n- Basic Auth\n- API Key", - "operationId": "komga_list_genres", - "parameters": [ - { - "name": "prefix", - "in": "path", - "description": "Komga API prefix (default: komga)", - "required": true, - "schema": { - "type": "string" - } - } + "OPDS" ], + "summary": "OpenSearch descriptor endpoint", + "description": "Returns the OpenSearch XML descriptor for OPDS clients", + "operationId": "opds_opensearch_descriptor", "responses": { "200": { - "description": "List of all genres", + "description": "OpenSearch descriptor", "content": { - "application/json": { - "schema": { - "type": "array", - "items": { - "type": "string" - } - } - } + "application/opensearchdescription+xml": {} } - }, - "401": { - "description": "Unauthorized" } }, "security": [ @@ -15368,41 +15281,38 @@ ] } }, - "/{prefix}/api/v1/languages": { + "/opds/series/{series_id}": { "get": { "tags": [ - "Komga" + "OPDS" ], - "summary": "List languages (stub - always returns empty array)", - "description": "Returns all languages in the library.\nCurrently returns empty as Codex doesn't aggregate languages separately.\n\n## Endpoint\n`GET /{prefix}/api/v1/languages`\n\n## Authentication\n- Bearer token (JWT)\n- Basic Auth\n- API Key", - "operationId": "komga_list_languages", + "summary": "List books in a series", + "description": "Returns an acquisition feed with all books in the specified series", + "operationId": "opds_series_books", "parameters": [ { - "name": "prefix", + "name": "series_id", "in": "path", - "description": "Komga API prefix (default: komga)", + "description": "Series ID", "required": true, "schema": { - "type": "string" + "type": "string", + "format": "uuid" } } ], "responses": { "200": { - "description": "Empty list of languages", + "description": "OPDS series books feed", "content": { - "application/json": { - "schema": { - "type": "array", - "items": { - "type": "string" - } - } - } + "application/atom+xml": {} } }, - "401": { - "description": "Unauthorized" + "403": { + "description": "Forbidden" + }, + "404": { + "description": "Series not found" } }, "security": [ @@ -15415,41 +15325,60 @@ ] } }, - "/{prefix}/api/v1/libraries": { + "/opds/v2": { "get": { "tags": [ - "Komga" + "OPDS 2.0" ], - "summary": "List all libraries", - "description": "Returns all libraries in Komga-compatible format.\n\n## Endpoint\n`GET /{prefix}/api/v1/libraries`\n\n## Authentication\n- Bearer token (JWT)\n- Basic Auth\n- API Key", - "operationId": "komga_list_libraries", - "parameters": [ - { - "name": "prefix", - "in": "path", - "description": "Komga API prefix (default: komga)", - "required": true, - "schema": { - "type": "string" + "summary": "Root OPDS 2.0 catalog", + "description": "Returns the main navigation feed with links to:\n- All libraries\n- Search\n- Recent additions", + "operationId": "opds2_root", + "responses": { + "200": { + "description": "OPDS 2.0 root catalog", + "content": { + "application/opds+json": { + "schema": { + "$ref": "#/components/schemas/Opds2Feed" + } + } } + }, + "403": { + "description": "Forbidden" + } + }, + "security": [ + { + "jwt_bearer": [] + }, + { + "api_key": [] } + ] + } + }, + "/opds/v2/libraries": { + "get": { + "tags": [ + "OPDS 2.0" ], + "summary": "List all libraries (OPDS 2.0)", + "description": "Returns a navigation feed with all available libraries", + "operationId": "opds2_libraries", "responses": { "200": { - "description": "List of libraries", + "description": "OPDS 2.0 libraries feed", "content": { - "application/json": { + "application/opds+json": { "schema": { - "type": "array", - "items": { - "$ref": "#/components/schemas/KomgaLibraryDto" - } + "$ref": "#/components/schemas/Opds2Feed" } } } }, - "401": { - "description": "Unauthorized" + "403": { + "description": "Forbidden" } }, "security": [ @@ -15462,24 +15391,15 @@ ] } }, - "/{prefix}/api/v1/libraries/{library_id}": { + "/opds/v2/libraries/{library_id}": { "get": { "tags": [ - "Komga" + "OPDS 2.0" ], - "summary": "Get library by ID", - "description": "Returns a single library in Komga-compatible format.\n\n## Endpoint\n`GET /{prefix}/api/v1/libraries/{libraryId}`\n\n## Authentication\n- Bearer token (JWT)\n- Basic Auth\n- API Key", - "operationId": "komga_get_library", + "summary": "List series in a library (OPDS 2.0)", + "description": "Returns a navigation feed with all series in the specified library", + "operationId": "opds2_library_series", "parameters": [ - { - "name": "prefix", - "in": "path", - "description": "Komga API prefix (default: komga)", - "required": true, - "schema": { - "type": "string" - } - }, { "name": "library_id", "in": "path", @@ -15489,21 +15409,41 @@ "type": "string", "format": "uuid" } + }, + { + "name": "page", + "in": "query", + "required": false, + "schema": { + "type": "integer", + "format": "int32", + "minimum": 0 + } + }, + { + "name": "pageSize", + "in": "query", + "required": false, + "schema": { + "type": "integer", + "format": "int32", + "minimum": 0 + } } ], "responses": { "200": { - "description": "Library details", + "description": "OPDS 2.0 library series feed", "content": { - "application/json": { + "application/opds+json": { "schema": { - "$ref": "#/components/schemas/KomgaLibraryDto" + "$ref": "#/components/schemas/Opds2Feed" } } } }, - "401": { - "description": "Unauthorized" + "403": { + "description": "Forbidden" }, "404": { "description": "Library not found" @@ -15519,47 +15459,49 @@ ] } }, - "/{prefix}/api/v1/libraries/{library_id}/thumbnail": { + "/opds/v2/recent": { "get": { "tags": [ - "Komga" + "OPDS 2.0" ], - "summary": "Get library thumbnail", - "description": "Returns a thumbnail image for the library. Uses the first series' cover\nas the library thumbnail, or returns a 404 if no series exist.\n\n## Endpoint\n`GET /{prefix}/api/v1/libraries/{libraryId}/thumbnail`\n\n## Authentication\n- Bearer token (JWT)\n- Basic Auth\n- API Key (via cookie fallback for browser image tags)", - "operationId": "komga_get_library_thumbnail", + "summary": "List recent additions (OPDS 2.0)", + "description": "Returns a publications feed with recently added books", + "operationId": "opds2_recent", "parameters": [ { - "name": "prefix", - "in": "path", - "description": "Komga API prefix (default: komga)", - "required": true, + "name": "page", + "in": "query", + "required": false, "schema": { - "type": "string" + "type": "integer", + "format": "int32", + "minimum": 0 } }, { - "name": "library_id", - "in": "path", - "description": "Library ID", - "required": true, + "name": "pageSize", + "in": "query", + "required": false, "schema": { - "type": "string", - "format": "uuid" + "type": "integer", + "format": "int32", + "minimum": 0 } } ], "responses": { "200": { - "description": "Library thumbnail image", + "description": "OPDS 2.0 recent additions feed", "content": { - "image/jpeg": {} + "application/opds+json": { + "schema": { + "$ref": "#/components/schemas/Opds2Feed" + } + } } }, - "401": { - "description": "Unauthorized" - }, - "404": { - "description": "Library not found or no series in library" + "403": { + "description": "Forbidden" } }, "security": [ @@ -15572,19 +15514,19 @@ ] } }, - "/{prefix}/api/v1/publishers": { + "/opds/v2/search": { "get": { "tags": [ - "Komga" + "OPDS 2.0" ], - "summary": "List publishers (stub - always returns empty array)", - "description": "Returns all publishers in the library.\nCurrently returns empty as Codex doesn't aggregate publishers separately.\n\n## Endpoint\n`GET /{prefix}/api/v1/publishers`\n\n## Authentication\n- Bearer token (JWT)\n- Basic Auth\n- API Key", - "operationId": "komga_list_publishers", + "summary": "OPDS 2.0 search endpoint", + "description": "Searches books and series by title and returns an OPDS 2.0 publications feed", + "operationId": "opds2_search", "parameters": [ { - "name": "prefix", - "in": "path", - "description": "Komga API prefix (default: komga)", + "name": "query", + "in": "query", + "description": "Search query string", "required": true, "schema": { "type": "string" @@ -15593,20 +15535,20 @@ ], "responses": { "200": { - "description": "Empty list of publishers", + "description": "OPDS 2.0 search results", "content": { - "application/json": { + "application/opds+json": { "schema": { - "type": "array", - "items": { - "type": "string" - } + "$ref": "#/components/schemas/Opds2Feed" } } } }, - "401": { - "description": "Unauthorized" + "400": { + "description": "Bad request - empty query" + }, + "403": { + "description": "Forbidden" } }, "security": [ @@ -15619,38 +15561,42 @@ ] } }, - "/{prefix}/api/v1/readlists": { + "/opds/v2/series/{series_id}": { "get": { "tags": [ - "Komga" + "OPDS 2.0" ], - "summary": "List read lists (stub - always returns empty)", - "description": "Komga read lists are user-created lists of books to read.\nCodex doesn't support this feature, so we return empty results.\n\n## Endpoint\n`GET /{prefix}/api/v1/readlists`\n\n## Authentication\n- Bearer token (JWT)\n- Basic Auth\n- API Key", - "operationId": "komga_list_readlists", + "summary": "List books in a series (OPDS 2.0)", + "description": "Returns a publications feed with all books in the specified series", + "operationId": "opds2_series_books", "parameters": [ { - "name": "prefix", + "name": "series_id", "in": "path", - "description": "Komga API prefix (default: komga)", + "description": "Series ID", "required": true, "schema": { - "type": "string" + "type": "string", + "format": "uuid" } } ], "responses": { "200": { - "description": "Empty list of read lists", + "description": "OPDS 2.0 series books feed", "content": { - "application/json": { + "application/opds+json": { "schema": { - "$ref": "#/components/schemas/KomgaPage_KomgaReadListDto" + "$ref": "#/components/schemas/Opds2Feed" } } } }, - "401": { - "description": "Unauthorized" + "403": { + "description": "Forbidden" + }, + "404": { + "description": "Series not found" } }, "security": [ @@ -15663,14 +15609,14 @@ ] } }, - "/{prefix}/api/v1/series": { + "/{prefix}/api/v1/age-ratings": { "get": { "tags": [ "Komga" ], - "summary": "List all series (paginated)", - "description": "Returns all series in Komga-compatible format with pagination.\n\n## Endpoint\n`GET /{prefix}/api/v1/series`\n\n## Query Parameters\n- `page` - Page number (0-indexed, default: 0)\n- `size` - Page size (default: 20)\n- `library_id` - Optional filter by library UUID\n- `search` - Optional search query\n\n## Authentication\n- Bearer token (JWT)\n- Basic Auth\n- API Key", - "operationId": "komga_list_series", + "summary": "List age ratings (stub - always returns empty array)", + "description": "Returns all age ratings in the library.\nCurrently returns empty as Codex doesn't aggregate age ratings separately.\n\n## Endpoint\n`GET /{prefix}/api/v1/age-ratings`\n\n## Authentication\n- Bearer token (JWT)\n- Basic Auth\n- API Key", + "operationId": "komga_list_age_ratings", "parameters": [ { "name": "prefix", @@ -15680,72 +15626,19 @@ "schema": { "type": "string" } - }, - { - "name": "page", - "in": "query", - "description": "Page number (0-indexed, Komga-style)", - "required": false, - "schema": { - "type": "integer", - "format": "int32" - } - }, - { - "name": "size", - "in": "query", - "description": "Page size (default: 20)", - "required": false, - "schema": { - "type": "integer", - "format": "int32" - } - }, - { - "name": "library_id", - "in": "query", - "description": "Filter by library ID", - "required": false, - "schema": { - "type": [ - "string", - "null" - ], - "format": "uuid" - } - }, - { - "name": "search", - "in": "query", - "description": "Search query", - "required": false, - "schema": { - "type": [ - "string", - "null" - ] - } - }, - { - "name": "sort", - "in": "query", - "description": "Sort parameter (e.g., \"metadata.titleSort,asc\", \"createdDate,desc\")", - "required": false, - "schema": { - "type": [ - "string", - "null" - ] - } } ], "responses": { "200": { - "description": "Paginated list of series", + "description": "Empty list of age ratings", "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/KomgaPage_KomgaSeriesDto" + "type": "array", + "items": { + "type": "integer", + "format": "int32" + } } } } @@ -15764,14 +15657,14 @@ ] } }, - "/{prefix}/api/v1/series/list": { + "/{prefix}/api/v1/books/list": { "post": { "tags": [ "Komga" ], - "summary": "Search/filter series", - "description": "Returns series matching the filter criteria.\nThis uses POST to support complex filter bodies.\n\n## Endpoint\n`POST /{prefix}/api/v1/series/list`\n\n## Query Parameters\n- `page` - Page number (0-indexed, default: 0)\n- `size` - Page size (default: 20)\n- `sort` - Sort parameter (e.g., \"createdDate,desc\")\n\n## Request Body\nJSON object with filter criteria (library_id, fullTextSearch, condition, etc.)\n\n## Authentication\n- Bearer token (JWT)\n- Basic Auth\n- API Key", - "operationId": "komga_search_series", + "summary": "Search/filter books", + "description": "Returns books matching the filter criteria.\nThis uses POST to support complex filter bodies.\n\n## Endpoint\n`POST /{prefix}/api/v1/books/list`\n\n## Query Parameters\n- `page` - Page number (0-indexed, default: 0)\n- `size` - Page size (default: 20)\n- `sort` - Sort parameter (e.g., \"createdDate,desc\")\n\n## Request Body\nJSON object with filter criteria (library_id, series_id, search_term, etc.)\n\n## Authentication\n- Bearer token (JWT)\n- Basic Auth\n- API Key", + "operationId": "komga_search_books", "parameters": [ { "name": "prefix", @@ -15803,22 +15696,9 @@ } }, { - "name": "library_id", - "in": "query", - "description": "Filter by library ID", - "required": false, - "schema": { - "type": [ - "string", - "null" - ], - "format": "uuid" - } - }, - { - "name": "search", + "name": "sort", "in": "query", - "description": "Search query", + "description": "Sort parameter (e.g., \"createdDate,desc\", \"metadata.numberSort,asc\")", "required": false, "schema": { "type": [ @@ -15828,15 +15708,16 @@ } }, { - "name": "sort", + "name": "library_id", "in": "query", - "description": "Sort parameter (e.g., \"metadata.titleSort,asc\", \"createdDate,desc\")", + "description": "Filter by library ID", "required": false, "schema": { "type": [ "string", "null" - ] + ], + "format": "uuid" } } ], @@ -15844,7 +15725,7 @@ "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/KomgaSeriesSearchRequestDto" + "$ref": "#/components/schemas/KomgaBooksSearchRequestDto" } } }, @@ -15852,11 +15733,11 @@ }, "responses": { "200": { - "description": "Paginated list of series matching filter", + "description": "Paginated list of books matching filter", "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/KomgaPage_KomgaSeriesDto" + "$ref": "#/components/schemas/KomgaPage_KomgaBookDto" } } } @@ -15875,14 +15756,14 @@ ] } }, - "/{prefix}/api/v1/series/new": { + "/{prefix}/api/v1/books/ondeck": { "get": { "tags": [ "Komga" ], - "summary": "Get recently added series", - "description": "Returns series sorted by created date descending (newest first).\n\n## Endpoint\n`GET /{prefix}/api/v1/series/new`\n\n## Query Parameters\n- `page` - Page number (0-indexed, default: 0)\n- `size` - Page size (default: 20)\n- `library_id` - Optional filter by library UUID\n\n## Authentication\n- Bearer token (JWT)\n- Basic Auth\n- API Key", - "operationId": "komga_get_series_new", + "summary": "Get \"on deck\" books", + "description": "Returns books that are currently in-progress (started but not completed).\nThis is the \"continue reading\" shelf in Komic.\n\n## Endpoint\n`GET /{prefix}/api/v1/books/ondeck`\n\n## Query Parameters\n- `page` - Page number (0-indexed, default: 0)\n- `size` - Page size (default: 20)\n\n## Authentication\n- Bearer token (JWT)\n- Basic Auth\n- API Key", + "operationId": "komga_get_books_ondeck", "parameters": [ { "name": "prefix", @@ -15914,22 +15795,9 @@ } }, { - "name": "library_id", - "in": "query", - "description": "Filter by library ID", - "required": false, - "schema": { - "type": [ - "string", - "null" - ], - "format": "uuid" - } - }, - { - "name": "search", + "name": "sort", "in": "query", - "description": "Search query", + "description": "Sort parameter (e.g., \"createdDate,desc\", \"metadata.numberSort,asc\")", "required": false, "schema": { "type": [ @@ -15939,25 +15807,26 @@ } }, { - "name": "sort", + "name": "library_id", "in": "query", - "description": "Sort parameter (e.g., \"metadata.titleSort,asc\", \"createdDate,desc\")", + "description": "Filter by library ID", "required": false, "schema": { "type": [ "string", "null" - ] + ], + "format": "uuid" } } ], "responses": { "200": { - "description": "Paginated list of recently added series", + "description": "Paginated list of in-progress books", "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/KomgaPage_KomgaSeriesDto" + "$ref": "#/components/schemas/KomgaPage_KomgaBookDto" } } } @@ -15976,14 +15845,14 @@ ] } }, - "/{prefix}/api/v1/series/release-dates": { + "/{prefix}/api/v1/books/{book_id}": { "get": { "tags": [ "Komga" ], - "summary": "List series release dates (stub - always returns empty array)", - "description": "Returns all release dates used by series in the library.\nCurrently returns empty as Codex doesn't aggregate release dates separately.\n\n## Endpoint\n`GET /{prefix}/api/v1/series/release-dates`\n\n## Authentication\n- Bearer token (JWT)\n- Basic Auth\n- API Key", - "operationId": "komga_list_series_release_dates", + "summary": "Get a book by ID", + "description": "Returns a single book in Komga-compatible format.\n\n## Endpoint\n`GET /{prefix}/api/v1/books/{bookId}`\n\n## Authentication\n- Bearer token (JWT)\n- Basic Auth\n- API Key", + "operationId": "komga_get_book", "parameters": [ { "name": "prefix", @@ -15993,24 +15862,34 @@ "schema": { "type": "string" } + }, + { + "name": "book_id", + "in": "path", + "description": "Book ID", + "required": true, + "schema": { + "type": "string", + "format": "uuid" + } } ], "responses": { "200": { - "description": "Empty list of release dates", + "description": "Book details", "content": { "application/json": { "schema": { - "type": "array", - "items": { - "type": "string" - } + "$ref": "#/components/schemas/KomgaBookDto" } } } }, "401": { "description": "Unauthorized" + }, + "404": { + "description": "Book not found" } }, "security": [ @@ -16023,14 +15902,14 @@ ] } }, - "/{prefix}/api/v1/series/updated": { + "/{prefix}/api/v1/books/{book_id}/file": { "get": { "tags": [ "Komga" ], - "summary": "Get recently updated series", - "description": "Returns series sorted by last modified date descending (most recently updated first).\n\n## Endpoint\n`GET /{prefix}/api/v1/series/updated`\n\n## Query Parameters\n- `page` - Page number (0-indexed, default: 0)\n- `size` - Page size (default: 20)\n- `library_id` - Optional filter by library UUID\n\n## Authentication\n- Bearer token (JWT)\n- Basic Auth\n- API Key", - "operationId": "komga_get_series_updated", + "summary": "Download book file", + "description": "Streams the original book file (CBZ, CBR, EPUB, PDF) for download.\nIncludes proper Content-Disposition header with UTF-8 encoding.\n\n## Endpoint\n`GET /{prefix}/api/v1/books/{bookId}/file`\n\n## Authentication\n- Bearer token (JWT)\n- Basic Auth\n- API Key", + "operationId": "komga_download_book_file", "parameters": [ { "name": "prefix", @@ -16042,76 +15921,28 @@ } }, { - "name": "page", - "in": "query", - "description": "Page number (0-indexed, Komga-style)", - "required": false, - "schema": { - "type": "integer", - "format": "int32" - } - }, - { - "name": "size", - "in": "query", - "description": "Page size (default: 20)", - "required": false, - "schema": { - "type": "integer", - "format": "int32" - } - }, - { - "name": "library_id", - "in": "query", - "description": "Filter by library ID", - "required": false, + "name": "book_id", + "in": "path", + "description": "Book ID", + "required": true, "schema": { - "type": [ - "string", - "null" - ], + "type": "string", "format": "uuid" } - }, - { - "name": "search", - "in": "query", - "description": "Search query", - "required": false, - "schema": { - "type": [ - "string", - "null" - ] - } - }, - { - "name": "sort", - "in": "query", - "description": "Sort parameter (e.g., \"metadata.titleSort,asc\", \"createdDate,desc\")", - "required": false, - "schema": { - "type": [ - "string", - "null" - ] - } } ], "responses": { "200": { - "description": "Paginated list of recently updated series", + "description": "Book file download", "content": { - "application/json": { - "schema": { - "$ref": "#/components/schemas/KomgaPage_KomgaSeriesDto" - } - } + "application/octet-stream": {} } }, "401": { "description": "Unauthorized" + }, + "404": { + "description": "Book not found or file missing" } }, "security": [ @@ -16124,15 +15955,15 @@ ] } }, - "/{prefix}/api/v1/series/{series_id}": { + "/{prefix}/api/v1/books/{book_id}/next": { "get": { "tags": [ "Komga" ], - "summary": "Get series by ID", - "description": "Returns a single series in Komga-compatible format.\n\n## Endpoint\n`GET /{prefix}/api/v1/series/{seriesId}`\n\n## Authentication\n- Bearer token (JWT)\n- Basic Auth\n- API Key", - "operationId": "komga_get_series", - "parameters": [ + "summary": "Get next book in series", + "description": "Returns the next book in the same series by sort order.\n\n## Endpoint\n`GET /{prefix}/api/v1/books/{bookId}/next`\n\n## Response\n- 200: Next book DTO\n- 404: No next book (this is the last book in series)\n\n## Authentication\n- Bearer token (JWT)\n- Basic Auth\n- API Key", + "operationId": "komga_get_next_book", + "parameters": [ { "name": "prefix", "in": "path", @@ -16143,9 +15974,9 @@ } }, { - "name": "series_id", + "name": "book_id", "in": "path", - "description": "Series ID", + "description": "Book ID", "required": true, "schema": { "type": "string", @@ -16155,11 +15986,11 @@ ], "responses": { "200": { - "description": "Series details", + "description": "Next book in series", "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/KomgaSeriesDto" + "$ref": "#/components/schemas/KomgaBookDto" } } } @@ -16168,7 +15999,7 @@ "description": "Unauthorized" }, "404": { - "description": "Series not found" + "description": "No next book" } }, "security": [ @@ -16181,14 +16012,14 @@ ] } }, - "/{prefix}/api/v1/series/{series_id}/books": { + "/{prefix}/api/v1/books/{book_id}/pages": { "get": { "tags": [ "Komga" ], - "summary": "Get books in a series", - "description": "Returns all books in a series with pagination.\n\n## Endpoint\n`GET /{prefix}/api/v1/series/{seriesId}/books`\n\n## Query Parameters\n- `page` - Page number (0-indexed, default: 0)\n- `size` - Page size (default: 20)\n\n## Authentication\n- Bearer token (JWT)\n- Basic Auth\n- API Key", - "operationId": "komga_get_series_books", + "summary": "List all pages for a book", + "description": "Returns an array of page metadata for all pages in a book.\nPages are ordered by page number (1-indexed).\n\n## Endpoint\n`GET /{prefix}/api/v1/books/{bookId}/pages`\n\n## Authentication\n- Bearer token (JWT)\n- Basic Auth\n- API Key\n\n## Response\nReturns an array of `KomgaPageDto` objects with page metadata including\nfilename, MIME type, dimensions, and size.", + "operationId": "komga_list_pages", "parameters": [ { "name": "prefix", @@ -16200,80 +16031,26 @@ } }, { - "name": "series_id", + "name": "book_id", "in": "path", - "description": "Series ID", + "description": "Book ID", "required": true, "schema": { "type": "string", "format": "uuid" } - }, - { - "name": "page", - "in": "query", - "description": "Page number (0-indexed, Komga-style)", - "required": false, - "schema": { - "type": "integer", - "format": "int32" - } - }, - { - "name": "size", - "in": "query", - "description": "Page size (default: 20)", - "required": false, - "schema": { - "type": "integer", - "format": "int32" - } - }, - { - "name": "library_id", - "in": "query", - "description": "Filter by library ID", - "required": false, - "schema": { - "type": [ - "string", - "null" - ], - "format": "uuid" - } - }, - { - "name": "search", - "in": "query", - "description": "Search query", - "required": false, - "schema": { - "type": [ - "string", - "null" - ] - } - }, - { - "name": "sort", - "in": "query", - "description": "Sort parameter (e.g., \"metadata.titleSort,asc\", \"createdDate,desc\")", - "required": false, - "schema": { - "type": [ - "string", - "null" - ] - } } ], "responses": { "200": { - "description": "Paginated list of books in series", + "description": "List of pages in the book", "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/KomgaPage_KomgaBookDto" + "type": "array", + "items": { + "$ref": "#/components/schemas/KomgaPageDto" + } } } } @@ -16282,7 +16059,7 @@ "description": "Unauthorized" }, "404": { - "description": "Series not found" + "description": "Book not found" } }, "security": [ @@ -16295,14 +16072,14 @@ ] } }, - "/{prefix}/api/v1/series/{series_id}/read-progress": { - "post": { + "/{prefix}/api/v1/books/{book_id}/pages/{page_number}": { + "get": { "tags": [ "Komga" ], - "summary": "Mark all books in a series as read", - "description": "Marks all books in a series as completed (read) for the current user.\nThis is equivalent to marking each book individually as completed.\n\n## Endpoint\n`POST /{prefix}/api/v1/series/{seriesId}/read-progress`\n\n## Response\n- 204 No Content on success (Komga behavior)\n\n## Authentication\n- Bearer token (JWT)\n- Basic Auth\n- API Key", - "operationId": "komga_mark_series_as_read", + "summary": "Get a specific page image", + "description": "Streams the raw page image for the requested page number.\nPage numbers are 1-indexed.\n\n## Endpoint\n`GET /{prefix}/api/v1/books/{bookId}/pages/{pageNumber}`\n\n## Authentication\n- Bearer token (JWT)\n- Basic Auth\n- API Key (via cookie fallback for browser image tags)\n\n## Response\nReturns the raw image data with appropriate Content-Type header.\nResponse is cached for 1 year (immutable content).", + "operationId": "komga_get_page", "parameters": [ { "name": "prefix", @@ -16314,25 +16091,38 @@ } }, { - "name": "series_id", + "name": "book_id", "in": "path", - "description": "Series ID", + "description": "Book ID", "required": true, "schema": { "type": "string", "format": "uuid" } + }, + { + "name": "page_number", + "in": "path", + "description": "Page number (1-indexed)", + "required": true, + "schema": { + "type": "integer", + "format": "int32" + } } ], "responses": { - "204": { - "description": "Series marked as read" + "200": { + "description": "Page image", + "content": { + "image/*": {} + } }, "401": { "description": "Unauthorized" }, "404": { - "description": "Series not found" + "description": "Book or page not found" } }, "security": [ @@ -16343,14 +16133,16 @@ "api_key": [] } ] - }, - "delete": { + } + }, + "/{prefix}/api/v1/books/{book_id}/pages/{page_number}/thumbnail": { + "get": { "tags": [ "Komga" ], - "summary": "Mark all books in a series as unread", - "description": "Removes all reading progress for all books in a series, effectively marking\nthe entire series as unread for the current user.\n\n## Endpoint\n`DELETE /{prefix}/api/v1/series/{seriesId}/read-progress`\n\n## Response\n- 204 No Content on success\n\n## Authentication\n- Bearer token (JWT)\n- Basic Auth\n- API Key", - "operationId": "komga_mark_series_as_unread", + "summary": "Get a page thumbnail", + "description": "Returns a thumbnail version of the requested page.\nThumbnails are resized to max 300px width/height while maintaining aspect ratio.\n\n## Endpoint\n`GET /{prefix}/api/v1/books/{bookId}/pages/{pageNumber}/thumbnail`\n\n## Authentication\n- Bearer token (JWT)\n- Basic Auth\n- API Key (via cookie fallback for browser image tags)\n\n## Response\nReturns a JPEG thumbnail with appropriate caching headers.", + "operationId": "komga_get_page_thumbnail", "parameters": [ { "name": "prefix", @@ -16362,25 +16154,38 @@ } }, { - "name": "series_id", + "name": "book_id", "in": "path", - "description": "Series ID", + "description": "Book ID", "required": true, "schema": { "type": "string", "format": "uuid" } + }, + { + "name": "page_number", + "in": "path", + "description": "Page number (1-indexed)", + "required": true, + "schema": { + "type": "integer", + "format": "int32" + } } ], "responses": { - "204": { - "description": "Series marked as unread" + "200": { + "description": "Page thumbnail image", + "content": { + "image/jpeg": {} + } }, "401": { "description": "Unauthorized" }, "404": { - "description": "Series not found" + "description": "Book or page not found" } }, "security": [ @@ -16393,14 +16198,14 @@ ] } }, - "/{prefix}/api/v1/series/{series_id}/thumbnail": { + "/{prefix}/api/v1/books/{book_id}/previous": { "get": { "tags": [ "Komga" ], - "summary": "Get series thumbnail", - "description": "Returns a thumbnail image for the series.\n\n## Endpoint\n`GET /{prefix}/api/v1/series/{seriesId}/thumbnail`\n\n## Authentication\n- Bearer token (JWT)\n- Basic Auth\n- API Key", - "operationId": "komga_get_series_thumbnail", + "summary": "Get previous book in series", + "description": "Returns the previous book in the same series by sort order.\n\n## Endpoint\n`GET /{prefix}/api/v1/books/{bookId}/previous`\n\n## Response\n- 200: Previous book DTO\n- 404: No previous book (this is the first book in series)\n\n## Authentication\n- Bearer token (JWT)\n- Basic Auth\n- API Key", + "operationId": "komga_get_previous_book", "parameters": [ { "name": "prefix", @@ -16412,9 +16217,9 @@ } }, { - "name": "series_id", + "name": "book_id", "in": "path", - "description": "Series ID", + "description": "Book ID", "required": true, "schema": { "type": "string", @@ -16424,16 +16229,20 @@ ], "responses": { "200": { - "description": "Series thumbnail image", + "description": "Previous book in series", "content": { - "image/jpeg": {} + "application/json": { + "schema": { + "$ref": "#/components/schemas/KomgaBookDto" + } + } } }, "401": { "description": "Unauthorized" }, "404": { - "description": "Series not found" + "description": "No previous book" } }, "security": [ @@ -16446,14 +16255,14 @@ ] } }, - "/{prefix}/api/v1/tags": { - "get": { + "/{prefix}/api/v1/books/{book_id}/read-progress": { + "delete": { "tags": [ "Komga" ], - "summary": "List tags", - "description": "Returns all tags in the library.\n\n## Endpoint\n`GET /{prefix}/api/v1/tags`\n\n## Authentication\n- Bearer token (JWT)\n- Basic Auth\n- API Key", - "operationId": "komga_list_tags", + "summary": "Delete reading progress for a book (mark as unread)", + "description": "Removes all reading progress for a book, effectively marking it as unread.\n\n## Endpoint\n`DELETE /{prefix}/api/v1/books/{bookId}/read-progress`\n\n## Response\n- 204 No Content on success\n\n## Authentication\n- Bearer token (JWT)\n- Basic Auth\n- API Key", + "operationId": "komga_delete_progress", "parameters": [ { "name": "prefix", @@ -16463,24 +16272,27 @@ "schema": { "type": "string" } + }, + { + "name": "book_id", + "in": "path", + "description": "Book ID", + "required": true, + "schema": { + "type": "string", + "format": "uuid" + } } ], "responses": { - "200": { - "description": "List of all tags", - "content": { - "application/json": { - "schema": { - "type": "array", - "items": { - "type": "string" - } - } - } - } + "204": { + "description": "Progress deleted successfully" }, "401": { "description": "Unauthorized" + }, + "404": { + "description": "Book not found" } }, "security": [ @@ -16491,16 +16303,14 @@ "api_key": [] } ] - } - }, - "/{prefix}/api/v1/users/me": { - "get": { + }, + "patch": { "tags": [ "Komga" ], - "summary": "Get current user information", - "description": "Returns information about the currently authenticated user in Komga format.\nThis endpoint is used by Komic and other apps to verify authentication\nand determine user capabilities.\n\n## Endpoint\n`GET /{prefix}/api/v1/users/me`\n\n## Response\nReturns a `KomgaUserDto` containing:\n- User ID (UUID as string)\n- Email address\n- Roles (ADMIN, USER, FILE_DOWNLOAD)\n- Library access settings\n- Content restrictions\n\n## Authentication\n- Bearer token (JWT)\n- Basic Auth\n- API Key", - "operationId": "komga_get_current_user", + "summary": "Update reading progress for a book", + "description": "Updates the user's reading progress for a specific book.\nKomic sends: `{ \"completed\": false, \"page\": 151 }`\n\n## Endpoint\n`PATCH /{prefix}/api/v1/books/{bookId}/read-progress`\n\n## Request Body\n- `page` - Current page number (1-indexed, optional)\n- `completed` - Whether book is completed (optional)\n- `device_id` - Device ID (optional, not used by Komic)\n- `device_name` - Device name (optional, not used by Komic)\n\n## Response\n- 204 No Content on success (Komga behavior)\n\n## Authentication\n- Bearer token (JWT)\n- Basic Auth\n- API Key", + "operationId": "komga_update_progress", "parameters": [ { "name": "prefix", @@ -16510,21 +16320,1350 @@ "schema": { "type": "string" } + }, + { + "name": "book_id", + "in": "path", + "description": "Book ID", + "required": true, + "schema": { + "type": "string", + "format": "uuid" + } } ], - "responses": { - "200": { - "description": "Current user information", - "content": { - "application/json": { - "schema": { - "$ref": "#/components/schemas/KomgaUserDto" - } + "requestBody": { + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/KomgaReadProgressUpdateDto" } } }, - "401": { - "description": "Unauthorized" + "required": true + }, + "responses": { + "204": { + "description": "Progress updated successfully" + }, + "401": { + "description": "Unauthorized" + }, + "404": { + "description": "Book not found" + } + }, + "security": [ + { + "jwt_bearer": [] + }, + { + "api_key": [] + } + ] + } + }, + "/{prefix}/api/v1/books/{book_id}/thumbnail": { + "get": { + "tags": [ + "Komga" + ], + "summary": "Get book thumbnail", + "description": "Returns a thumbnail image for the book's first page.\n\n## Endpoint\n`GET /{prefix}/api/v1/books/{bookId}/thumbnail`\n\n## Authentication\n- Bearer token (JWT)\n- Basic Auth\n- API Key", + "operationId": "komga_get_book_thumbnail", + "parameters": [ + { + "name": "prefix", + "in": "path", + "description": "Komga API prefix (default: komga)", + "required": true, + "schema": { + "type": "string" + } + }, + { + "name": "book_id", + "in": "path", + "description": "Book ID", + "required": true, + "schema": { + "type": "string", + "format": "uuid" + } + } + ], + "responses": { + "200": { + "description": "Book thumbnail image", + "content": { + "image/jpeg": {} + } + }, + "401": { + "description": "Unauthorized" + }, + "404": { + "description": "Book not found or has no pages" + } + }, + "security": [ + { + "jwt_bearer": [] + }, + { + "api_key": [] + } + ] + } + }, + "/{prefix}/api/v1/collections": { + "get": { + "tags": [ + "Komga" + ], + "summary": "List collections (stub - always returns empty)", + "description": "Komga collections are user-created groupings of series.\nCodex doesn't support this feature, so we return empty results.\n\n## Endpoint\n`GET /{prefix}/api/v1/collections`\n\n## Authentication\n- Bearer token (JWT)\n- Basic Auth\n- API Key", + "operationId": "komga_list_collections", + "parameters": [ + { + "name": "prefix", + "in": "path", + "description": "Komga API prefix (default: komga)", + "required": true, + "schema": { + "type": "string" + } + } + ], + "responses": { + "200": { + "description": "Empty list of collections", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/KomgaPage_KomgaCollectionDto" + } + } + } + }, + "401": { + "description": "Unauthorized" + } + }, + "security": [ + { + "jwt_bearer": [] + }, + { + "api_key": [] + } + ] + } + }, + "/{prefix}/api/v1/genres": { + "get": { + "tags": [ + "Komga" + ], + "summary": "List genres", + "description": "Returns all genres in the library.\n\n## Endpoint\n`GET /{prefix}/api/v1/genres`\n\n## Authentication\n- Bearer token (JWT)\n- Basic Auth\n- API Key", + "operationId": "komga_list_genres", + "parameters": [ + { + "name": "prefix", + "in": "path", + "description": "Komga API prefix (default: komga)", + "required": true, + "schema": { + "type": "string" + } + } + ], + "responses": { + "200": { + "description": "List of all genres", + "content": { + "application/json": { + "schema": { + "type": "array", + "items": { + "type": "string" + } + } + } + } + }, + "401": { + "description": "Unauthorized" + } + }, + "security": [ + { + "jwt_bearer": [] + }, + { + "api_key": [] + } + ] + } + }, + "/{prefix}/api/v1/languages": { + "get": { + "tags": [ + "Komga" + ], + "summary": "List languages (stub - always returns empty array)", + "description": "Returns all languages in the library.\nCurrently returns empty as Codex doesn't aggregate languages separately.\n\n## Endpoint\n`GET /{prefix}/api/v1/languages`\n\n## Authentication\n- Bearer token (JWT)\n- Basic Auth\n- API Key", + "operationId": "komga_list_languages", + "parameters": [ + { + "name": "prefix", + "in": "path", + "description": "Komga API prefix (default: komga)", + "required": true, + "schema": { + "type": "string" + } + } + ], + "responses": { + "200": { + "description": "Empty list of languages", + "content": { + "application/json": { + "schema": { + "type": "array", + "items": { + "type": "string" + } + } + } + } + }, + "401": { + "description": "Unauthorized" + } + }, + "security": [ + { + "jwt_bearer": [] + }, + { + "api_key": [] + } + ] + } + }, + "/{prefix}/api/v1/libraries": { + "get": { + "tags": [ + "Komga" + ], + "summary": "List all libraries", + "description": "Returns all libraries in Komga-compatible format.\n\n## Endpoint\n`GET /{prefix}/api/v1/libraries`\n\n## Authentication\n- Bearer token (JWT)\n- Basic Auth\n- API Key", + "operationId": "komga_list_libraries", + "parameters": [ + { + "name": "prefix", + "in": "path", + "description": "Komga API prefix (default: komga)", + "required": true, + "schema": { + "type": "string" + } + } + ], + "responses": { + "200": { + "description": "List of libraries", + "content": { + "application/json": { + "schema": { + "type": "array", + "items": { + "$ref": "#/components/schemas/KomgaLibraryDto" + } + } + } + } + }, + "401": { + "description": "Unauthorized" + } + }, + "security": [ + { + "jwt_bearer": [] + }, + { + "api_key": [] + } + ] + } + }, + "/{prefix}/api/v1/libraries/{library_id}": { + "get": { + "tags": [ + "Komga" + ], + "summary": "Get library by ID", + "description": "Returns a single library in Komga-compatible format.\n\n## Endpoint\n`GET /{prefix}/api/v1/libraries/{libraryId}`\n\n## Authentication\n- Bearer token (JWT)\n- Basic Auth\n- API Key", + "operationId": "komga_get_library", + "parameters": [ + { + "name": "prefix", + "in": "path", + "description": "Komga API prefix (default: komga)", + "required": true, + "schema": { + "type": "string" + } + }, + { + "name": "library_id", + "in": "path", + "description": "Library ID", + "required": true, + "schema": { + "type": "string", + "format": "uuid" + } + } + ], + "responses": { + "200": { + "description": "Library details", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/KomgaLibraryDto" + } + } + } + }, + "401": { + "description": "Unauthorized" + }, + "404": { + "description": "Library not found" + } + }, + "security": [ + { + "jwt_bearer": [] + }, + { + "api_key": [] + } + ] + } + }, + "/{prefix}/api/v1/libraries/{library_id}/thumbnail": { + "get": { + "tags": [ + "Komga" + ], + "summary": "Get library thumbnail", + "description": "Returns a thumbnail image for the library. Uses the first series' cover\nas the library thumbnail, or returns a 404 if no series exist.\n\n## Endpoint\n`GET /{prefix}/api/v1/libraries/{libraryId}/thumbnail`\n\n## Authentication\n- Bearer token (JWT)\n- Basic Auth\n- API Key (via cookie fallback for browser image tags)", + "operationId": "komga_get_library_thumbnail", + "parameters": [ + { + "name": "prefix", + "in": "path", + "description": "Komga API prefix (default: komga)", + "required": true, + "schema": { + "type": "string" + } + }, + { + "name": "library_id", + "in": "path", + "description": "Library ID", + "required": true, + "schema": { + "type": "string", + "format": "uuid" + } + } + ], + "responses": { + "200": { + "description": "Library thumbnail image", + "content": { + "image/jpeg": {} + } + }, + "401": { + "description": "Unauthorized" + }, + "404": { + "description": "Library not found or no series in library" + } + }, + "security": [ + { + "jwt_bearer": [] + }, + { + "api_key": [] + } + ] + } + }, + "/{prefix}/api/v1/publishers": { + "get": { + "tags": [ + "Komga" + ], + "summary": "List publishers (stub - always returns empty array)", + "description": "Returns all publishers in the library.\nCurrently returns empty as Codex doesn't aggregate publishers separately.\n\n## Endpoint\n`GET /{prefix}/api/v1/publishers`\n\n## Authentication\n- Bearer token (JWT)\n- Basic Auth\n- API Key", + "operationId": "komga_list_publishers", + "parameters": [ + { + "name": "prefix", + "in": "path", + "description": "Komga API prefix (default: komga)", + "required": true, + "schema": { + "type": "string" + } + } + ], + "responses": { + "200": { + "description": "Empty list of publishers", + "content": { + "application/json": { + "schema": { + "type": "array", + "items": { + "type": "string" + } + } + } + } + }, + "401": { + "description": "Unauthorized" + } + }, + "security": [ + { + "jwt_bearer": [] + }, + { + "api_key": [] + } + ] + } + }, + "/{prefix}/api/v1/readlists": { + "get": { + "tags": [ + "Komga" + ], + "summary": "List read lists (stub - always returns empty)", + "description": "Komga read lists are user-created lists of books to read.\nCodex doesn't support this feature, so we return empty results.\n\n## Endpoint\n`GET /{prefix}/api/v1/readlists`\n\n## Authentication\n- Bearer token (JWT)\n- Basic Auth\n- API Key", + "operationId": "komga_list_readlists", + "parameters": [ + { + "name": "prefix", + "in": "path", + "description": "Komga API prefix (default: komga)", + "required": true, + "schema": { + "type": "string" + } + } + ], + "responses": { + "200": { + "description": "Empty list of read lists", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/KomgaPage_KomgaReadListDto" + } + } + } + }, + "401": { + "description": "Unauthorized" + } + }, + "security": [ + { + "jwt_bearer": [] + }, + { + "api_key": [] + } + ] + } + }, + "/{prefix}/api/v1/series": { + "get": { + "tags": [ + "Komga" + ], + "summary": "List all series (paginated)", + "description": "Returns all series in Komga-compatible format with pagination.\n\n## Endpoint\n`GET /{prefix}/api/v1/series`\n\n## Query Parameters\n- `page` - Page number (0-indexed, default: 0)\n- `size` - Page size (default: 20)\n- `library_id` - Optional filter by library UUID\n- `search` - Optional search query\n\n## Authentication\n- Bearer token (JWT)\n- Basic Auth\n- API Key", + "operationId": "komga_list_series", + "parameters": [ + { + "name": "prefix", + "in": "path", + "description": "Komga API prefix (default: komga)", + "required": true, + "schema": { + "type": "string" + } + }, + { + "name": "page", + "in": "query", + "description": "Page number (0-indexed, Komga-style)", + "required": false, + "schema": { + "type": "integer", + "format": "int32" + } + }, + { + "name": "size", + "in": "query", + "description": "Page size (default: 20)", + "required": false, + "schema": { + "type": "integer", + "format": "int32" + } + }, + { + "name": "library_id", + "in": "query", + "description": "Filter by library ID", + "required": false, + "schema": { + "type": [ + "string", + "null" + ], + "format": "uuid" + } + }, + { + "name": "search", + "in": "query", + "description": "Search query", + "required": false, + "schema": { + "type": [ + "string", + "null" + ] + } + }, + { + "name": "sort", + "in": "query", + "description": "Sort parameter (e.g., \"metadata.titleSort,asc\", \"createdDate,desc\")", + "required": false, + "schema": { + "type": [ + "string", + "null" + ] + } + } + ], + "responses": { + "200": { + "description": "Paginated list of series", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/KomgaPage_KomgaSeriesDto" + } + } + } + }, + "401": { + "description": "Unauthorized" + } + }, + "security": [ + { + "jwt_bearer": [] + }, + { + "api_key": [] + } + ] + } + }, + "/{prefix}/api/v1/series/list": { + "post": { + "tags": [ + "Komga" + ], + "summary": "Search/filter series", + "description": "Returns series matching the filter criteria.\nThis uses POST to support complex filter bodies.\n\n## Endpoint\n`POST /{prefix}/api/v1/series/list`\n\n## Query Parameters\n- `page` - Page number (0-indexed, default: 0)\n- `size` - Page size (default: 20)\n- `sort` - Sort parameter (e.g., \"createdDate,desc\")\n\n## Request Body\nJSON object with filter criteria (library_id, fullTextSearch, condition, etc.)\n\n## Authentication\n- Bearer token (JWT)\n- Basic Auth\n- API Key", + "operationId": "komga_search_series", + "parameters": [ + { + "name": "prefix", + "in": "path", + "description": "Komga API prefix (default: komga)", + "required": true, + "schema": { + "type": "string" + } + }, + { + "name": "page", + "in": "query", + "description": "Page number (0-indexed, Komga-style)", + "required": false, + "schema": { + "type": "integer", + "format": "int32" + } + }, + { + "name": "size", + "in": "query", + "description": "Page size (default: 20)", + "required": false, + "schema": { + "type": "integer", + "format": "int32" + } + }, + { + "name": "library_id", + "in": "query", + "description": "Filter by library ID", + "required": false, + "schema": { + "type": [ + "string", + "null" + ], + "format": "uuid" + } + }, + { + "name": "search", + "in": "query", + "description": "Search query", + "required": false, + "schema": { + "type": [ + "string", + "null" + ] + } + }, + { + "name": "sort", + "in": "query", + "description": "Sort parameter (e.g., \"metadata.titleSort,asc\", \"createdDate,desc\")", + "required": false, + "schema": { + "type": [ + "string", + "null" + ] + } + } + ], + "requestBody": { + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/KomgaSeriesSearchRequestDto" + } + } + }, + "required": true + }, + "responses": { + "200": { + "description": "Paginated list of series matching filter", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/KomgaPage_KomgaSeriesDto" + } + } + } + }, + "401": { + "description": "Unauthorized" + } + }, + "security": [ + { + "jwt_bearer": [] + }, + { + "api_key": [] + } + ] + } + }, + "/{prefix}/api/v1/series/new": { + "get": { + "tags": [ + "Komga" + ], + "summary": "Get recently added series", + "description": "Returns series sorted by created date descending (newest first).\n\n## Endpoint\n`GET /{prefix}/api/v1/series/new`\n\n## Query Parameters\n- `page` - Page number (0-indexed, default: 0)\n- `size` - Page size (default: 20)\n- `library_id` - Optional filter by library UUID\n\n## Authentication\n- Bearer token (JWT)\n- Basic Auth\n- API Key", + "operationId": "komga_get_series_new", + "parameters": [ + { + "name": "prefix", + "in": "path", + "description": "Komga API prefix (default: komga)", + "required": true, + "schema": { + "type": "string" + } + }, + { + "name": "page", + "in": "query", + "description": "Page number (0-indexed, Komga-style)", + "required": false, + "schema": { + "type": "integer", + "format": "int32" + } + }, + { + "name": "size", + "in": "query", + "description": "Page size (default: 20)", + "required": false, + "schema": { + "type": "integer", + "format": "int32" + } + }, + { + "name": "library_id", + "in": "query", + "description": "Filter by library ID", + "required": false, + "schema": { + "type": [ + "string", + "null" + ], + "format": "uuid" + } + }, + { + "name": "search", + "in": "query", + "description": "Search query", + "required": false, + "schema": { + "type": [ + "string", + "null" + ] + } + }, + { + "name": "sort", + "in": "query", + "description": "Sort parameter (e.g., \"metadata.titleSort,asc\", \"createdDate,desc\")", + "required": false, + "schema": { + "type": [ + "string", + "null" + ] + } + } + ], + "responses": { + "200": { + "description": "Paginated list of recently added series", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/KomgaPage_KomgaSeriesDto" + } + } + } + }, + "401": { + "description": "Unauthorized" + } + }, + "security": [ + { + "jwt_bearer": [] + }, + { + "api_key": [] + } + ] + } + }, + "/{prefix}/api/v1/series/release-dates": { + "get": { + "tags": [ + "Komga" + ], + "summary": "List series release dates (stub - always returns empty array)", + "description": "Returns all release dates used by series in the library.\nCurrently returns empty as Codex doesn't aggregate release dates separately.\n\n## Endpoint\n`GET /{prefix}/api/v1/series/release-dates`\n\n## Authentication\n- Bearer token (JWT)\n- Basic Auth\n- API Key", + "operationId": "komga_list_series_release_dates", + "parameters": [ + { + "name": "prefix", + "in": "path", + "description": "Komga API prefix (default: komga)", + "required": true, + "schema": { + "type": "string" + } + } + ], + "responses": { + "200": { + "description": "Empty list of release dates", + "content": { + "application/json": { + "schema": { + "type": "array", + "items": { + "type": "string" + } + } + } + } + }, + "401": { + "description": "Unauthorized" + } + }, + "security": [ + { + "jwt_bearer": [] + }, + { + "api_key": [] + } + ] + } + }, + "/{prefix}/api/v1/series/updated": { + "get": { + "tags": [ + "Komga" + ], + "summary": "Get recently updated series", + "description": "Returns series sorted by last modified date descending (most recently updated first).\n\n## Endpoint\n`GET /{prefix}/api/v1/series/updated`\n\n## Query Parameters\n- `page` - Page number (0-indexed, default: 0)\n- `size` - Page size (default: 20)\n- `library_id` - Optional filter by library UUID\n\n## Authentication\n- Bearer token (JWT)\n- Basic Auth\n- API Key", + "operationId": "komga_get_series_updated", + "parameters": [ + { + "name": "prefix", + "in": "path", + "description": "Komga API prefix (default: komga)", + "required": true, + "schema": { + "type": "string" + } + }, + { + "name": "page", + "in": "query", + "description": "Page number (0-indexed, Komga-style)", + "required": false, + "schema": { + "type": "integer", + "format": "int32" + } + }, + { + "name": "size", + "in": "query", + "description": "Page size (default: 20)", + "required": false, + "schema": { + "type": "integer", + "format": "int32" + } + }, + { + "name": "library_id", + "in": "query", + "description": "Filter by library ID", + "required": false, + "schema": { + "type": [ + "string", + "null" + ], + "format": "uuid" + } + }, + { + "name": "search", + "in": "query", + "description": "Search query", + "required": false, + "schema": { + "type": [ + "string", + "null" + ] + } + }, + { + "name": "sort", + "in": "query", + "description": "Sort parameter (e.g., \"metadata.titleSort,asc\", \"createdDate,desc\")", + "required": false, + "schema": { + "type": [ + "string", + "null" + ] + } + } + ], + "responses": { + "200": { + "description": "Paginated list of recently updated series", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/KomgaPage_KomgaSeriesDto" + } + } + } + }, + "401": { + "description": "Unauthorized" + } + }, + "security": [ + { + "jwt_bearer": [] + }, + { + "api_key": [] + } + ] + } + }, + "/{prefix}/api/v1/series/{series_id}": { + "get": { + "tags": [ + "Komga" + ], + "summary": "Get series by ID", + "description": "Returns a single series in Komga-compatible format.\n\n## Endpoint\n`GET /{prefix}/api/v1/series/{seriesId}`\n\n## Authentication\n- Bearer token (JWT)\n- Basic Auth\n- API Key", + "operationId": "komga_get_series", + "parameters": [ + { + "name": "prefix", + "in": "path", + "description": "Komga API prefix (default: komga)", + "required": true, + "schema": { + "type": "string" + } + }, + { + "name": "series_id", + "in": "path", + "description": "Series ID", + "required": true, + "schema": { + "type": "string", + "format": "uuid" + } + } + ], + "responses": { + "200": { + "description": "Series details", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/KomgaSeriesDto" + } + } + } + }, + "401": { + "description": "Unauthorized" + }, + "404": { + "description": "Series not found" + } + }, + "security": [ + { + "jwt_bearer": [] + }, + { + "api_key": [] + } + ] + } + }, + "/{prefix}/api/v1/series/{series_id}/books": { + "get": { + "tags": [ + "Komga" + ], + "summary": "Get books in a series", + "description": "Returns all books in a series with pagination.\n\n## Endpoint\n`GET /{prefix}/api/v1/series/{seriesId}/books`\n\n## Query Parameters\n- `page` - Page number (0-indexed, default: 0)\n- `size` - Page size (default: 20)\n\n## Authentication\n- Bearer token (JWT)\n- Basic Auth\n- API Key", + "operationId": "komga_get_series_books", + "parameters": [ + { + "name": "prefix", + "in": "path", + "description": "Komga API prefix (default: komga)", + "required": true, + "schema": { + "type": "string" + } + }, + { + "name": "series_id", + "in": "path", + "description": "Series ID", + "required": true, + "schema": { + "type": "string", + "format": "uuid" + } + }, + { + "name": "page", + "in": "query", + "description": "Page number (0-indexed, Komga-style)", + "required": false, + "schema": { + "type": "integer", + "format": "int32" + } + }, + { + "name": "size", + "in": "query", + "description": "Page size (default: 20)", + "required": false, + "schema": { + "type": "integer", + "format": "int32" + } + }, + { + "name": "library_id", + "in": "query", + "description": "Filter by library ID", + "required": false, + "schema": { + "type": [ + "string", + "null" + ], + "format": "uuid" + } + }, + { + "name": "search", + "in": "query", + "description": "Search query", + "required": false, + "schema": { + "type": [ + "string", + "null" + ] + } + }, + { + "name": "sort", + "in": "query", + "description": "Sort parameter (e.g., \"metadata.titleSort,asc\", \"createdDate,desc\")", + "required": false, + "schema": { + "type": [ + "string", + "null" + ] + } + } + ], + "responses": { + "200": { + "description": "Paginated list of books in series", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/KomgaPage_KomgaBookDto" + } + } + } + }, + "401": { + "description": "Unauthorized" + }, + "404": { + "description": "Series not found" + } + }, + "security": [ + { + "jwt_bearer": [] + }, + { + "api_key": [] + } + ] + } + }, + "/{prefix}/api/v1/series/{series_id}/read-progress": { + "post": { + "tags": [ + "Komga" + ], + "summary": "Mark all books in a series as read", + "description": "Marks all books in a series as completed (read) for the current user.\nThis is equivalent to marking each book individually as completed.\n\n## Endpoint\n`POST /{prefix}/api/v1/series/{seriesId}/read-progress`\n\n## Response\n- 204 No Content on success (Komga behavior)\n\n## Authentication\n- Bearer token (JWT)\n- Basic Auth\n- API Key", + "operationId": "komga_mark_series_as_read", + "parameters": [ + { + "name": "prefix", + "in": "path", + "description": "Komga API prefix (default: komga)", + "required": true, + "schema": { + "type": "string" + } + }, + { + "name": "series_id", + "in": "path", + "description": "Series ID", + "required": true, + "schema": { + "type": "string", + "format": "uuid" + } + } + ], + "responses": { + "204": { + "description": "Series marked as read" + }, + "401": { + "description": "Unauthorized" + }, + "404": { + "description": "Series not found" + } + }, + "security": [ + { + "jwt_bearer": [] + }, + { + "api_key": [] + } + ] + }, + "delete": { + "tags": [ + "Komga" + ], + "summary": "Mark all books in a series as unread", + "description": "Removes all reading progress for all books in a series, effectively marking\nthe entire series as unread for the current user.\n\n## Endpoint\n`DELETE /{prefix}/api/v1/series/{seriesId}/read-progress`\n\n## Response\n- 204 No Content on success\n\n## Authentication\n- Bearer token (JWT)\n- Basic Auth\n- API Key", + "operationId": "komga_mark_series_as_unread", + "parameters": [ + { + "name": "prefix", + "in": "path", + "description": "Komga API prefix (default: komga)", + "required": true, + "schema": { + "type": "string" + } + }, + { + "name": "series_id", + "in": "path", + "description": "Series ID", + "required": true, + "schema": { + "type": "string", + "format": "uuid" + } + } + ], + "responses": { + "204": { + "description": "Series marked as unread" + }, + "401": { + "description": "Unauthorized" + }, + "404": { + "description": "Series not found" + } + }, + "security": [ + { + "jwt_bearer": [] + }, + { + "api_key": [] + } + ] + } + }, + "/{prefix}/api/v1/series/{series_id}/thumbnail": { + "get": { + "tags": [ + "Komga" + ], + "summary": "Get series thumbnail", + "description": "Returns a thumbnail image for the series.\n\n## Endpoint\n`GET /{prefix}/api/v1/series/{seriesId}/thumbnail`\n\n## Authentication\n- Bearer token (JWT)\n- Basic Auth\n- API Key", + "operationId": "komga_get_series_thumbnail", + "parameters": [ + { + "name": "prefix", + "in": "path", + "description": "Komga API prefix (default: komga)", + "required": true, + "schema": { + "type": "string" + } + }, + { + "name": "series_id", + "in": "path", + "description": "Series ID", + "required": true, + "schema": { + "type": "string", + "format": "uuid" + } + } + ], + "responses": { + "200": { + "description": "Series thumbnail image", + "content": { + "image/jpeg": {} + } + }, + "401": { + "description": "Unauthorized" + }, + "404": { + "description": "Series not found" + } + }, + "security": [ + { + "jwt_bearer": [] + }, + { + "api_key": [] + } + ] + } + }, + "/{prefix}/api/v1/tags": { + "get": { + "tags": [ + "Komga" + ], + "summary": "List tags", + "description": "Returns all tags in the library.\n\n## Endpoint\n`GET /{prefix}/api/v1/tags`\n\n## Authentication\n- Bearer token (JWT)\n- Basic Auth\n- API Key", + "operationId": "komga_list_tags", + "parameters": [ + { + "name": "prefix", + "in": "path", + "description": "Komga API prefix (default: komga)", + "required": true, + "schema": { + "type": "string" + } + } + ], + "responses": { + "200": { + "description": "List of all tags", + "content": { + "application/json": { + "schema": { + "type": "array", + "items": { + "type": "string" + } + } + } + } + }, + "401": { + "description": "Unauthorized" + } + }, + "security": [ + { + "jwt_bearer": [] + }, + { + "api_key": [] + } + ] + } + }, + "/{prefix}/api/v1/users/me": { + "get": { + "tags": [ + "Komga" + ], + "summary": "Get current user information", + "description": "Returns information about the currently authenticated user in Komga format.\nThis endpoint is used by Komic and other apps to verify authentication\nand determine user capabilities.\n\n## Endpoint\n`GET /{prefix}/api/v1/users/me`\n\n## Response\nReturns a `KomgaUserDto` containing:\n- User ID (UUID as string)\n- Email address\n- Roles (ADMIN, USER, FILE_DOWNLOAD)\n- Library access settings\n- Content restrictions\n\n## Authentication\n- Bearer token (JWT)\n- Basic Auth\n- API Key", + "operationId": "komga_get_current_user", + "parameters": [ + { + "name": "prefix", + "in": "path", + "description": "Komga API prefix (default: komga)", + "required": true, + "schema": { + "type": "string" + } + } + ], + "responses": { + "200": { + "description": "Current user information", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/KomgaUserDto" + } + } + } + }, + "401": { + "description": "Unauthorized" } }, "security": [ @@ -16907,6 +18046,27 @@ } } }, + "ApplicabilityResponse": { + "type": "object", + "description": "Response shape for `GET /api/v1/release-sources/applicability`.", + "required": [ + "applicable", + "pluginDisplayNames" + ], + "properties": { + "applicable": { + "type": "boolean", + "description": "`true` when at least one enabled `release_source` plugin applies to\nthe requested library (or, if no `libraryId` was supplied, to *any*\nlibrary). The frontend uses this to decide whether to render the\nper-series Tracking panel and Releases tab, or to show the\nbulk-track menu entry." + }, + "pluginDisplayNames": { + "type": "array", + "items": { + "type": "string" + }, + "description": "Plugin display names (or fallback to `name` when no manifest cached\nyet) of the enabled release-source plugins covering this library.\nEmpty when `applicable` is `false`. Useful for surfacing \"Powered by\nMangaUpdates, Nyaa\" hints in the UI." + } + } + }, "AuthorContextDto": { "type": "object", "description": "Author context for template evaluation.", @@ -20487,6 +21647,57 @@ } } }, + "BulkReleaseAction": { + "type": "string", + "description": "Action requested by `POST /api/v1/releases/bulk`.", + "enum": [ + "dismiss", + "mark-acquired", + "ignore", + "reset", + "delete" + ] + }, + "BulkReleaseActionRequest": { + "type": "object", + "description": "Request body for `POST /api/v1/releases/bulk`.", + "required": [ + "ids", + "action" + ], + "properties": { + "action": { + "$ref": "#/components/schemas/BulkReleaseAction" + }, + "ids": { + "type": "array", + "items": { + "type": "string", + "format": "uuid" + } + } + } + }, + "BulkReleaseActionResponse": { + "type": "object", + "description": "Response from `POST /api/v1/releases/bulk`.", + "required": [ + "affected", + "action" + ], + "properties": { + "action": { + "$ref": "#/components/schemas/BulkReleaseAction", + "description": "Action that ran (echoed back for client-side confirmation toasts)." + }, + "affected": { + "type": "integer", + "format": "int64", + "description": "Number of ledger rows actually affected. Less than `ids.len()` when\nsome IDs were already deleted concurrently.", + "minimum": 0 + } + } + }, "BulkRenumberSeriesRequest": { "type": "object", "description": "Request for bulk renumber operations on multiple series", @@ -20591,24 +21802,83 @@ } } }, - "BulkTaskResponse": { + "BulkTaskResponse": { + "type": "object", + "description": "Response for bulk task operations", + "required": [ + "taskId", + "message" + ], + "properties": { + "message": { + "type": "string", + "description": "Message describing the operation", + "example": "Thumbnail generation task queued for 5 series" + }, + "taskId": { + "type": "string", + "format": "uuid", + "description": "ID of the fan-out task that was created", + "example": "550e8400-e29b-41d4-a716-446655440000" + } + } + }, + "BulkTrackForReleasesItem": { + "type": "object", + "description": "Per-series outcome of a bulk track / untrack operation.\n\nReturned in `BulkTrackForReleasesResponse.results` so the UI can show a\nper-row status (e.g. \"tracked\", \"skipped: not found\", \"errored: …\") without\nre-querying the tracking config endpoint per series.", + "required": [ + "seriesId", + "outcome" + ], + "properties": { + "detail": { + "type": [ + "string", + "null" + ], + "description": "Free-form detail (error message for `errored`, reason for `skipped`).\n`None` for the success cases." + }, + "outcome": { + "type": "string", + "description": "`tracked` | `untracked` | `skipped` | `errored`." + }, + "seriesId": { + "type": "string", + "format": "uuid" + } + } + }, + "BulkTrackForReleasesResponse": { "type": "object", - "description": "Response for bulk task operations", + "description": "Aggregate result of `POST /series/bulk/track-for-releases` and its untrack\ncounterpart. Counts and per-series outcomes for client-side display.", "required": [ - "taskId", - "message" + "changed", + "alreadyInState", + "errored", + "results" ], "properties": { - "message": { - "type": "string", - "description": "Message describing the operation", - "example": "Thumbnail generation task queued for 5 series" + "alreadyInState": { + "type": "integer", + "description": "Series whose `tracked` flag was already in the target state. No-ops.", + "minimum": 0 }, - "taskId": { - "type": "string", - "format": "uuid", - "description": "ID of the fan-out task that was created", - "example": "550e8400-e29b-41d4-a716-446655440000" + "changed": { + "type": "integer", + "description": "Series successfully flipped to `tracked = true` (or `false` for the\nuntrack endpoint).", + "minimum": 0 + }, + "errored": { + "type": "integer", + "description": "Series that could not be processed (missing, error, etc.).", + "minimum": 0 + }, + "results": { + "type": "array", + "items": { + "$ref": "#/components/schemas/BulkTrackForReleasesItem" + }, + "description": "Per-series outcomes in input order." } } }, @@ -20800,7 +22070,7 @@ }, "type": { "type": "string", - "description": "Field type: \"number\", \"string\", or \"boolean\"" + "description": "Field type — free-form documentation hint. Common values: \"number\",\n\"string\", \"boolean\", \"string-array\", \"object\". The host never validates\nstored config against this; it forwards the raw JSON to the plugin." } } }, @@ -21386,6 +22656,26 @@ } } }, + "CreateSeriesAliasRequest": { + "type": "object", + "required": [ + "alias" + ], + "properties": { + "alias": { + "type": "string", + "description": "Alias text. Will be trimmed; must normalize to non-empty.", + "example": "Boku no Hero Academia" + }, + "source": { + "type": [ + "string", + "null" + ], + "description": "Optional explicit source. Defaults to `manual` when called from the API.\nPlugin-internal flows write `metadata`; we don't expose that to HTTP." + } + } + }, "CreateSeriesExportRequest": { "type": "object", "description": "Request body for creating a new series export", @@ -21633,6 +22923,19 @@ } } }, + "DeleteReleaseResponse": { + "type": "object", + "description": "Response from `DELETE /api/v1/releases/{id}`.\n\nSingle-row delete returns a small confirmation rather than 204 so the\nfrontend can surface a toast that mentions the etag clear (\"the next\npoll will re-fetch this release\"). Mirrors the bulk-delete shape with\n`affected = 1`.", + "required": [ + "deleted" + ], + "properties": { + "deleted": { + "type": "boolean", + "description": "`true` if the row was deleted, `false` if it didn't exist." + } + } + }, "DetectedSeriesDto": { "type": "object", "description": "Detected series information for preview", @@ -22505,6 +23808,92 @@ ] } } + }, + { + "type": "object", + "description": "A new release was recorded in the ledger.\n\nEmitted once per accepted, non-deduped ledger insert by the polling\ntask and the `releases/record` reverse-RPC handler. The frontend uses\nthis to bump the Releases nav badge, surface a toast on the inbox\npage, and refresh the per-series Releases tab.", + "required": [ + "ledgerId", + "seriesId", + "sourceId", + "pluginId", + "language", + "type" + ], + "properties": { + "chapter": { + "type": [ + "number", + "null" + ], + "format": "double", + "description": "Chapter announced (if the source emits chapters)." + }, + "language": { + "type": "string", + "description": "Language code (e.g. `\"en\"`); used by client-side notification\npreference filters." + }, + "ledgerId": { + "type": "string", + "format": "uuid" + }, + "pluginId": { + "type": "string", + "description": "Plugin name that owns the source (`release_sources.plugin_id`).\nHelps the frontend filter without an extra lookup." + }, + "seriesId": { + "type": "string", + "format": "uuid" + }, + "sourceId": { + "type": "string", + "format": "uuid" + }, + "type": { + "type": "string", + "enum": [ + "release_announced" + ] + }, + "volume": { + "type": [ + "integer", + "null" + ], + "format": "int32", + "description": "Volume announced (if the source emits volumes)." + } + } + }, + { + "type": "object", + "description": "A release source's poll task completed.\n\nEmitted at the end of every `poll_release_source` task run, after\n`release_sources.last_summary` / `last_polled_at` / `etag` have been\npersisted. The frontend uses this to refresh the Release tracking\nsettings page in real time so users don't have to reload to see a\n\"Poll now\" finish. Carries no diff details — receivers should\ninvalidate the source query and re-read the row.", + "required": [ + "sourceId", + "pluginId", + "hadError", + "type" + ], + "properties": { + "hadError": { + "type": "boolean", + "description": "`true` if the poll wrote a `last_error`. Cheap \"did it fail\"\nhint without forcing the client to refetch." + }, + "pluginId": { + "type": "string", + "description": "Plugin that owns the source (`release_sources.plugin_id`).\nCheap filter for clients only watching certain plugins." + }, + "sourceId": { + "type": "string", + "format": "uuid" + }, + "type": { + "type": "string", + "enum": [ + "release_source_polled" + ] + } + } } ], "description": "Specific event types for entity changes" @@ -23777,6 +25166,32 @@ "description": "When the series was last updated", "example": "2024-01-15T10:30:00Z" }, + "upstreamChapterGap": { + "type": [ + "number", + "null" + ], + "format": "float", + "description": "Upstream-vs-local chapter delta. See `SeriesDto::upstream_chapter_gap`.", + "example": 3.0 + }, + "upstreamGapProvider": { + "type": [ + "string", + "null" + ], + "description": "Provider that supplied the upstream counts. See\n`SeriesDto::upstream_gap_provider`.", + "example": "MangaBaka" + }, + "upstreamVolumeGap": { + "type": [ + "integer", + "null" + ], + "format": "int32", + "description": "Upstream-vs-local volume delta. See `SeriesDto::upstream_volume_gap`.", + "example": 1 + }, "volumesOwned": { "type": [ "integer", @@ -28332,6 +29747,174 @@ } } }, + "PaginatedResponse_ReleaseLedgerEntryDto": { + "type": "object", + "description": "Generic paginated response wrapper with HATEOAS links", + "required": [ + "data", + "page", + "pageSize", + "total", + "totalPages", + "links" + ], + "properties": { + "data": { + "type": "array", + "items": { + "type": "object", + "description": "A single release announcement. Sources write these; the inbox reads them.", + "required": [ + "id", + "seriesId", + "seriesTitle", + "sourceId", + "externalReleaseId", + "payloadUrl", + "confidence", + "state", + "observedAt", + "createdAt" + ], + "properties": { + "chapter": { + "type": [ + "number", + "null" + ], + "format": "double", + "description": "Decimal supports `12.5` etc." + }, + "confidence": { + "type": "number", + "format": "double" + }, + "createdAt": { + "type": "string", + "format": "date-time" + }, + "externalReleaseId": { + "type": "string", + "description": "Plugin-stable identity for the release (used for dedup).", + "example": "nyaa:1234567" + }, + "formatHints": { + "description": "Sparse `{ \"jxl\": true, \"container\": \"cbz\", ... }`." + }, + "groupOrUploader": { + "type": [ + "string", + "null" + ], + "description": "Group/scanlator/uploader attribution." + }, + "id": { + "type": "string", + "format": "uuid", + "example": "550e8400-e29b-41d4-a716-446655440a00" + }, + "infoHash": { + "type": [ + "string", + "null" + ], + "description": "Torrent info_hash, if applicable." + }, + "language": { + "type": [ + "string", + "null" + ] + }, + "mediaUrl": { + "type": [ + "string", + "null" + ], + "description": "Optional second URL for direct fetch (`.torrent`, `magnet:`, DDL\nlink). Travels paired with [`Self::media_url_kind`]." + }, + "mediaUrlKind": { + "type": [ + "string", + "null" + ], + "description": "Classifies what `media_url` points at: `torrent` | `magnet` |\n`direct` | `other`. The frontend uses this to pick a kind-specific\nicon next to the standard external-link icon." + }, + "metadata": { + "description": "Source-specific extras (free-form)." + }, + "observedAt": { + "type": "string", + "format": "date-time" + }, + "payloadUrl": { + "type": "string", + "description": "Where to acquire the release. Conventionally a human-readable\nlanding page (Nyaa view page, MangaUpdates release page)." + }, + "seriesId": { + "type": "string", + "format": "uuid", + "example": "550e8400-e29b-41d4-a716-446655440002" + }, + "seriesTitle": { + "type": "string", + "description": "Series title at the time of the response. Joined from the `series`\ntable so the inbox UI can render a human-readable label without a\nfollow-up fetch. Falls back to the empty string only if the series\nrow was hard-deleted between the join and the read.", + "example": "Chainsaw Man" + }, + "sourceId": { + "type": "string", + "format": "uuid", + "example": "550e8400-e29b-41d4-a716-446655440b00" + }, + "state": { + "type": "string", + "description": "`announced` | `dismissed` | `marked_acquired` | `hidden`." + }, + "volume": { + "type": [ + "integer", + "null" + ], + "format": "int32" + } + } + }, + "description": "The data items for this page" + }, + "links": { + "$ref": "#/components/schemas/PaginationLinks", + "description": "HATEOAS navigation links" + }, + "page": { + "type": "integer", + "format": "int64", + "description": "Current page number (1-indexed)", + "example": 1, + "minimum": 0 + }, + "pageSize": { + "type": "integer", + "format": "int64", + "description": "Number of items per page", + "example": 50, + "minimum": 0 + }, + "total": { + "type": "integer", + "format": "int64", + "description": "Total number of items across all pages", + "example": 150, + "minimum": 0 + }, + "totalPages": { + "type": "integer", + "format": "int64", + "description": "Total number of pages", + "example": 3, + "minimum": 0 + } + } + }, "PaginatedResponse_SeriesDto": { "type": "object", "description": "Generic paginated response wrapper with HATEOAS links", @@ -28474,6 +30057,32 @@ "description": "When the series was last updated", "example": "2024-01-15T10:30:00Z" }, + "upstreamChapterGap": { + "type": [ + "number", + "null" + ], + "format": "float", + "description": "Difference between the upstream original-language chapter count\n(`series_metadata.total_chapter_count`, supplied by metadata\nproviders like MangaBaka or AniList) and the highest locally-owned\nchapter (`local_max_chapter`).\n\nAlways `None` unless the series is tracked AND `track_chapters` is\nenabled AND the provider count is populated AND the rounded-to-1-\ndecimal gap is positive. **This is an informational signal, not a\nrelease announcement** — Phase 6's MangaUpdates plugin owns the\ntranslation-release feed.", + "example": 3.0 + }, + "upstreamGapProvider": { + "type": [ + "string", + "null" + ], + "description": "Display name of the metadata provider that supplied the upstream\ncounts (e.g., \"MangaBaka\", \"AniList\"). Set whenever at least one of\n`upstream_chapter_gap` / `upstream_volume_gap` is populated. Used by\nthe Phase 7 badge tooltip.", + "example": "MangaBaka" + }, + "upstreamVolumeGap": { + "type": [ + "integer", + "null" + ], + "format": "int32", + "description": "Difference between the upstream original-language volume count\n(`series_metadata.total_volume_count`) and the highest locally-owned\nvolume (`local_max_volume`). Same suppression rules as\n`upstream_chapter_gap`, gated on `track_volumes`.", + "example": 1 + }, "volumesOwned": { "type": [ "integer", @@ -29624,6 +31233,10 @@ }, "description": "Content types this plugin can provide metadata for (e.g., [\"series\", \"book\"])" }, + "releaseSource": { + "type": "boolean", + "description": "Whether the plugin declares the `release_source` capability (announces\nnew chapter / volume releases for tracked series)." + }, "userReadSync": { "type": "boolean", "description": "Can sync user reading progress" @@ -30705,6 +32318,24 @@ } } }, + "PollNowResponse": { + "type": "object", + "description": "Response shape from the `poll-now` endpoint.\n\n`status` is `enqueued` after a successful enqueue. The `message` carries\nthe task ID for follow-up (`tasks.id`); the task runs asynchronously, so\nthis response does not reflect poll outcome.", + "required": [ + "status", + "message" + ], + "properties": { + "message": { + "type": "string", + "description": "Human-readable message; includes the enqueued task ID." + }, + "status": { + "type": "string", + "description": "`enqueued` on success." + } + } + }, "PreviewScanRequest": { "type": "object", "description": "Preview scan request", @@ -31116,388 +32747,735 @@ }, "startedAt": { "type": "string", - "format": "date-time", - "description": "When reading started", - "example": "2024-01-10T14:30:00Z" + "format": "date-time", + "description": "When reading started", + "example": "2024-01-10T14:30:00Z" + }, + "updatedAt": { + "type": "string", + "format": "date-time", + "description": "When progress was last updated", + "example": "2024-01-15T18:45:00Z" + }, + "userId": { + "type": "string", + "format": "uuid", + "description": "User ID", + "example": "550e8400-e29b-41d4-a716-446655440001" + } + } + }, + "ReadingProgress": { + "type": "object", + "description": "Reading progress information for a publication\n\nCustom extension for tracking reading progress in OPDS 2.0.\nCompatible with reading apps that support progress sync.", + "required": [ + "currentPage", + "totalPages", + "progressPercent", + "isCompleted" + ], + "properties": { + "currentPage": { + "type": "integer", + "format": "int32", + "description": "Current page (1-indexed)" + }, + "isCompleted": { + "type": "boolean", + "description": "Whether the book has been completed" + }, + "lastReadAt": { + "type": [ + "string", + "null" + ], + "format": "date-time", + "description": "Last time progress was updated" + }, + "progressPercent": { + "type": "number", + "format": "double", + "description": "Progress as a percentage (0.0 - 100.0)" + }, + "totalPages": { + "type": "integer", + "format": "int32", + "description": "Total number of pages in the book" + } + } + }, + "RecommendationDto": { + "type": "object", + "description": "A single recommendation for the user", + "required": [ + "externalId", + "title", + "score", + "reason" + ], + "properties": { + "basedOn": { + "type": "array", + "items": { + "type": "string" + }, + "description": "Titles that influenced this recommendation" + }, + "codexSeriesId": { + "type": [ + "string", + "null" + ], + "description": "Codex series ID if matched to an existing series" + }, + "countryOfOrigin": { + "type": [ + "string", + "null" + ], + "description": "Country of origin ISO code (e.g., \"JP\", \"KR\", \"CN\")" + }, + "coverUrl": { + "type": [ + "string", + "null" + ], + "description": "Cover image URL" + }, + "externalId": { + "type": "string", + "description": "External ID on the source service" + }, + "externalUrl": { + "type": [ + "string", + "null" + ], + "description": "URL to the entry on the external service" + }, + "format": { + "type": [ + "string", + "null" + ], + "description": "Media format (e.g., \"MANGA\", \"NOVEL\", \"ONE_SHOT\")" + }, + "genres": { + "type": "array", + "items": { + "type": "string" + }, + "description": "Genres" + }, + "inCodex": { + "type": "boolean", + "description": "Whether this series exists in the Codex library (matched via external IDs)" + }, + "inLibrary": { + "type": "boolean", + "description": "Whether this series is already in the user's library (as reported by the plugin)" + }, + "popularity": { + "type": [ + "integer", + "null" + ], + "format": "int32", + "description": "Popularity ranking/count on the source service" + }, + "rating": { + "type": [ + "integer", + "null" + ], + "format": "int32", + "description": "Average user rating on the source service (0-100 scale)" + }, + "reason": { + "type": "string", + "description": "Human-readable reason for this recommendation" + }, + "score": { + "type": "number", + "format": "double", + "description": "Confidence/relevance score (0.0 to 1.0)" + }, + "startYear": { + "type": [ + "integer", + "null" + ], + "format": "int32", + "description": "Year the series started" + }, + "status": { + "type": [ + "string", + "null" + ], + "description": "Publication status (ongoing, ended, hiatus, abandoned, unknown)" + }, + "summary": { + "type": [ + "string", + "null" + ], + "description": "Summary/description" + }, + "tags": { + "type": [ + "array", + "null" + ], + "items": { + "$ref": "#/components/schemas/RecommendationTagDto" + }, + "description": "Tags with relevance rank" + }, + "title": { + "type": "string", + "description": "Title of the recommended series/book" + }, + "totalChapterCount": { + "type": [ + "number", + "null" + ], + "format": "float", + "description": "Total expected number of chapters in the series. May be fractional." + }, + "totalVolumeCount": { + "type": [ + "integer", + "null" + ], + "format": "int32", + "description": "Total expected number of volumes in the series." + } + } + }, + "RecommendationTagDto": { + "type": "object", + "description": "A tag with relevance rank from the source service", + "required": [ + "name", + "rank", + "category" + ], + "properties": { + "category": { + "type": "string", + "description": "Tag category (e.g., \"Genre\", \"Theme\")" + }, + "name": { + "type": "string", + "description": "Tag name (e.g., \"Isekai\", \"Gore\")" }, - "updatedAt": { + "rank": { + "type": "integer", + "format": "int32", + "description": "Relevance rank (0-100)" + } + } + }, + "RecommendationsRefreshResponse": { + "type": "object", + "description": "Response from POST /api/v1/user/recommendations/refresh", + "required": [ + "taskId", + "message" + ], + "properties": { + "message": { "type": "string", - "format": "date-time", - "description": "When progress was last updated", - "example": "2024-01-15T18:45:00Z" + "description": "Human-readable status message" }, - "userId": { + "taskId": { "type": "string", "format": "uuid", - "description": "User ID", - "example": "550e8400-e29b-41d4-a716-446655440001" + "description": "Task ID for tracking the refresh operation" } } }, - "ReadingProgress": { + "RecommendationsResponse": { "type": "object", - "description": "Reading progress information for a publication\n\nCustom extension for tracking reading progress in OPDS 2.0.\nCompatible with reading apps that support progress sync.", + "description": "Response from GET /api/v1/user/recommendations", "required": [ - "currentPage", - "totalPages", - "progressPercent", - "isCompleted" + "recommendations", + "pluginId", + "pluginName" ], "properties": { - "currentPage": { - "type": "integer", - "format": "int32", - "description": "Current page (1-indexed)" - }, - "isCompleted": { + "cached": { "type": "boolean", - "description": "Whether the book has been completed" + "description": "Whether these are cached results" }, - "lastReadAt": { + "generatedAt": { "type": [ "string", "null" ], - "format": "date-time", - "description": "Last time progress was updated" + "description": "When these recommendations were generated" }, - "progressPercent": { - "type": "number", - "format": "double", - "description": "Progress as a percentage (0.0 - 100.0)" + "pluginId": { + "type": "string", + "format": "uuid", + "description": "Plugin that provided these recommendations" }, - "totalPages": { - "type": "integer", - "format": "int32", - "description": "Total number of pages in the book" - } - } - }, - "RecommendationDto": { - "type": "object", - "description": "A single recommendation for the user", - "required": [ - "externalId", - "title", - "score", - "reason" - ], - "properties": { - "basedOn": { + "pluginName": { + "type": "string", + "description": "Plugin display name" + }, + "recommendations": { "type": "array", "items": { - "type": "string" + "$ref": "#/components/schemas/RecommendationDto" }, - "description": "Titles that influenced this recommendation" + "description": "Personalized recommendations" }, - "codexSeriesId": { + "taskId": { "type": [ "string", "null" ], - "description": "Codex series ID if matched to an existing series" + "format": "uuid", + "description": "ID of the running/pending background task, if any" }, - "countryOfOrigin": { + "taskStatus": { "type": [ "string", "null" ], - "description": "Country of origin ISO code (e.g., \"JP\", \"KR\", \"CN\")" + "description": "Status of a running/pending background task (\"pending\" or \"running\"), if any" + } + } + }, + "RefreshScope": { + "type": "string", + "description": "Scope of a metadata refresh job.\n\nPhase 9 only honours [`RefreshScope::SeriesOnly`] at runtime. The\nother variants are schema-accepted but rejected by the validator.", + "enum": [ + "series_only", + "books_only", + "series_and_books" + ] + }, + "RegisterRequest": { + "type": "object", + "description": "Register request", + "required": [ + "username", + "email", + "password" + ], + "properties": { + "email": { + "type": "string", + "description": "Email address", + "example": "john@example.com" }, - "coverUrl": { + "password": { + "type": "string", + "description": "Password", + "example": "securePassword123!" + }, + "username": { + "type": "string", + "description": "Username", + "example": "johndoe" + } + } + }, + "RegisterResponse": { + "type": "object", + "description": "Register response", + "required": [ + "user" + ], + "properties": { + "accessToken": { "type": [ "string", "null" ], - "description": "Cover image URL" + "description": "JWT access token (if email confirmation not required)", + "example": "eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9..." }, - "externalId": { - "type": "string", - "description": "External ID on the source service" + "expiresIn": { + "type": [ + "integer", + "null" + ], + "format": "int64", + "description": "Token expiry in seconds", + "example": 86400, + "minimum": 0 }, - "externalUrl": { + "message": { "type": [ "string", "null" ], - "description": "URL to the entry on the external service" + "description": "Message about email verification if required", + "example": "Please check your email to verify your account" }, - "format": { + "tokenType": { "type": [ "string", "null" ], - "description": "Media format (e.g., \"MANGA\", \"NOVEL\", \"ONE_SHOT\")" + "description": "Token type (always \"Bearer\")", + "example": "Bearer" }, - "genres": { + "user": { + "$ref": "#/components/schemas/UserInfo", + "description": "User information" + } + } + }, + "ReleaseFacetsResponse": { + "type": "object", + "description": "Response shape for `GET /api/v1/releases/facets`.\n\nEach list reflects the distinct values present in the ledger under the\n**other** active filters (Solr-style facet exclusion), so dropdowns\nnever offer combinations that would yield zero results. The frontend\nuses these to populate cascading filter Select inputs without forcing\nthe user to type UUIDs.", + "required": [ + "languages", + "libraries", + "series" + ], + "properties": { + "languages": { "type": "array", "items": { - "type": "string" - }, - "description": "Genres" + "$ref": "#/components/schemas/ReleaseLanguageFacetDto" + } }, - "inCodex": { - "type": "boolean", - "description": "Whether this series exists in the Codex library (matched via external IDs)" + "libraries": { + "type": "array", + "items": { + "$ref": "#/components/schemas/ReleaseLibraryFacetDto" + } }, - "inLibrary": { - "type": "boolean", - "description": "Whether this series is already in the user's library (as reported by the plugin)" + "series": { + "type": "array", + "items": { + "$ref": "#/components/schemas/ReleaseSeriesFacetDto" + } + } + } + }, + "ReleaseLanguageFacetDto": { + "type": "object", + "description": "One language option in the inbox facets response.", + "required": [ + "language", + "count" + ], + "properties": { + "count": { + "type": "integer", + "format": "int64", + "minimum": 0 }, - "popularity": { + "language": { + "type": "string" + } + } + }, + "ReleaseLedgerEntryDto": { + "type": "object", + "description": "A single release announcement. Sources write these; the inbox reads them.", + "required": [ + "id", + "seriesId", + "seriesTitle", + "sourceId", + "externalReleaseId", + "payloadUrl", + "confidence", + "state", + "observedAt", + "createdAt" + ], + "properties": { + "chapter": { "type": [ - "integer", + "number", "null" ], - "format": "int32", - "description": "Popularity ranking/count on the source service" + "format": "double", + "description": "Decimal supports `12.5` etc." }, - "rating": { + "confidence": { + "type": "number", + "format": "double" + }, + "createdAt": { + "type": "string", + "format": "date-time" + }, + "externalReleaseId": { + "type": "string", + "description": "Plugin-stable identity for the release (used for dedup).", + "example": "nyaa:1234567" + }, + "formatHints": { + "description": "Sparse `{ \"jxl\": true, \"container\": \"cbz\", ... }`." + }, + "groupOrUploader": { "type": [ - "integer", + "string", "null" ], - "format": "int32", - "description": "Average user rating on the source service (0-100 scale)" + "description": "Group/scanlator/uploader attribution." }, - "reason": { - "type": "string", - "description": "Human-readable reason for this recommendation" - }, - "score": { - "type": "number", - "format": "double", - "description": "Confidence/relevance score (0.0 to 1.0)" + "id": { + "type": "string", + "format": "uuid", + "example": "550e8400-e29b-41d4-a716-446655440a00" }, - "startYear": { + "infoHash": { "type": [ - "integer", + "string", "null" ], - "format": "int32", - "description": "Year the series started" + "description": "Torrent info_hash, if applicable." }, - "status": { + "language": { "type": [ "string", "null" - ], - "description": "Publication status (ongoing, ended, hiatus, abandoned, unknown)" + ] }, - "summary": { + "mediaUrl": { "type": [ "string", "null" ], - "description": "Summary/description" + "description": "Optional second URL for direct fetch (`.torrent`, `magnet:`, DDL\nlink). Travels paired with [`Self::media_url_kind`]." }, - "tags": { + "mediaUrlKind": { "type": [ - "array", + "string", "null" ], - "items": { - "$ref": "#/components/schemas/RecommendationTagDto" - }, - "description": "Tags with relevance rank" + "description": "Classifies what `media_url` points at: `torrent` | `magnet` |\n`direct` | `other`. The frontend uses this to pick a kind-specific\nicon next to the standard external-link icon." }, - "title": { + "metadata": { + "description": "Source-specific extras (free-form)." + }, + "observedAt": { "type": "string", - "description": "Title of the recommended series/book" + "format": "date-time" }, - "totalChapterCount": { - "type": [ - "number", - "null" - ], - "format": "float", - "description": "Total expected number of chapters in the series. May be fractional." + "payloadUrl": { + "type": "string", + "description": "Where to acquire the release. Conventionally a human-readable\nlanding page (Nyaa view page, MangaUpdates release page)." }, - "totalVolumeCount": { + "seriesId": { + "type": "string", + "format": "uuid", + "example": "550e8400-e29b-41d4-a716-446655440002" + }, + "seriesTitle": { + "type": "string", + "description": "Series title at the time of the response. Joined from the `series`\ntable so the inbox UI can render a human-readable label without a\nfollow-up fetch. Falls back to the empty string only if the series\nrow was hard-deleted between the join and the read.", + "example": "Chainsaw Man" + }, + "sourceId": { + "type": "string", + "format": "uuid", + "example": "550e8400-e29b-41d4-a716-446655440b00" + }, + "state": { + "type": "string", + "description": "`announced` | `dismissed` | `marked_acquired` | `hidden`." + }, + "volume": { "type": [ "integer", "null" ], - "format": "int32", - "description": "Total expected number of volumes in the series." + "format": "int32" } } }, - "RecommendationTagDto": { + "ReleaseLedgerListResponse": { "type": "object", - "description": "A tag with relevance rank from the source service", "required": [ - "name", - "rank", - "category" + "entries" ], "properties": { - "category": { - "type": "string", - "description": "Tag category (e.g., \"Genre\", \"Theme\")" + "entries": { + "type": "array", + "items": { + "$ref": "#/components/schemas/ReleaseLedgerEntryDto" + } + } + } + }, + "ReleaseLibraryFacetDto": { + "type": "object", + "description": "One library option in the inbox facets response.", + "required": [ + "libraryId", + "libraryName", + "count" + ], + "properties": { + "count": { + "type": "integer", + "format": "int64", + "minimum": 0 }, - "name": { + "libraryId": { "type": "string", - "description": "Tag name (e.g., \"Isekai\", \"Gore\")" + "format": "uuid" }, - "rank": { - "type": "integer", - "format": "int32", - "description": "Relevance rank (0-100)" + "libraryName": { + "type": "string" } } }, - "RecommendationsRefreshResponse": { + "ReleaseSeriesFacetDto": { "type": "object", - "description": "Response from POST /api/v1/user/recommendations/refresh", + "description": "One series option in the inbox facets response. Carries the joined\n`library_id` and `library_name` so the frontend can group the dropdown\nby library without a follow-up call.", "required": [ - "taskId", - "message" + "seriesId", + "seriesTitle", + "libraryId", + "libraryName", + "count" ], "properties": { - "message": { + "count": { + "type": "integer", + "format": "int64", + "description": "Number of ledger rows matching the active filter for this series.", + "minimum": 0 + }, + "libraryId": { "type": "string", - "description": "Human-readable status message" + "format": "uuid" }, - "taskId": { + "libraryName": { + "type": "string" + }, + "seriesId": { "type": "string", - "format": "uuid", - "description": "Task ID for tracking the refresh operation" + "format": "uuid" + }, + "seriesTitle": { + "type": "string" } } }, - "RecommendationsResponse": { + "ReleaseSourceDto": { "type": "object", - "description": "Response from GET /api/v1/user/recommendations", + "description": "A configured release source (one row per logical feed).", "required": [ - "recommendations", + "id", "pluginId", - "pluginName" + "sourceKey", + "displayName", + "kind", + "enabled", + "effectiveCronSchedule", + "createdAt", + "updatedAt" ], "properties": { - "cached": { - "type": "boolean", - "description": "Whether these are cached results" + "config": { + "description": "Source-specific configuration (free-form)." }, - "generatedAt": { + "createdAt": { + "type": "string", + "format": "date-time" + }, + "cronSchedule": { "type": [ "string", "null" ], - "description": "When these recommendations were generated" + "description": "Per-source cron override (5-field POSIX cron). `null` when the row\ninherits the server-wide `release_tracking.default_cron_schedule`.\nAlways present in the response (not omitted on null) so clients can\ndistinguish \"inheriting\" from \"field missing.\"" }, - "pluginId": { - "type": "string", - "format": "uuid", - "description": "Plugin that provided these recommendations" + "displayName": { + "type": "string" }, - "pluginName": { + "effectiveCronSchedule": { "type": "string", - "description": "Plugin display name" + "description": "The cron expression actually used by the scheduler for this source:\nthe row's `cron_schedule` if set, otherwise the resolved server-wide\ndefault. Lets the UI display \"Daily (Default)\" without needing to\nfetch the global setting separately." }, - "recommendations": { - "type": "array", - "items": { - "$ref": "#/components/schemas/RecommendationDto" - }, - "description": "Personalized recommendations" + "enabled": { + "type": "boolean" }, - "taskId": { + "etag": { "type": [ "string", "null" ], - "format": "uuid", - "description": "ID of the running/pending background task, if any" + "description": "Opaque etag/cursor used for conditional fetches." }, - "taskStatus": { - "type": [ - "string", - "null" - ], - "description": "Status of a running/pending background task (\"pending\" or \"running\"), if any" - } - } - }, - "RefreshScope": { - "type": "string", - "description": "Scope of a metadata refresh job.\n\nPhase 9 only honours [`RefreshScope::SeriesOnly`] at runtime. The\nother variants are schema-accepted but rejected by the validator.", - "enum": [ - "series_only", - "books_only", - "series_and_books" - ] - }, - "RegisterRequest": { - "type": "object", - "description": "Register request", - "required": [ - "username", - "email", - "password" - ], - "properties": { - "email": { + "id": { "type": "string", - "description": "Email address", - "example": "john@example.com" + "format": "uuid", + "example": "550e8400-e29b-41d4-a716-446655440b00" }, - "password": { + "kind": { "type": "string", - "description": "Password", - "example": "securePassword123!" + "description": "`rss-uploader` | `rss-series` | `api-feed` | `metadata-feed` | `metadata-piggyback`." }, - "username": { - "type": "string", - "description": "Username", - "example": "johndoe" - } - } - }, - "RegisterResponse": { - "type": "object", - "description": "Register response", - "required": [ - "user" - ], - "properties": { - "accessToken": { + "lastError": { "type": [ "string", "null" - ], - "description": "JWT access token (if email confirmation not required)", - "example": "eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9..." + ] }, - "expiresIn": { + "lastErrorAt": { "type": [ - "integer", + "string", "null" ], - "format": "int64", - "description": "Token expiry in seconds", - "example": 86400, - "minimum": 0 + "format": "date-time" }, - "message": { + "lastPolledAt": { "type": [ "string", "null" ], - "description": "Message about email verification if required", - "example": "Please check your email to verify your account" + "format": "date-time" }, - "tokenType": { + "lastSummary": { "type": [ "string", "null" ], - "description": "Token type (always \"Bearer\")", - "example": "Bearer" + "description": "One-line summary of the most recent successful poll. Surfaced under\nthe row's status badge so users can see *why* a poll returned no\nannouncements without grepping logs. NULL until the first successful\npoll on the source." }, - "user": { - "$ref": "#/components/schemas/UserInfo", - "description": "User information" + "pluginId": { + "type": "string", + "description": "Owning plugin id, or `core` for in-core synthetic sources.", + "example": "release-nyaa" + }, + "sourceKey": { + "type": "string", + "description": "Plugin-defined unique key.", + "example": "nyaa:user:tsuna69" + }, + "updatedAt": { + "type": "string", + "format": "date-time" + } + } + }, + "ReleaseSourceListResponse": { + "type": "object", + "required": [ + "sources" + ], + "properties": { + "sources": { + "type": "array", + "items": { + "$ref": "#/components/schemas/ReleaseSourceDto" + } } } }, @@ -32114,6 +34092,21 @@ } } }, + "ResetReleaseSourceResponse": { + "type": "object", + "description": "Response shape from the `reset` endpoint.\n\nReturns the number of ledger rows removed so callers can show a\nconfirmation toast. The source's transient poll state (etag,\nlast_polled_at, last_error, last_summary) is also cleared, but those\nare not counted here.", + "required": [ + "deletedLedgerEntries" + ], + "properties": { + "deletedLedgerEntries": { + "type": "integer", + "format": "int64", + "description": "Number of `release_ledger` rows deleted for this source.", + "minimum": 0 + } + } + }, "RetryAllErrorsRequest": { "type": "object", "description": "Request body for bulk retrying all book errors", @@ -32435,6 +34428,64 @@ } } }, + "SeriesAliasDto": { + "type": "object", + "description": "Title alias used by release-source plugins to match incoming releases by\ntitle (Nyaa, MangaUpdates without an external ID, etc.).", + "required": [ + "id", + "seriesId", + "alias", + "normalized", + "source", + "createdAt" + ], + "properties": { + "alias": { + "type": "string", + "description": "Alias as entered (preserves casing/punctuation).", + "example": "My Hero Academia" + }, + "createdAt": { + "type": "string", + "format": "date-time" + }, + "id": { + "type": "string", + "format": "uuid", + "description": "Alias row ID.", + "example": "550e8400-e29b-41d4-a716-446655440100" + }, + "normalized": { + "type": "string", + "description": "Lowercased + punctuation-stripped form used for matching.", + "example": "my hero academia" + }, + "seriesId": { + "type": "string", + "format": "uuid", + "example": "550e8400-e29b-41d4-a716-446655440002" + }, + "source": { + "type": "string", + "description": "`metadata` (auto-derived) | `manual` (user-entered).", + "example": "manual" + } + } + }, + "SeriesAliasListResponse": { + "type": "object", + "required": [ + "aliases" + ], + "properties": { + "aliases": { + "type": "array", + "items": { + "$ref": "#/components/schemas/SeriesAliasDto" + } + } + } + }, "SeriesAverageRatingResponse": { "type": "object", "description": "Response containing the average community rating for a series", @@ -32924,6 +34975,32 @@ "description": "When the series was last updated", "example": "2024-01-15T10:30:00Z" }, + "upstreamChapterGap": { + "type": [ + "number", + "null" + ], + "format": "float", + "description": "Difference between the upstream original-language chapter count\n(`series_metadata.total_chapter_count`, supplied by metadata\nproviders like MangaBaka or AniList) and the highest locally-owned\nchapter (`local_max_chapter`).\n\nAlways `None` unless the series is tracked AND `track_chapters` is\nenabled AND the provider count is populated AND the rounded-to-1-\ndecimal gap is positive. **This is an informational signal, not a\nrelease announcement** — Phase 6's MangaUpdates plugin owns the\ntranslation-release feed.", + "example": 3.0 + }, + "upstreamGapProvider": { + "type": [ + "string", + "null" + ], + "description": "Display name of the metadata provider that supplied the upstream\ncounts (e.g., \"MangaBaka\", \"AniList\"). Set whenever at least one of\n`upstream_chapter_gap` / `upstream_volume_gap` is populated. Used by\nthe Phase 7 badge tooltip.", + "example": "MangaBaka" + }, + "upstreamVolumeGap": { + "type": [ + "integer", + "null" + ], + "format": "int32", + "description": "Difference between the upstream original-language volume count\n(`series_metadata.total_volume_count`) and the highest locally-owned\nvolume (`local_max_volume`). Same suppression rules as\n`upstream_chapter_gap`, gated on `track_volumes`.", + "example": 1 + }, "volumesOwned": { "type": [ "integer", @@ -33504,6 +35581,93 @@ "custom" ] }, + "SeriesTrackingDto": { + "type": "object", + "description": "Per-series release-tracking configuration.\n\nReturned even for untracked series — the row defaults to `tracked: false`\nwith conservative defaults so the frontend can render the panel without\nspecial-casing missing rows.", + "required": [ + "seriesId", + "tracked", + "trackChapters", + "trackVolumes", + "createdAt", + "updatedAt" + ], + "properties": { + "confidenceThresholdOverride": { + "type": [ + "number", + "null" + ], + "format": "double", + "description": "Per-series override of the server's confidence threshold (0.0 - 1.0)." + }, + "createdAt": { + "type": "string", + "format": "date-time", + "description": "When the row was created (epoch when virtual)." + }, + "languages": { + "type": [ + "array", + "null" + ], + "items": { + "type": "string" + }, + "description": "Per-series language preference (ISO 639-1 codes, e.g. `[\"en\", \"es\"]`).\n`null` means \"fall back to the server-wide default (`release_tracking.default_languages`).\"\nUsed by aggregation feeds (e.g. MangaUpdates) that emit candidates in many languages." + }, + "latestKnownChapter": { + "type": [ + "number", + "null" + ], + "format": "double", + "description": "Latest known external chapter (supports decimals like 12.5)." + }, + "latestKnownVolume": { + "type": [ + "integer", + "null" + ], + "format": "int32", + "description": "Latest known external volume." + }, + "pollIntervalOverrideS": { + "type": [ + "integer", + "null" + ], + "format": "int32", + "description": "Per-series override of the source poll interval (seconds)." + }, + "seriesId": { + "type": "string", + "format": "uuid", + "description": "Series ID this config belongs to.", + "example": "550e8400-e29b-41d4-a716-446655440002" + }, + "trackChapters": { + "type": "boolean", + "description": "Whether to announce new chapters." + }, + "trackVolumes": { + "type": "boolean", + "description": "Whether to announce new volumes." + }, + "tracked": { + "type": "boolean", + "description": "Whether release tracking is enabled." + }, + "updatedAt": { + "type": "string", + "format": "date-time", + "description": "When the row was last updated (epoch when virtual)." + }, + "volumeChapterMap": { + "description": "Sparse map of `{ \"\": { \"first\": ch, \"last\": ch } }`." + } + } + }, "SeriesUpdateResponse": { "type": "object", "description": "Response for series update", @@ -35383,6 +37547,60 @@ "format": "uuid" } } + }, + { + "type": "object", + "description": "Backfill release-tracking aliases from existing series metadata.\n\nWalks series in scope, harvests the canonical title plus alternate titles\nfrom `series_metadata` and `series_alternate_titles`, and seeds them as\n`metadata`-source aliases in `series_aliases`. Idempotent — re-runs do\nnot create duplicates. Does NOT enable tracking; that stays explicit.", + "required": [ + "type" + ], + "properties": { + "libraryId": { + "type": [ + "string", + "null" + ], + "format": "uuid", + "description": "If set, scope to this library; otherwise all series." + }, + "seriesIds": { + "type": [ + "array", + "null" + ], + "items": { + "type": "string", + "format": "uuid" + }, + "description": "If set, scope to these specific series (takes precedence over library_id)." + }, + "type": { + "type": "string", + "enum": [ + "backfill_tracking_from_metadata" + ] + } + } + }, + { + "type": "object", + "description": "Poll a single `release_sources` row for new releases.\n\nResolves the source's owning plugin, calls `releases/poll` over the\nexisting plugin host, runs returned candidates through the matcher +\nthreshold, and writes accepted candidates to the ledger. On success\nupdates `last_polled_at` (and optionally `etag`); on failure records\n`last_error`. Idempotent: ledger writes dedup on\n`(source_id, external_release_id)` and `info_hash`.", + "required": [ + "sourceId", + "type" + ], + "properties": { + "sourceId": { + "type": "string", + "format": "uuid" + }, + "type": { + "type": "string", + "enum": [ + "poll_release_source" + ] + } + } } ], "description": "Task types supported by the distributed task queue" @@ -36475,6 +38693,108 @@ } } }, + "UpdateReleaseLedgerEntryRequest": { + "type": "object", + "description": "PATCH payload for ledger row state transitions.\n\nOnly `state` is patchable from the API today; the rest of the row is\nsource-controlled. `state` is validated against the canonical set:\n`announced` | `dismissed` | `marked_acquired` | `hidden`.", + "properties": { + "state": { + "type": [ + "string", + "null" + ], + "description": "New state. See [`ReleaseLedgerEntryDto::state`] for allowed values." + } + } + }, + "UpdateReleaseSourceRequest": { + "type": "object", + "description": "PATCH payload for a release source. All fields optional; omit to leave alone.\n\n`cron_schedule` uses double-Option semantics:\n- field absent (`None`): leave the row's cron_schedule unchanged\n- explicit `null` (`Some(None)`) / `\"\"` / `\" \"`: clear the override\n (revert to inheriting the server-wide\n `release_tracking.default_cron_schedule`)\n- `Some(Some(\"0 */6 * * *\"))`: set a per-source override", + "properties": { + "cronSchedule": { + "type": [ + "string", + "null" + ], + "description": "5-field POSIX cron expression. Use `null` (or empty string) to\nclear the override and inherit the server-wide default." + }, + "displayName": { + "type": [ + "string", + "null" + ] + }, + "enabled": { + "type": [ + "boolean", + "null" + ] + } + } + }, + "UpdateSeriesTrackingRequest": { + "type": "object", + "description": "PATCH payload for tracking config. All fields are optional:\nomit a field to leave it untouched. Use a JSON `null` on a nullable field\nto clear it explicitly.", + "properties": { + "confidenceThresholdOverride": { + "type": [ + "number", + "null" + ], + "format": "double" + }, + "languages": { + "type": [ + "array", + "null" + ], + "items": { + "type": "string" + }, + "description": "ISO 639-1 codes; `null` clears (falls back to server-wide default)." + }, + "latestKnownChapter": { + "type": [ + "number", + "null" + ], + "format": "double", + "description": "Use `Some(null)` to clear, `Some()` to set, omit to leave alone." + }, + "latestKnownVolume": { + "type": [ + "integer", + "null" + ], + "format": "int32" + }, + "pollIntervalOverrideS": { + "type": [ + "integer", + "null" + ], + "format": "int32" + }, + "trackChapters": { + "type": [ + "boolean", + "null" + ] + }, + "trackVolumes": { + "type": [ + "boolean", + "null" + ] + }, + "tracked": { + "type": [ + "boolean", + "null" + ] + }, + "volumeChapterMap": {} + } + }, "UpdateSettingRequest": { "type": "object", "description": "Update setting request", @@ -37359,6 +39679,14 @@ "name": "Series", "description": "Series browsing and search endpoints" }, + { + "name": "Tracking", + "description": "Release-tracking config and matcher aliases" + }, + { + "name": "Releases", + "description": "Release ledger (announcements) and source admin" + }, { "name": "Books", "description": "Book details and metadata endpoints" @@ -37484,6 +39812,8 @@ "tags": [ "Libraries", "Series", + "Tracking", + "Releases", "Books", "Pages" ] diff --git a/docs/docs/plugins/release-mangaupdates.md b/docs/docs/plugins/release-mangaupdates.md new file mode 100644 index 00000000..b2eaae91 --- /dev/null +++ b/docs/docs/plugins/release-mangaupdates.md @@ -0,0 +1,95 @@ +--- +--- + +# MangaUpdates Releases Plugin + +The MangaUpdates Releases plugin announces new chapter and volume releases for tracked series by polling per-series RSS feeds at [MangaUpdates](https://www.mangaupdates.com). It is a **notify-only** plugin: Codex surfaces announcements; you acquire externally. + +## Features + +- Per-series RSS polling against MangaUpdates' v1 API. +- Multi-language support: each scanlation release carries a language tag (English, Spanish, Indonesian, French, German, Portuguese, etc.). +- Per-series language preferences with a server-wide default. +- Admin-configurable scanlation group blocklist. +- Idempotent ledger writes (re-polling never re-announces an already-seen release). +- Daily default poll interval; conditional GET keeps bandwidth low. + +## How it works + +The plugin auto-registers a single source row (`MangaUpdates Releases`) on first start. Unlike Nyaa (one row per uploader), MangaUpdates polls every tracked series with a `mangaupdates` external ID under one logical feed, so a single row is the right model. You'll find the row in **Settings → Release tracking** along with its enable toggle, poll-interval input, and "Poll now" button. + +1. Codex schedules a poll for the source row (default: once per 24 hours). +2. The plugin asks the host for tracked series scoped to those with a `mangaupdates` external ID. +3. For each series, the plugin GETs `https://api.mangaupdates.com/v1/series/{id}/rss`. +4. Each RSS item is parsed into a release candidate: chapter / volume number, scanlation group, language code, release page URL. +5. Candidates are filtered by the configured language list and group blocklist, then submitted to the host's release ledger. +6. The host applies a confidence threshold (1.0 here, since matches are ID-keyed) and dedups on `(source_id, external_release_id)`. +7. On successful insert, `series_tracking.latest_known_chapter` / `latest_known_volume` advance to the high-water mark — but only for releases in the series' effective language list. + +The plugin **never** downloads release files. The "Open" link on the inbox row sends you to the MangaUpdates release page; how you acquire the chapter is up to you. + +## Setup + +The plugin works out of the box once installed and enabled — no required config. The single source row is materialized on first start. The two things you'll typically configure are: which **languages** announcements should pass through (see below), and optionally a **scanlation group blocklist** for noisy groups. + +### Populating MangaUpdates IDs + +For the plugin to find any tracked series, those series need a `mangaupdates` external ID. There are two ways to populate this: + +**Manual entry** (works for any series): + +1. Go to the series' detail page and open the **Tracking** panel. +2. Add a new external ID with source `mangaupdates` and the numeric ID from the series' MangaUpdates URL (e.g. `https://www.mangaupdates.com/series/abc123/series-name` → use the numeric internal ID exposed by the v1 API). + +**Metadata-refresh population**: when the MangaBaka metadata provider runs, it cross-references and stores the MangaUpdates ID automatically for series that exist in MangaBaka's database. + +### Language preferences + +MangaUpdates aggregates scanlation releases across many languages. The plugin filters announcements to languages you've configured. + +- **Per-series**: set `languages` to a list of [ISO 639-1 codes](https://en.wikipedia.org/wiki/List_of_ISO_639-1_codes) on the series' tracking config, e.g. `["en"]` for English only or `["en", "es"]` for English + Spanish. +- **Server-wide default**: when a series doesn't override `languages`, the plugin falls back to the `release_tracking.default_languages` setting (default: `["en"]`). +- **Forward-only**: changing the language list affects future polls. Already-recorded ledger rows aren't retroactively hidden — use the inbox's language filter to re-scope an existing view. +- **Untagged entries**: MangaUpdates entries that don't carry a language code are dropped by default. There is no current admin override for this; if you need it, file an issue. + +Common language codes: + +| Code | Language | +| ---- | ----------- | +| en | English | +| es | Spanish | +| id | Indonesian | +| fr | French | +| de | German | +| pt | Portuguese | +| it | Italian | +| pl | Polish | +| ru | Russian | + +### Group blocklist + +Admins can configure `blockedGroups` (comma-separated) to silently drop releases from named scanlation groups. Matching is case-insensitive on the group name as it appears in the RSS title (the part following `by ` and before the language tag). Useful for dropping known low-quality / MTL-only groups. + +``` +blockedGroups: "MTL Group, LowQualityScans" +``` + +## Configuration reference + +| Field | Scope | Default | Notes | +| ------------------ | ------------ | ------- | --------------------------------------------------------------------- | +| `blockedGroups` | admin | `""` | CSV. Case-insensitive match on group name. | +| `requestTimeoutMs` | admin | `10000` | Hard timeout per RSS fetch. Clamped to `[1000, 60000]`. | +| `languages` | per-series | `null` | ISO 639-1 codes. `null` falls back to the server-wide default. | +| `default_languages` | server-wide | `["en"]` | `release_tracking.default_languages` setting. Affects all release-tracking plugins, not just this one. | + +## Limitations + +- **Per-series ETags not implemented yet.** The plugin issues unconditional GETs against each tracked series' feed every poll. With daily polls and small per-series feeds this is a non-issue, but it does mean a 304 response is essentially never seen on this source. A future revision will add per-(source, series) state to wire conditional GETs through. +- **Volume bundles are best-effort.** Volume-only entries (e.g. `Vol.15 by VolBundler (en)`) are recognized and announce on the volume axis, but mixed entries (`Vol.2 c.14 by Group (en)`) bump both chapter and volume marks. Whether a volume bundle should retroactively suppress already-announced loose chapters is governed by the host's matcher, not this plugin. +- **No retroactive language re-filter.** Switching `languages` only affects future polls. Old ledger rows in dropped languages stay in the inbox unless dismissed; the inbox's language filter scopes the view. + +## Risks + +- **Rate limits.** MangaUpdates serves the RSS endpoints publicly without API keys. The plugin uses a daily default poll cadence and per-host backoff (driven by the host) to back off on 429 / 503 responses. Tracking hundreds of series with sub-hourly intervals will likely get you rate-limited; stick to daily. +- **Missing IDs.** Series without a `mangaupdates` external ID are silently skipped. This is by design (the plugin would otherwise have to fuzzy-match titles, which the n8n flow proved is unsafe). diff --git a/docs/docs/plugins/release-nyaa.md b/docs/docs/plugins/release-nyaa.md new file mode 100644 index 00000000..002c4edb --- /dev/null +++ b/docs/docs/plugins/release-nyaa.md @@ -0,0 +1,98 @@ +--- +--- + +# Nyaa Releases Plugin + +The Nyaa Releases plugin announces new chapter and volume torrents for tracked series by polling Nyaa.si user RSS feeds. Unlike the [MangaUpdates plugin](./release-mangaupdates.md), which tells you *what* has been released in your languages, the Nyaa plugin tells you *where to download* a release that exists. It is **notify-only**: Codex never downloads torrents. + +## What it's for + +Nyaa is an acquisition-pointer source. It complements (not replaces) the translation-feed plugins: + +- **MangaUpdates** answers: "Has chapter 143 been released in English?" +- **Nyaa** answers: "Is there a torrent for chapter 143 from a trusted uploader?" + +Use Nyaa when you've already decided on a small allowlist of trusted uploaders (e.g. `1r0n`) and want a single feed of "new releases from these people" filtered down to your tracked series. + +## Features + +- Per-uploader (or per-search-query) RSS polling against Nyaa.si user feeds. +- Alias-based series matching: each parsed Nyaa title is normalized and compared to every tracked series' alias list. +- Confidence scoring: exact normalized match → 0.95; fuzzy near-match (Sørensen-Dice) → 0.7-0.85; everything below is dropped before reaching the host. +- Format-hint extraction: `(Digital)`, `(JXL)`, `(Magazine)`, etc. surface on the candidate's `formatHints` for downstream filtering. +- Volume and chapter ranges are recognized: `[1r0n] Boruto v01-14 (Digital)` and `[Group] Dandadan c126-142 (Digital)` parse correctly and pass both ends to the host. +- Idempotent ledger writes (re-polling never re-announces an already-seen release). +- Daily default poll interval; conditional GETs (ETag + Last-Modified) keep bandwidth low. +- Per-host backoff is driven by the host on 429 / 503 responses. + +## How it works + +The plugin auto-materializes one **release source row** per uploader entry on first start (and on every config save, which restarts the plugin). + +1. You set the plugin's `uploaders` config to a comma-separated list (see [Setup](#setup) below). +2. On startup the plugin parses the list and calls `releases/register_sources` over the host RPC channel. The host upserts one row per entry in `release_sources` keyed on `(plugin_id, sourceKey)` where `sourceKey` is `kind:identifier` (e.g. `user:tsuna69`, `query:luminousscans`, `params:c=3_1&q=berserk`). +3. Each row gets its own poll cadence (default 24h, overridable in **Settings → Release tracking**), its own ETag, and its own last-error / last-polled status. The scheduler fires one `releases/poll` task per row. +4. When the host calls `releases/poll(sourceId, sourceKey, config, etag)`, the plugin recovers the subscription from `config.subscription` and fetches just that uploader's feed: + - User feed: `https://nyaa.si/?page=rss&u=` + - Plain search: `https://nyaa.si/?page=rss&q=` + - URL-style params: `https://nyaa.si/?page=rss&` +5. Each RSS item is parsed: a leading `[Group]` token, chapter / volume token (single or range), and parenthesized format hints are extracted; the remaining text is the *series guess*. +6. The series guess is normalized and matched against tracked-series aliases. Confidence ≥ 0.95 on exact normalized match; otherwise the matcher computes a token-level Dice ratio and rejects below 0.85. +7. Matching candidates are submitted to the host's release ledger via `releases/record`. The host applies its threshold (default 0.7) and dedups on `(source_id, external_release_id)` and on `info_hash` (Nyaa's `nyaa:infoHash` element). + +Removing an entry from the `uploaders` list and re-saving prunes the corresponding row and its `release_ledger` history (cascade delete). User-managed fields (`enabled`, `pollIntervalS`) survive plugin restarts. + +The plugin **never** downloads release files. The "Open" link on the inbox row sends you to the Nyaa view page or the `.torrent` URL; how you acquire the chapter is up to you. + +## Setup + +### 1. Configure uploader subscriptions + +The plugin's `uploaders` admin field is a comma-separated list of trusted uploader handles or queries. Each entry takes one of three forms: + +| Form | Example | What it polls | +| ----------------- | ------------------------ | ---------------------------------------------------------------------------- | +| `username` | `tsuna69` | `https://nyaa.si/?page=rss&u=tsuna69` — that uploader's full RSS feed. | +| `q:` | `q:LuminousScans` | `https://nyaa.si/?page=rss&q=LuminousScans` — a plain site-wide search. | +| `q:?` | `q:?c=3_1&q=Berserk` | URL-style search with allowlisted keys: `q`, `c`, `f`, `u`. The example here scopes a search to the Literature → English-translated category. | + +Mix freely: + +```json +{ + "uploaders": "tsuna69,TankobonBlur,q:LuminousScans,q:?c=3_1&q=Berserk" +} +``` + +Empty tokens are dropped; case-insensitive duplicates are silently deduplicated. URL-style entries normalize their param order so `q:?q=X&c=3_1` and `q:?c=3_1&q=X` collapse to the same source row. Anything not on the allowlist (`s=`, `o=`, etc.) is dropped without error. + +After saving, head to **Settings → Release tracking** to see the per-source rows the plugin registered. Each row has its own enable toggle, poll-interval input, and "Poll now" button. Disabling a row pauses its scheduled polls; deleting an entry from the `uploaders` CSV (and saving) removes the row entirely. + +### 2. Make sure tracked series have aliases + +Nyaa releases identify a series only by name in the title. The plugin matches titles to series via the `series_aliases` table: + +- The `BackfillTrackingFromMetadata` task (Phase 1) seeds aliases from each series' `series_metadata.title`, `title_sort`, and alternate titles. +- You can also add aliases manually via the Tracking panel on a series detail page. + +For best results, add aliases that mirror how your trusted uploaders name the release. Example: 1r0n names `Boruto: Two Blue Vortex` as `[1r0n] Boruto - Two Blue Vortex - Volume NN (Digital)`. The default normalization produces `boruto two blue vortex` from both forms, so an exact match is automatic — but if you track *Boruto* with only the alias `Boruto`, the matcher will see `boruto two blue vortex` and reject it as not similar enough to `boruto`. + +## Configuration reference + +| Field | Scope | Default | Notes | +| ------------------ | ------------ | ---------------------- | -------------------------------------------------------------------------------------------------- | +| `uploaders` | admin | `""` | Comma-separated subscription list. See the table above for the three accepted entry forms. | +| `requestTimeoutMs` | admin | `10000` | Hard timeout per Nyaa fetch. Clamped to `[1000, 60000]`. | +| `baseUrl` | admin | `https://nyaa.si` | Override base URL — useful for mirrors. Trailing slashes are trimmed. | + +## Limitations + +- **Language is hardcoded to English.** Nyaa releases don't carry a language tag, and the uploaders this plugin targets predominantly release English-language scans. Admins who add non-English uploaders should configure tracked series' `languages` accordingly so the host's `latest_known_*` advance gate doesn't pollute the high-water mark with releases the user can't read. +- **Title parsing is best-effort.** The corpus covers the common 1r0n / TankobonBlur shapes plus generic `Volume NN` / `Chapter NNN` forms. Edge-case titles (e.g. unusual punctuation, missing separators) may parse with an empty `seriesGuess`; the matcher silently rejects those entries (no false positives). +- **No per-uploader confidence weighting yet.** Every matched candidate gets the same confidence based on the alias match alone. Adding per-uploader trust scores (downgrade an uploader after N user dismissals) is a future enhancement. + +## Risks + +- **Rate limits.** Nyaa serves RSS publicly without API keys, but it's a small site and aggressive polling is unwelcome. The plugin uses a daily default cadence and per-host backoff (driven by the host) to back off on 429 / 5xx responses. Don't reduce the interval below the default unless you have a specific reason. +- **Title-parsing false positives.** Alias-only matching is fundamentally fuzzier than the external-ID match used by MangaUpdates. The matcher's 0.85 Dice floor + 0.95 exact-confidence give the host's threshold (default 0.7) enough headroom to drop bad matches, but watch the inbox for the first few days after enabling and dismiss anything mis-matched. Repeated dismissals tell you which series need additional aliases. +- **Quality varies by uploader.** This is *acquisition pointer* data. The plugin doesn't validate that the underlying torrent is what its title claims to be; that's why the user maintains the uploader allowlist. diff --git a/migration/src/lib.rs b/migration/src/lib.rs index 46de62c6..281e5821 100644 --- a/migration/src/lib.rs +++ b/migration/src/lib.rs @@ -147,6 +147,22 @@ mod m20260503_000070_backfill_book_volume_chapter; // Filename retains the original Phase 1 name for git-history continuity; module // now creates the generic `library_jobs` table instead of adding a JSON column. mod m20260503_000071_add_metadata_refresh_config; +// Release tracking (Phase 1): series_tracking sidecar + series_aliases +mod m20260503_000072_create_release_tracking; +// Release tracking (Phase 2): release_sources + release_ledger +mod m20260503_000073_create_release_ledger; +// Release tracking (Phase 6): per-series language preference for scanlation feeds +mod m20260504_000074_add_tracking_languages; +// Release tracking (Phase 6): server-wide default language list +mod m20260504_000075_seed_release_tracking_languages; +// Release tracking (Phase 8 follow-up): server-wide notification filter settings +mod m20260504_000076_seed_release_tracking_notify_filters; +// Release tracking: per-source last-poll summary surfaced in the UI +mod m20260505_000077_add_release_sources_last_summary; +// Release tracking: per-row media_url + media_url_kind for torrent/magnet/DDL +mod m20260505_000078_add_release_ledger_media_url; +// Release tracking: server-wide default cron schedule for release-source polling +mod m20260505_000079_seed_release_tracking_default_cron; pub struct Migrator; @@ -264,6 +280,22 @@ impl MigratorTrait for Migrator { Box::new(m20260503_000070_backfill_book_volume_chapter::Migration), // Per-library scheduled metadata refresh config (Phase 1) Box::new(m20260503_000071_add_metadata_refresh_config::Migration), + // Release tracking (Phase 1) + Box::new(m20260503_000072_create_release_tracking::Migration), + // Release tracking (Phase 2) + Box::new(m20260503_000073_create_release_ledger::Migration), + // Release tracking (Phase 6): per-series language preference + Box::new(m20260504_000074_add_tracking_languages::Migration), + // Release tracking (Phase 6): server-wide default language list + Box::new(m20260504_000075_seed_release_tracking_languages::Migration), + // Release tracking (Phase 8 follow-up): notification filter settings + Box::new(m20260504_000076_seed_release_tracking_notify_filters::Migration), + // Release tracking: per-source last-poll summary + Box::new(m20260505_000077_add_release_sources_last_summary::Migration), + // Release tracking: per-row media_url + media_url_kind + Box::new(m20260505_000078_add_release_ledger_media_url::Migration), + // Release tracking: server-wide default cron schedule + Box::new(m20260505_000079_seed_release_tracking_default_cron::Migration), ] } } diff --git a/migration/src/m20260503_000072_create_release_tracking.rs b/migration/src/m20260503_000072_create_release_tracking.rs new file mode 100644 index 00000000..49b98d08 --- /dev/null +++ b/migration/src/m20260503_000072_create_release_tracking.rs @@ -0,0 +1,248 @@ +//! Create release-tracking schema (Phase 1 of release-tracking implementation). +//! +//! Adds two tables that augment the existing `series` and `series_external_ids` +//! tables for tracked-series support: +//! +//! - `series_tracking` (1:1 with series, FK cascade): per-series flag + status +//! metadata describing whether the series is being tracked for releases, and +//! the latest known external chapter/volume so the matcher can compute +//! "behind by N." +//! - `series_aliases`: title aliases used by sources without ID-based matching +//! (e.g. Nyaa). Distinct from `series_alternate_titles`, which is purpose-built +//! for labelled localized titles (Japanese/Romaji/English/Korean) - aliases +//! are arbitrary normalized strings used solely for matching incoming release +//! titles against tracked series. +//! +//! External IDs (MangaDex UUID, AniList, MAL, etc.) are stored in the existing +//! `series_external_ids` table and are NOT duplicated here. + +use sea_orm_migration::prelude::*; + +use crate::m20260103_000003_create_series::Series; + +#[derive(DeriveMigrationName)] +pub struct Migration; + +#[async_trait::async_trait] +impl MigrationTrait for Migration { + async fn up(&self, manager: &SchemaManager) -> Result<(), DbErr> { + let is_postgres = manager.get_database_backend() == sea_orm::DatabaseBackend::Postgres; + + // ---------- series_tracking ---------- + let mut tracking = Table::create(); + tracking + .table(SeriesTracking::Table) + .if_not_exists() + // Primary key is series_id (1:1 sidecar). + .col( + ColumnDef::new(SeriesTracking::SeriesId) + .uuid() + .not_null() + .primary_key(), + ) + .col( + ColumnDef::new(SeriesTracking::Tracked) + .boolean() + .not_null() + .default(false), + ) + .col( + ColumnDef::new(SeriesTracking::TrackChapters) + .boolean() + .not_null() + .default(true), + ) + .col( + ColumnDef::new(SeriesTracking::TrackVolumes) + .boolean() + .not_null() + .default(true), + ) + // Latest external chapter (decimal to handle 12.5 etc.) and volume. + .col(ColumnDef::new(SeriesTracking::LatestKnownChapter).double()) + .col(ColumnDef::new(SeriesTracking::LatestKnownVolume).integer()) + // Sparse map: { "": { "first": , "last": } } + .col(ColumnDef::new(SeriesTracking::VolumeChapterMap).json_binary()) + // Per-series overrides (null = use source/server default). + .col(ColumnDef::new(SeriesTracking::PollIntervalOverrideS).integer()) + .col(ColumnDef::new(SeriesTracking::ConfidenceThresholdOverride).double()) + .col({ + let mut col = ColumnDef::new(SeriesTracking::CreatedAt); + col.timestamp_with_time_zone().not_null(); + if is_postgres { + col.extra("DEFAULT NOW()"); + } else { + col.extra("DEFAULT CURRENT_TIMESTAMP"); + } + col + }) + .col({ + let mut col = ColumnDef::new(SeriesTracking::UpdatedAt); + col.timestamp_with_time_zone().not_null(); + if is_postgres { + col.extra("DEFAULT NOW()"); + } else { + col.extra("DEFAULT CURRENT_TIMESTAMP"); + } + col + }) + .foreign_key( + ForeignKey::create() + .name("fk_series_tracking_series_id") + .from(SeriesTracking::Table, SeriesTracking::SeriesId) + .to(Series::Table, Series::Id) + .on_delete(ForeignKeyAction::Cascade) + .on_update(ForeignKeyAction::NoAction), + ); + + manager.create_table(tracking.to_owned()).await?; + + // Partial index for the hot path: "list all tracked series." + // Use raw SQL because the DSL's partial-index support is uneven + // across SQLite/Postgres in our SeaORM version. + manager + .get_connection() + .execute_unprepared( + "CREATE INDEX idx_series_tracking_tracked \ + ON series_tracking(series_id) WHERE tracked = TRUE", + ) + .await?; + + // ---------- series_aliases ---------- + let mut aliases = Table::create(); + aliases.table(SeriesAliases::Table).if_not_exists(); + + if is_postgres { + aliases.col( + ColumnDef::new(SeriesAliases::Id) + .uuid() + .not_null() + .primary_key() + .extra("DEFAULT gen_random_uuid()"), + ); + } else { + aliases.col( + ColumnDef::new(SeriesAliases::Id) + .uuid() + .not_null() + .primary_key(), + ); + } + + aliases + .col(ColumnDef::new(SeriesAliases::SeriesId).uuid().not_null()) + .col( + ColumnDef::new(SeriesAliases::Alias) + .string_len(500) + .not_null(), + ) + // Lowercased + punctuation-stripped, used for matching. + .col( + ColumnDef::new(SeriesAliases::Normalized) + .string_len(500) + .not_null(), + ) + // 'metadata' | 'manual' + .col( + ColumnDef::new(SeriesAliases::Source) + .string_len(20) + .not_null(), + ) + .col({ + let mut col = ColumnDef::new(SeriesAliases::CreatedAt); + col.timestamp_with_time_zone().not_null(); + if is_postgres { + col.extra("DEFAULT NOW()"); + } else { + col.extra("DEFAULT CURRENT_TIMESTAMP"); + } + col + }) + .foreign_key( + ForeignKey::create() + .name("fk_series_aliases_series_id") + .from(SeriesAliases::Table, SeriesAliases::SeriesId) + .to(Series::Table, Series::Id) + .on_delete(ForeignKeyAction::Cascade) + .on_update(ForeignKeyAction::NoAction), + ); + + manager.create_table(aliases.to_owned()).await?; + + // Unique on (series_id, alias) - same alias can't be added twice for one series, + // but the same alias string can exist on different series (which is fine and + // expected for ambiguous titles). + manager + .create_index( + Index::create() + .name("idx_series_aliases_unique") + .table(SeriesAliases::Table) + .col(SeriesAliases::SeriesId) + .col(SeriesAliases::Alias) + .unique() + .to_owned(), + ) + .await?; + + // Index on normalized for matcher lookups (most-frequent access pattern). + manager + .create_index( + Index::create() + .name("idx_series_aliases_normalized") + .table(SeriesAliases::Table) + .col(SeriesAliases::Normalized) + .to_owned(), + ) + .await?; + + // FK index for joins back to series. + manager + .create_index( + Index::create() + .name("idx_series_aliases_series_id") + .table(SeriesAliases::Table) + .col(SeriesAliases::SeriesId) + .to_owned(), + ) + .await?; + + Ok(()) + } + + async fn down(&self, manager: &SchemaManager) -> Result<(), DbErr> { + manager + .drop_table(Table::drop().table(SeriesAliases::Table).to_owned()) + .await?; + manager + .drop_table(Table::drop().table(SeriesTracking::Table).to_owned()) + .await?; + Ok(()) + } +} + +#[derive(DeriveIden)] +pub enum SeriesTracking { + Table, + SeriesId, + Tracked, + TrackChapters, + TrackVolumes, + LatestKnownChapter, + LatestKnownVolume, + VolumeChapterMap, + PollIntervalOverrideS, + ConfidenceThresholdOverride, + CreatedAt, + UpdatedAt, +} + +#[derive(DeriveIden)] +pub enum SeriesAliases { + Table, + Id, + SeriesId, + Alias, + Normalized, + Source, + CreatedAt, +} diff --git a/migration/src/m20260503_000073_create_release_ledger.rs b/migration/src/m20260503_000073_create_release_ledger.rs new file mode 100644 index 00000000..a3557db6 --- /dev/null +++ b/migration/src/m20260503_000073_create_release_ledger.rs @@ -0,0 +1,339 @@ +//! Create release-tracking ledger schema (Phase 2 of release-tracking implementation). +//! +//! Adds two tables that store release announcements emitted by source plugins: +//! +//! - `release_sources`: one row per logical source a plugin exposes. A single +//! plugin can expose many sources (e.g., one per Nyaa uploader subscription). +//! Tracks per-source poll cadence, last-poll status, and an opaque +//! `etag`/cursor used for conditional fetches. +//! - `release_ledger`: the dedup-keyed announcement ledger. Sources write rows +//! here; the inbox UI reads from it. Dedup keys: `(source_id, +//! external_release_id)` (unique per source) and `info_hash` (unique +//! globally where present, since two BitTorrent sources publishing the same +//! torrent would share an info_hash). + +use sea_orm_migration::prelude::*; + +use crate::m20260103_000003_create_series::Series; + +#[derive(DeriveMigrationName)] +pub struct Migration; + +#[async_trait::async_trait] +impl MigrationTrait for Migration { + async fn up(&self, manager: &SchemaManager) -> Result<(), DbErr> { + let is_postgres = manager.get_database_backend() == sea_orm::DatabaseBackend::Postgres; + + // ---------- release_sources ---------- + let mut sources = Table::create(); + sources.table(ReleaseSources::Table).if_not_exists(); + + if is_postgres { + sources.col( + ColumnDef::new(ReleaseSources::Id) + .uuid() + .not_null() + .primary_key() + .extra("DEFAULT gen_random_uuid()"), + ); + } else { + sources.col( + ColumnDef::new(ReleaseSources::Id) + .uuid() + .not_null() + .primary_key(), + ); + } + + sources + // Owning plugin. The string `"core"` is reserved for in-core + // synthetic sources (e.g., metadata-piggyback in Phase 5) so we + // don't need a foreign key to plugins.id (which would force every + // synthetic source to also have a plugins row). + .col( + ColumnDef::new(ReleaseSources::PluginId) + .string_len(100) + .not_null(), + ) + // Plugin-defined unique key, e.g. "nyaa:user:tsuna69". + .col( + ColumnDef::new(ReleaseSources::SourceKey) + .string_len(255) + .not_null(), + ) + .col( + ColumnDef::new(ReleaseSources::DisplayName) + .string_len(255) + .not_null(), + ) + // 'rss-uploader' | 'rss-series' | 'api-feed' | 'metadata-feed' | 'metadata-piggyback' + .col( + ColumnDef::new(ReleaseSources::Kind) + .string_len(40) + .not_null(), + ) + .col( + ColumnDef::new(ReleaseSources::Enabled) + .boolean() + .not_null() + .default(true), + ) + // Per-source cron schedule override. NULL means "inherit the + // server-wide `release_tracking.default_cron_schedule` setting". + // Stored as a 5-field POSIX cron expression (the host normalizes + // to 6-field at scheduler-load time). + .col(ColumnDef::new(ReleaseSources::CronSchedule).string_len(120)) + .col(ColumnDef::new(ReleaseSources::LastPolledAt).timestamp_with_time_zone()) + .col(ColumnDef::new(ReleaseSources::LastError).text()) + .col(ColumnDef::new(ReleaseSources::LastErrorAt).timestamp_with_time_zone()) + .col(ColumnDef::new(ReleaseSources::Etag).string_len(255)) + .col(ColumnDef::new(ReleaseSources::Config).json_binary()) + .col({ + let mut col = ColumnDef::new(ReleaseSources::CreatedAt); + col.timestamp_with_time_zone().not_null(); + if is_postgres { + col.extra("DEFAULT NOW()"); + } else { + col.extra("DEFAULT CURRENT_TIMESTAMP"); + } + col + }) + .col({ + let mut col = ColumnDef::new(ReleaseSources::UpdatedAt); + col.timestamp_with_time_zone().not_null(); + if is_postgres { + col.extra("DEFAULT NOW()"); + } else { + col.extra("DEFAULT CURRENT_TIMESTAMP"); + } + col + }); + + manager.create_table(sources.to_owned()).await?; + + // (plugin_id, source_key) is the natural composite identity. + manager + .create_index( + Index::create() + .name("idx_release_sources_plugin_key") + .table(ReleaseSources::Table) + .col(ReleaseSources::PluginId) + .col(ReleaseSources::SourceKey) + .unique() + .to_owned(), + ) + .await?; + + // Hot path for the scheduler: enumerate enabled sources. + manager + .get_connection() + .execute_unprepared( + "CREATE INDEX idx_release_sources_enabled \ + ON release_sources(id) WHERE enabled = TRUE", + ) + .await?; + + // ---------- release_ledger ---------- + let mut ledger = Table::create(); + ledger.table(ReleaseLedger::Table).if_not_exists(); + + if is_postgres { + ledger.col( + ColumnDef::new(ReleaseLedger::Id) + .uuid() + .not_null() + .primary_key() + .extra("DEFAULT gen_random_uuid()"), + ); + } else { + ledger.col( + ColumnDef::new(ReleaseLedger::Id) + .uuid() + .not_null() + .primary_key(), + ); + } + + ledger + .col(ColumnDef::new(ReleaseLedger::SeriesId).uuid().not_null()) + .col(ColumnDef::new(ReleaseLedger::SourceId).uuid().not_null()) + // Plugin-stable identity for the release. Required - a source that + // can't produce one is unusable for dedup. + .col( + ColumnDef::new(ReleaseLedger::ExternalReleaseId) + .string_len(500) + .not_null(), + ) + // Optional. Torrent sources will have one; HTTP sources won't. + .col(ColumnDef::new(ReleaseLedger::InfoHash).string_len(64)) + // Decimal for chapters (handles 12.5, 110.1, etc.). Volume is integer. + .col(ColumnDef::new(ReleaseLedger::Chapter).double()) + .col(ColumnDef::new(ReleaseLedger::Volume).integer()) + .col(ColumnDef::new(ReleaseLedger::Language).string_len(20)) + // { "jxl": true, "container": "cbz", ... } + .col(ColumnDef::new(ReleaseLedger::FormatHints).json_binary()) + .col(ColumnDef::new(ReleaseLedger::GroupOrUploader).string_len(255)) + // Where the user goes to acquire the release. + .col( + ColumnDef::new(ReleaseLedger::PayloadUrl) + .string_len(2048) + .not_null(), + ) + .col( + ColumnDef::new(ReleaseLedger::Confidence) + .double() + .not_null(), + ) + // 'announced' | 'dismissed' | 'marked_acquired' | 'hidden' + .col( + ColumnDef::new(ReleaseLedger::State) + .string_len(20) + .not_null() + .default("announced"), + ) + .col(ColumnDef::new(ReleaseLedger::Metadata).json_binary()) + .col( + ColumnDef::new(ReleaseLedger::ObservedAt) + .timestamp_with_time_zone() + .not_null(), + ) + .col({ + let mut col = ColumnDef::new(ReleaseLedger::CreatedAt); + col.timestamp_with_time_zone().not_null(); + if is_postgres { + col.extra("DEFAULT NOW()"); + } else { + col.extra("DEFAULT CURRENT_TIMESTAMP"); + } + col + }) + .foreign_key( + ForeignKey::create() + .name("fk_release_ledger_series_id") + .from(ReleaseLedger::Table, ReleaseLedger::SeriesId) + .to(Series::Table, Series::Id) + .on_delete(ForeignKeyAction::Cascade) + .on_update(ForeignKeyAction::NoAction), + ) + .foreign_key( + ForeignKey::create() + .name("fk_release_ledger_source_id") + .from(ReleaseLedger::Table, ReleaseLedger::SourceId) + .to(ReleaseSources::Table, ReleaseSources::Id) + .on_delete(ForeignKeyAction::Cascade) + .on_update(ForeignKeyAction::NoAction), + ); + + manager.create_table(ledger.to_owned()).await?; + + // Primary dedup key. + manager + .create_index( + Index::create() + .name("idx_release_ledger_source_external") + .table(ReleaseLedger::Table) + .col(ReleaseLedger::SourceId) + .col(ReleaseLedger::ExternalReleaseId) + .unique() + .to_owned(), + ) + .await?; + + // Cross-source dedup on info_hash where present. Partial unique index; + // both Postgres and SQLite accept this `WHERE info_hash IS NOT NULL` + // form via raw SQL. + manager + .get_connection() + .execute_unprepared( + "CREATE UNIQUE INDEX idx_release_ledger_info_hash \ + ON release_ledger(info_hash) WHERE info_hash IS NOT NULL", + ) + .await?; + + // Per-series ledger: ordered scan by observed_at desc. + manager + .create_index( + Index::create() + .name("idx_release_ledger_series_observed") + .table(ReleaseLedger::Table) + .col(ReleaseLedger::SeriesId) + .col((ReleaseLedger::ObservedAt, IndexOrder::Desc)) + .to_owned(), + ) + .await?; + + // Inbox query: undismissed, ordered by observed_at desc. We use a + // partial index on the announced state since that's the dominant + // filter for the inbox view. + manager + .get_connection() + .execute_unprepared( + "CREATE INDEX idx_release_ledger_state_observed \ + ON release_ledger(state, observed_at DESC) WHERE state = 'announced'", + ) + .await?; + + // FK index for joins back to source. + manager + .create_index( + Index::create() + .name("idx_release_ledger_source_id") + .table(ReleaseLedger::Table) + .col(ReleaseLedger::SourceId) + .to_owned(), + ) + .await?; + + Ok(()) + } + + async fn down(&self, manager: &SchemaManager) -> Result<(), DbErr> { + manager + .drop_table(Table::drop().table(ReleaseLedger::Table).to_owned()) + .await?; + manager + .drop_table(Table::drop().table(ReleaseSources::Table).to_owned()) + .await?; + Ok(()) + } +} + +#[derive(DeriveIden)] +pub enum ReleaseSources { + Table, + Id, + PluginId, + SourceKey, + DisplayName, + Kind, + Enabled, + CronSchedule, + LastPolledAt, + LastError, + LastErrorAt, + Etag, + Config, + CreatedAt, + UpdatedAt, +} + +#[derive(DeriveIden)] +pub enum ReleaseLedger { + Table, + Id, + SeriesId, + SourceId, + ExternalReleaseId, + InfoHash, + Chapter, + Volume, + Language, + FormatHints, + GroupOrUploader, + PayloadUrl, + Confidence, + State, + Metadata, + ObservedAt, + CreatedAt, +} diff --git a/migration/src/m20260504_000074_add_tracking_languages.rs b/migration/src/m20260504_000074_add_tracking_languages.rs new file mode 100644 index 00000000..c853a16c --- /dev/null +++ b/migration/src/m20260504_000074_add_tracking_languages.rs @@ -0,0 +1,39 @@ +//! Add `languages` column to `series_tracking` (Phase 6 of release-tracking). +//! +//! Per-series language preference for release-source plugins (e.g. +//! MangaUpdates) that aggregate scanlations across many languages. Stored as a +//! JSON array of ISO 639-1 codes, e.g. `["en"]` or `["en", "es"]`. NULL means +//! "fall back to the server-wide `release_tracking.default_languages` setting" +//! - that fallback policy lives in the plugin/service layer, not the schema. + +use sea_orm_migration::prelude::*; + +#[derive(DeriveMigrationName)] +pub struct Migration; + +#[async_trait::async_trait] +impl MigrationTrait for Migration { + async fn up(&self, manager: &SchemaManager) -> Result<(), DbErr> { + manager + .alter_table( + Table::alter() + .table(Alias::new("series_tracking")) + .add_column(ColumnDef::new(Alias::new("languages")).json_binary()) + .to_owned(), + ) + .await?; + Ok(()) + } + + async fn down(&self, manager: &SchemaManager) -> Result<(), DbErr> { + manager + .alter_table( + Table::alter() + .table(Alias::new("series_tracking")) + .drop_column(Alias::new("languages")) + .to_owned(), + ) + .await?; + Ok(()) + } +} diff --git a/migration/src/m20260504_000075_seed_release_tracking_languages.rs b/migration/src/m20260504_000075_seed_release_tracking_languages.rs new file mode 100644 index 00000000..0f451eee --- /dev/null +++ b/migration/src/m20260504_000075_seed_release_tracking_languages.rs @@ -0,0 +1,102 @@ +//! Seed the server-wide `release_tracking.default_languages` setting (Phase 6). +//! +//! Aggregation feeds (e.g. MangaUpdates RSS) emit candidates in many languages. +//! Plugins filter client-side using a per-series `series_tracking.languages` +//! list, falling back to this server-wide default when that's NULL. ISO 639-1 +//! codes; the seed value is `["en"]` to match the user's primary expectation +//! and the language tag MangaUpdates uses for English scanlations. +//! +//! Stored as a JSON array string; type `"Array"` so SettingsRepository parses +//! it via `serde_json::from_str` directly. + +use sea_orm::{ActiveModelTrait, Set, Statement, entity::prelude::*}; +use sea_orm_migration::prelude::*; +use uuid::Uuid; + +#[derive(DeriveMigrationName)] +pub struct Migration; + +// Minimal ActiveModel for settings to avoid circular dependencies (matches the +// pattern used by sibling seed migrations, e.g. +// `m20260111_000026_seed_metrics_settings`). +#[derive(Clone, Debug, PartialEq, Eq, DeriveEntityModel)] +#[sea_orm(table_name = "settings")] +pub struct Model { + #[sea_orm(primary_key, auto_increment = false)] + pub id: Uuid, + pub key: String, + pub value: String, + pub value_type: String, + pub category: String, + pub description: String, + pub is_sensitive: bool, + pub default_value: String, + pub validation_rules: Option, + pub min_value: Option, + pub max_value: Option, + pub updated_at: chrono::DateTime, + pub updated_by: Option, + pub version: i32, + pub deleted_at: Option>, +} + +#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)] +pub enum Relation {} + +impl ActiveModelBehavior for ActiveModel {} + +#[async_trait::async_trait] +impl MigrationTrait for Migration { + async fn up(&self, manager: &SchemaManager) -> Result<(), DbErr> { + let db = manager.get_connection(); + + // Idempotent seed. + let exists = db + .query_one(Statement::from_string( + manager.get_database_backend(), + "SELECT COUNT(*) as count FROM settings WHERE key = 'release_tracking.default_languages'" + .to_owned(), + )) + .await?; + if let Some(row) = exists { + let count: i64 = row.try_get("", "count")?; + if count > 0 { + return Ok(()); + } + } + + let setting = ActiveModel { + id: Set(Uuid::new_v4()), + key: Set("release_tracking.default_languages".to_string()), + value: Set("[\"en\"]".to_string()), + value_type: Set("Array".to_string()), + category: Set("Release Tracking".to_string()), + description: Set( + "Server-wide default language list (ISO 639-1) for release-source plugins that aggregate scanlations across multiple languages (e.g. MangaUpdates). Per-series overrides on `series_tracking.languages` take precedence." + .to_string(), + ), + is_sensitive: Set(false), + default_value: Set("[\"en\"]".to_string()), + validation_rules: Set(None), + min_value: Set(None), + max_value: Set(None), + updated_at: Set(chrono::Utc::now()), + updated_by: Set(None), + version: Set(1), + deleted_at: Set(None), + }; + + setting.insert(db).await?; + Ok(()) + } + + async fn down(&self, manager: &SchemaManager) -> Result<(), DbErr> { + let db = manager.get_connection(); + db.execute(Statement::from_string( + manager.get_database_backend(), + "DELETE FROM settings WHERE key = 'release_tracking.default_languages'".to_owned(), + )) + .await?; + Ok(()) + } +} diff --git a/migration/src/m20260504_000076_seed_release_tracking_notify_filters.rs b/migration/src/m20260504_000076_seed_release_tracking_notify_filters.rs new file mode 100644 index 00000000..105a8c73 --- /dev/null +++ b/migration/src/m20260504_000076_seed_release_tracking_notify_filters.rs @@ -0,0 +1,127 @@ +//! Seed the server-wide `release_tracking.notify_languages` and +//! `release_tracking.notify_plugins` settings (Phase 8 follow-up). +//! +//! These two arrays filter the in-app `release_announced` notification stream +//! (toasts + Releases nav badge): +//! +//! - `notify_languages` (ISO 639-1, default `[]`): when non-empty, only +//! announcements whose `language` is in this list bump the badge / surface +//! a toast. Empty = "let everything through." +//! +//! - `notify_plugins` (plugin IDs, default `[]`): when non-empty, only +//! announcements emitted by a plugin in this list bump the badge / surface +//! a toast. Empty = "all installed release-source plugins are allowed." +//! +//! These filters are server-wide because all admins of a Codex instance share +//! the same notification stream. Per-series mute lives on +//! `user_preferences.release_tracking.muted_series_ids` (per-user) — the +//! distinction is that muting individual series is a personal-pref override +//! over what would otherwise be a shared global notification, while the +//! language / plugin allowlists shape the global stream itself. +//! +//! Defaults are empty arrays (no filtering) so a fresh install behaves like +//! the old in-memory store: every announcement notifies. Admins can tighten +//! later via the `/settings/release-tracking` page. + +use sea_orm::{ActiveModelTrait, Set, Statement, entity::prelude::*}; +use sea_orm_migration::prelude::*; +use uuid::Uuid; + +#[derive(DeriveMigrationName)] +pub struct Migration; + +#[derive(Clone, Debug, PartialEq, Eq, DeriveEntityModel)] +#[sea_orm(table_name = "settings")] +pub struct Model { + #[sea_orm(primary_key, auto_increment = false)] + pub id: Uuid, + pub key: String, + pub value: String, + pub value_type: String, + pub category: String, + pub description: String, + pub is_sensitive: bool, + pub default_value: String, + pub validation_rules: Option, + pub min_value: Option, + pub max_value: Option, + pub updated_at: chrono::DateTime, + pub updated_by: Option, + pub version: i32, + pub deleted_at: Option>, +} + +#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)] +pub enum Relation {} + +impl ActiveModelBehavior for ActiveModel {} + +const KEYS: &[(&str, &str)] = &[ + ( + "release_tracking.notify_languages", + "Server-wide allowlist of ISO 639-1 language codes for release-tracking notifications. When non-empty, only announcements whose language is in this list bump the Releases badge and surface a toast. Empty array = let everything through.", + ), + ( + "release_tracking.notify_plugins", + "Server-wide allowlist of release-source plugin IDs whose announcements should bump the Releases badge and surface a toast. Empty array = all installed release-source plugins are allowed.", + ), +]; + +#[async_trait::async_trait] +impl MigrationTrait for Migration { + async fn up(&self, manager: &SchemaManager) -> Result<(), DbErr> { + let db = manager.get_connection(); + + for (key, description) in KEYS { + // Idempotent seed. Static string concat is safe; `key` is a + // compile-time constant from the KEYS table. + let exists = db + .query_one(Statement::from_string( + manager.get_database_backend(), + format!( + "SELECT COUNT(*) as count FROM settings WHERE key = '{}'", + key + ), + )) + .await?; + if let Some(row) = exists { + let count: i64 = row.try_get("", "count")?; + if count > 0 { + continue; + } + } + + let setting = ActiveModel { + id: Set(Uuid::new_v4()), + key: Set((*key).to_string()), + value: Set("[]".to_string()), + value_type: Set("Array".to_string()), + category: Set("Release Tracking".to_string()), + description: Set((*description).to_string()), + is_sensitive: Set(false), + default_value: Set("[]".to_string()), + validation_rules: Set(None), + min_value: Set(None), + max_value: Set(None), + updated_at: Set(chrono::Utc::now()), + updated_by: Set(None), + version: Set(1), + deleted_at: Set(None), + }; + setting.insert(db).await?; + } + Ok(()) + } + + async fn down(&self, manager: &SchemaManager) -> Result<(), DbErr> { + let db = manager.get_connection(); + for (key, _) in KEYS { + db.execute(Statement::from_string( + manager.get_database_backend(), + format!("DELETE FROM settings WHERE key = '{}'", key), + )) + .await?; + } + Ok(()) + } +} diff --git a/migration/src/m20260505_000077_add_release_sources_last_summary.rs b/migration/src/m20260505_000077_add_release_sources_last_summary.rs new file mode 100644 index 00000000..664b1d55 --- /dev/null +++ b/migration/src/m20260505_000077_add_release_sources_last_summary.rs @@ -0,0 +1,40 @@ +//! Add `last_summary` column to `release_sources`. +//! +//! Free-form text written by the poll-source task on every successful poll +//! completion (e.g. `"fetched 12 items, matched 0, recorded 0"`). The +//! Release tracking settings UI surfaces it under the per-row status badge +//! so users can see *why* a poll returned no announcements (no tracked +//! series with aliases, upstream not modified, etc.) without grepping +//! container logs. NULL until the first successful poll. + +use sea_orm_migration::prelude::*; + +#[derive(DeriveMigrationName)] +pub struct Migration; + +#[async_trait::async_trait] +impl MigrationTrait for Migration { + async fn up(&self, manager: &SchemaManager) -> Result<(), DbErr> { + manager + .alter_table( + Table::alter() + .table(Alias::new("release_sources")) + .add_column(ColumnDef::new(Alias::new("last_summary")).text()) + .to_owned(), + ) + .await?; + Ok(()) + } + + async fn down(&self, manager: &SchemaManager) -> Result<(), DbErr> { + manager + .alter_table( + Table::alter() + .table(Alias::new("release_sources")) + .drop_column(Alias::new("last_summary")) + .to_owned(), + ) + .await?; + Ok(()) + } +} diff --git a/migration/src/m20260505_000078_add_release_ledger_media_url.rs b/migration/src/m20260505_000078_add_release_ledger_media_url.rs new file mode 100644 index 00000000..fe75af86 --- /dev/null +++ b/migration/src/m20260505_000078_add_release_ledger_media_url.rs @@ -0,0 +1,60 @@ +//! Add `media_url` + `media_url_kind` columns to `release_ledger`. +//! +//! Some sources (Nyaa especially) carry two URLs per release: a +//! human-readable landing page and the actual fetch URL (a `.torrent`, +//! magnet link, or direct download). The existing `payload_url` keeps the +//! landing page; this migration adds the second URL and a small enum +//! string describing what it points at so the inbox UI can render a +//! kind-specific icon (download arrow / magnet / etc.) next to the +//! standard external-link icon. +//! +//! Both columns are nullable — sources that only surface a single URL +//! (MangaUpdates) leave them empty. + +use sea_orm_migration::prelude::*; + +#[derive(DeriveMigrationName)] +pub struct Migration; + +#[async_trait::async_trait] +impl MigrationTrait for Migration { + async fn up(&self, manager: &SchemaManager) -> Result<(), DbErr> { + manager + .alter_table( + Table::alter() + .table(Alias::new("release_ledger")) + .add_column(ColumnDef::new(Alias::new("media_url")).string_len(2048)) + .to_owned(), + ) + .await?; + manager + .alter_table( + Table::alter() + .table(Alias::new("release_ledger")) + .add_column(ColumnDef::new(Alias::new("media_url_kind")).string_len(32)) + .to_owned(), + ) + .await?; + Ok(()) + } + + async fn down(&self, manager: &SchemaManager) -> Result<(), DbErr> { + manager + .alter_table( + Table::alter() + .table(Alias::new("release_ledger")) + .drop_column(Alias::new("media_url_kind")) + .to_owned(), + ) + .await?; + manager + .alter_table( + Table::alter() + .table(Alias::new("release_ledger")) + .drop_column(Alias::new("media_url")) + .to_owned(), + ) + .await?; + Ok(()) + } +} diff --git a/migration/src/m20260505_000079_seed_release_tracking_default_cron.rs b/migration/src/m20260505_000079_seed_release_tracking_default_cron.rs new file mode 100644 index 00000000..a49abd87 --- /dev/null +++ b/migration/src/m20260505_000079_seed_release_tracking_default_cron.rs @@ -0,0 +1,100 @@ +//! Seed the server-wide `release_tracking.default_cron_schedule` setting. +//! +//! Resolution chain for a `release_sources` row's effective schedule: +//! 1. `release_sources.cron_schedule` if non-NULL +//! 2. otherwise this server-wide default +//! 3. otherwise the compile-time fallback (`"0 0 * * *"`, daily) +//! +//! Stored as a 5-field POSIX cron string. The host normalizes to the +//! 6-field format expected by `tokio-cron-scheduler` at scheduler-load time +//! via `crate::utils::cron::normalize_cron_expression`. + +use sea_orm::{ActiveModelTrait, Set, Statement, entity::prelude::*}; +use sea_orm_migration::prelude::*; +use uuid::Uuid; + +#[derive(DeriveMigrationName)] +pub struct Migration; + +#[derive(Clone, Debug, PartialEq, Eq, DeriveEntityModel)] +#[sea_orm(table_name = "settings")] +pub struct Model { + #[sea_orm(primary_key, auto_increment = false)] + pub id: Uuid, + pub key: String, + pub value: String, + pub value_type: String, + pub category: String, + pub description: String, + pub is_sensitive: bool, + pub default_value: String, + pub validation_rules: Option, + pub min_value: Option, + pub max_value: Option, + pub updated_at: chrono::DateTime, + pub updated_by: Option, + pub version: i32, + pub deleted_at: Option>, +} + +#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)] +pub enum Relation {} + +impl ActiveModelBehavior for ActiveModel {} + +const SETTING_KEY: &str = "release_tracking.default_cron_schedule"; +const DEFAULT_CRON: &str = "0 0 * * *"; // daily at midnight (5-field POSIX) + +#[async_trait::async_trait] +impl MigrationTrait for Migration { + async fn up(&self, manager: &SchemaManager) -> Result<(), DbErr> { + let db = manager.get_connection(); + + let exists = db + .query_one(Statement::from_string( + manager.get_database_backend(), + format!("SELECT COUNT(*) as count FROM settings WHERE key = '{SETTING_KEY}'"), + )) + .await?; + if let Some(row) = exists { + let count: i64 = row.try_get("", "count")?; + if count > 0 { + return Ok(()); + } + } + + let setting = ActiveModel { + id: Set(Uuid::new_v4()), + key: Set(SETTING_KEY.to_string()), + value: Set(DEFAULT_CRON.to_string()), + value_type: Set("String".to_string()), + category: Set("Release Tracking".to_string()), + description: Set( + "Server-wide default cron schedule for release-source polling. Applied to any `release_sources` row whose `cron_schedule` is NULL. Standard 5-field POSIX cron (minute hour day_of_month month day_of_week)." + .to_string(), + ), + is_sensitive: Set(false), + default_value: Set(DEFAULT_CRON.to_string()), + validation_rules: Set(Some(r#"{"input_type": "cron"}"#.to_string())), + min_value: Set(None), + max_value: Set(None), + updated_at: Set(chrono::Utc::now()), + updated_by: Set(None), + version: Set(1), + deleted_at: Set(None), + }; + + setting.insert(db).await?; + Ok(()) + } + + async fn down(&self, manager: &SchemaManager) -> Result<(), DbErr> { + let db = manager.get_connection(); + db.execute(Statement::from_string( + manager.get_database_backend(), + format!("DELETE FROM settings WHERE key = '{SETTING_KEY}'"), + )) + .await?; + Ok(()) + } +} diff --git a/plugins/release-mangaupdates/package-lock.json b/plugins/release-mangaupdates/package-lock.json new file mode 100644 index 00000000..0e96b7e0 --- /dev/null +++ b/plugins/release-mangaupdates/package-lock.json @@ -0,0 +1,1971 @@ +{ + "name": "@ashdev/codex-plugin-release-mangaupdates", + "version": "1.18.0", + "lockfileVersion": 3, + "requires": true, + "packages": { + "": { + "name": "@ashdev/codex-plugin-release-mangaupdates", + "version": "1.18.0", + "license": "MIT", + "dependencies": { + "@ashdev/codex-plugin-sdk": "file:../sdk-typescript" + }, + "bin": { + "codex-plugin-release-mangaupdates": "dist/index.js" + }, + "devDependencies": { + "@biomejs/biome": "^2.4.4", + "@types/node": "^22.0.0", + "esbuild": "^0.27.3", + "typescript": "^5.9.3", + "vitest": "^4.0.18" + }, + "engines": { + "node": ">=22.0.0" + } + }, + "../sdk-typescript": { + "name": "@ashdev/codex-plugin-sdk", + "version": "1.18.0", + "license": "MIT", + "devDependencies": { + "@biomejs/biome": "^2.4.4", + "@types/node": "^22.0.0", + "typescript": "^5.9.3", + "vitest": "^4.0.18" + }, + "engines": { + "node": ">=22.0.0" + } + }, + "node_modules/@ashdev/codex-plugin-sdk": { + "resolved": "../sdk-typescript", + "link": true + }, + "node_modules/@biomejs/biome": { + "version": "2.4.14", + "resolved": "https://registry.npmjs.org/@biomejs/biome/-/biome-2.4.14.tgz", + "integrity": "sha512-TmAvxOEgrpLypzVGJ8FulIZnlyA9TxrO1hyqYrCz9r+bwma9xXxuLA5IuYnj55XQneFx460KjRbx6SWGLkg3bQ==", + "dev": true, + "license": "MIT OR Apache-2.0", + "bin": { + "biome": "bin/biome" + }, + "engines": { + "node": ">=14.21.3" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/biome" + }, + "optionalDependencies": { + "@biomejs/cli-darwin-arm64": "2.4.14", + "@biomejs/cli-darwin-x64": "2.4.14", + "@biomejs/cli-linux-arm64": "2.4.14", + "@biomejs/cli-linux-arm64-musl": "2.4.14", + "@biomejs/cli-linux-x64": "2.4.14", + "@biomejs/cli-linux-x64-musl": "2.4.14", + "@biomejs/cli-win32-arm64": "2.4.14", + "@biomejs/cli-win32-x64": "2.4.14" + } + }, + "node_modules/@biomejs/cli-darwin-arm64": { + "version": "2.4.14", + "resolved": "https://registry.npmjs.org/@biomejs/cli-darwin-arm64/-/cli-darwin-arm64-2.4.14.tgz", + "integrity": "sha512-XvgoE9XOawUOQPdmvs4J7wPhi/DLwSCGks3AlPJDmh34O0awRTqCED1HRcRDdpf1Zrp4us4MGOOdIxNpbqNF5Q==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT OR Apache-2.0", + "optional": true, + "os": [ + "darwin" + ], + "engines": { + "node": ">=14.21.3" + } + }, + "node_modules/@biomejs/cli-darwin-x64": { + "version": "2.4.14", + "resolved": "https://registry.npmjs.org/@biomejs/cli-darwin-x64/-/cli-darwin-x64-2.4.14.tgz", + "integrity": "sha512-jE7hKBCFhOx3uUh+ZkWBfOHxAcILPfhFplNkuID/eZeSTLHzfZzoZxW8fbqY9xXRnPi7jGNAf1iPVR+0yWsM/Q==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT OR Apache-2.0", + "optional": true, + "os": [ + "darwin" + ], + "engines": { + "node": ">=14.21.3" + } + }, + "node_modules/@biomejs/cli-linux-arm64": { + "version": "2.4.14", + "resolved": "https://registry.npmjs.org/@biomejs/cli-linux-arm64/-/cli-linux-arm64-2.4.14.tgz", + "integrity": "sha512-2TELhZnW5RSLL063l9rc5xLpA0ZIw0Ccwy/0q384rvNAgFw3yI76bd59547yxowdQr5MNPET/xDLrLuvgSeeWQ==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT OR Apache-2.0", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=14.21.3" + } + }, + "node_modules/@biomejs/cli-linux-arm64-musl": { + "version": "2.4.14", + "resolved": "https://registry.npmjs.org/@biomejs/cli-linux-arm64-musl/-/cli-linux-arm64-musl-2.4.14.tgz", + "integrity": "sha512-/z+6gqAqqUQTHazwStxSXKHg9b8UvqBmDFRp+c4wYbq2KXhELQDon9EoC9RpmQ8JWkqQx/lIUy/cs+MhzDZp6A==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT OR Apache-2.0", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=14.21.3" + } + }, + "node_modules/@biomejs/cli-linux-x64": { + "version": "2.4.14", + "resolved": "https://registry.npmjs.org/@biomejs/cli-linux-x64/-/cli-linux-x64-2.4.14.tgz", + "integrity": "sha512-zHrlQZDBDUz4OLAraYpWKcnLS6HOewBFWYOzY91d1ZjdqZwibOyb6BEu6WuWLugyo0P3riCmsbV9UqV1cSXwQg==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT OR Apache-2.0", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=14.21.3" + } + }, + "node_modules/@biomejs/cli-linux-x64-musl": { + "version": "2.4.14", + "resolved": "https://registry.npmjs.org/@biomejs/cli-linux-x64-musl/-/cli-linux-x64-musl-2.4.14.tgz", + "integrity": "sha512-R6BWgJdQOwW9ulJatuTVrQkjnODjqHZkKNOqb1sz++3Noe5LYd0i3PchnOBUCYAPHoPWHhjJqbdZlHEu0hpjdA==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT OR Apache-2.0", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=14.21.3" + } + }, + "node_modules/@biomejs/cli-win32-arm64": { + "version": "2.4.14", + "resolved": "https://registry.npmjs.org/@biomejs/cli-win32-arm64/-/cli-win32-arm64-2.4.14.tgz", + "integrity": "sha512-M3EH5hqOI/F/FUA2u4xcLoUgmxd218mvuj/6JL7Hv2toQvr2/AdOvKSpGkoRuWFCtQPVa+ZqkEV3Q5xBA9+XSA==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT OR Apache-2.0", + "optional": true, + "os": [ + "win32" + ], + "engines": { + "node": ">=14.21.3" + } + }, + "node_modules/@biomejs/cli-win32-x64": { + "version": "2.4.14", + "resolved": "https://registry.npmjs.org/@biomejs/cli-win32-x64/-/cli-win32-x64-2.4.14.tgz", + "integrity": "sha512-WL0EG5qE+EAKomGXbf2g6VnSKJhTL3tXC0QRzWRwA5VpjxNYa6H4P7ZWfymbGE4IhZZQi1KXQ2R0YjwInmz2fA==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT OR Apache-2.0", + "optional": true, + "os": [ + "win32" + ], + "engines": { + "node": ">=14.21.3" + } + }, + "node_modules/@emnapi/core": { + "version": "1.10.0", + "resolved": "https://registry.npmjs.org/@emnapi/core/-/core-1.10.0.tgz", + "integrity": "sha512-yq6OkJ4p82CAfPl0u9mQebQHKPJkY7WrIuk205cTYnYe+k2Z8YBh11FrbRG/H6ihirqcacOgl2BIO8oyMQLeXw==", + "dev": true, + "license": "MIT", + "optional": true, + "dependencies": { + "@emnapi/wasi-threads": "1.2.1", + "tslib": "^2.4.0" + } + }, + "node_modules/@emnapi/runtime": { + "version": "1.10.0", + "resolved": "https://registry.npmjs.org/@emnapi/runtime/-/runtime-1.10.0.tgz", + "integrity": "sha512-ewvYlk86xUoGI0zQRNq/mC+16R1QeDlKQy21Ki3oSYXNgLb45GV1P6A0M+/s6nyCuNDqe5VpaY84BzXGwVbwFA==", + "dev": true, + "license": "MIT", + "optional": true, + "dependencies": { + "tslib": "^2.4.0" + } + }, + "node_modules/@emnapi/wasi-threads": { + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/@emnapi/wasi-threads/-/wasi-threads-1.2.1.tgz", + "integrity": "sha512-uTII7OYF+/Mes/MrcIOYp5yOtSMLBWSIoLPpcgwipoiKbli6k322tcoFsxoIIxPDqW01SQGAgko4EzZi2BNv2w==", + "dev": true, + "license": "MIT", + "optional": true, + "dependencies": { + "tslib": "^2.4.0" + } + }, + "node_modules/@esbuild/aix-ppc64": { + "version": "0.27.7", + "resolved": "https://registry.npmjs.org/@esbuild/aix-ppc64/-/aix-ppc64-0.27.7.tgz", + "integrity": "sha512-EKX3Qwmhz1eMdEJokhALr0YiD0lhQNwDqkPYyPhiSwKrh7/4KRjQc04sZ8db+5DVVnZ1LmbNDI1uAMPEUBnQPg==", + "cpu": [ + "ppc64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "aix" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/android-arm": { + "version": "0.27.7", + "resolved": "https://registry.npmjs.org/@esbuild/android-arm/-/android-arm-0.27.7.tgz", + "integrity": "sha512-jbPXvB4Yj2yBV7HUfE2KHe4GJX51QplCN1pGbYjvsyCZbQmies29EoJbkEc+vYuU5o45AfQn37vZlyXy4YJ8RQ==", + "cpu": [ + "arm" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "android" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/android-arm64": { + "version": "0.27.7", + "resolved": "https://registry.npmjs.org/@esbuild/android-arm64/-/android-arm64-0.27.7.tgz", + "integrity": "sha512-62dPZHpIXzvChfvfLJow3q5dDtiNMkwiRzPylSCfriLvZeq0a1bWChrGx/BbUbPwOrsWKMn8idSllklzBy+dgQ==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "android" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/android-x64": { + "version": "0.27.7", + "resolved": "https://registry.npmjs.org/@esbuild/android-x64/-/android-x64-0.27.7.tgz", + "integrity": "sha512-x5VpMODneVDb70PYV2VQOmIUUiBtY3D3mPBG8NxVk5CogneYhkR7MmM3yR/uMdITLrC1ml/NV1rj4bMJuy9MCg==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "android" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/darwin-arm64": { + "version": "0.27.7", + "resolved": "https://registry.npmjs.org/@esbuild/darwin-arm64/-/darwin-arm64-0.27.7.tgz", + "integrity": "sha512-5lckdqeuBPlKUwvoCXIgI2D9/ABmPq3Rdp7IfL70393YgaASt7tbju3Ac+ePVi3KDH6N2RqePfHnXkaDtY9fkw==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "darwin" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/darwin-x64": { + "version": "0.27.7", + "resolved": "https://registry.npmjs.org/@esbuild/darwin-x64/-/darwin-x64-0.27.7.tgz", + "integrity": "sha512-rYnXrKcXuT7Z+WL5K980jVFdvVKhCHhUwid+dDYQpH+qu+TefcomiMAJpIiC2EM3Rjtq0sO3StMV/+3w3MyyqQ==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "darwin" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/freebsd-arm64": { + "version": "0.27.7", + "resolved": "https://registry.npmjs.org/@esbuild/freebsd-arm64/-/freebsd-arm64-0.27.7.tgz", + "integrity": "sha512-B48PqeCsEgOtzME2GbNM2roU29AMTuOIN91dsMO30t+Ydis3z/3Ngoj5hhnsOSSwNzS+6JppqWsuhTp6E82l2w==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "freebsd" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/freebsd-x64": { + "version": "0.27.7", + "resolved": "https://registry.npmjs.org/@esbuild/freebsd-x64/-/freebsd-x64-0.27.7.tgz", + "integrity": "sha512-jOBDK5XEjA4m5IJK3bpAQF9/Lelu/Z9ZcdhTRLf4cajlB+8VEhFFRjWgfy3M1O4rO2GQ/b2dLwCUGpiF/eATNQ==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "freebsd" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/linux-arm": { + "version": "0.27.7", + "resolved": "https://registry.npmjs.org/@esbuild/linux-arm/-/linux-arm-0.27.7.tgz", + "integrity": "sha512-RkT/YXYBTSULo3+af8Ib0ykH8u2MBh57o7q/DAs3lTJlyVQkgQvlrPTnjIzzRPQyavxtPtfg0EopvDyIt0j1rA==", + "cpu": [ + "arm" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/linux-arm64": { + "version": "0.27.7", + "resolved": "https://registry.npmjs.org/@esbuild/linux-arm64/-/linux-arm64-0.27.7.tgz", + "integrity": "sha512-RZPHBoxXuNnPQO9rvjh5jdkRmVizktkT7TCDkDmQ0W2SwHInKCAV95GRuvdSvA7w4VMwfCjUiPwDi0ZO6Nfe9A==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/linux-ia32": { + "version": "0.27.7", + "resolved": "https://registry.npmjs.org/@esbuild/linux-ia32/-/linux-ia32-0.27.7.tgz", + "integrity": "sha512-GA48aKNkyQDbd3KtkplYWT102C5sn/EZTY4XROkxONgruHPU72l+gW+FfF8tf2cFjeHaRbWpOYa/uRBz/Xq1Pg==", + "cpu": [ + "ia32" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/linux-loong64": { + "version": "0.27.7", + "resolved": "https://registry.npmjs.org/@esbuild/linux-loong64/-/linux-loong64-0.27.7.tgz", + "integrity": "sha512-a4POruNM2oWsD4WKvBSEKGIiWQF8fZOAsycHOt6JBpZ+JN2n2JH9WAv56SOyu9X5IqAjqSIPTaJkqN8F7XOQ5Q==", + "cpu": [ + "loong64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/linux-mips64el": { + "version": "0.27.7", + "resolved": "https://registry.npmjs.org/@esbuild/linux-mips64el/-/linux-mips64el-0.27.7.tgz", + "integrity": "sha512-KabT5I6StirGfIz0FMgl1I+R1H73Gp0ofL9A3nG3i/cYFJzKHhouBV5VWK1CSgKvVaG4q1RNpCTR2LuTVB3fIw==", + "cpu": [ + "mips64el" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/linux-ppc64": { + "version": "0.27.7", + "resolved": "https://registry.npmjs.org/@esbuild/linux-ppc64/-/linux-ppc64-0.27.7.tgz", + "integrity": "sha512-gRsL4x6wsGHGRqhtI+ifpN/vpOFTQtnbsupUF5R5YTAg+y/lKelYR1hXbnBdzDjGbMYjVJLJTd2OFmMewAgwlQ==", + "cpu": [ + "ppc64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/linux-riscv64": { + "version": "0.27.7", + "resolved": "https://registry.npmjs.org/@esbuild/linux-riscv64/-/linux-riscv64-0.27.7.tgz", + "integrity": "sha512-hL25LbxO1QOngGzu2U5xeXtxXcW+/GvMN3ejANqXkxZ/opySAZMrc+9LY/WyjAan41unrR3YrmtTsUpwT66InQ==", + "cpu": [ + "riscv64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/linux-s390x": { + "version": "0.27.7", + "resolved": "https://registry.npmjs.org/@esbuild/linux-s390x/-/linux-s390x-0.27.7.tgz", + "integrity": "sha512-2k8go8Ycu1Kb46vEelhu1vqEP+UeRVj2zY1pSuPdgvbd5ykAw82Lrro28vXUrRmzEsUV0NzCf54yARIK8r0fdw==", + "cpu": [ + "s390x" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/linux-x64": { + "version": "0.27.7", + "resolved": "https://registry.npmjs.org/@esbuild/linux-x64/-/linux-x64-0.27.7.tgz", + "integrity": "sha512-hzznmADPt+OmsYzw1EE33ccA+HPdIqiCRq7cQeL1Jlq2gb1+OyWBkMCrYGBJ+sxVzve2ZJEVeePbLM2iEIZSxA==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/netbsd-arm64": { + "version": "0.27.7", + "resolved": "https://registry.npmjs.org/@esbuild/netbsd-arm64/-/netbsd-arm64-0.27.7.tgz", + "integrity": "sha512-b6pqtrQdigZBwZxAn1UpazEisvwaIDvdbMbmrly7cDTMFnw/+3lVxxCTGOrkPVnsYIosJJXAsILG9XcQS+Yu6w==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "netbsd" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/netbsd-x64": { + "version": "0.27.7", + "resolved": "https://registry.npmjs.org/@esbuild/netbsd-x64/-/netbsd-x64-0.27.7.tgz", + "integrity": "sha512-OfatkLojr6U+WN5EDYuoQhtM+1xco+/6FSzJJnuWiUw5eVcicbyK3dq5EeV/QHT1uy6GoDhGbFpprUiHUYggrw==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "netbsd" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/openbsd-arm64": { + "version": "0.27.7", + "resolved": "https://registry.npmjs.org/@esbuild/openbsd-arm64/-/openbsd-arm64-0.27.7.tgz", + "integrity": "sha512-AFuojMQTxAz75Fo8idVcqoQWEHIXFRbOc1TrVcFSgCZtQfSdc1RXgB3tjOn/krRHENUB4j00bfGjyl2mJrU37A==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "openbsd" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/openbsd-x64": { + "version": "0.27.7", + "resolved": "https://registry.npmjs.org/@esbuild/openbsd-x64/-/openbsd-x64-0.27.7.tgz", + "integrity": "sha512-+A1NJmfM8WNDv5CLVQYJ5PshuRm/4cI6WMZRg1by1GwPIQPCTs1GLEUHwiiQGT5zDdyLiRM/l1G0Pv54gvtKIg==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "openbsd" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/openharmony-arm64": { + "version": "0.27.7", + "resolved": "https://registry.npmjs.org/@esbuild/openharmony-arm64/-/openharmony-arm64-0.27.7.tgz", + "integrity": "sha512-+KrvYb/C8zA9CU/g0sR6w2RBw7IGc5J2BPnc3dYc5VJxHCSF1yNMxTV5LQ7GuKteQXZtspjFbiuW5/dOj7H4Yw==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "openharmony" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/sunos-x64": { + "version": "0.27.7", + "resolved": "https://registry.npmjs.org/@esbuild/sunos-x64/-/sunos-x64-0.27.7.tgz", + "integrity": "sha512-ikktIhFBzQNt/QDyOL580ti9+5mL/YZeUPKU2ivGtGjdTYoqz6jObj6nOMfhASpS4GU4Q/Clh1QtxWAvcYKamA==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "sunos" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/win32-arm64": { + "version": "0.27.7", + "resolved": "https://registry.npmjs.org/@esbuild/win32-arm64/-/win32-arm64-0.27.7.tgz", + "integrity": "sha512-7yRhbHvPqSpRUV7Q20VuDwbjW5kIMwTHpptuUzV+AA46kiPze5Z7qgt6CLCK3pWFrHeNfDd1VKgyP4O+ng17CA==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "win32" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/win32-ia32": { + "version": "0.27.7", + "resolved": "https://registry.npmjs.org/@esbuild/win32-ia32/-/win32-ia32-0.27.7.tgz", + "integrity": "sha512-SmwKXe6VHIyZYbBLJrhOoCJRB/Z1tckzmgTLfFYOfpMAx63BJEaL9ExI8x7v0oAO3Zh6D/Oi1gVxEYr5oUCFhw==", + "cpu": [ + "ia32" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "win32" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/win32-x64": { + "version": "0.27.7", + "resolved": "https://registry.npmjs.org/@esbuild/win32-x64/-/win32-x64-0.27.7.tgz", + "integrity": "sha512-56hiAJPhwQ1R4i+21FVF7V8kSD5zZTdHcVuRFMW0hn753vVfQN8xlx4uOPT4xoGH0Z/oVATuR82AiqSTDIpaHg==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "win32" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@jridgewell/sourcemap-codec": { + "version": "1.5.5", + "resolved": "https://registry.npmjs.org/@jridgewell/sourcemap-codec/-/sourcemap-codec-1.5.5.tgz", + "integrity": "sha512-cYQ9310grqxueWbl+WuIUIaiUaDcj7WOq5fVhEljNVgRfOUhY9fy2zTvfoqWsnebh8Sl70VScFbICvJnLKB0Og==", + "dev": true, + "license": "MIT" + }, + "node_modules/@napi-rs/wasm-runtime": { + "version": "1.1.4", + "resolved": "https://registry.npmjs.org/@napi-rs/wasm-runtime/-/wasm-runtime-1.1.4.tgz", + "integrity": "sha512-3NQNNgA1YSlJb/kMH1ildASP9HW7/7kYnRI2szWJaofaS1hWmbGI4H+d3+22aGzXXN9IJ+n+GiFVcGipJP18ow==", + "dev": true, + "license": "MIT", + "optional": true, + "dependencies": { + "@tybys/wasm-util": "^0.10.1" + }, + "funding": { + "type": "github", + "url": "https://github.com/sponsors/Brooooooklyn" + }, + "peerDependencies": { + "@emnapi/core": "^1.7.1", + "@emnapi/runtime": "^1.7.1" + } + }, + "node_modules/@oxc-project/types": { + "version": "0.127.0", + "resolved": "https://registry.npmjs.org/@oxc-project/types/-/types-0.127.0.tgz", + "integrity": "sha512-aIYXQBo4lCbO4z0R3FHeucQHpF46l2LbMdxRvqvuRuW2OxdnSkcng5B8+K12spgLDj93rtN3+J2Vac/TIO+ciQ==", + "dev": true, + "license": "MIT", + "funding": { + "url": "https://github.com/sponsors/Boshen" + } + }, + "node_modules/@rolldown/binding-android-arm64": { + "version": "1.0.0-rc.17", + "resolved": "https://registry.npmjs.org/@rolldown/binding-android-arm64/-/binding-android-arm64-1.0.0-rc.17.tgz", + "integrity": "sha512-s70pVGhw4zqGeFnXWvAzJDlvxhlRollagdCCKRgOsgUOH3N1l0LIxf83AtGzmb5SiVM4Hjl5HyarMRfdfj3DaQ==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "android" + ], + "engines": { + "node": "^20.19.0 || >=22.12.0" + } + }, + "node_modules/@rolldown/binding-darwin-arm64": { + "version": "1.0.0-rc.17", + "resolved": "https://registry.npmjs.org/@rolldown/binding-darwin-arm64/-/binding-darwin-arm64-1.0.0-rc.17.tgz", + "integrity": "sha512-4ksWc9n0mhlZpZ9PMZgTGjeOPRu8MB1Z3Tz0Mo02eWfWCHMW1zN82Qz/pL/rC+yQa+8ZnutMF0JjJe7PjwasYw==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "darwin" + ], + "engines": { + "node": "^20.19.0 || >=22.12.0" + } + }, + "node_modules/@rolldown/binding-darwin-x64": { + "version": "1.0.0-rc.17", + "resolved": "https://registry.npmjs.org/@rolldown/binding-darwin-x64/-/binding-darwin-x64-1.0.0-rc.17.tgz", + "integrity": "sha512-SUSDOI6WwUVNcWxd02QEBjLdY1VPHvlEkw6T/8nYG322iYWCTxRb1vzk4E+mWWYehTp7ERibq54LSJGjmouOsw==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "darwin" + ], + "engines": { + "node": "^20.19.0 || >=22.12.0" + } + }, + "node_modules/@rolldown/binding-freebsd-x64": { + "version": "1.0.0-rc.17", + "resolved": "https://registry.npmjs.org/@rolldown/binding-freebsd-x64/-/binding-freebsd-x64-1.0.0-rc.17.tgz", + "integrity": "sha512-hwnz3nw9dbJ05EDO/PvcjaaewqqDy7Y1rn1UO81l8iIK1GjenME75dl16ajbvSSMfv66WXSRCYKIqfgq2KCfxw==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "freebsd" + ], + "engines": { + "node": "^20.19.0 || >=22.12.0" + } + }, + "node_modules/@rolldown/binding-linux-arm-gnueabihf": { + "version": "1.0.0-rc.17", + "resolved": "https://registry.npmjs.org/@rolldown/binding-linux-arm-gnueabihf/-/binding-linux-arm-gnueabihf-1.0.0-rc.17.tgz", + "integrity": "sha512-IS+W7epTcwANmFSQFrS1SivEXHtl1JtuQA9wlxrZTcNi6mx+FDOYrakGevvvTwgj2JvWiK8B29/qD9BELZPyXQ==", + "cpu": [ + "arm" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": "^20.19.0 || >=22.12.0" + } + }, + "node_modules/@rolldown/binding-linux-arm64-gnu": { + "version": "1.0.0-rc.17", + "resolved": "https://registry.npmjs.org/@rolldown/binding-linux-arm64-gnu/-/binding-linux-arm64-gnu-1.0.0-rc.17.tgz", + "integrity": "sha512-e6usGaHKW5BMNZOymS1UcEYGowQMWcgZ71Z17Sl/h2+ZziNJ1a9n3Zvcz6LdRyIW5572wBCTH/Z+bKuZouGk9Q==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": "^20.19.0 || >=22.12.0" + } + }, + "node_modules/@rolldown/binding-linux-arm64-musl": { + "version": "1.0.0-rc.17", + "resolved": "https://registry.npmjs.org/@rolldown/binding-linux-arm64-musl/-/binding-linux-arm64-musl-1.0.0-rc.17.tgz", + "integrity": "sha512-b/CgbwAJpmrRLp02RPfhbudf5tZnN9nsPWK82znefso832etkem8H7FSZwxrOI9djcdTP7U6YfNhbRnh7djErg==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": "^20.19.0 || >=22.12.0" + } + }, + "node_modules/@rolldown/binding-linux-ppc64-gnu": { + "version": "1.0.0-rc.17", + "resolved": "https://registry.npmjs.org/@rolldown/binding-linux-ppc64-gnu/-/binding-linux-ppc64-gnu-1.0.0-rc.17.tgz", + "integrity": "sha512-4EII1iNGRUN5WwGbF/kOh/EIkoDN9HsupgLQoXfY+D1oyJm7/F4t5PYU5n8SWZgG0FEwakyM8pGgwcBYruGTlA==", + "cpu": [ + "ppc64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": "^20.19.0 || >=22.12.0" + } + }, + "node_modules/@rolldown/binding-linux-s390x-gnu": { + "version": "1.0.0-rc.17", + "resolved": "https://registry.npmjs.org/@rolldown/binding-linux-s390x-gnu/-/binding-linux-s390x-gnu-1.0.0-rc.17.tgz", + "integrity": "sha512-AH8oq3XqQo4IibpVXvPeLDI5pzkpYn0WiZAfT05kFzoJ6tQNzwRdDYQ45M8I/gslbodRZwW8uxLhbSBbkv96rA==", + "cpu": [ + "s390x" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": "^20.19.0 || >=22.12.0" + } + }, + "node_modules/@rolldown/binding-linux-x64-gnu": { + "version": "1.0.0-rc.17", + "resolved": "https://registry.npmjs.org/@rolldown/binding-linux-x64-gnu/-/binding-linux-x64-gnu-1.0.0-rc.17.tgz", + "integrity": "sha512-cLnjV3xfo7KslbU41Z7z8BH/E1y5mzUYzAqih1d1MDaIGZRCMqTijqLv76/P7fyHuvUcfGsIpqCdddbxLLK9rA==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": "^20.19.0 || >=22.12.0" + } + }, + "node_modules/@rolldown/binding-linux-x64-musl": { + "version": "1.0.0-rc.17", + "resolved": "https://registry.npmjs.org/@rolldown/binding-linux-x64-musl/-/binding-linux-x64-musl-1.0.0-rc.17.tgz", + "integrity": "sha512-0phclDw1spsL7dUB37sIARuis2tAgomCJXAHZlpt8PXZ4Ba0dRP1e+66lsRqrfhISeN9bEGNjQs+T/Fbd7oYGw==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": "^20.19.0 || >=22.12.0" + } + }, + "node_modules/@rolldown/binding-openharmony-arm64": { + "version": "1.0.0-rc.17", + "resolved": "https://registry.npmjs.org/@rolldown/binding-openharmony-arm64/-/binding-openharmony-arm64-1.0.0-rc.17.tgz", + "integrity": "sha512-0ag/hEgXOwgw4t8QyQvUCxvEg+V0KBcA6YuOx9g0r02MprutRF5dyljgm3EmR02O292UX7UeS6HzWHAl6KgyhA==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "openharmony" + ], + "engines": { + "node": "^20.19.0 || >=22.12.0" + } + }, + "node_modules/@rolldown/binding-wasm32-wasi": { + "version": "1.0.0-rc.17", + "resolved": "https://registry.npmjs.org/@rolldown/binding-wasm32-wasi/-/binding-wasm32-wasi-1.0.0-rc.17.tgz", + "integrity": "sha512-LEXei6vo0E5wTGwpkJ4KoT3OZJRnglwldt5ziLzOlc6qqb55z4tWNq2A+PFqCJuvWWdP53CVhG1Z9NtToDPJrA==", + "cpu": [ + "wasm32" + ], + "dev": true, + "license": "MIT", + "optional": true, + "dependencies": { + "@emnapi/core": "1.10.0", + "@emnapi/runtime": "1.10.0", + "@napi-rs/wasm-runtime": "^1.1.4" + }, + "engines": { + "node": "^20.19.0 || >=22.12.0" + } + }, + "node_modules/@rolldown/binding-win32-arm64-msvc": { + "version": "1.0.0-rc.17", + "resolved": "https://registry.npmjs.org/@rolldown/binding-win32-arm64-msvc/-/binding-win32-arm64-msvc-1.0.0-rc.17.tgz", + "integrity": "sha512-gUmyzBl3SPMa6hrqFUth9sVfcLBlYsbMzBx5PlexMroZStgzGqlZ26pYG89rBb45Mnia+oil6YAIFeEWGWhoZA==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "win32" + ], + "engines": { + "node": "^20.19.0 || >=22.12.0" + } + }, + "node_modules/@rolldown/binding-win32-x64-msvc": { + "version": "1.0.0-rc.17", + "resolved": "https://registry.npmjs.org/@rolldown/binding-win32-x64-msvc/-/binding-win32-x64-msvc-1.0.0-rc.17.tgz", + "integrity": "sha512-3hkiolcUAvPB9FLb3UZdfjVVNWherN1f/skkGWJP/fgSQhYUZpSIRr0/I8ZK9TkF3F7kxvJAk0+IcKvPHk9qQg==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "win32" + ], + "engines": { + "node": "^20.19.0 || >=22.12.0" + } + }, + "node_modules/@rolldown/pluginutils": { + "version": "1.0.0-rc.17", + "resolved": "https://registry.npmjs.org/@rolldown/pluginutils/-/pluginutils-1.0.0-rc.17.tgz", + "integrity": "sha512-n8iosDOt6Ig1UhJ2AYqoIhHWh/isz0xpicHTzpKBeotdVsTEcxsSA/i3EVM7gQAj0rU27OLAxCjzlj15IWY7bg==", + "dev": true, + "license": "MIT" + }, + "node_modules/@standard-schema/spec": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/@standard-schema/spec/-/spec-1.1.0.tgz", + "integrity": "sha512-l2aFy5jALhniG5HgqrD6jXLi/rUWrKvqN/qJx6yoJsgKhblVd+iqqU4RCXavm/jPityDo5TCvKMnpjKnOriy0w==", + "dev": true, + "license": "MIT" + }, + "node_modules/@tybys/wasm-util": { + "version": "0.10.2", + "resolved": "https://registry.npmjs.org/@tybys/wasm-util/-/wasm-util-0.10.2.tgz", + "integrity": "sha512-RoBvJ2X0wuKlWFIjrwffGw1IqZHKQqzIchKaadZZfnNpsAYp2mM0h36JtPCjNDAHGgYez/15uMBpfGwchhiMgg==", + "dev": true, + "license": "MIT", + "optional": true, + "dependencies": { + "tslib": "^2.4.0" + } + }, + "node_modules/@types/chai": { + "version": "5.2.3", + "resolved": "https://registry.npmjs.org/@types/chai/-/chai-5.2.3.tgz", + "integrity": "sha512-Mw558oeA9fFbv65/y4mHtXDs9bPnFMZAL/jxdPFUpOHHIXX91mcgEHbS5Lahr+pwZFR8A7GQleRWeI6cGFC2UA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@types/deep-eql": "*", + "assertion-error": "^2.0.1" + } + }, + "node_modules/@types/deep-eql": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/@types/deep-eql/-/deep-eql-4.0.2.tgz", + "integrity": "sha512-c9h9dVVMigMPc4bwTvC5dxqtqJZwQPePsWjPlpSOnojbor6pGqdk541lfA7AqFQr5pB1BRdq0juY9db81BwyFw==", + "dev": true, + "license": "MIT" + }, + "node_modules/@types/estree": { + "version": "1.0.8", + "resolved": "https://registry.npmjs.org/@types/estree/-/estree-1.0.8.tgz", + "integrity": "sha512-dWHzHa2WqEXI/O1E9OjrocMTKJl2mSrEolh1Iomrv6U+JuNwaHXsXx9bLu5gG7BUWFIN0skIQJQ/L1rIex4X6w==", + "dev": true, + "license": "MIT" + }, + "node_modules/@types/node": { + "version": "22.19.17", + "resolved": "https://registry.npmjs.org/@types/node/-/node-22.19.17.tgz", + "integrity": "sha512-wGdMcf+vPYM6jikpS/qhg6WiqSV/OhG+jeeHT/KlVqxYfD40iYJf9/AE1uQxVWFvU7MipKRkRv8NSHiCGgPr8Q==", + "dev": true, + "license": "MIT", + "dependencies": { + "undici-types": "~6.21.0" + } + }, + "node_modules/@vitest/expect": { + "version": "4.1.5", + "resolved": "https://registry.npmjs.org/@vitest/expect/-/expect-4.1.5.tgz", + "integrity": "sha512-PWBaRY5JoKuRnHlUHfpV/KohFylaDZTupcXN1H9vYryNLOnitSw60Mw9IAE2r67NbwwzBw/Cc/8q9BK3kIX8Kw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@standard-schema/spec": "^1.1.0", + "@types/chai": "^5.2.2", + "@vitest/spy": "4.1.5", + "@vitest/utils": "4.1.5", + "chai": "^6.2.2", + "tinyrainbow": "^3.1.0" + }, + "funding": { + "url": "https://opencollective.com/vitest" + } + }, + "node_modules/@vitest/mocker": { + "version": "4.1.5", + "resolved": "https://registry.npmjs.org/@vitest/mocker/-/mocker-4.1.5.tgz", + "integrity": "sha512-/x2EmFC4mT4NNzqvC3fmesuV97w5FC903KPmey4gsnJiMQ3Be1IlDKVaDaG8iqaLFHqJ2FVEkxZk5VmeLjIItw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@vitest/spy": "4.1.5", + "estree-walker": "^3.0.3", + "magic-string": "^0.30.21" + }, + "funding": { + "url": "https://opencollective.com/vitest" + }, + "peerDependencies": { + "msw": "^2.4.9", + "vite": "^6.0.0 || ^7.0.0 || ^8.0.0" + }, + "peerDependenciesMeta": { + "msw": { + "optional": true + }, + "vite": { + "optional": true + } + } + }, + "node_modules/@vitest/pretty-format": { + "version": "4.1.5", + "resolved": "https://registry.npmjs.org/@vitest/pretty-format/-/pretty-format-4.1.5.tgz", + "integrity": "sha512-7I3q6l5qr03dVfMX2wCo9FxwSJbPdwKjy2uu/YPpU3wfHvIL4QHwVRp57OfGrDFeUJ8/8QdfBKIV12FTtLn00g==", + "dev": true, + "license": "MIT", + "dependencies": { + "tinyrainbow": "^3.1.0" + }, + "funding": { + "url": "https://opencollective.com/vitest" + } + }, + "node_modules/@vitest/runner": { + "version": "4.1.5", + "resolved": "https://registry.npmjs.org/@vitest/runner/-/runner-4.1.5.tgz", + "integrity": "sha512-2D+o7Pr82IEO46YPpoA/YU0neeyr6FTerQb5Ro7BUnBuv6NQtT/kmVnczngiMEBhzgqz2UZYl5gArejsyERDSQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "@vitest/utils": "4.1.5", + "pathe": "^2.0.3" + }, + "funding": { + "url": "https://opencollective.com/vitest" + } + }, + "node_modules/@vitest/snapshot": { + "version": "4.1.5", + "resolved": "https://registry.npmjs.org/@vitest/snapshot/-/snapshot-4.1.5.tgz", + "integrity": "sha512-zypXEt4KH/XgKGPUz4eC2AvErYx0My5hfL8oDb1HzGFpEk1P62bxSohdyOmvz+d9UJwanI68MKwr2EquOaOgMQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "@vitest/pretty-format": "4.1.5", + "@vitest/utils": "4.1.5", + "magic-string": "^0.30.21", + "pathe": "^2.0.3" + }, + "funding": { + "url": "https://opencollective.com/vitest" + } + }, + "node_modules/@vitest/spy": { + "version": "4.1.5", + "resolved": "https://registry.npmjs.org/@vitest/spy/-/spy-4.1.5.tgz", + "integrity": "sha512-2lNOsh6+R2Idnf1TCZqSwYlKN2E/iDlD8sgU59kYVl+OMDmvldO1VDk39smRfpUNwYpNRVn3w4YfuC7KfbBnkQ==", + "dev": true, + "license": "MIT", + "funding": { + "url": "https://opencollective.com/vitest" + } + }, + "node_modules/@vitest/utils": { + "version": "4.1.5", + "resolved": "https://registry.npmjs.org/@vitest/utils/-/utils-4.1.5.tgz", + "integrity": "sha512-76wdkrmfXfqGjueGgnb45ITPyUi1ycZ4IHgC2bhPDUfWHklY/q3MdLOAB+TF1e6xfl8NxNY0ZYaPCFNWSsw3Ug==", + "dev": true, + "license": "MIT", + "dependencies": { + "@vitest/pretty-format": "4.1.5", + "convert-source-map": "^2.0.0", + "tinyrainbow": "^3.1.0" + }, + "funding": { + "url": "https://opencollective.com/vitest" + } + }, + "node_modules/assertion-error": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/assertion-error/-/assertion-error-2.0.1.tgz", + "integrity": "sha512-Izi8RQcffqCeNVgFigKli1ssklIbpHnCYc6AknXGYoB6grJqyeby7jv12JUQgmTAnIDnbck1uxksT4dzN3PWBA==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=12" + } + }, + "node_modules/chai": { + "version": "6.2.2", + "resolved": "https://registry.npmjs.org/chai/-/chai-6.2.2.tgz", + "integrity": "sha512-NUPRluOfOiTKBKvWPtSD4PhFvWCqOi0BGStNWs57X9js7XGTprSmFoz5F0tWhR4WPjNeR9jXqdC7/UpSJTnlRg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=18" + } + }, + "node_modules/convert-source-map": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/convert-source-map/-/convert-source-map-2.0.0.tgz", + "integrity": "sha512-Kvp459HrV2FEJ1CAsi1Ku+MY3kasH19TFykTz2xWmMeq6bk2NU3XXvfJ+Q61m0xktWwt+1HSYf3JZsTms3aRJg==", + "dev": true, + "license": "MIT" + }, + "node_modules/detect-libc": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/detect-libc/-/detect-libc-2.1.2.tgz", + "integrity": "sha512-Btj2BOOO83o3WyH59e8MgXsxEQVcarkUOpEYrubB0urwnN10yQ364rsiByU11nZlqWYZm05i/of7io4mzihBtQ==", + "dev": true, + "license": "Apache-2.0", + "engines": { + "node": ">=8" + } + }, + "node_modules/es-module-lexer": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/es-module-lexer/-/es-module-lexer-2.1.0.tgz", + "integrity": "sha512-n27zTYMjYu1aj4MjCWzSP7G9r75utsaoc8m61weK+W8JMBGGQybd43GstCXZ3WNmSFtGT9wi59qQTW6mhTR5LQ==", + "dev": true, + "license": "MIT" + }, + "node_modules/esbuild": { + "version": "0.27.7", + "resolved": "https://registry.npmjs.org/esbuild/-/esbuild-0.27.7.tgz", + "integrity": "sha512-IxpibTjyVnmrIQo5aqNpCgoACA/dTKLTlhMHihVHhdkxKyPO1uBBthumT0rdHmcsk9uMonIWS0m4FljWzILh3w==", + "dev": true, + "hasInstallScript": true, + "license": "MIT", + "bin": { + "esbuild": "bin/esbuild" + }, + "engines": { + "node": ">=18" + }, + "optionalDependencies": { + "@esbuild/aix-ppc64": "0.27.7", + "@esbuild/android-arm": "0.27.7", + "@esbuild/android-arm64": "0.27.7", + "@esbuild/android-x64": "0.27.7", + "@esbuild/darwin-arm64": "0.27.7", + "@esbuild/darwin-x64": "0.27.7", + "@esbuild/freebsd-arm64": "0.27.7", + "@esbuild/freebsd-x64": "0.27.7", + "@esbuild/linux-arm": "0.27.7", + "@esbuild/linux-arm64": "0.27.7", + "@esbuild/linux-ia32": "0.27.7", + "@esbuild/linux-loong64": "0.27.7", + "@esbuild/linux-mips64el": "0.27.7", + "@esbuild/linux-ppc64": "0.27.7", + "@esbuild/linux-riscv64": "0.27.7", + "@esbuild/linux-s390x": "0.27.7", + "@esbuild/linux-x64": "0.27.7", + "@esbuild/netbsd-arm64": "0.27.7", + "@esbuild/netbsd-x64": "0.27.7", + "@esbuild/openbsd-arm64": "0.27.7", + "@esbuild/openbsd-x64": "0.27.7", + "@esbuild/openharmony-arm64": "0.27.7", + "@esbuild/sunos-x64": "0.27.7", + "@esbuild/win32-arm64": "0.27.7", + "@esbuild/win32-ia32": "0.27.7", + "@esbuild/win32-x64": "0.27.7" + } + }, + "node_modules/estree-walker": { + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/estree-walker/-/estree-walker-3.0.3.tgz", + "integrity": "sha512-7RUKfXgSMMkzt6ZuXmqapOurLGPPfgj6l9uRZ7lRGolvk0y2yocc35LdcxKC5PQZdn2DMqioAQ2NoWcrTKmm6g==", + "dev": true, + "license": "MIT", + "dependencies": { + "@types/estree": "^1.0.0" + } + }, + "node_modules/expect-type": { + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/expect-type/-/expect-type-1.3.0.tgz", + "integrity": "sha512-knvyeauYhqjOYvQ66MznSMs83wmHrCycNEN6Ao+2AeYEfxUIkuiVxdEa1qlGEPK+We3n0THiDciYSsCcgW/DoA==", + "dev": true, + "license": "Apache-2.0", + "engines": { + "node": ">=12.0.0" + } + }, + "node_modules/fdir": { + "version": "6.5.0", + "resolved": "https://registry.npmjs.org/fdir/-/fdir-6.5.0.tgz", + "integrity": "sha512-tIbYtZbucOs0BRGqPJkshJUYdL+SDH7dVM8gjy+ERp3WAUjLEFJE+02kanyHtwjWOnwrKYBiwAmM0p4kLJAnXg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=12.0.0" + }, + "peerDependencies": { + "picomatch": "^3 || ^4" + }, + "peerDependenciesMeta": { + "picomatch": { + "optional": true + } + } + }, + "node_modules/fsevents": { + "version": "2.3.3", + "resolved": "https://registry.npmjs.org/fsevents/-/fsevents-2.3.3.tgz", + "integrity": "sha512-5xoDfX+fL7faATnagmWPpbFtwh/R77WmMMqqHGS65C3vvB0YHrgF+B1YmZ3441tMj5n63k0212XNoJwzlhffQw==", + "dev": true, + "hasInstallScript": true, + "license": "MIT", + "optional": true, + "os": [ + "darwin" + ], + "engines": { + "node": "^8.16.0 || ^10.6.0 || >=11.0.0" + } + }, + "node_modules/lightningcss": { + "version": "1.32.0", + "resolved": "https://registry.npmjs.org/lightningcss/-/lightningcss-1.32.0.tgz", + "integrity": "sha512-NXYBzinNrblfraPGyrbPoD19C1h9lfI/1mzgWYvXUTe414Gz/X1FD2XBZSZM7rRTrMA8JL3OtAaGifrIKhQ5yQ==", + "dev": true, + "license": "MPL-2.0", + "dependencies": { + "detect-libc": "^2.0.3" + }, + "engines": { + "node": ">= 12.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/parcel" + }, + "optionalDependencies": { + "lightningcss-android-arm64": "1.32.0", + "lightningcss-darwin-arm64": "1.32.0", + "lightningcss-darwin-x64": "1.32.0", + "lightningcss-freebsd-x64": "1.32.0", + "lightningcss-linux-arm-gnueabihf": "1.32.0", + "lightningcss-linux-arm64-gnu": "1.32.0", + "lightningcss-linux-arm64-musl": "1.32.0", + "lightningcss-linux-x64-gnu": "1.32.0", + "lightningcss-linux-x64-musl": "1.32.0", + "lightningcss-win32-arm64-msvc": "1.32.0", + "lightningcss-win32-x64-msvc": "1.32.0" + } + }, + "node_modules/lightningcss-android-arm64": { + "version": "1.32.0", + "resolved": "https://registry.npmjs.org/lightningcss-android-arm64/-/lightningcss-android-arm64-1.32.0.tgz", + "integrity": "sha512-YK7/ClTt4kAK0vo6w3X+Pnm0D2cf2vPHbhOXdoNti1Ga0al1P4TBZhwjATvjNwLEBCnKvjJc2jQgHXH0NEwlAg==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MPL-2.0", + "optional": true, + "os": [ + "android" + ], + "engines": { + "node": ">= 12.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/parcel" + } + }, + "node_modules/lightningcss-darwin-arm64": { + "version": "1.32.0", + "resolved": "https://registry.npmjs.org/lightningcss-darwin-arm64/-/lightningcss-darwin-arm64-1.32.0.tgz", + "integrity": "sha512-RzeG9Ju5bag2Bv1/lwlVJvBE3q6TtXskdZLLCyfg5pt+HLz9BqlICO7LZM7VHNTTn/5PRhHFBSjk5lc4cmscPQ==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MPL-2.0", + "optional": true, + "os": [ + "darwin" + ], + "engines": { + "node": ">= 12.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/parcel" + } + }, + "node_modules/lightningcss-darwin-x64": { + "version": "1.32.0", + "resolved": "https://registry.npmjs.org/lightningcss-darwin-x64/-/lightningcss-darwin-x64-1.32.0.tgz", + "integrity": "sha512-U+QsBp2m/s2wqpUYT/6wnlagdZbtZdndSmut/NJqlCcMLTWp5muCrID+K5UJ6jqD2BFshejCYXniPDbNh73V8w==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MPL-2.0", + "optional": true, + "os": [ + "darwin" + ], + "engines": { + "node": ">= 12.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/parcel" + } + }, + "node_modules/lightningcss-freebsd-x64": { + "version": "1.32.0", + "resolved": "https://registry.npmjs.org/lightningcss-freebsd-x64/-/lightningcss-freebsd-x64-1.32.0.tgz", + "integrity": "sha512-JCTigedEksZk3tHTTthnMdVfGf61Fky8Ji2E4YjUTEQX14xiy/lTzXnu1vwiZe3bYe0q+SpsSH/CTeDXK6WHig==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MPL-2.0", + "optional": true, + "os": [ + "freebsd" + ], + "engines": { + "node": ">= 12.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/parcel" + } + }, + "node_modules/lightningcss-linux-arm-gnueabihf": { + "version": "1.32.0", + "resolved": "https://registry.npmjs.org/lightningcss-linux-arm-gnueabihf/-/lightningcss-linux-arm-gnueabihf-1.32.0.tgz", + "integrity": "sha512-x6rnnpRa2GL0zQOkt6rts3YDPzduLpWvwAF6EMhXFVZXD4tPrBkEFqzGowzCsIWsPjqSK+tyNEODUBXeeVHSkw==", + "cpu": [ + "arm" + ], + "dev": true, + "license": "MPL-2.0", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">= 12.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/parcel" + } + }, + "node_modules/lightningcss-linux-arm64-gnu": { + "version": "1.32.0", + "resolved": "https://registry.npmjs.org/lightningcss-linux-arm64-gnu/-/lightningcss-linux-arm64-gnu-1.32.0.tgz", + "integrity": "sha512-0nnMyoyOLRJXfbMOilaSRcLH3Jw5z9HDNGfT/gwCPgaDjnx0i8w7vBzFLFR1f6CMLKF8gVbebmkUN3fa/kQJpQ==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MPL-2.0", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">= 12.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/parcel" + } + }, + "node_modules/lightningcss-linux-arm64-musl": { + "version": "1.32.0", + "resolved": "https://registry.npmjs.org/lightningcss-linux-arm64-musl/-/lightningcss-linux-arm64-musl-1.32.0.tgz", + "integrity": "sha512-UpQkoenr4UJEzgVIYpI80lDFvRmPVg6oqboNHfoH4CQIfNA+HOrZ7Mo7KZP02dC6LjghPQJeBsvXhJod/wnIBg==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MPL-2.0", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">= 12.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/parcel" + } + }, + "node_modules/lightningcss-linux-x64-gnu": { + "version": "1.32.0", + "resolved": "https://registry.npmjs.org/lightningcss-linux-x64-gnu/-/lightningcss-linux-x64-gnu-1.32.0.tgz", + "integrity": "sha512-V7Qr52IhZmdKPVr+Vtw8o+WLsQJYCTd8loIfpDaMRWGUZfBOYEJeyJIkqGIDMZPwPx24pUMfwSxxI8phr/MbOA==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MPL-2.0", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">= 12.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/parcel" + } + }, + "node_modules/lightningcss-linux-x64-musl": { + "version": "1.32.0", + "resolved": "https://registry.npmjs.org/lightningcss-linux-x64-musl/-/lightningcss-linux-x64-musl-1.32.0.tgz", + "integrity": "sha512-bYcLp+Vb0awsiXg/80uCRezCYHNg1/l3mt0gzHnWV9XP1W5sKa5/TCdGWaR/zBM2PeF/HbsQv/j2URNOiVuxWg==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MPL-2.0", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">= 12.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/parcel" + } + }, + "node_modules/lightningcss-win32-arm64-msvc": { + "version": "1.32.0", + "resolved": "https://registry.npmjs.org/lightningcss-win32-arm64-msvc/-/lightningcss-win32-arm64-msvc-1.32.0.tgz", + "integrity": "sha512-8SbC8BR40pS6baCM8sbtYDSwEVQd4JlFTOlaD3gWGHfThTcABnNDBda6eTZeqbofalIJhFx0qKzgHJmcPTnGdw==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MPL-2.0", + "optional": true, + "os": [ + "win32" + ], + "engines": { + "node": ">= 12.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/parcel" + } + }, + "node_modules/lightningcss-win32-x64-msvc": { + "version": "1.32.0", + "resolved": "https://registry.npmjs.org/lightningcss-win32-x64-msvc/-/lightningcss-win32-x64-msvc-1.32.0.tgz", + "integrity": "sha512-Amq9B/SoZYdDi1kFrojnoqPLxYhQ4Wo5XiL8EVJrVsB8ARoC1PWW6VGtT0WKCemjy8aC+louJnjS7U18x3b06Q==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MPL-2.0", + "optional": true, + "os": [ + "win32" + ], + "engines": { + "node": ">= 12.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/parcel" + } + }, + "node_modules/magic-string": { + "version": "0.30.21", + "resolved": "https://registry.npmjs.org/magic-string/-/magic-string-0.30.21.tgz", + "integrity": "sha512-vd2F4YUyEXKGcLHoq+TEyCjxueSeHnFxyyjNp80yg0XV4vUhnDer/lvvlqM/arB5bXQN5K2/3oinyCRyx8T2CQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jridgewell/sourcemap-codec": "^1.5.5" + } + }, + "node_modules/nanoid": { + "version": "3.3.12", + "resolved": "https://registry.npmjs.org/nanoid/-/nanoid-3.3.12.tgz", + "integrity": "sha512-ZB9RH/39qpq5Vu6Y+NmUaFhQR6pp+M2Xt76XBnEwDaGcVAqhlvxrl3B2bKS5D3NH3QR76v3aSrKaF/Kiy7lEtQ==", + "dev": true, + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/ai" + } + ], + "license": "MIT", + "bin": { + "nanoid": "bin/nanoid.cjs" + }, + "engines": { + "node": "^10 || ^12 || ^13.7 || ^14 || >=15.0.1" + } + }, + "node_modules/obug": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/obug/-/obug-2.1.1.tgz", + "integrity": "sha512-uTqF9MuPraAQ+IsnPf366RG4cP9RtUi7MLO1N3KEc+wb0a6yKpeL0lmk2IB1jY5KHPAlTc6T/JRdC/YqxHNwkQ==", + "dev": true, + "funding": [ + "https://github.com/sponsors/sxzz", + "https://opencollective.com/debug" + ], + "license": "MIT" + }, + "node_modules/pathe": { + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/pathe/-/pathe-2.0.3.tgz", + "integrity": "sha512-WUjGcAqP1gQacoQe+OBJsFA7Ld4DyXuUIjZ5cc75cLHvJ7dtNsTugphxIADwspS+AraAUePCKrSVtPLFj/F88w==", + "dev": true, + "license": "MIT" + }, + "node_modules/picocolors": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/picocolors/-/picocolors-1.1.1.tgz", + "integrity": "sha512-xceH2snhtb5M9liqDsmEw56le376mTZkEX/jEb/RxNFyegNul7eNslCXP9FDj/Lcu0X8KEyMceP2ntpaHrDEVA==", + "dev": true, + "license": "ISC" + }, + "node_modules/picomatch": { + "version": "4.0.4", + "resolved": "https://registry.npmjs.org/picomatch/-/picomatch-4.0.4.tgz", + "integrity": "sha512-QP88BAKvMam/3NxH6vj2o21R6MjxZUAd6nlwAS/pnGvN9IVLocLHxGYIzFhg6fUQ+5th6P4dv4eW9jX3DSIj7A==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/sponsors/jonschlinkert" + } + }, + "node_modules/postcss": { + "version": "8.5.14", + "resolved": "https://registry.npmjs.org/postcss/-/postcss-8.5.14.tgz", + "integrity": "sha512-SoSL4+OSEtR99LHFZQiJLkT59C5B1amGO1NzTwj7TT1qCUgUO6hxOvzkOYxD+vMrXBM3XJIKzokoERdqQq/Zmg==", + "dev": true, + "funding": [ + { + "type": "opencollective", + "url": "https://opencollective.com/postcss/" + }, + { + "type": "tidelift", + "url": "https://tidelift.com/funding/github/npm/postcss" + }, + { + "type": "github", + "url": "https://github.com/sponsors/ai" + } + ], + "license": "MIT", + "dependencies": { + "nanoid": "^3.3.11", + "picocolors": "^1.1.1", + "source-map-js": "^1.2.1" + }, + "engines": { + "node": "^10 || ^12 || >=14" + } + }, + "node_modules/rolldown": { + "version": "1.0.0-rc.17", + "resolved": "https://registry.npmjs.org/rolldown/-/rolldown-1.0.0-rc.17.tgz", + "integrity": "sha512-ZrT53oAKrtA4+YtBWPQbtPOxIbVDbxT0orcYERKd63VJTF13zPcgXTvD4843L8pcsI7M6MErt8QtON6lrB9tyA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@oxc-project/types": "=0.127.0", + "@rolldown/pluginutils": "1.0.0-rc.17" + }, + "bin": { + "rolldown": "bin/cli.mjs" + }, + "engines": { + "node": "^20.19.0 || >=22.12.0" + }, + "optionalDependencies": { + "@rolldown/binding-android-arm64": "1.0.0-rc.17", + "@rolldown/binding-darwin-arm64": "1.0.0-rc.17", + "@rolldown/binding-darwin-x64": "1.0.0-rc.17", + "@rolldown/binding-freebsd-x64": "1.0.0-rc.17", + "@rolldown/binding-linux-arm-gnueabihf": "1.0.0-rc.17", + "@rolldown/binding-linux-arm64-gnu": "1.0.0-rc.17", + "@rolldown/binding-linux-arm64-musl": "1.0.0-rc.17", + "@rolldown/binding-linux-ppc64-gnu": "1.0.0-rc.17", + "@rolldown/binding-linux-s390x-gnu": "1.0.0-rc.17", + "@rolldown/binding-linux-x64-gnu": "1.0.0-rc.17", + "@rolldown/binding-linux-x64-musl": "1.0.0-rc.17", + "@rolldown/binding-openharmony-arm64": "1.0.0-rc.17", + "@rolldown/binding-wasm32-wasi": "1.0.0-rc.17", + "@rolldown/binding-win32-arm64-msvc": "1.0.0-rc.17", + "@rolldown/binding-win32-x64-msvc": "1.0.0-rc.17" + } + }, + "node_modules/siginfo": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/siginfo/-/siginfo-2.0.0.tgz", + "integrity": "sha512-ybx0WO1/8bSBLEWXZvEd7gMW3Sn3JFlW3TvX1nREbDLRNQNaeNN8WK0meBwPdAaOI7TtRRRJn/Es1zhrrCHu7g==", + "dev": true, + "license": "ISC" + }, + "node_modules/source-map-js": { + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/source-map-js/-/source-map-js-1.2.1.tgz", + "integrity": "sha512-UXWMKhLOwVKb728IUtQPXxfYU+usdybtUrK/8uGE8CQMvrhOpwvzDBwj0QhSL7MQc7vIsISBG8VQ8+IDQxpfQA==", + "dev": true, + "license": "BSD-3-Clause", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/stackback": { + "version": "0.0.2", + "resolved": "https://registry.npmjs.org/stackback/-/stackback-0.0.2.tgz", + "integrity": "sha512-1XMJE5fQo1jGH6Y/7ebnwPOBEkIEnT4QF32d5R1+VXdXveM0IBMJt8zfaxX1P3QhVwrYe+576+jkANtSS2mBbw==", + "dev": true, + "license": "MIT" + }, + "node_modules/std-env": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/std-env/-/std-env-4.1.0.tgz", + "integrity": "sha512-Rq7ybcX2RuC55r9oaPVEW7/xu3tj8u4GeBYHBWCychFtzMIr86A7e3PPEBPT37sHStKX3+TiX/Fr/ACmJLVlLQ==", + "dev": true, + "license": "MIT" + }, + "node_modules/tinybench": { + "version": "2.9.0", + "resolved": "https://registry.npmjs.org/tinybench/-/tinybench-2.9.0.tgz", + "integrity": "sha512-0+DUvqWMValLmha6lr4kD8iAMK1HzV0/aKnCtWb9v9641TnP/MFb7Pc2bxoxQjTXAErryXVgUOfv2YqNllqGeg==", + "dev": true, + "license": "MIT" + }, + "node_modules/tinyexec": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/tinyexec/-/tinyexec-1.1.2.tgz", + "integrity": "sha512-dAqSqE/RabpBKI8+h26GfLq6Vb3JVXs30XYQjdMjaj/c2tS8IYYMbIzP599KtRj7c57/wYApb3QjgRgXmrCukA==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=18" + } + }, + "node_modules/tinyglobby": { + "version": "0.2.16", + "resolved": "https://registry.npmjs.org/tinyglobby/-/tinyglobby-0.2.16.tgz", + "integrity": "sha512-pn99VhoACYR8nFHhxqix+uvsbXineAasWm5ojXoN8xEwK5Kd3/TrhNn1wByuD52UxWRLy8pu+kRMniEi6Eq9Zg==", + "dev": true, + "license": "MIT", + "dependencies": { + "fdir": "^6.5.0", + "picomatch": "^4.0.4" + }, + "engines": { + "node": ">=12.0.0" + }, + "funding": { + "url": "https://github.com/sponsors/SuperchupuDev" + } + }, + "node_modules/tinyrainbow": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/tinyrainbow/-/tinyrainbow-3.1.0.tgz", + "integrity": "sha512-Bf+ILmBgretUrdJxzXM0SgXLZ3XfiaUuOj/IKQHuTXip+05Xn+uyEYdVg0kYDipTBcLrCVyUzAPz7QmArb0mmw==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=14.0.0" + } + }, + "node_modules/tslib": { + "version": "2.8.1", + "resolved": "https://registry.npmjs.org/tslib/-/tslib-2.8.1.tgz", + "integrity": "sha512-oJFu94HQb+KVduSUQL7wnpmqnfmLsOA/nAh6b6EH0wCEoK0/mPeXU6c3wKDV83MkOuHPRHtSXKKU99IBazS/2w==", + "dev": true, + "license": "0BSD", + "optional": true + }, + "node_modules/typescript": { + "version": "5.9.3", + "resolved": "https://registry.npmjs.org/typescript/-/typescript-5.9.3.tgz", + "integrity": "sha512-jl1vZzPDinLr9eUt3J/t7V6FgNEw9QjvBPdysz9KfQDD41fQrC2Y4vKQdiaUpFT4bXlb1RHhLpp8wtm6M5TgSw==", + "dev": true, + "license": "Apache-2.0", + "bin": { + "tsc": "bin/tsc", + "tsserver": "bin/tsserver" + }, + "engines": { + "node": ">=14.17" + } + }, + "node_modules/undici-types": { + "version": "6.21.0", + "resolved": "https://registry.npmjs.org/undici-types/-/undici-types-6.21.0.tgz", + "integrity": "sha512-iwDZqg0QAGrg9Rav5H4n0M64c3mkR59cJ6wQp+7C4nI0gsmExaedaYLNO44eT4AtBBwjbTiGPMlt2Md0T9H9JQ==", + "dev": true, + "license": "MIT" + }, + "node_modules/vite": { + "version": "8.0.10", + "resolved": "https://registry.npmjs.org/vite/-/vite-8.0.10.tgz", + "integrity": "sha512-rZuUu9j6J5uotLDs+cAA4O5H4K1SfPliUlQwqa6YEwSrWDZzP4rhm00oJR5snMewjxF5V/K3D4kctsUTsIU9Mw==", + "dev": true, + "license": "MIT", + "dependencies": { + "lightningcss": "^1.32.0", + "picomatch": "^4.0.4", + "postcss": "^8.5.10", + "rolldown": "1.0.0-rc.17", + "tinyglobby": "^0.2.16" + }, + "bin": { + "vite": "bin/vite.js" + }, + "engines": { + "node": "^20.19.0 || >=22.12.0" + }, + "funding": { + "url": "https://github.com/vitejs/vite?sponsor=1" + }, + "optionalDependencies": { + "fsevents": "~2.3.3" + }, + "peerDependencies": { + "@types/node": "^20.19.0 || >=22.12.0", + "@vitejs/devtools": "^0.1.0", + "esbuild": "^0.27.0 || ^0.28.0", + "jiti": ">=1.21.0", + "less": "^4.0.0", + "sass": "^1.70.0", + "sass-embedded": "^1.70.0", + "stylus": ">=0.54.8", + "sugarss": "^5.0.0", + "terser": "^5.16.0", + "tsx": "^4.8.1", + "yaml": "^2.4.2" + }, + "peerDependenciesMeta": { + "@types/node": { + "optional": true + }, + "@vitejs/devtools": { + "optional": true + }, + "esbuild": { + "optional": true + }, + "jiti": { + "optional": true + }, + "less": { + "optional": true + }, + "sass": { + "optional": true + }, + "sass-embedded": { + "optional": true + }, + "stylus": { + "optional": true + }, + "sugarss": { + "optional": true + }, + "terser": { + "optional": true + }, + "tsx": { + "optional": true + }, + "yaml": { + "optional": true + } + } + }, + "node_modules/vitest": { + "version": "4.1.5", + "resolved": "https://registry.npmjs.org/vitest/-/vitest-4.1.5.tgz", + "integrity": "sha512-9Xx1v3/ih3m9hN+SbfkUyy0JAs72ap3r7joc87XL6jwF0jGg6mFBvQ1SrwaX+h8BlkX6Hz9shdd1uo6AF+ZGpg==", + "dev": true, + "license": "MIT", + "dependencies": { + "@vitest/expect": "4.1.5", + "@vitest/mocker": "4.1.5", + "@vitest/pretty-format": "4.1.5", + "@vitest/runner": "4.1.5", + "@vitest/snapshot": "4.1.5", + "@vitest/spy": "4.1.5", + "@vitest/utils": "4.1.5", + "es-module-lexer": "^2.0.0", + "expect-type": "^1.3.0", + "magic-string": "^0.30.21", + "obug": "^2.1.1", + "pathe": "^2.0.3", + "picomatch": "^4.0.3", + "std-env": "^4.0.0-rc.1", + "tinybench": "^2.9.0", + "tinyexec": "^1.0.2", + "tinyglobby": "^0.2.15", + "tinyrainbow": "^3.1.0", + "vite": "^6.0.0 || ^7.0.0 || ^8.0.0", + "why-is-node-running": "^2.3.0" + }, + "bin": { + "vitest": "vitest.mjs" + }, + "engines": { + "node": "^20.0.0 || ^22.0.0 || >=24.0.0" + }, + "funding": { + "url": "https://opencollective.com/vitest" + }, + "peerDependencies": { + "@edge-runtime/vm": "*", + "@opentelemetry/api": "^1.9.0", + "@types/node": "^20.0.0 || ^22.0.0 || >=24.0.0", + "@vitest/browser-playwright": "4.1.5", + "@vitest/browser-preview": "4.1.5", + "@vitest/browser-webdriverio": "4.1.5", + "@vitest/coverage-istanbul": "4.1.5", + "@vitest/coverage-v8": "4.1.5", + "@vitest/ui": "4.1.5", + "happy-dom": "*", + "jsdom": "*", + "vite": "^6.0.0 || ^7.0.0 || ^8.0.0" + }, + "peerDependenciesMeta": { + "@edge-runtime/vm": { + "optional": true + }, + "@opentelemetry/api": { + "optional": true + }, + "@types/node": { + "optional": true + }, + "@vitest/browser-playwright": { + "optional": true + }, + "@vitest/browser-preview": { + "optional": true + }, + "@vitest/browser-webdriverio": { + "optional": true + }, + "@vitest/coverage-istanbul": { + "optional": true + }, + "@vitest/coverage-v8": { + "optional": true + }, + "@vitest/ui": { + "optional": true + }, + "happy-dom": { + "optional": true + }, + "jsdom": { + "optional": true + }, + "vite": { + "optional": false + } + } + }, + "node_modules/why-is-node-running": { + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/why-is-node-running/-/why-is-node-running-2.3.0.tgz", + "integrity": "sha512-hUrmaWBdVDcxvYqnyh09zunKzROWjbZTiNy8dBEjkS7ehEDQibXJ7XvlmtbwuTclUiIyN+CyXQD4Vmko8fNm8w==", + "dev": true, + "license": "MIT", + "dependencies": { + "siginfo": "^2.0.0", + "stackback": "0.0.2" + }, + "bin": { + "why-is-node-running": "cli.js" + }, + "engines": { + "node": ">=8" + } + } + } +} diff --git a/plugins/release-mangaupdates/package.json b/plugins/release-mangaupdates/package.json new file mode 100644 index 00000000..ec72ac92 --- /dev/null +++ b/plugins/release-mangaupdates/package.json @@ -0,0 +1,51 @@ +{ + "name": "@ashdev/codex-plugin-release-mangaupdates", + "version": "1.18.0", + "description": "MangaUpdates RSS release-source plugin for Codex - announces new chapter releases for tracked series in user-configured languages", + "main": "dist/index.js", + "bin": "dist/index.js", + "type": "module", + "files": [ + "dist", + "README.md" + ], + "repository": { + "type": "git", + "url": "https://github.com/AshDevFr/codex.git", + "directory": "plugins/release-mangaupdates" + }, + "scripts": { + "build": "esbuild src/index.ts --bundle --platform=node --target=node22 --format=esm --outfile=dist/index.js --sourcemap --banner:js='#!/usr/bin/env node'", + "dev": "npm run build -- --watch", + "clean": "rm -rf dist", + "start": "node dist/index.js", + "lint": "biome check .", + "lint:fix": "biome check --write .", + "typecheck": "tsc --noEmit", + "test": "vitest run --passWithNoTests", + "test:watch": "vitest", + "prepublishOnly": "npm run lint && npm run build" + }, + "keywords": [ + "codex", + "plugin", + "mangaupdates", + "release-source", + "manga" + ], + "author": "Codex", + "license": "MIT", + "engines": { + "node": ">=22.0.0" + }, + "dependencies": { + "@ashdev/codex-plugin-sdk": "file:../sdk-typescript" + }, + "devDependencies": { + "@biomejs/biome": "^2.4.4", + "@types/node": "^22.0.0", + "esbuild": "^0.27.3", + "typescript": "^5.9.3", + "vitest": "^4.0.18" + } +} diff --git a/plugins/release-mangaupdates/src/fetcher.test.ts b/plugins/release-mangaupdates/src/fetcher.test.ts new file mode 100644 index 00000000..d08ffa92 --- /dev/null +++ b/plugins/release-mangaupdates/src/fetcher.test.ts @@ -0,0 +1,142 @@ +import { describe, expect, it, vi } from "vitest"; +import { + feedUrl, + fetchSeriesFeed, + MANGAUPDATES_RSS_BASE, + normalizeMangaUpdatesId, +} from "./fetcher.js"; + +function mockResponse(status: number, body = "", headers: Record = {}): Response { + // Some status codes (204, 304) can't be set on a constructed `Response` + // because they're "null body status" codes. We synthesize a minimal + // duck-typed object instead — only `status`, `statusText`, `headers`, and + // `text()` are read by `fetchSeriesFeed`. + const h = new Headers(headers); + return { + status, + statusText: "", + headers: h, + text: async () => body, + } as unknown as Response; +} + +describe("normalizeMangaUpdatesId", () => { + it("passes numeric IDs through unchanged", () => { + expect(normalizeMangaUpdatesId("12345")).toBe("12345"); + expect(normalizeMangaUpdatesId("15180124327")).toBe("15180124327"); + }); + + it("decodes base36 slugs to their numeric form", () => { + // Real example: Solo Leveling. parseInt("6z1uqw7", 36) === 15180124327. + expect(normalizeMangaUpdatesId("6z1uqw7")).toBe("15180124327"); + // Another real shape — slug with letters. + expect(normalizeMangaUpdatesId("abc")).toBe(String(Number.parseInt("abc", 36))); + }); + + it("does not decode all-digit input even though parseInt(_, 36) would", () => { + // An all-digit slug is ambiguous, but every legacy integer ID is + // all-digit, so we have to treat that form as authoritative. Otherwise + // a series whose slug just happens to be e.g. "12345" would round-trip + // through base36 and end up pointing at a different series. + expect(normalizeMangaUpdatesId("12345")).toBe("12345"); + }); + + it("trims surrounding whitespace before decoding", () => { + expect(normalizeMangaUpdatesId(" 6z1uqw7 ")).toBe("15180124327"); + }); + + it("rejects out-of-alphabet input as a config error", () => { + expect(normalizeMangaUpdatesId("not-a-real-id")).toBeNull(); + expect(normalizeMangaUpdatesId("")).toBeNull(); + expect(normalizeMangaUpdatesId(" ")).toBeNull(); + }); +}); + +describe("feedUrl", () => { + it("builds the per-series RSS URL from a numeric ID", () => { + expect(feedUrl("12345")).toBe(`${MANGAUPDATES_RSS_BASE}/12345/rss`); + }); + + it("decodes a base36 slug before building the URL", () => { + // Metadata sources (MangaBaka et al.) often store the public-URL slug; + // the API itself only accepts the numeric form, so the fetcher + // normalizes transparently. + expect(feedUrl("6z1uqw7")).toBe(`${MANGAUPDATES_RSS_BASE}/15180124327/rss`); + }); +}); + +describe("fetchSeriesFeed", () => { + it("sends If-None-Match when a previous ETag is given", async () => { + const fetchImpl = vi.fn().mockResolvedValue(mockResponse(200, "", { etag: '"def"' })); + await fetchSeriesFeed("99", '"abc"', { fetchImpl }); + const callArgs = fetchImpl.mock.calls[0]; + expect(callArgs).toBeDefined(); + if (!callArgs) return; + const [url, init] = callArgs as [string, RequestInit]; + expect(url).toBe(feedUrl("99")); + const headers = init.headers as Record; + expect(headers["If-None-Match"]).toBe('"abc"'); + expect(headers.Accept).toContain("rss"); + }); + + it("omits If-None-Match on the first poll (no previous etag)", async () => { + const fetchImpl = vi.fn().mockResolvedValue(mockResponse(200, "")); + await fetchSeriesFeed("99", null, { fetchImpl }); + const callArgs = fetchImpl.mock.calls[0]; + if (!callArgs) return; + const headers = (callArgs[1] as RequestInit).headers as Record; + expect(headers["If-None-Match"]).toBeUndefined(); + }); + + it("returns ok with body and etag on 200", async () => { + const fetchImpl = vi + .fn() + .mockResolvedValue(mockResponse(200, "body", { etag: '"new-etag"' })); + const result = await fetchSeriesFeed("99", null, { fetchImpl }); + expect(result.kind).toBe("ok"); + if (result.kind !== "ok") return; + expect(result.body).toBe("body"); + expect(result.etag).toBe('"new-etag"'); + expect(result.status).toBe(200); + }); + + it("returns notModified on 304", async () => { + const fetchImpl = vi.fn().mockResolvedValue(mockResponse(304)); + const result = await fetchSeriesFeed("99", '"abc"', { fetchImpl }); + expect(result.kind).toBe("notModified"); + expect(result.status).toBe(304); + }); + + it("returns error with the upstream status on 429", async () => { + const fetchImpl = vi.fn().mockResolvedValue(mockResponse(429)); + const result = await fetchSeriesFeed("99", null, { fetchImpl }); + expect(result.kind).toBe("error"); + expect(result.status).toBe(429); + }); + + it("returns error with status 0 on transport-level failure", async () => { + const fetchImpl = vi.fn().mockRejectedValue(new Error("ECONNRESET")); + const result = await fetchSeriesFeed("99", null, { fetchImpl }); + expect(result.kind).toBe("error"); + if (result.kind !== "error") return; + expect(result.status).toBe(0); + expect(result.message).toContain("ECONNRESET"); + }); + + it("returns error with status 0 on timeout (AbortError)", async () => { + const fetchImpl = vi.fn().mockImplementation((_url, init: RequestInit) => { + // Simulate an aborted request: throw the same DOMException-like error + // that real `fetch` raises when the AbortSignal triggers. + return new Promise((_, reject) => { + const signal = init.signal as AbortSignal; + signal.addEventListener("abort", () => { + reject(new DOMException("aborted", "AbortError")); + }); + }); + }); + const result = await fetchSeriesFeed("99", null, { fetchImpl, timeoutMs: 10 }); + expect(result.kind).toBe("error"); + if (result.kind !== "error") return; + expect(result.status).toBe(0); + }); +}); diff --git a/plugins/release-mangaupdates/src/fetcher.ts b/plugins/release-mangaupdates/src/fetcher.ts new file mode 100644 index 00000000..409fb285 --- /dev/null +++ b/plugins/release-mangaupdates/src/fetcher.ts @@ -0,0 +1,129 @@ +/** + * MangaUpdates per-series RSS fetcher. + * + * Wraps `fetch` with conditional GET (`If-None-Match` from a stored ETag) and + * a hard timeout. Returns a discriminated result so the caller can: + * - act on `200`: parse the body, persist the new ETag. + * - skip parse on `304`: nothing changed since last poll. + * - report `429` / `5xx` upstream-status codes back to the host so the + * per-host backoff layer can react. + * + * Network is the only side effect; nothing in here touches storage, the host, + * or process state. That keeps it trivially testable: pass a mocked `fetch` + * implementation and assert. + */ + +/** Discriminated fetch result. */ +export type FetchResult = + | { kind: "ok"; body: string; etag: string | null; status: 200 } + | { kind: "notModified"; status: 304 } + | { kind: "error"; status: number; message: string }; + +export interface FetcherOptions { + /** Custom `fetch` impl (for testing). Defaults to global `fetch`. */ + fetchImpl?: typeof fetch; + /** Per-request timeout. Defaults to 10s. */ + timeoutMs?: number; +} + +/** Public base URL for MangaUpdates' v1 RSS API. */ +export const MANGAUPDATES_RSS_BASE = "https://api.mangaupdates.com/v1/series"; + +/** + * Normalize a MangaUpdates series ID to its numeric form for API calls. + * + * MangaUpdates uses two interchangeable representations of the same ID: + * + * - **Numeric** (e.g. `15180124327`) — the internal primary key. Every + * `/v1/series/...` API endpoint requires this form. + * - **Base36 slug** (e.g. `6z1uqw7`) — a base36 encoding of the numeric + * ID, used in public URLs only (`mangaupdates.com/series/6z1uqw7/...`). + * The API rejects this form with a 405. + * + * Metadata sources (MangaBaka, etc.) typically scrape the public URL and + * store the slug, so the value we receive on `entry.externalIds.mangaupdates` + * is whatever the source happened to grab. Decode here so callers don't + * have to know. + * + * Returns the input unchanged when it's already an all-digit string; + * `null` when the input contains characters outside the base36 alphabet + * (caller should surface as a configuration error). + */ +export function normalizeMangaUpdatesId(raw: string): string | null { + const trimmed = raw.trim(); + if (trimmed.length === 0) return null; + if (/^\d+$/.test(trimmed)) return trimmed; + if (!/^[0-9a-z]+$/i.test(trimmed)) return null; + // parseInt('6z1uqw7', 36) = 15180124327. JS numbers are precise for + // integers up to 2^53; MangaUpdates IDs sit well below that. + const decoded = Number.parseInt(trimmed, 36); + if (!Number.isFinite(decoded) || decoded <= 0) return null; + return String(decoded); +} + +/** + * Build the per-series RSS URL. Accepts either the numeric ID or the + * base36 slug — see `normalizeMangaUpdatesId` for the rationale. + */ +export function feedUrl(mangaUpdatesId: string): string { + const normalized = normalizeMangaUpdatesId(mangaUpdatesId) ?? mangaUpdatesId; + return `${MANGAUPDATES_RSS_BASE}/${normalized}/rss`; +} + +/** + * Conditional GET against a per-series RSS feed. + * + * @param mangaUpdatesId - The MangaUpdates series ID. + * @param previousEtag - The ETag from the previous successful poll (if any). + * @param opts - Fetcher options (custom fetch, timeout). + */ +export async function fetchSeriesFeed( + mangaUpdatesId: string, + previousEtag: string | null, + opts: FetcherOptions = {}, +): Promise { + const fetchImpl = opts.fetchImpl ?? globalThis.fetch; + const timeoutMs = opts.timeoutMs ?? 10_000; + + const url = feedUrl(mangaUpdatesId); + const headers: Record = { + Accept: "application/rss+xml, application/xml;q=0.9, */*;q=0.5", + "User-Agent": "Codex-ReleaseTracker/1.0 (+https://github.com/AshDevFr/codex)", + }; + if (previousEtag) { + headers["If-None-Match"] = previousEtag; + } + + // AbortSignal.timeout is the cleanest path. Falling back to a manual + // controller would add complexity without value (we already require Node + // 22+). + const signal = AbortSignal.timeout(timeoutMs); + + let resp: Response; + try { + resp = await fetchImpl(url, { method: "GET", headers, signal }); + } catch (err) { + const msg = err instanceof Error ? err.message : "Unknown fetch error"; + // Treat aborts and other transport-level failures as 0/unavailable so + // the host's per-host backoff layer can detect "this domain is sad + // right now" without us having to invent a fake HTTP status. + return { kind: "error", status: 0, message: msg }; + } + + if (resp.status === 304) { + return { kind: "notModified", status: 304 }; + } + + if (resp.status === 200) { + const body = await resp.text(); + const etag = resp.headers.get("etag"); + return { kind: "ok", body, etag, status: 200 }; + } + + // Pass through 429 / 5xx so the host's backoff layer sees the real status. + return { + kind: "error", + status: resp.status, + message: `upstream returned ${resp.status} ${resp.statusText}`, + }; +} diff --git a/plugins/release-mangaupdates/src/filter.test.ts b/plugins/release-mangaupdates/src/filter.test.ts new file mode 100644 index 00000000..2d69573f --- /dev/null +++ b/plugins/release-mangaupdates/src/filter.test.ts @@ -0,0 +1,115 @@ +import { describe, expect, it } from "vitest"; +import { parseCommaList, passesFilters, resolveFilters } from "./filter.js"; +import type { ParsedRssItem } from "./parser.js"; +import { UNKNOWN_LANGUAGE } from "./parser.js"; + +function item(overrides: Partial = {}): ParsedRssItem { + return { + externalReleaseId: "abc", + title: "c.143 by G (en)", + chapter: 143, + volume: null, + group: "G", + language: "en", + link: "https://example.com", + observedAt: new Date().toISOString(), + ...overrides, + }; +} + +describe("resolveFilters", () => { + it("normalizes languages: trim, lowercase, dedup", () => { + const f = resolveFilters({ + languages: ["EN", " es ", "en", ""], + blockedGroups: [], + }); + expect(f.languages).toEqual(["en", "es"]); + }); + + it("normalizes blocked groups (case-insensitive set)", () => { + const f = resolveFilters({ + languages: ["en"], + blockedGroups: ["LowQuality", " MTL Group "], + }); + expect(f.blockedGroups.has("lowquality")).toBe(true); + expect(f.blockedGroups.has("mtl group")).toBe(true); + }); + + it("defaults includeUnknownLanguage to false", () => { + const f = resolveFilters({ languages: ["en"], blockedGroups: [] }); + expect(f.includeUnknownLanguage).toBe(false); + }); +}); + +describe("parseCommaList", () => { + it("splits, trims, drops empties", () => { + expect(parseCommaList(" a , b , , c")).toEqual(["a", "b", "c"]); + }); + + it("returns [] for non-string input", () => { + expect(parseCommaList(undefined)).toEqual([]); + expect(parseCommaList(null)).toEqual([]); + expect(parseCommaList(42)).toEqual([]); + }); +}); + +describe("passesFilters", () => { + it("passes English item when languages = ['en']", () => { + const f = resolveFilters({ languages: ["en"], blockedGroups: [] }); + expect(passesFilters(item({ language: "en" }), f)).toBe(true); + }); + + it("rejects Spanish item when languages = ['en']", () => { + const f = resolveFilters({ languages: ["en"], blockedGroups: [] }); + expect(passesFilters(item({ language: "es" }), f)).toBe(false); + }); + + it("passes Spanish when languages = ['en', 'es']", () => { + const f = resolveFilters({ languages: ["en", "es"], blockedGroups: [] }); + expect(passesFilters(item({ language: "es" }), f)).toBe(true); + }); + + it("rejects unknown-language item by default", () => { + const f = resolveFilters({ languages: ["en"], blockedGroups: [] }); + expect(passesFilters(item({ language: UNKNOWN_LANGUAGE }), f)).toBe(false); + }); + + it("admits unknown-language item when includeUnknownLanguage = true", () => { + const f = resolveFilters({ + languages: ["en"], + blockedGroups: [], + includeUnknownLanguage: true, + }); + expect(passesFilters(item({ language: UNKNOWN_LANGUAGE }), f)).toBe(true); + }); + + it("passes everything (including unknown) when languages list is empty", () => { + const f = resolveFilters({ languages: [], blockedGroups: [] }); + expect(passesFilters(item({ language: "en" }), f)).toBe(true); + expect(passesFilters(item({ language: "es" }), f)).toBe(true); + // Unknown language is *still* gated by includeUnknownLanguage + // (defaults to false); an empty `languages` list means "no language + // restriction on known codes," not "include unknown." + expect(passesFilters(item({ language: UNKNOWN_LANGUAGE }), f)).toBe(false); + }); + + it("rejects items from a blocked group", () => { + const f = resolveFilters({ + languages: ["en"], + blockedGroups: ["MTL Group"], + }); + expect(passesFilters(item({ group: "MTL Group" }), f)).toBe(false); + }); + + it("group blocklist is case-insensitive", () => { + const f = resolveFilters({ languages: ["en"], blockedGroups: ["mtl group"] }); + expect(passesFilters(item({ group: "MTL Group" }), f)).toBe(false); + expect(passesFilters(item({ group: "MTL GROUP" }), f)).toBe(false); + expect(passesFilters(item({ group: "Other Group" }), f)).toBe(true); + }); + + it("admits items with no group regardless of blocklist", () => { + const f = resolveFilters({ languages: ["en"], blockedGroups: ["MTL"] }); + expect(passesFilters(item({ group: null }), f)).toBe(true); + }); +}); diff --git a/plugins/release-mangaupdates/src/filter.ts b/plugins/release-mangaupdates/src/filter.ts new file mode 100644 index 00000000..6506e870 --- /dev/null +++ b/plugins/release-mangaupdates/src/filter.ts @@ -0,0 +1,106 @@ +/** + * Filtering: language allowlist + group blocklist. + * + * Filters are applied client-side in the plugin (before recording) for two + * reasons: + * 1. Keeps the ledger small. Out-of-language items would be dropped by the + * host anyway via the latest_known_* gate, but writing them to the + * ledger pollutes the inbox and wastes write IO. + * 2. Keeps the inbox clean. Users who configure `["en"]` don't want to see + * Spanish entries hidden behind a state flag — they want them gone. + */ + +import { type ParsedRssItem, UNKNOWN_LANGUAGE } from "./parser.js"; + +/** + * Resolved, normalized filter inputs for a single series. Both lists are + * lowercased + trimmed. Empty `languages` is interpreted as "no filter" + * (everything passes), but the caller is expected to pass at least the + * server-wide default to avoid that footgun. + */ +export interface ResolvedFilters { + /** Lowercased ISO 639-1 codes; empty = no filter. */ + languages: string[]; + /** Lowercased group names; case-insensitive exact match against `group`. */ + blockedGroups: Set; + /** + * Whether to include items whose language couldn't be detected + * (`UNKNOWN_LANGUAGE` sentinel). Default false — be conservative. + */ + includeUnknownLanguage: boolean; +} + +/** + * Build resolved filters from raw config strings + lists. Centralizes the + * normalization so the poll handler doesn't have to care about casing or + * whitespace. + */ +export function resolveFilters(input: { + languages: string[]; + blockedGroups: string[]; + includeUnknownLanguage?: boolean; +}): ResolvedFilters { + const languages = dedupePreserveOrder( + input.languages.map((s) => s.trim().toLowerCase()).filter((s) => s.length > 0), + ); + const blockedGroups = new Set( + input.blockedGroups.map((s) => s.trim().toLowerCase()).filter((s) => s.length > 0), + ); + return { + languages, + blockedGroups, + includeUnknownLanguage: input.includeUnknownLanguage ?? false, + }; +} + +/** + * Parse a comma-separated string into a clean list (trim, drop empties). + * Helper for `blockedGroups` which is admin-config typed as a single string. + */ +export function parseCommaList(raw: unknown): string[] { + if (typeof raw !== "string") return []; + return raw + .split(",") + .map((s) => s.trim()) + .filter((s) => s.length > 0); +} + +/** + * Returns true if the item should be kept. + * + * Language filter: + * - If `languages` is empty → pass. + * - Otherwise, item.language must be in the list (case-insensitive). + * - `unknown` language is rejected unless `includeUnknownLanguage` is true. + * + * Group filter: + * - If `group` is null → pass (we have nothing to match against). + * - Otherwise, group must NOT be in `blockedGroups`. + */ +export function passesFilters(item: ParsedRssItem, filters: ResolvedFilters): boolean { + // Language gate. + if (item.language === UNKNOWN_LANGUAGE) { + if (!filters.includeUnknownLanguage) return false; + } else if (filters.languages.length > 0) { + if (!filters.languages.includes(item.language.toLowerCase())) return false; + } + + // Group blocklist. + if (item.group !== null && filters.blockedGroups.size > 0) { + if (filters.blockedGroups.has(item.group.trim().toLowerCase())) return false; + } + + return true; +} + +function dedupePreserveOrder(xs: string[]): string[] { + const seen = new Set(); + const out: string[] = []; + for (const x of xs) { + if (!seen.has(x)) { + seen.add(x); + out.push(x); + } + } + return out; +} diff --git a/plugins/release-mangaupdates/src/index.test.ts b/plugins/release-mangaupdates/src/index.test.ts new file mode 100644 index 00000000..0a066ec5 --- /dev/null +++ b/plugins/release-mangaupdates/src/index.test.ts @@ -0,0 +1,221 @@ +import { HostRpcClient, type TrackedSeriesEntry } from "@ashdev/codex-plugin-sdk"; +import { describe, expect, it, vi } from "vitest"; +import { pollSeries } from "./index.js"; +import { EXTERNAL_ID_SOURCE_MANGAUPDATES } from "./manifest.js"; + +// ----------------------------------------------------------------------------- +// Helpers +// ----------------------------------------------------------------------------- + +function trackedEntry(seriesId: string, muId: string | null = "12345"): TrackedSeriesEntry { + return { + seriesId, + ...(muId + ? { externalIds: { [EXTERNAL_ID_SOURCE_MANGAUPDATES]: muId } as Record } + : {}), + }; +} + +const multilingualFeedXml = ` + + + + <![CDATA[c.142 by EnglishGroup (en)]]> + https://www.mangaupdates.com/release.html?id=1001 + 1001 + Mon, 04 May 2026 01:00:00 GMT + + + <![CDATA[c.144 by SpanishGroup (es)]]> + https://www.mangaupdates.com/release.html?id=1002 + 1002 + Sun, 03 May 2026 12:00:00 GMT + + + <![CDATA[c.145 by BlockedGroup (en)]]> + https://www.mangaupdates.com/release.html?id=1003 + 1003 + Sat, 02 May 2026 22:00:00 GMT + + +`; + +interface CapturedCall { + method: string; + params: unknown; +} + +/** + * Build an `HostRpcClient` whose calls are intercepted in-memory. Each call + * is recorded; the response is provided by `respond`. + */ +function makeMockRpc(respond: (method: string, params: unknown) => unknown): { + rpc: HostRpcClient; + calls: CapturedCall[]; +} { + const calls: CapturedCall[] = []; + // We bypass the wire format entirely: provide a custom `writeFn` that + // captures the request, then synthesize a matching response and feed it + // back via `handleResponse`. This exercises the real id-correlation path. + // eslint-disable-next-line prefer-const + let rpc: HostRpcClient; + const writeFn = (line: string) => { + const req = JSON.parse(line.trim()) as { + id: number; + method: string; + params: unknown; + }; + calls.push({ method: req.method, params: req.params }); + let result: unknown; + let error: { code: number; message: string } | null = null; + try { + result = respond(req.method, req.params); + } catch (err) { + error = { + code: -32_000, + message: err instanceof Error ? err.message : "synthetic error", + }; + } + setImmediate(() => { + const payload = error + ? { jsonrpc: "2.0", id: req.id, error } + : { jsonrpc: "2.0", id: req.id, result }; + rpc.handleResponse(JSON.stringify(payload)); + }); + }; + rpc = new HostRpcClient(writeFn); + return { rpc, calls }; +} + +function mockFetchOk(body: string, etag?: string): typeof fetch { + return vi.fn().mockResolvedValue( + new Response(body, { + status: 200, + headers: etag ? { etag } : {}, + }), + ) as unknown as typeof fetch; +} + +/** + * Build a minimal Response-like stub. `Response`'s constructor refuses some + * status codes (304, 204) since they're "null body status" codes. We only + * need a handful of fields to drive `fetcher.ts`. + */ +function stubResponse(status: number, body = "", headers: Record = {}): Response { + const h = new Headers(headers); + return { + status, + statusText: "", + headers: h, + text: async () => body, + } as unknown as Response; +} + +// ----------------------------------------------------------------------------- +// pollSeries +// ----------------------------------------------------------------------------- + +describe("pollSeries", () => { + it("skips a series that has no MangaUpdates external ID", async () => { + const { rpc, calls } = makeMockRpc(() => ({ ledgerId: "x", deduped: false })); + const out = await pollSeries(rpc, "src-1", trackedEntry("series-1", null), { + blockedGroups: [], + timeoutMs: 1000, + fetchImpl: vi.fn() as unknown as typeof fetch, // never called + }); + expect(out.fetched).toBe(false); + expect(out.error).toContain("missing mangaupdates external ID"); + expect(calls).toHaveLength(0); + }); + + it("records candidates for in-language items and skips blocked groups", async () => { + const { rpc, calls } = makeMockRpc(() => ({ ledgerId: "ld", deduped: false })); + const out = await pollSeries(rpc, "src-1", trackedEntry("series-1", "999"), { + // Effective languages from the host gate is empty here (the plugin's + // client-side language filter is a host-handoff stub today). The + // group blocklist still applies. + blockedGroups: ["BlockedGroup"], + timeoutMs: 1000, + fetchImpl: mockFetchOk(multilingualFeedXml, '"new-etag"'), + }); + expect(out.fetched).toBe(true); + expect(out.notModified).toBe(false); + expect(out.parsed).toBe(3); // 3 items in the fixture + // Blocked group dropped, the other two are recorded. + expect(out.recorded).toBe(2); + expect(out.etag).toBe('"new-etag"'); + + const recordCalls = calls.filter((c) => c.method === "releases/record"); + expect(recordCalls).toHaveLength(2); + const groups = recordCalls.map((c) => { + const params = c.params as { candidate: { groupOrUploader: string | null } }; + return params.candidate.groupOrUploader; + }); + expect(groups).toEqual(["EnglishGroup", "SpanishGroup"]); + }); + + it("returns notModified when upstream replies 304", async () => { + const { rpc } = makeMockRpc(() => ({ ledgerId: "x", deduped: false })); + const fetchImpl = vi.fn().mockResolvedValue(stubResponse(304)); + const out = await pollSeries(rpc, "src-1", trackedEntry("series-1"), { + blockedGroups: [], + timeoutMs: 1000, + fetchImpl: fetchImpl as unknown as typeof fetch, + }); + expect(out.notModified).toBe(true); + expect(out.parsed).toBe(0); + expect(out.recorded).toBe(0); + expect(out.upstreamStatus).toBe(304); + }); + + it("propagates upstream 429 status without recording", async () => { + const { rpc, calls } = makeMockRpc(() => ({ ledgerId: "x", deduped: false })); + const fetchImpl = vi.fn().mockResolvedValue(stubResponse(429)); + const out = await pollSeries(rpc, "src-1", trackedEntry("series-1"), { + blockedGroups: [], + timeoutMs: 1000, + fetchImpl: fetchImpl as unknown as typeof fetch, + }); + expect(out.fetched).toBe(false); + expect(out.upstreamStatus).toBe(429); + expect(calls.filter((c) => c.method === "releases/record")).toHaveLength(0); + }); + + it("survives a record() error and continues to next item", async () => { + let recordCalls = 0; + const { rpc } = makeMockRpc((method) => { + if (method === "releases/record") { + recordCalls++; + if (recordCalls === 1) { + // Synthesize a JSON-RPC error for the first record() call. The + // mock writeFn catches the throw and turns it into an `error` + // response, mimicking what the host would emit on rejection. + throw new Error("simulated host error"); + } + } + return { ledgerId: "ld", deduped: false }; + }); + const fetchImpl = mockFetchOk(multilingualFeedXml, '"e"'); + const out = await pollSeries(rpc, "src-1", trackedEntry("series-1"), { + blockedGroups: [], + timeoutMs: 1000, + fetchImpl, + }); + // 3 items parsed; first record() failed so recorded reflects only the + // remaining two successful inserts. + expect(out.parsed).toBe(3); + expect(out.recorded).toBe(2); + }); + + it("counts deduped records as not-newly-recorded", async () => { + const { rpc } = makeMockRpc(() => ({ ledgerId: "ld", deduped: true })); + const fetchImpl = mockFetchOk(multilingualFeedXml, '"e"'); + const out = await pollSeries(rpc, "src-1", trackedEntry("series-1"), { + blockedGroups: [], + timeoutMs: 1000, + fetchImpl, + }); + expect(out.parsed).toBe(3); + expect(out.recorded).toBe(0); // every record returned deduped:true + }); +}); diff --git a/plugins/release-mangaupdates/src/index.ts b/plugins/release-mangaupdates/src/index.ts new file mode 100644 index 00000000..90676f19 --- /dev/null +++ b/plugins/release-mangaupdates/src/index.ts @@ -0,0 +1,478 @@ +/** + * MangaUpdates RSS Release-Source Plugin for Codex. + * + * Polls per-series RSS feeds at MangaUpdates and announces new chapter / + * volume releases for tracked series. The plugin is the first writer of + * `release_ledger` rows in production — earlier phases build the + * infrastructure, this one delivers the first real notification feed. + * + * Flow per `releases/poll`: + * 1. Pull tracked-series scope from the host (`releases/list_tracked`). + * Filtered server-side to series with a `mangaupdates` external ID. + * 2. For each series, conditional GET the RSS feed. + * 3. Parse the response into items, then filter by: + * - per-series language list (admin / per-series config) + * - admin-configured group blocklist + * 4. Build `ReleaseCandidate` rows and stream them via + * `releases/record`. The host's matcher applies the threshold and + * ledger dedup. + * 5. Pass the new ETag back via the poll response so the host updates + * the source row. + * + * **Concurrency note:** The plugin host already serializes RPCs per plugin + * process, so we don't need to throttle internally beyond an in-poll loop + * that walks tracked series sequentially. + */ + +import { + createLogger, + createReleaseSourcePlugin, + type HostRpcClient, + HostRpcError, + type InitializeParams, + RELEASES_METHODS, + type ReleaseCandidate, + type ReleasePollRequest, + type ReleasePollResponse, + type TrackedSeriesEntry, +} from "@ashdev/codex-plugin-sdk"; +import { fetchSeriesFeed } from "./fetcher.js"; +import { parseCommaList, passesFilters, resolveFilters } from "./filter.js"; +import { EXTERNAL_ID_SOURCE_MANGAUPDATES, manifest } from "./manifest.js"; +import { type ParsedRssItem, parseFeed } from "./parser.js"; + +const logger = createLogger({ name: manifest.name, level: "info" }); + +// ============================================================================= +// Plugin-level state (set during initialize) +// ============================================================================= + +interface PluginState { + hostRpc: HostRpcClient | null; + /** Admin-configured group blocklist (lowercased exact match). */ + blockedGroupsCsv: string; + /** Hard timeout for upstream fetches. */ + requestTimeoutMs: number; +} + +const state: PluginState = { + hostRpc: null, + blockedGroupsCsv: "", + requestTimeoutMs: 10_000, +}; + +/** Reset state. Exported for tests; not part of the plugin contract. */ +export function _resetState(): void { + state.hostRpc = null; + state.blockedGroupsCsv = ""; + state.requestTimeoutMs = 10_000; +} + +// ============================================================================= +// Reverse-RPC wrappers (typed shorthands so the poll code reads cleanly) +// ============================================================================= + +interface ListTrackedResponse { + tracked: TrackedSeriesEntry[]; + nextOffset?: number; +} + +interface RecordResponse { + ledgerId: string; + deduped: boolean; +} + +async function listTracked( + rpc: HostRpcClient, + sourceId: string, + offset: number, + limit: number, +): Promise { + return rpc.call(RELEASES_METHODS.LIST_TRACKED, { + sourceId, + offset, + limit, + }); +} + +async function recordCandidate( + rpc: HostRpcClient, + sourceId: string, + candidate: ReleaseCandidate, +): Promise { + try { + return await rpc.call(RELEASES_METHODS.RECORD, { + sourceId, + candidate, + }); + } catch (err) { + if (err instanceof HostRpcError) { + // Threshold rejection / validation error / unknown source. Log and + // skip; the next poll will retry the still-eligible candidates. + logger.warn( + `record failed for ${candidate.externalReleaseId}: ${err.message} (code ${err.code})`, + ); + } else { + const msg = err instanceof Error ? err.message : "unknown error"; + logger.warn(`record failed for ${candidate.externalReleaseId}: ${msg}`); + } + return null; + } +} + +// ============================================================================= +// Iteration helpers +// ============================================================================= + +/** + * Lazily walk all tracked-series pages from the host. Yields entries one + * series at a time so the caller can interleave per-series fetches without + * buffering the whole list (relevant for users tracking hundreds of series). + */ +async function* iterateTrackedSeries( + rpc: HostRpcClient, + sourceId: string, +): AsyncGenerator { + const pageSize = 200; + let offset = 0; + while (true) { + const page = await listTracked(rpc, sourceId, offset, pageSize); + for (const entry of page.tracked) { + yield entry; + } + if (page.nextOffset === undefined || page.tracked.length === 0) return; + offset = page.nextOffset; + } +} + +/** + * Per-series effective language list. We use the host's `latestKnown*` + * exposure plus the `externalIds` map to scope the fetch, but the + * languages config is owned by the host (set on `series_tracking.languages` + * with fallback to the server-wide default). + * + * However, the current `releases/list_tracked` response shape doesn't + * expose per-series `languages` — see plan doc for this design choice. + * For Phase 6 the plugin reads its admin-level group blocklist and emits + * candidates with the language tag from the parsed entry; the host's + * `latest_known_*` advance gate enforces the per-series language list + * authoritatively (see `services/release/languages.rs`). + * + * We *also* want to drop out-of-language candidates client-side to keep the + * ledger small and the inbox clean. Without per-series languages on the + * tracked-series payload, the client-side filter degrades to a no-op + * pass-everything for known languages — leaving it to the host's gate. The + * group blocklist still applies. + * + * If a future protocol revision exposes `effectiveLanguages` on the + * tracked-series entry, swap this stub for the real list and the existing + * `passesFilters` will do the right thing. + */ +function effectiveLanguagesForSeries(_entry: TrackedSeriesEntry): string[] { + return []; // empty = no client-side language gate; host gate is authoritative +} + +/** + * Map a `ParsedRssItem` to a `ReleaseCandidate`. Confidence is 1.0 because + * the match is keyed by external ID — there's no fuzzy matching. + */ +function toCandidate(entry: TrackedSeriesEntry, item: ParsedRssItem): ReleaseCandidate { + const candidate: ReleaseCandidate = { + seriesMatch: { + codexSeriesId: entry.seriesId, + confidence: 1.0, + reason: `mangaupdates_id:${entry.externalIds?.[EXTERNAL_ID_SOURCE_MANGAUPDATES] ?? ""}`, + }, + externalReleaseId: item.externalReleaseId, + chapter: item.chapter, + volume: item.volume, + language: item.language, + groupOrUploader: item.group, + payloadUrl: item.link.length > 0 ? item.link : `urn:mu:${item.externalReleaseId}`, + observedAt: item.observedAt, + }; + return candidate; +} + +// ============================================================================= +// Per-series poll +// ============================================================================= + +/** Outcome of a single per-series fetch+record cycle. */ +export interface SeriesPollOutcome { + seriesId: string; + fetched: boolean; + notModified: boolean; + parsed: number; + /** Of those parsed, how many passed client-side filters and were sent to record. */ + matched: number; + recorded: number; + /** Of those sent to record, how many the host deduped onto an existing row. */ + deduped: number; + upstreamStatus: number; + /** New ETag returned by upstream (only set when fetched=true). */ + etag: string | null; + /** Error string if the per-series fetch failed; empty otherwise. */ + error: string; +} + +/** + * Poll a single series. Internal — exposed for testing. + * + * Aggregates the worst (highest) upstream status across the per-series + * fetches at the call site so the host's per-host backoff layer sees real + * 429/5xx signals. + */ +export async function pollSeries( + rpc: HostRpcClient, + sourceId: string, + entry: TrackedSeriesEntry, + options: { + blockedGroups: string[]; + timeoutMs: number; + fetchImpl?: typeof fetch; + }, +): Promise { + const muId = entry.externalIds?.[EXTERNAL_ID_SOURCE_MANGAUPDATES]; + if (!muId) { + return { + seriesId: entry.seriesId, + fetched: false, + notModified: false, + parsed: 0, + matched: 0, + recorded: 0, + deduped: 0, + upstreamStatus: 0, + etag: null, + error: "missing mangaupdates external ID", + }; + } + + // We don't have per-series ETag here — that lives on the source row, not + // the series. For a per-source feed (rss-uploader) ETags align cleanly; + // for per-series feeds (this plugin) we'd need per-(source, series) state + // to do conditional GETs per series. That's a future optimization; for + // now we always do an unconditional GET. Daily polls + small per-series + // bodies keep the bandwidth cost negligible. + const result = await fetchSeriesFeed(muId, null, { + fetchImpl: options.fetchImpl, + timeoutMs: options.timeoutMs, + }); + + if (result.kind === "notModified") { + return { + seriesId: entry.seriesId, + fetched: true, + notModified: true, + parsed: 0, + matched: 0, + recorded: 0, + deduped: 0, + upstreamStatus: 304, + etag: null, + error: "", + }; + } + + if (result.kind === "error") { + return { + seriesId: entry.seriesId, + fetched: false, + notModified: false, + parsed: 0, + matched: 0, + recorded: 0, + deduped: 0, + upstreamStatus: result.status, + etag: null, + error: result.message, + }; + } + + // result.kind === "ok" + const items = parseFeed(result.body); + const filters = resolveFilters({ + languages: effectiveLanguagesForSeries(entry), + blockedGroups: options.blockedGroups, + }); + let matched = 0; + let recorded = 0; + let deduped = 0; + for (const item of items) { + if (!passesFilters(item, filters)) continue; + matched++; + const candidate = toCandidate(entry, item); + const outcome = await recordCandidate(rpc, sourceId, candidate); + if (!outcome) continue; + if (outcome.deduped) { + deduped++; + } else { + recorded++; + } + } + return { + seriesId: entry.seriesId, + fetched: true, + notModified: false, + parsed: items.length, + matched, + recorded, + deduped, + upstreamStatus: 200, + etag: result.etag, + error: "", + }; +} + +// ============================================================================= +// Top-level poll handler +// ============================================================================= + +async function poll(params: ReleasePollRequest, rpc: HostRpcClient): Promise { + const sourceId = params.sourceId; + const blockedGroups = parseCommaList(state.blockedGroupsCsv); + + let parsed = 0; + let matched = 0; + let recorded = 0; + let deduped = 0; + let worstStatus = 200; + let lastEtag: string | null = null; + let seenSeries = 0; + // Series the host returned that lack a MangaUpdates external ID. A high + // count here is the most common cause of an "empty" poll: the plugin + // can't fetch a feed without an MU ID, so the user needs to populate + // those (manual paste or metadata refresh from MangaBaka). + let skippedNoMuId = 0; + + for await (const entry of iterateTrackedSeries(rpc, sourceId)) { + seenSeries++; + const outcome = await pollSeries(rpc, sourceId, entry, { + blockedGroups, + timeoutMs: state.requestTimeoutMs, + }); + parsed += outcome.parsed; + matched += outcome.matched; + recorded += outcome.recorded; + deduped += outcome.deduped; + if (outcome.upstreamStatus > worstStatus) { + worstStatus = outcome.upstreamStatus; + } + if (outcome.etag) lastEtag = outcome.etag; + + if (outcome.error === "missing mangaupdates external ID") { + skippedNoMuId++; + } else if (outcome.error) { + logger.warn(`series ${entry.seriesId}: ${outcome.error} (status ${outcome.upstreamStatus})`); + } + } + + if (skippedNoMuId > 0) { + logger.info( + `skipped ${skippedNoMuId} of ${seenSeries} tracked series for source=${sourceId}: no mangaupdates external ID. Add one in the Tracking panel or run a metadata refresh.`, + ); + } + + logger.info( + `poll complete: source=${sourceId} series=${seenSeries} skipped=${skippedNoMuId} parsed=${parsed} matched=${matched} recorded=${recorded} deduped=${deduped} worst_status=${worstStatus}`, + ); + + // Report counters back to the host so the source's `last_summary` is + // accurate. Without these the host only sees the (empty) `candidates` + // payload — we record via reverse-RPC mid-poll — and the badge reads + // "Fetched 0 items" no matter what actually happened. + // Per-series ETags don't align with the per-source state slot, so we + // intentionally leave `etag` undefined unless we actually saw one + // (which today we won't, since we don't pass If-None-Match per series). + return { + notModified: false, + upstreamStatus: worstStatus, + parsed, + matched, + recorded, + deduped, + ...(lastEtag !== null ? { etag: lastEtag } : {}), + }; +} + +// ============================================================================= +// Plugin Initialization +// ============================================================================= + +/** + * Register a single static source row representing the MangaUpdates batch + * feed. Unlike Nyaa (one row per uploader), MangaUpdates polls all tracked + * series under one logical feed, so we always declare exactly one row keyed + * `default`. Retries on `METHOD_NOT_FOUND` to handle the brief race where + * the host has not yet installed the releases reverse-RPC handler. + */ +export async function registerSources( + rpc: HostRpcClient, +): Promise<{ registered: number; pruned: number } | null> { + const sources = [ + { + sourceKey: "default", + displayName: "MangaUpdates Releases", + kind: "rss-series" as const, + config: null, + }, + ]; + const maxAttempts = 5; + for (let attempt = 1; attempt <= maxAttempts; attempt++) { + try { + return await rpc.call<{ registered: number; pruned: number }>( + RELEASES_METHODS.REGISTER_SOURCES, + { sources }, + ); + } catch (err) { + const isMethodNotFound = err instanceof HostRpcError && err.code === -32601; + if (isMethodNotFound && attempt < maxAttempts) { + await new Promise((r) => setTimeout(r, 50 * attempt)); + continue; + } + const reason = err instanceof Error ? err.message : String(err); + logger.error(`register_sources failed: ${reason}`); + return null; + } + } + return null; +} + +createReleaseSourcePlugin({ + manifest, + provider: { + async poll(params: ReleasePollRequest): Promise { + if (!state.hostRpc) { + throw new Error("Plugin not initialized: hostRpc client missing"); + } + return poll(params, state.hostRpc); + }, + }, + logLevel: "info", + async onInitialize(params: InitializeParams) { + state.hostRpc = params.hostRpc; + const ac = params.adminConfig ?? {}; + if (typeof ac.blockedGroups === "string") { + state.blockedGroupsCsv = ac.blockedGroups; + } + if (typeof ac.requestTimeoutMs === "number" && Number.isFinite(ac.requestTimeoutMs)) { + state.requestTimeoutMs = Math.max(1_000, Math.min(ac.requestTimeoutMs, 60_000)); + } + logger.info( + `initialized: blockedGroups=${state.blockedGroupsCsv ? "set" : "empty"} timeoutMs=${state.requestTimeoutMs}`, + ); + + // Materialize the single static source row. Deferred to a microtask so + // we run *after* the host installs the releases reverse-RPC handler. + queueMicrotask(() => { + void registerSources(params.hostRpc).then((result) => { + if (result) { + logger.info(`register_sources: registered=${result.registered} pruned=${result.pruned}`); + } + }); + }); + }, +}); + +logger.info("MangaUpdates release-source plugin started"); diff --git a/plugins/release-mangaupdates/src/manifest.ts b/plugins/release-mangaupdates/src/manifest.ts new file mode 100644 index 00000000..3719786d --- /dev/null +++ b/plugins/release-mangaupdates/src/manifest.ts @@ -0,0 +1,63 @@ +import type { PluginManifest } from "@ashdev/codex-plugin-sdk"; +import packageJson from "../package.json" with { type: "json" }; + +/** + * External-ID source name for MangaUpdates. + * + * MangaUpdates IDs are populated by metadata-provider plugins (e.g. + * MangaBaka cross-references) or pasted manually by the user via the series + * tracking panel. The release plugin needs the bare source name (no + * `plugin:` prefix) here to match the host's external-ID filter. + */ +export const EXTERNAL_ID_SOURCE_MANGAUPDATES = "mangaupdates" as const; + +export const manifest = { + name: "release-mangaupdates", + displayName: "MangaUpdates Releases", + version: packageJson.version, + description: + "Announces new chapter releases for tracked series via MangaUpdates per-series RSS feeds. Filters by user-configured languages.", + author: "Codex", + homepage: "https://github.com/AshDevFr/codex", + protocolVersion: "1.1", + capabilities: { + releaseSource: { + kinds: ["rss-series"], + requiresAliases: false, + requiresExternalIds: [EXTERNAL_ID_SOURCE_MANGAUPDATES], + canAnnounceChapters: true, + canAnnounceVolumes: true, + }, + }, + configSchema: { + description: + "MangaUpdates plugin configuration. Per-series language preferences live on each series' tracking config; the values here are server-wide defaults applied when a series doesn't override them.", + fields: [ + { + key: "blockedGroups", + label: "Blocked Scanlation Groups", + description: + "Comma-separated list of scanlation group names to exclude from announcements (case-insensitive, exact match). Per-series overrides may further extend this list.", + type: "string" as const, + required: false, + default: "", + example: "LowQualityScans,MTL Group", + }, + { + key: "requestTimeoutMs", + label: "Request Timeout (ms)", + description: + "How long to wait for a single RSS fetch before giving up. Defaults to 10000 (10 seconds).", + type: "number" as const, + required: false, + default: 10_000, + }, + ], + }, + userDescription: + "Announces new chapters for series you've tracked, using their MangaUpdates IDs. Filters releases to languages you can read. Notification-only — Codex does not download anything.", + adminSetupInstructions: + "1. No config is required to get started — saving the plugin is enough. The plugin auto-registers a single source row (`MangaUpdates Releases`) in **Settings → Release tracking** on first start, where you can disable it, change the poll interval, or hit *Poll now*. 2. To get announcements for a series, edit its tracking panel and either paste a `mangaupdates` external ID or let the metadata-refresh path populate it from MangaBaka cross-references. 3. Optional: set `blockedGroups` (CSV, case-insensitive) to filter noisy scanlators server-wide; per-series language preferences live on each series' tracking config and override the server default (`release_tracking.default_languages`). No credentials are needed; MangaUpdates RSS feeds are public.", +} as const satisfies PluginManifest & { + capabilities: { releaseSource: { kinds: ["rss-series"] } }; +}; diff --git a/plugins/release-mangaupdates/src/parser.test.ts b/plugins/release-mangaupdates/src/parser.test.ts new file mode 100644 index 00000000..4023be76 --- /dev/null +++ b/plugins/release-mangaupdates/src/parser.test.ts @@ -0,0 +1,227 @@ +import { describe, expect, it } from "vitest"; +import { parseFeed, parseItem, parseTitle } from "./parser.js"; + +// ----------------------------------------------------------------------------- +// parseTitle +// ----------------------------------------------------------------------------- + +describe("parseTitle", () => { + it("extracts chapter, group, and language from canonical English entry", () => { + const t = parseTitle("c.143 by Best Group (en)"); + expect(t.chapter).toBe(143); + expect(t.volume).toBeNull(); + expect(t.group).toBe("Best Group"); + expect(t.language).toBe("en"); + }); + + it("extracts chapter and volume when both present", () => { + const t = parseTitle("Vol.2 c.14 by GroupName (en)"); + expect(t.chapter).toBe(14); + expect(t.volume).toBe(2); + expect(t.group).toBe("GroupName"); + expect(t.language).toBe("en"); + }); + + it("handles decimal chapter numbers", () => { + const t = parseTitle("c.47.5 by SubScans (en)"); + expect(t.chapter).toBe(47.5); + }); + + it("handles long-form vol./ch. prefixes", () => { + const t = parseTitle("vol.5 ch.30 by Group (es)"); + expect(t.volume).toBe(5); + expect(t.chapter).toBe(30); + expect(t.language).toBe("es"); + }); + + it("extracts Spanish entry", () => { + const t = parseTitle("c.144 by Hablada Scans (es)"); + expect(t.chapter).toBe(144); + expect(t.language).toBe("es"); + expect(t.group).toBe("Hablada Scans"); + }); + + it("extracts Indonesian entry", () => { + const t = parseTitle("c.145 by ID Translators (id)"); + expect(t.chapter).toBe(145); + expect(t.language).toBe("id"); + }); + + it("defaults language to 'en' when no language tag is present", () => { + // The MangaUpdates v1 RSS endpoint serves the English-localized release + // stream and titles ship without a language tag. Defaulting to "en" + // (rather than the legacy `UNKNOWN_LANGUAGE` sentinel) keeps the + // client-side language gate from dropping every item. + const t = parseTitle("c.143 by Best Group"); + expect(t.chapter).toBe(143); + expect(t.group).toBe("Best Group"); + expect(t.language).toBe("en"); + }); + + it("handles volume-only bundle (no chapter)", () => { + const t = parseTitle("Vol.15 by VolBundlerScans (en)"); + expect(t.volume).toBe(15); + expect(t.chapter).toBeNull(); + expect(t.group).toBe("VolBundlerScans"); + expect(t.language).toBe("en"); + }); + + it("handles entry with no group", () => { + const t = parseTitle("c.143 (en)"); + expect(t.chapter).toBe(143); + expect(t.language).toBe("en"); + expect(t.group).toBeNull(); + }); + + it("lowercases language codes regardless of source casing", () => { + const t = parseTitle("c.143 by Group (EN)"); + expect(t.language).toBe("en"); + }); + + it("ignores trailing whitespace before language code", () => { + const t = parseTitle("c.143 by Group (en) "); + expect(t.language).toBe("en"); + }); +}); + +// ----------------------------------------------------------------------------- +// parseItem +// ----------------------------------------------------------------------------- + +const englishItem = ` + <![CDATA[c.143 by Best Group (en)]]> + https://www.mangaupdates.com/release.html?id=12345 + 12345 + Mon, 04 May 2026 02:31:00 GMT +`; + +describe("parseItem", () => { + it("parses a canonical English item", () => { + const item = parseItem(englishItem); + expect(item).not.toBeNull(); + if (!item) return; + expect(item.externalReleaseId).toBe("12345"); + expect(item.title).toBe("c.143 by Best Group (en)"); + expect(item.chapter).toBe(143); + expect(item.volume).toBeNull(); + expect(item.group).toBe("Best Group"); + expect(item.language).toBe("en"); + expect(item.link).toBe("https://www.mangaupdates.com/release.html?id=12345"); + expect(item.observedAt).toBe("2026-05-04T02:31:00.000Z"); + }); + + it("falls back to link as externalReleaseId when guid is missing", () => { + const xml = ` + c.144 by Group (en) + https://www.mangaupdates.com/release.html?id=99 + Mon, 04 May 2026 02:31:00 GMT + `; + const item = parseItem(xml); + expect(item?.externalReleaseId).toBe("https://www.mangaupdates.com/release.html?id=99"); + }); + + it("derives a deterministic id when both guid and link are missing", () => { + const xml = ` + c.144 by Group (en) + Mon, 04 May 2026 02:31:00 GMT + `; + const a = parseItem(xml); + const b = parseItem(xml); + expect(a?.externalReleaseId).toBeTruthy(); + expect(a?.externalReleaseId).toBe(b?.externalReleaseId); + expect(a?.externalReleaseId.startsWith("t:")).toBe(true); + }); + + it("returns null for a malformed item missing title", () => { + const xml = `https://example.com`; + expect(parseItem(xml)).toBeNull(); + }); + + it("falls back to current time when pubDate is invalid", () => { + const xml = ` + c.1 by G (en) + not a real date + `; + const item = parseItem(xml); + expect(item).not.toBeNull(); + if (!item) return; + expect(Number.isNaN(new Date(item.observedAt).getTime())).toBe(false); + }); + + it("decodes XML entities in title", () => { + const xml = ` + c.1 by G & B (en) + https://example.com/x + Mon, 04 May 2026 02:31:00 GMT + `; + const item = parseItem(xml); + expect(item?.title).toBe("c.1 by G & B (en)"); + expect(item?.group).toBe("G & B"); + }); +}); + +// ----------------------------------------------------------------------------- +// parseFeed +// ----------------------------------------------------------------------------- + +const multilingualFeed = ` + + + Test Series Releases + + <![CDATA[c.142 by EnglishGroup (en)]]> + https://www.mangaupdates.com/release.html?id=1001 + 1001 + Mon, 04 May 2026 01:00:00 GMT + + + <![CDATA[c.144 by SpanishGroup (es)]]> + https://www.mangaupdates.com/release.html?id=1002 + 1002 + Sun, 03 May 2026 12:00:00 GMT + + + <![CDATA[c.145 by IndonesianGroup (id)]]> + https://www.mangaupdates.com/release.html?id=1003 + 1003 + Sat, 02 May 2026 22:00:00 GMT + + + <![CDATA[Vol.15 by VolBundler (en)]]> + https://www.mangaupdates.com/release.html?id=1004 + 1004 + Fri, 01 May 2026 10:00:00 GMT + + + <![CDATA[c.146 by NoLanguageTagGroup]]> + https://www.mangaupdates.com/release.html?id=1005 + 1005 + Thu, 30 Apr 2026 09:00:00 GMT + + +`; + +describe("parseFeed", () => { + it("parses all items in a multi-language fixture", () => { + const items = parseFeed(multilingualFeed); + expect(items).toHaveLength(5); + expect(items[0]?.language).toBe("en"); + expect(items[1]?.language).toBe("es"); + expect(items[2]?.language).toBe("id"); + expect(items[3]?.language).toBe("en"); + expect(items[3]?.volume).toBe(15); + expect(items[3]?.chapter).toBeNull(); + // Item 4's title carries no language tag; parser defaults to "en" + // because the MU v1 RSS feed is the English release stream. + expect(items[4]?.language).toBe("en"); + }); + + it("returns an empty array for an empty channel", () => { + expect(parseFeed("")).toEqual([]); + }); + + it("returns an empty array for malformed XML", () => { + // Non-fatal: parseFeed should never throw, just return whatever it can. + expect(parseFeed("<<>>")).toEqual([]); + }); +}); diff --git a/plugins/release-mangaupdates/src/parser.ts b/plugins/release-mangaupdates/src/parser.ts new file mode 100644 index 00000000..321cc2c1 --- /dev/null +++ b/plugins/release-mangaupdates/src/parser.ts @@ -0,0 +1,236 @@ +/** + * RSS parser for MangaUpdates per-series feeds. + * + * Per-series feed: `https://api.mangaupdates.com/v1/series/{series_id}/rss` + * + * Each `` is one scanlation release. The plugin extracts: + * - chapter / volume from the title + * - scanlation group from the title + * - language tag (parenthesized two-letter code) from the title + * - link (the MangaUpdates release page) used as `payloadUrl` + * - pubDate as `observedAt` + * + * Implementation note: we do NOT pull in a heavy XML parser. The MangaUpdates + * RSS format is simple, well-formed, and stable. A small targeted regex + * pipeline avoids a 100kb dependency and CVE surface for marginal benefit. + */ + +/** Parsed item, pre-`ReleaseCandidate`. */ +export interface ParsedRssItem { + /** Stable per-source ID. Derived from the release URL or guid. */ + externalReleaseId: string; + /** Original title string. Useful for debugging / fallback. */ + title: string; + /** Chapter number (decimals supported, e.g. "47.5"). */ + chapter: number | null; + /** Volume number. */ + volume: number | null; + /** + * Language tag (lowercased ISO 639-1). Defaults to `"en"` when the title + * doesn't carry an explicit `(xx)` code, since the MangaUpdates v1 RSS + * endpoint serves the English release stream. The legacy + * `UNKNOWN_LANGUAGE` sentinel is still exported for callers that want + * to surface "no tag detected" explicitly, but the parser no longer + * produces it on its own. + */ + language: string; + /** Scanlation group name (best-effort; nullable). */ + group: string | null; + /** Release page URL on MangaUpdates. Used as `payloadUrl`. */ + link: string; + /** ISO-8601 string. Falls back to "now" when pubDate is missing/invalid. */ + observedAt: string; +} + +/** Sentinel returned when the language tag can't be detected. */ +export const UNKNOWN_LANGUAGE = "unknown" as const; + +// ----------------------------------------------------------------------------- +// XML helpers +// ----------------------------------------------------------------------------- + +/** Strip CDATA wrapper if present, unescape `&` `<` `>` `"`. */ +function decodeXmlText(raw: string): string { + let s = raw.trim(); + const cdataMatch = s.match(/^$/); + if (cdataMatch?.[1] !== undefined) { + s = cdataMatch[1]; + } + return s + .replace(/&/g, "&") + .replace(/</g, "<") + .replace(/>/g, ">") + .replace(/"/g, '"') + .replace(/'/g, "'") + .replace(/'/g, "'"); +} + +/** Pull the first `` text content from an XML fragment, or null. */ +function extractTagText(xml: string, tag: string): string | null { + const re = new RegExp(`<${tag}[^>]*>([\\s\\S]*?)`, "i"); + const m = xml.match(re); + if (!m?.[1]) return null; + return decodeXmlText(m[1]); +} + +/** Pull all `...` blocks from a feed. */ +function splitItems(xml: string): string[] { + const out: string[] = []; + const re = /]*>([\s\S]*?)<\/item>/gi; + for (;;) { + const match = re.exec(xml); + if (match === null) break; + if (match[1] !== undefined) out.push(match[1]); + } + return out; +} + +// ----------------------------------------------------------------------------- +// Title parsing +// ----------------------------------------------------------------------------- + +/** + * Extract chapter/volume/group/language from a MangaUpdates RSS title. + * + * Observed shapes: + * "Vol.2 c.14 by GroupName (en)" + * "v.2 c.14.5 by GroupName (es)" + * "c.143 by GroupName" (language missing) + * "Vol.15 by GroupName (en)" (volume-only bundle) + * "c.143 (en)" (no group) + * + * Volume tokens: `v.N`, `vol.N`, `Vol.N` (case-insensitive). + * Chapter tokens: `c.N`, `ch.N`, `Ch.N` (decimals allowed). + * Group: text between `by ` and the next `(` or end-of-string. + * Language: trailing `(xx)` two-letter code, lowercased. + */ +export function parseTitle(title: string): { + chapter: number | null; + volume: number | null; + group: string | null; + language: string; +} { + const trimmed = title.trim(); + + // Chapter: c.N or ch.N (allow decimals). + let chapter: number | null = null; + const chMatch = trimmed.match(/\bc(?:h)?\.?\s*([0-9]+(?:\.[0-9]+)?)\b/i); + if (chMatch?.[1]) { + const n = Number.parseFloat(chMatch[1]); + if (Number.isFinite(n)) chapter = n; + } + + // Volume: v.N or vol.N. + let volume: number | null = null; + const volMatch = trimmed.match(/\bv(?:ol)?\.?\s*([0-9]+)\b/i); + if (volMatch?.[1]) { + const n = Number.parseInt(volMatch[1], 10); + if (Number.isFinite(n)) volume = n; + } + + // Group: "by " up to "(" or end. + let group: string | null = null; + const groupMatch = trimmed.match(/\bby\s+(.+?)(?:\s*\([a-z]{2,3}\)\s*)?$/i); + if (groupMatch?.[1]) { + const candidate = groupMatch[1].trim(); + if (candidate.length > 0) group = candidate; + } + + // Language: trailing parenthesized 2-3 letter code (e.g. (en), (es), (id), (por)). + // + // The current MangaUpdates v1 RSS endpoint (`/v1/series/{id}/rss`) ships + // titles without a language tag — it's the English-localized release + // stream by design. Default to `"en"` so items aren't dropped by the + // client-side language gate; an explicit `(es)` / `(id)` / etc. still + // wins when present, and the host's per-series language list remains + // the authoritative gate downstream. The legacy `UNKNOWN_LANGUAGE` + // sentinel is kept exported for backwards compatibility but no longer + // produced by this parser. + let language = "en"; + const langMatch = trimmed.match(/\(([a-z]{2,3})\)\s*$/i); + if (langMatch?.[1]) { + language = langMatch[1].toLowerCase(); + } + + return { chapter, volume, group, language }; +} + +// ----------------------------------------------------------------------------- +// Item parsing +// ----------------------------------------------------------------------------- + +/** + * Best-effort `pubDate` -> ISO-8601 conversion. MangaUpdates uses RFC-2822 + * style dates (`Mon, 04 May 2026 02:31:00 GMT`). Falls back to "now" on + * invalid input — never throws, since one bad pubDate shouldn't drop the + * whole feed. + */ +function pubDateToIso(raw: string | null): string { + if (raw) { + const d = new Date(raw); + if (!Number.isNaN(d.getTime())) return d.toISOString(); + } + return new Date().toISOString(); +} + +/** + * Derive a stable external_release_id. Prefer ``, then the link URL, + * otherwise fall back to a deterministic hash of `(title + pubDate)`. + * + * Stability is what matters: re-polling the same item must produce the same + * ID so the host's `(source_id, external_release_id)` dedup catches it. + */ +function deriveExternalReleaseId( + guid: string | null, + link: string | null, + title: string, + pubDate: string | null, +): string { + if (guid && guid.trim().length > 0) return guid.trim(); + if (link && link.trim().length > 0) return link.trim(); + // Deterministic fallback for feeds that omit both. djb2-ish hash keeps the + // ID short while staying stable across polls. + const fallback = `${title}|${pubDate ?? ""}`; + let h = 5381; + for (let i = 0; i < fallback.length; i++) { + h = ((h << 5) + h + fallback.charCodeAt(i)) | 0; + } + return `t:${(h >>> 0).toString(36)}`; +} + +/** + * Parse a single MangaUpdates `` block into a `ParsedRssItem`. Returns + * null if the title is missing entirely (truly malformed item). + */ +export function parseItem(itemXml: string): ParsedRssItem | null { + const title = extractTagText(itemXml, "title"); + if (!title) return null; + + const link = extractTagText(itemXml, "link"); + const guid = extractTagText(itemXml, "guid"); + const pubDate = extractTagText(itemXml, "pubDate"); + + const { chapter, volume, group, language } = parseTitle(title); + + return { + externalReleaseId: deriveExternalReleaseId(guid, link, title, pubDate), + title, + chapter, + volume, + group, + language, + link: link ?? "", + observedAt: pubDateToIso(pubDate), + }; +} + +/** + * Parse a full MangaUpdates per-series RSS feed body into items. Bad items + * (missing title) are dropped silently — the feed should be best-effort + * tolerant. + */ +export function parseFeed(xml: string): ParsedRssItem[] { + return splitItems(xml) + .map(parseItem) + .filter((i): i is ParsedRssItem => i !== null); +} diff --git a/plugins/release-mangaupdates/tsconfig.json b/plugins/release-mangaupdates/tsconfig.json new file mode 100644 index 00000000..ef1ca5f9 --- /dev/null +++ b/plugins/release-mangaupdates/tsconfig.json @@ -0,0 +1,24 @@ +{ + "compilerOptions": { + "target": "ES2022", + "module": "NodeNext", + "moduleResolution": "NodeNext", + "lib": ["ES2022"], + "outDir": "./dist", + "rootDir": "./src", + "declaration": true, + "sourceMap": true, + "strict": true, + "esModuleInterop": true, + "skipLibCheck": true, + "forceConsistentCasingInFileNames": true, + "resolveJsonModule": true, + "isolatedModules": true, + "noUnusedLocals": true, + "noUnusedParameters": true, + "noImplicitReturns": true, + "noFallthroughCasesInSwitch": true + }, + "include": ["src/**/*"], + "exclude": ["node_modules", "dist", "**/*.test.ts"] +} diff --git a/plugins/release-mangaupdates/vitest.config.ts b/plugins/release-mangaupdates/vitest.config.ts new file mode 100644 index 00000000..ae847ff6 --- /dev/null +++ b/plugins/release-mangaupdates/vitest.config.ts @@ -0,0 +1,7 @@ +import { defineConfig } from "vitest/config"; + +export default defineConfig({ + test: { + include: ["src/**/*.test.ts"], + }, +}); diff --git a/plugins/release-nyaa/package-lock.json b/plugins/release-nyaa/package-lock.json new file mode 100644 index 00000000..22f5f4b6 --- /dev/null +++ b/plugins/release-nyaa/package-lock.json @@ -0,0 +1,1971 @@ +{ + "name": "@ashdev/codex-plugin-release-nyaa", + "version": "1.18.0", + "lockfileVersion": 3, + "requires": true, + "packages": { + "": { + "name": "@ashdev/codex-plugin-release-nyaa", + "version": "1.18.0", + "license": "MIT", + "dependencies": { + "@ashdev/codex-plugin-sdk": "file:../sdk-typescript" + }, + "bin": { + "codex-plugin-release-nyaa": "dist/index.js" + }, + "devDependencies": { + "@biomejs/biome": "^2.4.4", + "@types/node": "^22.0.0", + "esbuild": "^0.27.3", + "typescript": "^5.9.3", + "vitest": "^4.0.18" + }, + "engines": { + "node": ">=22.0.0" + } + }, + "../sdk-typescript": { + "name": "@ashdev/codex-plugin-sdk", + "version": "1.18.0", + "license": "MIT", + "devDependencies": { + "@biomejs/biome": "^2.4.4", + "@types/node": "^22.0.0", + "typescript": "^5.9.3", + "vitest": "^4.0.18" + }, + "engines": { + "node": ">=22.0.0" + } + }, + "node_modules/@ashdev/codex-plugin-sdk": { + "resolved": "../sdk-typescript", + "link": true + }, + "node_modules/@biomejs/biome": { + "version": "2.4.14", + "resolved": "https://registry.npmjs.org/@biomejs/biome/-/biome-2.4.14.tgz", + "integrity": "sha512-TmAvxOEgrpLypzVGJ8FulIZnlyA9TxrO1hyqYrCz9r+bwma9xXxuLA5IuYnj55XQneFx460KjRbx6SWGLkg3bQ==", + "dev": true, + "license": "MIT OR Apache-2.0", + "bin": { + "biome": "bin/biome" + }, + "engines": { + "node": ">=14.21.3" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/biome" + }, + "optionalDependencies": { + "@biomejs/cli-darwin-arm64": "2.4.14", + "@biomejs/cli-darwin-x64": "2.4.14", + "@biomejs/cli-linux-arm64": "2.4.14", + "@biomejs/cli-linux-arm64-musl": "2.4.14", + "@biomejs/cli-linux-x64": "2.4.14", + "@biomejs/cli-linux-x64-musl": "2.4.14", + "@biomejs/cli-win32-arm64": "2.4.14", + "@biomejs/cli-win32-x64": "2.4.14" + } + }, + "node_modules/@biomejs/cli-darwin-arm64": { + "version": "2.4.14", + "resolved": "https://registry.npmjs.org/@biomejs/cli-darwin-arm64/-/cli-darwin-arm64-2.4.14.tgz", + "integrity": "sha512-XvgoE9XOawUOQPdmvs4J7wPhi/DLwSCGks3AlPJDmh34O0awRTqCED1HRcRDdpf1Zrp4us4MGOOdIxNpbqNF5Q==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT OR Apache-2.0", + "optional": true, + "os": [ + "darwin" + ], + "engines": { + "node": ">=14.21.3" + } + }, + "node_modules/@biomejs/cli-darwin-x64": { + "version": "2.4.14", + "resolved": "https://registry.npmjs.org/@biomejs/cli-darwin-x64/-/cli-darwin-x64-2.4.14.tgz", + "integrity": "sha512-jE7hKBCFhOx3uUh+ZkWBfOHxAcILPfhFplNkuID/eZeSTLHzfZzoZxW8fbqY9xXRnPi7jGNAf1iPVR+0yWsM/Q==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT OR Apache-2.0", + "optional": true, + "os": [ + "darwin" + ], + "engines": { + "node": ">=14.21.3" + } + }, + "node_modules/@biomejs/cli-linux-arm64": { + "version": "2.4.14", + "resolved": "https://registry.npmjs.org/@biomejs/cli-linux-arm64/-/cli-linux-arm64-2.4.14.tgz", + "integrity": "sha512-2TELhZnW5RSLL063l9rc5xLpA0ZIw0Ccwy/0q384rvNAgFw3yI76bd59547yxowdQr5MNPET/xDLrLuvgSeeWQ==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT OR Apache-2.0", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=14.21.3" + } + }, + "node_modules/@biomejs/cli-linux-arm64-musl": { + "version": "2.4.14", + "resolved": "https://registry.npmjs.org/@biomejs/cli-linux-arm64-musl/-/cli-linux-arm64-musl-2.4.14.tgz", + "integrity": "sha512-/z+6gqAqqUQTHazwStxSXKHg9b8UvqBmDFRp+c4wYbq2KXhELQDon9EoC9RpmQ8JWkqQx/lIUy/cs+MhzDZp6A==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT OR Apache-2.0", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=14.21.3" + } + }, + "node_modules/@biomejs/cli-linux-x64": { + "version": "2.4.14", + "resolved": "https://registry.npmjs.org/@biomejs/cli-linux-x64/-/cli-linux-x64-2.4.14.tgz", + "integrity": "sha512-zHrlQZDBDUz4OLAraYpWKcnLS6HOewBFWYOzY91d1ZjdqZwibOyb6BEu6WuWLugyo0P3riCmsbV9UqV1cSXwQg==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT OR Apache-2.0", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=14.21.3" + } + }, + "node_modules/@biomejs/cli-linux-x64-musl": { + "version": "2.4.14", + "resolved": "https://registry.npmjs.org/@biomejs/cli-linux-x64-musl/-/cli-linux-x64-musl-2.4.14.tgz", + "integrity": "sha512-R6BWgJdQOwW9ulJatuTVrQkjnODjqHZkKNOqb1sz++3Noe5LYd0i3PchnOBUCYAPHoPWHhjJqbdZlHEu0hpjdA==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT OR Apache-2.0", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=14.21.3" + } + }, + "node_modules/@biomejs/cli-win32-arm64": { + "version": "2.4.14", + "resolved": "https://registry.npmjs.org/@biomejs/cli-win32-arm64/-/cli-win32-arm64-2.4.14.tgz", + "integrity": "sha512-M3EH5hqOI/F/FUA2u4xcLoUgmxd218mvuj/6JL7Hv2toQvr2/AdOvKSpGkoRuWFCtQPVa+ZqkEV3Q5xBA9+XSA==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT OR Apache-2.0", + "optional": true, + "os": [ + "win32" + ], + "engines": { + "node": ">=14.21.3" + } + }, + "node_modules/@biomejs/cli-win32-x64": { + "version": "2.4.14", + "resolved": "https://registry.npmjs.org/@biomejs/cli-win32-x64/-/cli-win32-x64-2.4.14.tgz", + "integrity": "sha512-WL0EG5qE+EAKomGXbf2g6VnSKJhTL3tXC0QRzWRwA5VpjxNYa6H4P7ZWfymbGE4IhZZQi1KXQ2R0YjwInmz2fA==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT OR Apache-2.0", + "optional": true, + "os": [ + "win32" + ], + "engines": { + "node": ">=14.21.3" + } + }, + "node_modules/@emnapi/core": { + "version": "1.10.0", + "resolved": "https://registry.npmjs.org/@emnapi/core/-/core-1.10.0.tgz", + "integrity": "sha512-yq6OkJ4p82CAfPl0u9mQebQHKPJkY7WrIuk205cTYnYe+k2Z8YBh11FrbRG/H6ihirqcacOgl2BIO8oyMQLeXw==", + "dev": true, + "license": "MIT", + "optional": true, + "dependencies": { + "@emnapi/wasi-threads": "1.2.1", + "tslib": "^2.4.0" + } + }, + "node_modules/@emnapi/runtime": { + "version": "1.10.0", + "resolved": "https://registry.npmjs.org/@emnapi/runtime/-/runtime-1.10.0.tgz", + "integrity": "sha512-ewvYlk86xUoGI0zQRNq/mC+16R1QeDlKQy21Ki3oSYXNgLb45GV1P6A0M+/s6nyCuNDqe5VpaY84BzXGwVbwFA==", + "dev": true, + "license": "MIT", + "optional": true, + "dependencies": { + "tslib": "^2.4.0" + } + }, + "node_modules/@emnapi/wasi-threads": { + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/@emnapi/wasi-threads/-/wasi-threads-1.2.1.tgz", + "integrity": "sha512-uTII7OYF+/Mes/MrcIOYp5yOtSMLBWSIoLPpcgwipoiKbli6k322tcoFsxoIIxPDqW01SQGAgko4EzZi2BNv2w==", + "dev": true, + "license": "MIT", + "optional": true, + "dependencies": { + "tslib": "^2.4.0" + } + }, + "node_modules/@esbuild/aix-ppc64": { + "version": "0.27.7", + "resolved": "https://registry.npmjs.org/@esbuild/aix-ppc64/-/aix-ppc64-0.27.7.tgz", + "integrity": "sha512-EKX3Qwmhz1eMdEJokhALr0YiD0lhQNwDqkPYyPhiSwKrh7/4KRjQc04sZ8db+5DVVnZ1LmbNDI1uAMPEUBnQPg==", + "cpu": [ + "ppc64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "aix" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/android-arm": { + "version": "0.27.7", + "resolved": "https://registry.npmjs.org/@esbuild/android-arm/-/android-arm-0.27.7.tgz", + "integrity": "sha512-jbPXvB4Yj2yBV7HUfE2KHe4GJX51QplCN1pGbYjvsyCZbQmies29EoJbkEc+vYuU5o45AfQn37vZlyXy4YJ8RQ==", + "cpu": [ + "arm" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "android" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/android-arm64": { + "version": "0.27.7", + "resolved": "https://registry.npmjs.org/@esbuild/android-arm64/-/android-arm64-0.27.7.tgz", + "integrity": "sha512-62dPZHpIXzvChfvfLJow3q5dDtiNMkwiRzPylSCfriLvZeq0a1bWChrGx/BbUbPwOrsWKMn8idSllklzBy+dgQ==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "android" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/android-x64": { + "version": "0.27.7", + "resolved": "https://registry.npmjs.org/@esbuild/android-x64/-/android-x64-0.27.7.tgz", + "integrity": "sha512-x5VpMODneVDb70PYV2VQOmIUUiBtY3D3mPBG8NxVk5CogneYhkR7MmM3yR/uMdITLrC1ml/NV1rj4bMJuy9MCg==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "android" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/darwin-arm64": { + "version": "0.27.7", + "resolved": "https://registry.npmjs.org/@esbuild/darwin-arm64/-/darwin-arm64-0.27.7.tgz", + "integrity": "sha512-5lckdqeuBPlKUwvoCXIgI2D9/ABmPq3Rdp7IfL70393YgaASt7tbju3Ac+ePVi3KDH6N2RqePfHnXkaDtY9fkw==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "darwin" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/darwin-x64": { + "version": "0.27.7", + "resolved": "https://registry.npmjs.org/@esbuild/darwin-x64/-/darwin-x64-0.27.7.tgz", + "integrity": "sha512-rYnXrKcXuT7Z+WL5K980jVFdvVKhCHhUwid+dDYQpH+qu+TefcomiMAJpIiC2EM3Rjtq0sO3StMV/+3w3MyyqQ==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "darwin" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/freebsd-arm64": { + "version": "0.27.7", + "resolved": "https://registry.npmjs.org/@esbuild/freebsd-arm64/-/freebsd-arm64-0.27.7.tgz", + "integrity": "sha512-B48PqeCsEgOtzME2GbNM2roU29AMTuOIN91dsMO30t+Ydis3z/3Ngoj5hhnsOSSwNzS+6JppqWsuhTp6E82l2w==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "freebsd" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/freebsd-x64": { + "version": "0.27.7", + "resolved": "https://registry.npmjs.org/@esbuild/freebsd-x64/-/freebsd-x64-0.27.7.tgz", + "integrity": "sha512-jOBDK5XEjA4m5IJK3bpAQF9/Lelu/Z9ZcdhTRLf4cajlB+8VEhFFRjWgfy3M1O4rO2GQ/b2dLwCUGpiF/eATNQ==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "freebsd" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/linux-arm": { + "version": "0.27.7", + "resolved": "https://registry.npmjs.org/@esbuild/linux-arm/-/linux-arm-0.27.7.tgz", + "integrity": "sha512-RkT/YXYBTSULo3+af8Ib0ykH8u2MBh57o7q/DAs3lTJlyVQkgQvlrPTnjIzzRPQyavxtPtfg0EopvDyIt0j1rA==", + "cpu": [ + "arm" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/linux-arm64": { + "version": "0.27.7", + "resolved": "https://registry.npmjs.org/@esbuild/linux-arm64/-/linux-arm64-0.27.7.tgz", + "integrity": "sha512-RZPHBoxXuNnPQO9rvjh5jdkRmVizktkT7TCDkDmQ0W2SwHInKCAV95GRuvdSvA7w4VMwfCjUiPwDi0ZO6Nfe9A==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/linux-ia32": { + "version": "0.27.7", + "resolved": "https://registry.npmjs.org/@esbuild/linux-ia32/-/linux-ia32-0.27.7.tgz", + "integrity": "sha512-GA48aKNkyQDbd3KtkplYWT102C5sn/EZTY4XROkxONgruHPU72l+gW+FfF8tf2cFjeHaRbWpOYa/uRBz/Xq1Pg==", + "cpu": [ + "ia32" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/linux-loong64": { + "version": "0.27.7", + "resolved": "https://registry.npmjs.org/@esbuild/linux-loong64/-/linux-loong64-0.27.7.tgz", + "integrity": "sha512-a4POruNM2oWsD4WKvBSEKGIiWQF8fZOAsycHOt6JBpZ+JN2n2JH9WAv56SOyu9X5IqAjqSIPTaJkqN8F7XOQ5Q==", + "cpu": [ + "loong64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/linux-mips64el": { + "version": "0.27.7", + "resolved": "https://registry.npmjs.org/@esbuild/linux-mips64el/-/linux-mips64el-0.27.7.tgz", + "integrity": "sha512-KabT5I6StirGfIz0FMgl1I+R1H73Gp0ofL9A3nG3i/cYFJzKHhouBV5VWK1CSgKvVaG4q1RNpCTR2LuTVB3fIw==", + "cpu": [ + "mips64el" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/linux-ppc64": { + "version": "0.27.7", + "resolved": "https://registry.npmjs.org/@esbuild/linux-ppc64/-/linux-ppc64-0.27.7.tgz", + "integrity": "sha512-gRsL4x6wsGHGRqhtI+ifpN/vpOFTQtnbsupUF5R5YTAg+y/lKelYR1hXbnBdzDjGbMYjVJLJTd2OFmMewAgwlQ==", + "cpu": [ + "ppc64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/linux-riscv64": { + "version": "0.27.7", + "resolved": "https://registry.npmjs.org/@esbuild/linux-riscv64/-/linux-riscv64-0.27.7.tgz", + "integrity": "sha512-hL25LbxO1QOngGzu2U5xeXtxXcW+/GvMN3ejANqXkxZ/opySAZMrc+9LY/WyjAan41unrR3YrmtTsUpwT66InQ==", + "cpu": [ + "riscv64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/linux-s390x": { + "version": "0.27.7", + "resolved": "https://registry.npmjs.org/@esbuild/linux-s390x/-/linux-s390x-0.27.7.tgz", + "integrity": "sha512-2k8go8Ycu1Kb46vEelhu1vqEP+UeRVj2zY1pSuPdgvbd5ykAw82Lrro28vXUrRmzEsUV0NzCf54yARIK8r0fdw==", + "cpu": [ + "s390x" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/linux-x64": { + "version": "0.27.7", + "resolved": "https://registry.npmjs.org/@esbuild/linux-x64/-/linux-x64-0.27.7.tgz", + "integrity": "sha512-hzznmADPt+OmsYzw1EE33ccA+HPdIqiCRq7cQeL1Jlq2gb1+OyWBkMCrYGBJ+sxVzve2ZJEVeePbLM2iEIZSxA==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/netbsd-arm64": { + "version": "0.27.7", + "resolved": "https://registry.npmjs.org/@esbuild/netbsd-arm64/-/netbsd-arm64-0.27.7.tgz", + "integrity": "sha512-b6pqtrQdigZBwZxAn1UpazEisvwaIDvdbMbmrly7cDTMFnw/+3lVxxCTGOrkPVnsYIosJJXAsILG9XcQS+Yu6w==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "netbsd" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/netbsd-x64": { + "version": "0.27.7", + "resolved": "https://registry.npmjs.org/@esbuild/netbsd-x64/-/netbsd-x64-0.27.7.tgz", + "integrity": "sha512-OfatkLojr6U+WN5EDYuoQhtM+1xco+/6FSzJJnuWiUw5eVcicbyK3dq5EeV/QHT1uy6GoDhGbFpprUiHUYggrw==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "netbsd" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/openbsd-arm64": { + "version": "0.27.7", + "resolved": "https://registry.npmjs.org/@esbuild/openbsd-arm64/-/openbsd-arm64-0.27.7.tgz", + "integrity": "sha512-AFuojMQTxAz75Fo8idVcqoQWEHIXFRbOc1TrVcFSgCZtQfSdc1RXgB3tjOn/krRHENUB4j00bfGjyl2mJrU37A==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "openbsd" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/openbsd-x64": { + "version": "0.27.7", + "resolved": "https://registry.npmjs.org/@esbuild/openbsd-x64/-/openbsd-x64-0.27.7.tgz", + "integrity": "sha512-+A1NJmfM8WNDv5CLVQYJ5PshuRm/4cI6WMZRg1by1GwPIQPCTs1GLEUHwiiQGT5zDdyLiRM/l1G0Pv54gvtKIg==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "openbsd" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/openharmony-arm64": { + "version": "0.27.7", + "resolved": "https://registry.npmjs.org/@esbuild/openharmony-arm64/-/openharmony-arm64-0.27.7.tgz", + "integrity": "sha512-+KrvYb/C8zA9CU/g0sR6w2RBw7IGc5J2BPnc3dYc5VJxHCSF1yNMxTV5LQ7GuKteQXZtspjFbiuW5/dOj7H4Yw==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "openharmony" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/sunos-x64": { + "version": "0.27.7", + "resolved": "https://registry.npmjs.org/@esbuild/sunos-x64/-/sunos-x64-0.27.7.tgz", + "integrity": "sha512-ikktIhFBzQNt/QDyOL580ti9+5mL/YZeUPKU2ivGtGjdTYoqz6jObj6nOMfhASpS4GU4Q/Clh1QtxWAvcYKamA==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "sunos" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/win32-arm64": { + "version": "0.27.7", + "resolved": "https://registry.npmjs.org/@esbuild/win32-arm64/-/win32-arm64-0.27.7.tgz", + "integrity": "sha512-7yRhbHvPqSpRUV7Q20VuDwbjW5kIMwTHpptuUzV+AA46kiPze5Z7qgt6CLCK3pWFrHeNfDd1VKgyP4O+ng17CA==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "win32" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/win32-ia32": { + "version": "0.27.7", + "resolved": "https://registry.npmjs.org/@esbuild/win32-ia32/-/win32-ia32-0.27.7.tgz", + "integrity": "sha512-SmwKXe6VHIyZYbBLJrhOoCJRB/Z1tckzmgTLfFYOfpMAx63BJEaL9ExI8x7v0oAO3Zh6D/Oi1gVxEYr5oUCFhw==", + "cpu": [ + "ia32" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "win32" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/win32-x64": { + "version": "0.27.7", + "resolved": "https://registry.npmjs.org/@esbuild/win32-x64/-/win32-x64-0.27.7.tgz", + "integrity": "sha512-56hiAJPhwQ1R4i+21FVF7V8kSD5zZTdHcVuRFMW0hn753vVfQN8xlx4uOPT4xoGH0Z/oVATuR82AiqSTDIpaHg==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "win32" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@jridgewell/sourcemap-codec": { + "version": "1.5.5", + "resolved": "https://registry.npmjs.org/@jridgewell/sourcemap-codec/-/sourcemap-codec-1.5.5.tgz", + "integrity": "sha512-cYQ9310grqxueWbl+WuIUIaiUaDcj7WOq5fVhEljNVgRfOUhY9fy2zTvfoqWsnebh8Sl70VScFbICvJnLKB0Og==", + "dev": true, + "license": "MIT" + }, + "node_modules/@napi-rs/wasm-runtime": { + "version": "1.1.4", + "resolved": "https://registry.npmjs.org/@napi-rs/wasm-runtime/-/wasm-runtime-1.1.4.tgz", + "integrity": "sha512-3NQNNgA1YSlJb/kMH1ildASP9HW7/7kYnRI2szWJaofaS1hWmbGI4H+d3+22aGzXXN9IJ+n+GiFVcGipJP18ow==", + "dev": true, + "license": "MIT", + "optional": true, + "dependencies": { + "@tybys/wasm-util": "^0.10.1" + }, + "funding": { + "type": "github", + "url": "https://github.com/sponsors/Brooooooklyn" + }, + "peerDependencies": { + "@emnapi/core": "^1.7.1", + "@emnapi/runtime": "^1.7.1" + } + }, + "node_modules/@oxc-project/types": { + "version": "0.127.0", + "resolved": "https://registry.npmjs.org/@oxc-project/types/-/types-0.127.0.tgz", + "integrity": "sha512-aIYXQBo4lCbO4z0R3FHeucQHpF46l2LbMdxRvqvuRuW2OxdnSkcng5B8+K12spgLDj93rtN3+J2Vac/TIO+ciQ==", + "dev": true, + "license": "MIT", + "funding": { + "url": "https://github.com/sponsors/Boshen" + } + }, + "node_modules/@rolldown/binding-android-arm64": { + "version": "1.0.0-rc.17", + "resolved": "https://registry.npmjs.org/@rolldown/binding-android-arm64/-/binding-android-arm64-1.0.0-rc.17.tgz", + "integrity": "sha512-s70pVGhw4zqGeFnXWvAzJDlvxhlRollagdCCKRgOsgUOH3N1l0LIxf83AtGzmb5SiVM4Hjl5HyarMRfdfj3DaQ==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "android" + ], + "engines": { + "node": "^20.19.0 || >=22.12.0" + } + }, + "node_modules/@rolldown/binding-darwin-arm64": { + "version": "1.0.0-rc.17", + "resolved": "https://registry.npmjs.org/@rolldown/binding-darwin-arm64/-/binding-darwin-arm64-1.0.0-rc.17.tgz", + "integrity": "sha512-4ksWc9n0mhlZpZ9PMZgTGjeOPRu8MB1Z3Tz0Mo02eWfWCHMW1zN82Qz/pL/rC+yQa+8ZnutMF0JjJe7PjwasYw==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "darwin" + ], + "engines": { + "node": "^20.19.0 || >=22.12.0" + } + }, + "node_modules/@rolldown/binding-darwin-x64": { + "version": "1.0.0-rc.17", + "resolved": "https://registry.npmjs.org/@rolldown/binding-darwin-x64/-/binding-darwin-x64-1.0.0-rc.17.tgz", + "integrity": "sha512-SUSDOI6WwUVNcWxd02QEBjLdY1VPHvlEkw6T/8nYG322iYWCTxRb1vzk4E+mWWYehTp7ERibq54LSJGjmouOsw==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "darwin" + ], + "engines": { + "node": "^20.19.0 || >=22.12.0" + } + }, + "node_modules/@rolldown/binding-freebsd-x64": { + "version": "1.0.0-rc.17", + "resolved": "https://registry.npmjs.org/@rolldown/binding-freebsd-x64/-/binding-freebsd-x64-1.0.0-rc.17.tgz", + "integrity": "sha512-hwnz3nw9dbJ05EDO/PvcjaaewqqDy7Y1rn1UO81l8iIK1GjenME75dl16ajbvSSMfv66WXSRCYKIqfgq2KCfxw==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "freebsd" + ], + "engines": { + "node": "^20.19.0 || >=22.12.0" + } + }, + "node_modules/@rolldown/binding-linux-arm-gnueabihf": { + "version": "1.0.0-rc.17", + "resolved": "https://registry.npmjs.org/@rolldown/binding-linux-arm-gnueabihf/-/binding-linux-arm-gnueabihf-1.0.0-rc.17.tgz", + "integrity": "sha512-IS+W7epTcwANmFSQFrS1SivEXHtl1JtuQA9wlxrZTcNi6mx+FDOYrakGevvvTwgj2JvWiK8B29/qD9BELZPyXQ==", + "cpu": [ + "arm" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": "^20.19.0 || >=22.12.0" + } + }, + "node_modules/@rolldown/binding-linux-arm64-gnu": { + "version": "1.0.0-rc.17", + "resolved": "https://registry.npmjs.org/@rolldown/binding-linux-arm64-gnu/-/binding-linux-arm64-gnu-1.0.0-rc.17.tgz", + "integrity": "sha512-e6usGaHKW5BMNZOymS1UcEYGowQMWcgZ71Z17Sl/h2+ZziNJ1a9n3Zvcz6LdRyIW5572wBCTH/Z+bKuZouGk9Q==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": "^20.19.0 || >=22.12.0" + } + }, + "node_modules/@rolldown/binding-linux-arm64-musl": { + "version": "1.0.0-rc.17", + "resolved": "https://registry.npmjs.org/@rolldown/binding-linux-arm64-musl/-/binding-linux-arm64-musl-1.0.0-rc.17.tgz", + "integrity": "sha512-b/CgbwAJpmrRLp02RPfhbudf5tZnN9nsPWK82znefso832etkem8H7FSZwxrOI9djcdTP7U6YfNhbRnh7djErg==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": "^20.19.0 || >=22.12.0" + } + }, + "node_modules/@rolldown/binding-linux-ppc64-gnu": { + "version": "1.0.0-rc.17", + "resolved": "https://registry.npmjs.org/@rolldown/binding-linux-ppc64-gnu/-/binding-linux-ppc64-gnu-1.0.0-rc.17.tgz", + "integrity": "sha512-4EII1iNGRUN5WwGbF/kOh/EIkoDN9HsupgLQoXfY+D1oyJm7/F4t5PYU5n8SWZgG0FEwakyM8pGgwcBYruGTlA==", + "cpu": [ + "ppc64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": "^20.19.0 || >=22.12.0" + } + }, + "node_modules/@rolldown/binding-linux-s390x-gnu": { + "version": "1.0.0-rc.17", + "resolved": "https://registry.npmjs.org/@rolldown/binding-linux-s390x-gnu/-/binding-linux-s390x-gnu-1.0.0-rc.17.tgz", + "integrity": "sha512-AH8oq3XqQo4IibpVXvPeLDI5pzkpYn0WiZAfT05kFzoJ6tQNzwRdDYQ45M8I/gslbodRZwW8uxLhbSBbkv96rA==", + "cpu": [ + "s390x" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": "^20.19.0 || >=22.12.0" + } + }, + "node_modules/@rolldown/binding-linux-x64-gnu": { + "version": "1.0.0-rc.17", + "resolved": "https://registry.npmjs.org/@rolldown/binding-linux-x64-gnu/-/binding-linux-x64-gnu-1.0.0-rc.17.tgz", + "integrity": "sha512-cLnjV3xfo7KslbU41Z7z8BH/E1y5mzUYzAqih1d1MDaIGZRCMqTijqLv76/P7fyHuvUcfGsIpqCdddbxLLK9rA==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": "^20.19.0 || >=22.12.0" + } + }, + "node_modules/@rolldown/binding-linux-x64-musl": { + "version": "1.0.0-rc.17", + "resolved": "https://registry.npmjs.org/@rolldown/binding-linux-x64-musl/-/binding-linux-x64-musl-1.0.0-rc.17.tgz", + "integrity": "sha512-0phclDw1spsL7dUB37sIARuis2tAgomCJXAHZlpt8PXZ4Ba0dRP1e+66lsRqrfhISeN9bEGNjQs+T/Fbd7oYGw==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": "^20.19.0 || >=22.12.0" + } + }, + "node_modules/@rolldown/binding-openharmony-arm64": { + "version": "1.0.0-rc.17", + "resolved": "https://registry.npmjs.org/@rolldown/binding-openharmony-arm64/-/binding-openharmony-arm64-1.0.0-rc.17.tgz", + "integrity": "sha512-0ag/hEgXOwgw4t8QyQvUCxvEg+V0KBcA6YuOx9g0r02MprutRF5dyljgm3EmR02O292UX7UeS6HzWHAl6KgyhA==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "openharmony" + ], + "engines": { + "node": "^20.19.0 || >=22.12.0" + } + }, + "node_modules/@rolldown/binding-wasm32-wasi": { + "version": "1.0.0-rc.17", + "resolved": "https://registry.npmjs.org/@rolldown/binding-wasm32-wasi/-/binding-wasm32-wasi-1.0.0-rc.17.tgz", + "integrity": "sha512-LEXei6vo0E5wTGwpkJ4KoT3OZJRnglwldt5ziLzOlc6qqb55z4tWNq2A+PFqCJuvWWdP53CVhG1Z9NtToDPJrA==", + "cpu": [ + "wasm32" + ], + "dev": true, + "license": "MIT", + "optional": true, + "dependencies": { + "@emnapi/core": "1.10.0", + "@emnapi/runtime": "1.10.0", + "@napi-rs/wasm-runtime": "^1.1.4" + }, + "engines": { + "node": "^20.19.0 || >=22.12.0" + } + }, + "node_modules/@rolldown/binding-win32-arm64-msvc": { + "version": "1.0.0-rc.17", + "resolved": "https://registry.npmjs.org/@rolldown/binding-win32-arm64-msvc/-/binding-win32-arm64-msvc-1.0.0-rc.17.tgz", + "integrity": "sha512-gUmyzBl3SPMa6hrqFUth9sVfcLBlYsbMzBx5PlexMroZStgzGqlZ26pYG89rBb45Mnia+oil6YAIFeEWGWhoZA==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "win32" + ], + "engines": { + "node": "^20.19.0 || >=22.12.0" + } + }, + "node_modules/@rolldown/binding-win32-x64-msvc": { + "version": "1.0.0-rc.17", + "resolved": "https://registry.npmjs.org/@rolldown/binding-win32-x64-msvc/-/binding-win32-x64-msvc-1.0.0-rc.17.tgz", + "integrity": "sha512-3hkiolcUAvPB9FLb3UZdfjVVNWherN1f/skkGWJP/fgSQhYUZpSIRr0/I8ZK9TkF3F7kxvJAk0+IcKvPHk9qQg==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "win32" + ], + "engines": { + "node": "^20.19.0 || >=22.12.0" + } + }, + "node_modules/@rolldown/pluginutils": { + "version": "1.0.0-rc.17", + "resolved": "https://registry.npmjs.org/@rolldown/pluginutils/-/pluginutils-1.0.0-rc.17.tgz", + "integrity": "sha512-n8iosDOt6Ig1UhJ2AYqoIhHWh/isz0xpicHTzpKBeotdVsTEcxsSA/i3EVM7gQAj0rU27OLAxCjzlj15IWY7bg==", + "dev": true, + "license": "MIT" + }, + "node_modules/@standard-schema/spec": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/@standard-schema/spec/-/spec-1.1.0.tgz", + "integrity": "sha512-l2aFy5jALhniG5HgqrD6jXLi/rUWrKvqN/qJx6yoJsgKhblVd+iqqU4RCXavm/jPityDo5TCvKMnpjKnOriy0w==", + "dev": true, + "license": "MIT" + }, + "node_modules/@tybys/wasm-util": { + "version": "0.10.2", + "resolved": "https://registry.npmjs.org/@tybys/wasm-util/-/wasm-util-0.10.2.tgz", + "integrity": "sha512-RoBvJ2X0wuKlWFIjrwffGw1IqZHKQqzIchKaadZZfnNpsAYp2mM0h36JtPCjNDAHGgYez/15uMBpfGwchhiMgg==", + "dev": true, + "license": "MIT", + "optional": true, + "dependencies": { + "tslib": "^2.4.0" + } + }, + "node_modules/@types/chai": { + "version": "5.2.3", + "resolved": "https://registry.npmjs.org/@types/chai/-/chai-5.2.3.tgz", + "integrity": "sha512-Mw558oeA9fFbv65/y4mHtXDs9bPnFMZAL/jxdPFUpOHHIXX91mcgEHbS5Lahr+pwZFR8A7GQleRWeI6cGFC2UA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@types/deep-eql": "*", + "assertion-error": "^2.0.1" + } + }, + "node_modules/@types/deep-eql": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/@types/deep-eql/-/deep-eql-4.0.2.tgz", + "integrity": "sha512-c9h9dVVMigMPc4bwTvC5dxqtqJZwQPePsWjPlpSOnojbor6pGqdk541lfA7AqFQr5pB1BRdq0juY9db81BwyFw==", + "dev": true, + "license": "MIT" + }, + "node_modules/@types/estree": { + "version": "1.0.8", + "resolved": "https://registry.npmjs.org/@types/estree/-/estree-1.0.8.tgz", + "integrity": "sha512-dWHzHa2WqEXI/O1E9OjrocMTKJl2mSrEolh1Iomrv6U+JuNwaHXsXx9bLu5gG7BUWFIN0skIQJQ/L1rIex4X6w==", + "dev": true, + "license": "MIT" + }, + "node_modules/@types/node": { + "version": "22.19.17", + "resolved": "https://registry.npmjs.org/@types/node/-/node-22.19.17.tgz", + "integrity": "sha512-wGdMcf+vPYM6jikpS/qhg6WiqSV/OhG+jeeHT/KlVqxYfD40iYJf9/AE1uQxVWFvU7MipKRkRv8NSHiCGgPr8Q==", + "dev": true, + "license": "MIT", + "dependencies": { + "undici-types": "~6.21.0" + } + }, + "node_modules/@vitest/expect": { + "version": "4.1.5", + "resolved": "https://registry.npmjs.org/@vitest/expect/-/expect-4.1.5.tgz", + "integrity": "sha512-PWBaRY5JoKuRnHlUHfpV/KohFylaDZTupcXN1H9vYryNLOnitSw60Mw9IAE2r67NbwwzBw/Cc/8q9BK3kIX8Kw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@standard-schema/spec": "^1.1.0", + "@types/chai": "^5.2.2", + "@vitest/spy": "4.1.5", + "@vitest/utils": "4.1.5", + "chai": "^6.2.2", + "tinyrainbow": "^3.1.0" + }, + "funding": { + "url": "https://opencollective.com/vitest" + } + }, + "node_modules/@vitest/mocker": { + "version": "4.1.5", + "resolved": "https://registry.npmjs.org/@vitest/mocker/-/mocker-4.1.5.tgz", + "integrity": "sha512-/x2EmFC4mT4NNzqvC3fmesuV97w5FC903KPmey4gsnJiMQ3Be1IlDKVaDaG8iqaLFHqJ2FVEkxZk5VmeLjIItw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@vitest/spy": "4.1.5", + "estree-walker": "^3.0.3", + "magic-string": "^0.30.21" + }, + "funding": { + "url": "https://opencollective.com/vitest" + }, + "peerDependencies": { + "msw": "^2.4.9", + "vite": "^6.0.0 || ^7.0.0 || ^8.0.0" + }, + "peerDependenciesMeta": { + "msw": { + "optional": true + }, + "vite": { + "optional": true + } + } + }, + "node_modules/@vitest/pretty-format": { + "version": "4.1.5", + "resolved": "https://registry.npmjs.org/@vitest/pretty-format/-/pretty-format-4.1.5.tgz", + "integrity": "sha512-7I3q6l5qr03dVfMX2wCo9FxwSJbPdwKjy2uu/YPpU3wfHvIL4QHwVRp57OfGrDFeUJ8/8QdfBKIV12FTtLn00g==", + "dev": true, + "license": "MIT", + "dependencies": { + "tinyrainbow": "^3.1.0" + }, + "funding": { + "url": "https://opencollective.com/vitest" + } + }, + "node_modules/@vitest/runner": { + "version": "4.1.5", + "resolved": "https://registry.npmjs.org/@vitest/runner/-/runner-4.1.5.tgz", + "integrity": "sha512-2D+o7Pr82IEO46YPpoA/YU0neeyr6FTerQb5Ro7BUnBuv6NQtT/kmVnczngiMEBhzgqz2UZYl5gArejsyERDSQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "@vitest/utils": "4.1.5", + "pathe": "^2.0.3" + }, + "funding": { + "url": "https://opencollective.com/vitest" + } + }, + "node_modules/@vitest/snapshot": { + "version": "4.1.5", + "resolved": "https://registry.npmjs.org/@vitest/snapshot/-/snapshot-4.1.5.tgz", + "integrity": "sha512-zypXEt4KH/XgKGPUz4eC2AvErYx0My5hfL8oDb1HzGFpEk1P62bxSohdyOmvz+d9UJwanI68MKwr2EquOaOgMQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "@vitest/pretty-format": "4.1.5", + "@vitest/utils": "4.1.5", + "magic-string": "^0.30.21", + "pathe": "^2.0.3" + }, + "funding": { + "url": "https://opencollective.com/vitest" + } + }, + "node_modules/@vitest/spy": { + "version": "4.1.5", + "resolved": "https://registry.npmjs.org/@vitest/spy/-/spy-4.1.5.tgz", + "integrity": "sha512-2lNOsh6+R2Idnf1TCZqSwYlKN2E/iDlD8sgU59kYVl+OMDmvldO1VDk39smRfpUNwYpNRVn3w4YfuC7KfbBnkQ==", + "dev": true, + "license": "MIT", + "funding": { + "url": "https://opencollective.com/vitest" + } + }, + "node_modules/@vitest/utils": { + "version": "4.1.5", + "resolved": "https://registry.npmjs.org/@vitest/utils/-/utils-4.1.5.tgz", + "integrity": "sha512-76wdkrmfXfqGjueGgnb45ITPyUi1ycZ4IHgC2bhPDUfWHklY/q3MdLOAB+TF1e6xfl8NxNY0ZYaPCFNWSsw3Ug==", + "dev": true, + "license": "MIT", + "dependencies": { + "@vitest/pretty-format": "4.1.5", + "convert-source-map": "^2.0.0", + "tinyrainbow": "^3.1.0" + }, + "funding": { + "url": "https://opencollective.com/vitest" + } + }, + "node_modules/assertion-error": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/assertion-error/-/assertion-error-2.0.1.tgz", + "integrity": "sha512-Izi8RQcffqCeNVgFigKli1ssklIbpHnCYc6AknXGYoB6grJqyeby7jv12JUQgmTAnIDnbck1uxksT4dzN3PWBA==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=12" + } + }, + "node_modules/chai": { + "version": "6.2.2", + "resolved": "https://registry.npmjs.org/chai/-/chai-6.2.2.tgz", + "integrity": "sha512-NUPRluOfOiTKBKvWPtSD4PhFvWCqOi0BGStNWs57X9js7XGTprSmFoz5F0tWhR4WPjNeR9jXqdC7/UpSJTnlRg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=18" + } + }, + "node_modules/convert-source-map": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/convert-source-map/-/convert-source-map-2.0.0.tgz", + "integrity": "sha512-Kvp459HrV2FEJ1CAsi1Ku+MY3kasH19TFykTz2xWmMeq6bk2NU3XXvfJ+Q61m0xktWwt+1HSYf3JZsTms3aRJg==", + "dev": true, + "license": "MIT" + }, + "node_modules/detect-libc": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/detect-libc/-/detect-libc-2.1.2.tgz", + "integrity": "sha512-Btj2BOOO83o3WyH59e8MgXsxEQVcarkUOpEYrubB0urwnN10yQ364rsiByU11nZlqWYZm05i/of7io4mzihBtQ==", + "dev": true, + "license": "Apache-2.0", + "engines": { + "node": ">=8" + } + }, + "node_modules/es-module-lexer": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/es-module-lexer/-/es-module-lexer-2.1.0.tgz", + "integrity": "sha512-n27zTYMjYu1aj4MjCWzSP7G9r75utsaoc8m61weK+W8JMBGGQybd43GstCXZ3WNmSFtGT9wi59qQTW6mhTR5LQ==", + "dev": true, + "license": "MIT" + }, + "node_modules/esbuild": { + "version": "0.27.7", + "resolved": "https://registry.npmjs.org/esbuild/-/esbuild-0.27.7.tgz", + "integrity": "sha512-IxpibTjyVnmrIQo5aqNpCgoACA/dTKLTlhMHihVHhdkxKyPO1uBBthumT0rdHmcsk9uMonIWS0m4FljWzILh3w==", + "dev": true, + "hasInstallScript": true, + "license": "MIT", + "bin": { + "esbuild": "bin/esbuild" + }, + "engines": { + "node": ">=18" + }, + "optionalDependencies": { + "@esbuild/aix-ppc64": "0.27.7", + "@esbuild/android-arm": "0.27.7", + "@esbuild/android-arm64": "0.27.7", + "@esbuild/android-x64": "0.27.7", + "@esbuild/darwin-arm64": "0.27.7", + "@esbuild/darwin-x64": "0.27.7", + "@esbuild/freebsd-arm64": "0.27.7", + "@esbuild/freebsd-x64": "0.27.7", + "@esbuild/linux-arm": "0.27.7", + "@esbuild/linux-arm64": "0.27.7", + "@esbuild/linux-ia32": "0.27.7", + "@esbuild/linux-loong64": "0.27.7", + "@esbuild/linux-mips64el": "0.27.7", + "@esbuild/linux-ppc64": "0.27.7", + "@esbuild/linux-riscv64": "0.27.7", + "@esbuild/linux-s390x": "0.27.7", + "@esbuild/linux-x64": "0.27.7", + "@esbuild/netbsd-arm64": "0.27.7", + "@esbuild/netbsd-x64": "0.27.7", + "@esbuild/openbsd-arm64": "0.27.7", + "@esbuild/openbsd-x64": "0.27.7", + "@esbuild/openharmony-arm64": "0.27.7", + "@esbuild/sunos-x64": "0.27.7", + "@esbuild/win32-arm64": "0.27.7", + "@esbuild/win32-ia32": "0.27.7", + "@esbuild/win32-x64": "0.27.7" + } + }, + "node_modules/estree-walker": { + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/estree-walker/-/estree-walker-3.0.3.tgz", + "integrity": "sha512-7RUKfXgSMMkzt6ZuXmqapOurLGPPfgj6l9uRZ7lRGolvk0y2yocc35LdcxKC5PQZdn2DMqioAQ2NoWcrTKmm6g==", + "dev": true, + "license": "MIT", + "dependencies": { + "@types/estree": "^1.0.0" + } + }, + "node_modules/expect-type": { + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/expect-type/-/expect-type-1.3.0.tgz", + "integrity": "sha512-knvyeauYhqjOYvQ66MznSMs83wmHrCycNEN6Ao+2AeYEfxUIkuiVxdEa1qlGEPK+We3n0THiDciYSsCcgW/DoA==", + "dev": true, + "license": "Apache-2.0", + "engines": { + "node": ">=12.0.0" + } + }, + "node_modules/fdir": { + "version": "6.5.0", + "resolved": "https://registry.npmjs.org/fdir/-/fdir-6.5.0.tgz", + "integrity": "sha512-tIbYtZbucOs0BRGqPJkshJUYdL+SDH7dVM8gjy+ERp3WAUjLEFJE+02kanyHtwjWOnwrKYBiwAmM0p4kLJAnXg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=12.0.0" + }, + "peerDependencies": { + "picomatch": "^3 || ^4" + }, + "peerDependenciesMeta": { + "picomatch": { + "optional": true + } + } + }, + "node_modules/fsevents": { + "version": "2.3.3", + "resolved": "https://registry.npmjs.org/fsevents/-/fsevents-2.3.3.tgz", + "integrity": "sha512-5xoDfX+fL7faATnagmWPpbFtwh/R77WmMMqqHGS65C3vvB0YHrgF+B1YmZ3441tMj5n63k0212XNoJwzlhffQw==", + "dev": true, + "hasInstallScript": true, + "license": "MIT", + "optional": true, + "os": [ + "darwin" + ], + "engines": { + "node": "^8.16.0 || ^10.6.0 || >=11.0.0" + } + }, + "node_modules/lightningcss": { + "version": "1.32.0", + "resolved": "https://registry.npmjs.org/lightningcss/-/lightningcss-1.32.0.tgz", + "integrity": "sha512-NXYBzinNrblfraPGyrbPoD19C1h9lfI/1mzgWYvXUTe414Gz/X1FD2XBZSZM7rRTrMA8JL3OtAaGifrIKhQ5yQ==", + "dev": true, + "license": "MPL-2.0", + "dependencies": { + "detect-libc": "^2.0.3" + }, + "engines": { + "node": ">= 12.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/parcel" + }, + "optionalDependencies": { + "lightningcss-android-arm64": "1.32.0", + "lightningcss-darwin-arm64": "1.32.0", + "lightningcss-darwin-x64": "1.32.0", + "lightningcss-freebsd-x64": "1.32.0", + "lightningcss-linux-arm-gnueabihf": "1.32.0", + "lightningcss-linux-arm64-gnu": "1.32.0", + "lightningcss-linux-arm64-musl": "1.32.0", + "lightningcss-linux-x64-gnu": "1.32.0", + "lightningcss-linux-x64-musl": "1.32.0", + "lightningcss-win32-arm64-msvc": "1.32.0", + "lightningcss-win32-x64-msvc": "1.32.0" + } + }, + "node_modules/lightningcss-android-arm64": { + "version": "1.32.0", + "resolved": "https://registry.npmjs.org/lightningcss-android-arm64/-/lightningcss-android-arm64-1.32.0.tgz", + "integrity": "sha512-YK7/ClTt4kAK0vo6w3X+Pnm0D2cf2vPHbhOXdoNti1Ga0al1P4TBZhwjATvjNwLEBCnKvjJc2jQgHXH0NEwlAg==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MPL-2.0", + "optional": true, + "os": [ + "android" + ], + "engines": { + "node": ">= 12.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/parcel" + } + }, + "node_modules/lightningcss-darwin-arm64": { + "version": "1.32.0", + "resolved": "https://registry.npmjs.org/lightningcss-darwin-arm64/-/lightningcss-darwin-arm64-1.32.0.tgz", + "integrity": "sha512-RzeG9Ju5bag2Bv1/lwlVJvBE3q6TtXskdZLLCyfg5pt+HLz9BqlICO7LZM7VHNTTn/5PRhHFBSjk5lc4cmscPQ==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MPL-2.0", + "optional": true, + "os": [ + "darwin" + ], + "engines": { + "node": ">= 12.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/parcel" + } + }, + "node_modules/lightningcss-darwin-x64": { + "version": "1.32.0", + "resolved": "https://registry.npmjs.org/lightningcss-darwin-x64/-/lightningcss-darwin-x64-1.32.0.tgz", + "integrity": "sha512-U+QsBp2m/s2wqpUYT/6wnlagdZbtZdndSmut/NJqlCcMLTWp5muCrID+K5UJ6jqD2BFshejCYXniPDbNh73V8w==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MPL-2.0", + "optional": true, + "os": [ + "darwin" + ], + "engines": { + "node": ">= 12.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/parcel" + } + }, + "node_modules/lightningcss-freebsd-x64": { + "version": "1.32.0", + "resolved": "https://registry.npmjs.org/lightningcss-freebsd-x64/-/lightningcss-freebsd-x64-1.32.0.tgz", + "integrity": "sha512-JCTigedEksZk3tHTTthnMdVfGf61Fky8Ji2E4YjUTEQX14xiy/lTzXnu1vwiZe3bYe0q+SpsSH/CTeDXK6WHig==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MPL-2.0", + "optional": true, + "os": [ + "freebsd" + ], + "engines": { + "node": ">= 12.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/parcel" + } + }, + "node_modules/lightningcss-linux-arm-gnueabihf": { + "version": "1.32.0", + "resolved": "https://registry.npmjs.org/lightningcss-linux-arm-gnueabihf/-/lightningcss-linux-arm-gnueabihf-1.32.0.tgz", + "integrity": "sha512-x6rnnpRa2GL0zQOkt6rts3YDPzduLpWvwAF6EMhXFVZXD4tPrBkEFqzGowzCsIWsPjqSK+tyNEODUBXeeVHSkw==", + "cpu": [ + "arm" + ], + "dev": true, + "license": "MPL-2.0", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">= 12.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/parcel" + } + }, + "node_modules/lightningcss-linux-arm64-gnu": { + "version": "1.32.0", + "resolved": "https://registry.npmjs.org/lightningcss-linux-arm64-gnu/-/lightningcss-linux-arm64-gnu-1.32.0.tgz", + "integrity": "sha512-0nnMyoyOLRJXfbMOilaSRcLH3Jw5z9HDNGfT/gwCPgaDjnx0i8w7vBzFLFR1f6CMLKF8gVbebmkUN3fa/kQJpQ==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MPL-2.0", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">= 12.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/parcel" + } + }, + "node_modules/lightningcss-linux-arm64-musl": { + "version": "1.32.0", + "resolved": "https://registry.npmjs.org/lightningcss-linux-arm64-musl/-/lightningcss-linux-arm64-musl-1.32.0.tgz", + "integrity": "sha512-UpQkoenr4UJEzgVIYpI80lDFvRmPVg6oqboNHfoH4CQIfNA+HOrZ7Mo7KZP02dC6LjghPQJeBsvXhJod/wnIBg==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MPL-2.0", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">= 12.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/parcel" + } + }, + "node_modules/lightningcss-linux-x64-gnu": { + "version": "1.32.0", + "resolved": "https://registry.npmjs.org/lightningcss-linux-x64-gnu/-/lightningcss-linux-x64-gnu-1.32.0.tgz", + "integrity": "sha512-V7Qr52IhZmdKPVr+Vtw8o+WLsQJYCTd8loIfpDaMRWGUZfBOYEJeyJIkqGIDMZPwPx24pUMfwSxxI8phr/MbOA==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MPL-2.0", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">= 12.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/parcel" + } + }, + "node_modules/lightningcss-linux-x64-musl": { + "version": "1.32.0", + "resolved": "https://registry.npmjs.org/lightningcss-linux-x64-musl/-/lightningcss-linux-x64-musl-1.32.0.tgz", + "integrity": "sha512-bYcLp+Vb0awsiXg/80uCRezCYHNg1/l3mt0gzHnWV9XP1W5sKa5/TCdGWaR/zBM2PeF/HbsQv/j2URNOiVuxWg==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MPL-2.0", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">= 12.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/parcel" + } + }, + "node_modules/lightningcss-win32-arm64-msvc": { + "version": "1.32.0", + "resolved": "https://registry.npmjs.org/lightningcss-win32-arm64-msvc/-/lightningcss-win32-arm64-msvc-1.32.0.tgz", + "integrity": "sha512-8SbC8BR40pS6baCM8sbtYDSwEVQd4JlFTOlaD3gWGHfThTcABnNDBda6eTZeqbofalIJhFx0qKzgHJmcPTnGdw==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MPL-2.0", + "optional": true, + "os": [ + "win32" + ], + "engines": { + "node": ">= 12.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/parcel" + } + }, + "node_modules/lightningcss-win32-x64-msvc": { + "version": "1.32.0", + "resolved": "https://registry.npmjs.org/lightningcss-win32-x64-msvc/-/lightningcss-win32-x64-msvc-1.32.0.tgz", + "integrity": "sha512-Amq9B/SoZYdDi1kFrojnoqPLxYhQ4Wo5XiL8EVJrVsB8ARoC1PWW6VGtT0WKCemjy8aC+louJnjS7U18x3b06Q==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MPL-2.0", + "optional": true, + "os": [ + "win32" + ], + "engines": { + "node": ">= 12.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/parcel" + } + }, + "node_modules/magic-string": { + "version": "0.30.21", + "resolved": "https://registry.npmjs.org/magic-string/-/magic-string-0.30.21.tgz", + "integrity": "sha512-vd2F4YUyEXKGcLHoq+TEyCjxueSeHnFxyyjNp80yg0XV4vUhnDer/lvvlqM/arB5bXQN5K2/3oinyCRyx8T2CQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jridgewell/sourcemap-codec": "^1.5.5" + } + }, + "node_modules/nanoid": { + "version": "3.3.12", + "resolved": "https://registry.npmjs.org/nanoid/-/nanoid-3.3.12.tgz", + "integrity": "sha512-ZB9RH/39qpq5Vu6Y+NmUaFhQR6pp+M2Xt76XBnEwDaGcVAqhlvxrl3B2bKS5D3NH3QR76v3aSrKaF/Kiy7lEtQ==", + "dev": true, + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/ai" + } + ], + "license": "MIT", + "bin": { + "nanoid": "bin/nanoid.cjs" + }, + "engines": { + "node": "^10 || ^12 || ^13.7 || ^14 || >=15.0.1" + } + }, + "node_modules/obug": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/obug/-/obug-2.1.1.tgz", + "integrity": "sha512-uTqF9MuPraAQ+IsnPf366RG4cP9RtUi7MLO1N3KEc+wb0a6yKpeL0lmk2IB1jY5KHPAlTc6T/JRdC/YqxHNwkQ==", + "dev": true, + "funding": [ + "https://github.com/sponsors/sxzz", + "https://opencollective.com/debug" + ], + "license": "MIT" + }, + "node_modules/pathe": { + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/pathe/-/pathe-2.0.3.tgz", + "integrity": "sha512-WUjGcAqP1gQacoQe+OBJsFA7Ld4DyXuUIjZ5cc75cLHvJ7dtNsTugphxIADwspS+AraAUePCKrSVtPLFj/F88w==", + "dev": true, + "license": "MIT" + }, + "node_modules/picocolors": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/picocolors/-/picocolors-1.1.1.tgz", + "integrity": "sha512-xceH2snhtb5M9liqDsmEw56le376mTZkEX/jEb/RxNFyegNul7eNslCXP9FDj/Lcu0X8KEyMceP2ntpaHrDEVA==", + "dev": true, + "license": "ISC" + }, + "node_modules/picomatch": { + "version": "4.0.4", + "resolved": "https://registry.npmjs.org/picomatch/-/picomatch-4.0.4.tgz", + "integrity": "sha512-QP88BAKvMam/3NxH6vj2o21R6MjxZUAd6nlwAS/pnGvN9IVLocLHxGYIzFhg6fUQ+5th6P4dv4eW9jX3DSIj7A==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/sponsors/jonschlinkert" + } + }, + "node_modules/postcss": { + "version": "8.5.14", + "resolved": "https://registry.npmjs.org/postcss/-/postcss-8.5.14.tgz", + "integrity": "sha512-SoSL4+OSEtR99LHFZQiJLkT59C5B1amGO1NzTwj7TT1qCUgUO6hxOvzkOYxD+vMrXBM3XJIKzokoERdqQq/Zmg==", + "dev": true, + "funding": [ + { + "type": "opencollective", + "url": "https://opencollective.com/postcss/" + }, + { + "type": "tidelift", + "url": "https://tidelift.com/funding/github/npm/postcss" + }, + { + "type": "github", + "url": "https://github.com/sponsors/ai" + } + ], + "license": "MIT", + "dependencies": { + "nanoid": "^3.3.11", + "picocolors": "^1.1.1", + "source-map-js": "^1.2.1" + }, + "engines": { + "node": "^10 || ^12 || >=14" + } + }, + "node_modules/rolldown": { + "version": "1.0.0-rc.17", + "resolved": "https://registry.npmjs.org/rolldown/-/rolldown-1.0.0-rc.17.tgz", + "integrity": "sha512-ZrT53oAKrtA4+YtBWPQbtPOxIbVDbxT0orcYERKd63VJTF13zPcgXTvD4843L8pcsI7M6MErt8QtON6lrB9tyA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@oxc-project/types": "=0.127.0", + "@rolldown/pluginutils": "1.0.0-rc.17" + }, + "bin": { + "rolldown": "bin/cli.mjs" + }, + "engines": { + "node": "^20.19.0 || >=22.12.0" + }, + "optionalDependencies": { + "@rolldown/binding-android-arm64": "1.0.0-rc.17", + "@rolldown/binding-darwin-arm64": "1.0.0-rc.17", + "@rolldown/binding-darwin-x64": "1.0.0-rc.17", + "@rolldown/binding-freebsd-x64": "1.0.0-rc.17", + "@rolldown/binding-linux-arm-gnueabihf": "1.0.0-rc.17", + "@rolldown/binding-linux-arm64-gnu": "1.0.0-rc.17", + "@rolldown/binding-linux-arm64-musl": "1.0.0-rc.17", + "@rolldown/binding-linux-ppc64-gnu": "1.0.0-rc.17", + "@rolldown/binding-linux-s390x-gnu": "1.0.0-rc.17", + "@rolldown/binding-linux-x64-gnu": "1.0.0-rc.17", + "@rolldown/binding-linux-x64-musl": "1.0.0-rc.17", + "@rolldown/binding-openharmony-arm64": "1.0.0-rc.17", + "@rolldown/binding-wasm32-wasi": "1.0.0-rc.17", + "@rolldown/binding-win32-arm64-msvc": "1.0.0-rc.17", + "@rolldown/binding-win32-x64-msvc": "1.0.0-rc.17" + } + }, + "node_modules/siginfo": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/siginfo/-/siginfo-2.0.0.tgz", + "integrity": "sha512-ybx0WO1/8bSBLEWXZvEd7gMW3Sn3JFlW3TvX1nREbDLRNQNaeNN8WK0meBwPdAaOI7TtRRRJn/Es1zhrrCHu7g==", + "dev": true, + "license": "ISC" + }, + "node_modules/source-map-js": { + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/source-map-js/-/source-map-js-1.2.1.tgz", + "integrity": "sha512-UXWMKhLOwVKb728IUtQPXxfYU+usdybtUrK/8uGE8CQMvrhOpwvzDBwj0QhSL7MQc7vIsISBG8VQ8+IDQxpfQA==", + "dev": true, + "license": "BSD-3-Clause", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/stackback": { + "version": "0.0.2", + "resolved": "https://registry.npmjs.org/stackback/-/stackback-0.0.2.tgz", + "integrity": "sha512-1XMJE5fQo1jGH6Y/7ebnwPOBEkIEnT4QF32d5R1+VXdXveM0IBMJt8zfaxX1P3QhVwrYe+576+jkANtSS2mBbw==", + "dev": true, + "license": "MIT" + }, + "node_modules/std-env": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/std-env/-/std-env-4.1.0.tgz", + "integrity": "sha512-Rq7ybcX2RuC55r9oaPVEW7/xu3tj8u4GeBYHBWCychFtzMIr86A7e3PPEBPT37sHStKX3+TiX/Fr/ACmJLVlLQ==", + "dev": true, + "license": "MIT" + }, + "node_modules/tinybench": { + "version": "2.9.0", + "resolved": "https://registry.npmjs.org/tinybench/-/tinybench-2.9.0.tgz", + "integrity": "sha512-0+DUvqWMValLmha6lr4kD8iAMK1HzV0/aKnCtWb9v9641TnP/MFb7Pc2bxoxQjTXAErryXVgUOfv2YqNllqGeg==", + "dev": true, + "license": "MIT" + }, + "node_modules/tinyexec": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/tinyexec/-/tinyexec-1.1.2.tgz", + "integrity": "sha512-dAqSqE/RabpBKI8+h26GfLq6Vb3JVXs30XYQjdMjaj/c2tS8IYYMbIzP599KtRj7c57/wYApb3QjgRgXmrCukA==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=18" + } + }, + "node_modules/tinyglobby": { + "version": "0.2.16", + "resolved": "https://registry.npmjs.org/tinyglobby/-/tinyglobby-0.2.16.tgz", + "integrity": "sha512-pn99VhoACYR8nFHhxqix+uvsbXineAasWm5ojXoN8xEwK5Kd3/TrhNn1wByuD52UxWRLy8pu+kRMniEi6Eq9Zg==", + "dev": true, + "license": "MIT", + "dependencies": { + "fdir": "^6.5.0", + "picomatch": "^4.0.4" + }, + "engines": { + "node": ">=12.0.0" + }, + "funding": { + "url": "https://github.com/sponsors/SuperchupuDev" + } + }, + "node_modules/tinyrainbow": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/tinyrainbow/-/tinyrainbow-3.1.0.tgz", + "integrity": "sha512-Bf+ILmBgretUrdJxzXM0SgXLZ3XfiaUuOj/IKQHuTXip+05Xn+uyEYdVg0kYDipTBcLrCVyUzAPz7QmArb0mmw==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=14.0.0" + } + }, + "node_modules/tslib": { + "version": "2.8.1", + "resolved": "https://registry.npmjs.org/tslib/-/tslib-2.8.1.tgz", + "integrity": "sha512-oJFu94HQb+KVduSUQL7wnpmqnfmLsOA/nAh6b6EH0wCEoK0/mPeXU6c3wKDV83MkOuHPRHtSXKKU99IBazS/2w==", + "dev": true, + "license": "0BSD", + "optional": true + }, + "node_modules/typescript": { + "version": "5.9.3", + "resolved": "https://registry.npmjs.org/typescript/-/typescript-5.9.3.tgz", + "integrity": "sha512-jl1vZzPDinLr9eUt3J/t7V6FgNEw9QjvBPdysz9KfQDD41fQrC2Y4vKQdiaUpFT4bXlb1RHhLpp8wtm6M5TgSw==", + "dev": true, + "license": "Apache-2.0", + "bin": { + "tsc": "bin/tsc", + "tsserver": "bin/tsserver" + }, + "engines": { + "node": ">=14.17" + } + }, + "node_modules/undici-types": { + "version": "6.21.0", + "resolved": "https://registry.npmjs.org/undici-types/-/undici-types-6.21.0.tgz", + "integrity": "sha512-iwDZqg0QAGrg9Rav5H4n0M64c3mkR59cJ6wQp+7C4nI0gsmExaedaYLNO44eT4AtBBwjbTiGPMlt2Md0T9H9JQ==", + "dev": true, + "license": "MIT" + }, + "node_modules/vite": { + "version": "8.0.10", + "resolved": "https://registry.npmjs.org/vite/-/vite-8.0.10.tgz", + "integrity": "sha512-rZuUu9j6J5uotLDs+cAA4O5H4K1SfPliUlQwqa6YEwSrWDZzP4rhm00oJR5snMewjxF5V/K3D4kctsUTsIU9Mw==", + "dev": true, + "license": "MIT", + "dependencies": { + "lightningcss": "^1.32.0", + "picomatch": "^4.0.4", + "postcss": "^8.5.10", + "rolldown": "1.0.0-rc.17", + "tinyglobby": "^0.2.16" + }, + "bin": { + "vite": "bin/vite.js" + }, + "engines": { + "node": "^20.19.0 || >=22.12.0" + }, + "funding": { + "url": "https://github.com/vitejs/vite?sponsor=1" + }, + "optionalDependencies": { + "fsevents": "~2.3.3" + }, + "peerDependencies": { + "@types/node": "^20.19.0 || >=22.12.0", + "@vitejs/devtools": "^0.1.0", + "esbuild": "^0.27.0 || ^0.28.0", + "jiti": ">=1.21.0", + "less": "^4.0.0", + "sass": "^1.70.0", + "sass-embedded": "^1.70.0", + "stylus": ">=0.54.8", + "sugarss": "^5.0.0", + "terser": "^5.16.0", + "tsx": "^4.8.1", + "yaml": "^2.4.2" + }, + "peerDependenciesMeta": { + "@types/node": { + "optional": true + }, + "@vitejs/devtools": { + "optional": true + }, + "esbuild": { + "optional": true + }, + "jiti": { + "optional": true + }, + "less": { + "optional": true + }, + "sass": { + "optional": true + }, + "sass-embedded": { + "optional": true + }, + "stylus": { + "optional": true + }, + "sugarss": { + "optional": true + }, + "terser": { + "optional": true + }, + "tsx": { + "optional": true + }, + "yaml": { + "optional": true + } + } + }, + "node_modules/vitest": { + "version": "4.1.5", + "resolved": "https://registry.npmjs.org/vitest/-/vitest-4.1.5.tgz", + "integrity": "sha512-9Xx1v3/ih3m9hN+SbfkUyy0JAs72ap3r7joc87XL6jwF0jGg6mFBvQ1SrwaX+h8BlkX6Hz9shdd1uo6AF+ZGpg==", + "dev": true, + "license": "MIT", + "dependencies": { + "@vitest/expect": "4.1.5", + "@vitest/mocker": "4.1.5", + "@vitest/pretty-format": "4.1.5", + "@vitest/runner": "4.1.5", + "@vitest/snapshot": "4.1.5", + "@vitest/spy": "4.1.5", + "@vitest/utils": "4.1.5", + "es-module-lexer": "^2.0.0", + "expect-type": "^1.3.0", + "magic-string": "^0.30.21", + "obug": "^2.1.1", + "pathe": "^2.0.3", + "picomatch": "^4.0.3", + "std-env": "^4.0.0-rc.1", + "tinybench": "^2.9.0", + "tinyexec": "^1.0.2", + "tinyglobby": "^0.2.15", + "tinyrainbow": "^3.1.0", + "vite": "^6.0.0 || ^7.0.0 || ^8.0.0", + "why-is-node-running": "^2.3.0" + }, + "bin": { + "vitest": "vitest.mjs" + }, + "engines": { + "node": "^20.0.0 || ^22.0.0 || >=24.0.0" + }, + "funding": { + "url": "https://opencollective.com/vitest" + }, + "peerDependencies": { + "@edge-runtime/vm": "*", + "@opentelemetry/api": "^1.9.0", + "@types/node": "^20.0.0 || ^22.0.0 || >=24.0.0", + "@vitest/browser-playwright": "4.1.5", + "@vitest/browser-preview": "4.1.5", + "@vitest/browser-webdriverio": "4.1.5", + "@vitest/coverage-istanbul": "4.1.5", + "@vitest/coverage-v8": "4.1.5", + "@vitest/ui": "4.1.5", + "happy-dom": "*", + "jsdom": "*", + "vite": "^6.0.0 || ^7.0.0 || ^8.0.0" + }, + "peerDependenciesMeta": { + "@edge-runtime/vm": { + "optional": true + }, + "@opentelemetry/api": { + "optional": true + }, + "@types/node": { + "optional": true + }, + "@vitest/browser-playwright": { + "optional": true + }, + "@vitest/browser-preview": { + "optional": true + }, + "@vitest/browser-webdriverio": { + "optional": true + }, + "@vitest/coverage-istanbul": { + "optional": true + }, + "@vitest/coverage-v8": { + "optional": true + }, + "@vitest/ui": { + "optional": true + }, + "happy-dom": { + "optional": true + }, + "jsdom": { + "optional": true + }, + "vite": { + "optional": false + } + } + }, + "node_modules/why-is-node-running": { + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/why-is-node-running/-/why-is-node-running-2.3.0.tgz", + "integrity": "sha512-hUrmaWBdVDcxvYqnyh09zunKzROWjbZTiNy8dBEjkS7ehEDQibXJ7XvlmtbwuTclUiIyN+CyXQD4Vmko8fNm8w==", + "dev": true, + "license": "MIT", + "dependencies": { + "siginfo": "^2.0.0", + "stackback": "0.0.2" + }, + "bin": { + "why-is-node-running": "cli.js" + }, + "engines": { + "node": ">=8" + } + } + } +} diff --git a/plugins/release-nyaa/package.json b/plugins/release-nyaa/package.json new file mode 100644 index 00000000..b5c98456 --- /dev/null +++ b/plugins/release-nyaa/package.json @@ -0,0 +1,52 @@ +{ + "name": "@ashdev/codex-plugin-release-nyaa", + "version": "1.18.0", + "description": "Nyaa.si uploader-feed release-source plugin for Codex - announces torrent releases for tracked series, filtered by an admin allowlist of trusted uploaders", + "main": "dist/index.js", + "bin": "dist/index.js", + "type": "module", + "files": [ + "dist", + "README.md" + ], + "repository": { + "type": "git", + "url": "https://github.com/AshDevFr/codex.git", + "directory": "plugins/release-nyaa" + }, + "scripts": { + "build": "esbuild src/index.ts --bundle --platform=node --target=node22 --format=esm --outfile=dist/index.js --sourcemap --banner:js='#!/usr/bin/env node'", + "dev": "npm run build -- --watch", + "clean": "rm -rf dist", + "start": "node dist/index.js", + "lint": "biome check .", + "lint:fix": "biome check --write .", + "typecheck": "tsc --noEmit", + "test": "vitest run --passWithNoTests", + "test:watch": "vitest", + "prepublishOnly": "npm run lint && npm run build" + }, + "keywords": [ + "codex", + "plugin", + "nyaa", + "release-source", + "manga", + "torrent" + ], + "author": "Codex", + "license": "MIT", + "engines": { + "node": ">=22.0.0" + }, + "dependencies": { + "@ashdev/codex-plugin-sdk": "file:../sdk-typescript" + }, + "devDependencies": { + "@biomejs/biome": "^2.4.4", + "@types/node": "^22.0.0", + "esbuild": "^0.27.3", + "typescript": "^5.9.3", + "vitest": "^4.0.18" + } +} diff --git a/plugins/release-nyaa/src/fetcher.test.ts b/plugins/release-nyaa/src/fetcher.test.ts new file mode 100644 index 00000000..b1fcef07 --- /dev/null +++ b/plugins/release-nyaa/src/fetcher.test.ts @@ -0,0 +1,261 @@ +import { describe, expect, it, vi } from "vitest"; +import { + feedUrl, + fetchSubscriptionFeed, + parseSubscriptionList, + parseSubscriptionToken, +} from "./fetcher.js"; + +// ----------------------------------------------------------------------------- +// parseSubscriptionToken / parseSubscriptionList +// ----------------------------------------------------------------------------- + +describe("parseSubscriptionToken", () => { + it("returns null for empty / whitespace-only input", () => { + expect(parseSubscriptionToken("")).toBeNull(); + expect(parseSubscriptionToken(" ")).toBeNull(); + }); + + it("treats a bare identifier as a user feed", () => { + expect(parseSubscriptionToken("1r0n")).toEqual({ kind: "user", identifier: "1r0n" }); + }); + + it("treats `q:` as a search query", () => { + expect(parseSubscriptionToken("q:LuminousScans")).toEqual({ + kind: "query", + identifier: "LuminousScans", + }); + }); + + it("treats `query:` (long form) as a search query", () => { + expect(parseSubscriptionToken("query:Manga Group")).toEqual({ + kind: "query", + identifier: "Manga Group", + }); + }); + + it("rejects an empty query body", () => { + expect(parseSubscriptionToken("q:")).toBeNull(); + expect(parseSubscriptionToken("query: ")).toBeNull(); + }); + + it("parses `q:?key=value&…` as URL-style allowlisted params", () => { + expect(parseSubscriptionToken("q:?c=3_1&q=Berserk")).toEqual({ + kind: "params", + identifier: "c=3_1&q=Berserk", + }); + }); + + it("normalizes URL-style param order so reorderings dedupe", () => { + const a = parseSubscriptionToken("q:?q=Berserk&c=3_1"); + const b = parseSubscriptionToken("q:?c=3_1&q=Berserk"); + expect(a).toEqual(b); + }); + + it("URL-encodes special characters in URL-style params", () => { + expect(parseSubscriptionToken("q:?q=Berserk Volume")).toEqual({ + kind: "params", + identifier: "q=Berserk+Volume", + }); + }); + + it("drops keys that aren't on the allowlist", () => { + expect(parseSubscriptionToken("q:?q=Berserk&s=size&o=desc")).toEqual({ + kind: "params", + identifier: "q=Berserk", + }); + }); + + it("returns null when no allowlisted keys remain", () => { + expect(parseSubscriptionToken("q:?s=size&o=desc")).toBeNull(); + expect(parseSubscriptionToken("q:?")).toBeNull(); + }); + + it("collapses `q:?u=` (only u) to a bare user token for dedup", () => { + expect(parseSubscriptionToken("q:?u=1r0n")).toEqual({ + kind: "user", + identifier: "1r0n", + }); + }); + + it("keeps `q:?u=…&c=…` as params so the category survives", () => { + expect(parseSubscriptionToken("q:?u=1r0n&c=3_1")).toEqual({ + kind: "params", + identifier: "c=3_1&u=1r0n", + }); + }); + + it("ignores empty values in URL-style params", () => { + expect(parseSubscriptionToken("q:?c=&q=Berserk")).toEqual({ + kind: "params", + identifier: "q=Berserk", + }); + }); +}); + +describe("parseSubscriptionList", () => { + it("parses a comma-separated list and dedupes (case-insensitive)", () => { + const list = parseSubscriptionList("1r0n, TankobonBlur ,1r0n,q:LuminousScans"); + expect(list).toEqual([ + { kind: "user", identifier: "1r0n" }, + { kind: "user", identifier: "TankobonBlur" }, + { kind: "query", identifier: "LuminousScans" }, + ]); + }); + + it("returns an empty list for non-string / non-array input", () => { + expect(parseSubscriptionList(undefined)).toEqual([]); + expect(parseSubscriptionList(null)).toEqual([]); + expect(parseSubscriptionList(42)).toEqual([]); + expect(parseSubscriptionList({ uploaders: "1r0n" })).toEqual([]); + }); + + it("drops empty tokens (trailing comma, double commas)", () => { + expect(parseSubscriptionList(",,,foo,,,bar,,")).toEqual([ + { kind: "user", identifier: "foo" }, + { kind: "user", identifier: "bar" }, + ]); + }); + + it("parses a JSON array of entries (preferred manifest shape)", () => { + const list = parseSubscriptionList([ + "1r0n", + " TankobonBlur ", + "1r0n", + "q:LuminousScans", + "q:?c=3_1&q=Berserk", + ]); + expect(list).toEqual([ + { kind: "user", identifier: "1r0n" }, + { kind: "user", identifier: "TankobonBlur" }, + { kind: "query", identifier: "LuminousScans" }, + { kind: "params", identifier: "c=3_1&q=Berserk" }, + ]); + }); + + it("returns an empty list for an empty array", () => { + expect(parseSubscriptionList([])).toEqual([]); + }); + + it("ignores non-string entries inside an array", () => { + const list = parseSubscriptionList(["1r0n", 42, null, undefined, "q:Foo"]); + expect(list).toEqual([ + { kind: "user", identifier: "1r0n" }, + { kind: "query", identifier: "Foo" }, + ]); + }); + + it("array entries are NOT comma-split — pre-tokenization is the caller's job", () => { + // Contract: in the array path, each element is one token. CSV-style + // splitting only happens on the legacy string path. So `"a,b"` becomes + // a literal user identifier — which Nyaa won't match against, but the + // parser doesn't reject it. + const list = parseSubscriptionList(["a,b"]); + expect(list).toEqual([{ kind: "user", identifier: "a,b" }]); + }); +}); + +// ----------------------------------------------------------------------------- +// feedUrl +// ----------------------------------------------------------------------------- + +describe("feedUrl", () => { + it("builds a user-feed URL", () => { + const url = feedUrl({ kind: "user", identifier: "1r0n" }); + expect(url).toBe("https://nyaa.si/?page=rss&u=1r0n"); + }); + + it("builds a search-feed URL with URL-encoded query", () => { + const url = feedUrl({ kind: "query", identifier: "Luminous Scans" }); + expect(url).toBe("https://nyaa.si/?page=rss&q=Luminous%20Scans"); + }); + + it("respects a custom base URL with trailing slash trimming", () => { + const url = feedUrl({ kind: "user", identifier: "x" }, "https://mirror.example/"); + expect(url).toBe("https://mirror.example/?page=rss&u=x"); + }); + + it("builds a URL from a params-kind subscription verbatim", () => { + const url = feedUrl({ kind: "params", identifier: "c=3_1&q=Berserk" }); + expect(url).toBe("https://nyaa.si/?page=rss&c=3_1&q=Berserk"); + }); +}); + +// ----------------------------------------------------------------------------- +// fetchSubscriptionFeed +// ----------------------------------------------------------------------------- + +function stubResponse(status: number, body = "", headers: Record = {}): Response { + const h = new Headers(headers); + return { + status, + statusText: "", + headers: h, + text: async () => body, + } as unknown as Response; +} + +describe("fetchSubscriptionFeed", () => { + it("returns ok with body, etag, and last-modified on 200", async () => { + const fetchImpl = vi.fn().mockResolvedValue( + stubResponse(200, "", { + etag: '"v1"', + "last-modified": "Mon, 04 May 2026 02:31:00 GMT", + }), + ); + const r = await fetchSubscriptionFeed({ kind: "user", identifier: "1r0n" }, null, null, { + fetchImpl: fetchImpl as unknown as typeof fetch, + }); + expect(r.kind).toBe("ok"); + if (r.kind !== "ok") return; + expect(r.body).toBe(""); + expect(r.etag).toBe('"v1"'); + expect(r.lastModified).toBe("Mon, 04 May 2026 02:31:00 GMT"); + }); + + it("returns notModified on 304", async () => { + const fetchImpl = vi.fn().mockResolvedValue(stubResponse(304)); + const r = await fetchSubscriptionFeed({ kind: "user", identifier: "1r0n" }, '"v1"', null, { + fetchImpl: fetchImpl as unknown as typeof fetch, + }); + expect(r.kind).toBe("notModified"); + }); + + it("forwards 429 / 5xx as an error result with the upstream status", async () => { + const fetchImpl = vi.fn().mockResolvedValue(stubResponse(429)); + const r = await fetchSubscriptionFeed({ kind: "user", identifier: "1r0n" }, null, null, { + fetchImpl: fetchImpl as unknown as typeof fetch, + }); + expect(r.kind).toBe("error"); + if (r.kind !== "error") return; + expect(r.status).toBe(429); + }); + + it("returns status=0 on transport error / abort", async () => { + const fetchImpl = vi.fn().mockRejectedValue(new Error("network down")); + const r = await fetchSubscriptionFeed({ kind: "user", identifier: "1r0n" }, null, null, { + fetchImpl: fetchImpl as unknown as typeof fetch, + }); + expect(r.kind).toBe("error"); + if (r.kind !== "error") return; + expect(r.status).toBe(0); + expect(r.message).toContain("network down"); + }); + + it("attaches If-None-Match and If-Modified-Since headers when previous values are passed", async () => { + const fetchImpl = vi.fn().mockResolvedValue(stubResponse(200, "")); + await fetchSubscriptionFeed( + { kind: "user", identifier: "1r0n" }, + '"v1"', + "Sat, 01 May 2026 00:00:00 GMT", + { fetchImpl: fetchImpl as unknown as typeof fetch }, + ); + const callArgs = fetchImpl.mock.calls[0]; + expect(callArgs).toBeDefined(); + if (!callArgs) return; + const [, init] = callArgs as [string, RequestInit]; + const headers = init.headers as Record; + expect(headers["If-None-Match"]).toBe('"v1"'); + expect(headers["If-Modified-Since"]).toBe("Sat, 01 May 2026 00:00:00 GMT"); + }); +}); diff --git a/plugins/release-nyaa/src/fetcher.ts b/plugins/release-nyaa/src/fetcher.ts new file mode 100644 index 00000000..e7965c68 --- /dev/null +++ b/plugins/release-nyaa/src/fetcher.ts @@ -0,0 +1,273 @@ +/** + * Nyaa.si RSS fetcher. + * + * Wraps `fetch` with conditional GET (`If-None-Match` from a stored ETag, plus + * `If-Modified-Since` from a stored Last-Modified header) and a hard timeout. + * + * Nyaa exposes two feed shapes we care about: + * - User feed: `https://nyaa.si/?page=rss&u=` + * - Search feed: `https://nyaa.si/?page=rss&q=` (with optional + * filters; the plugin keeps it simple and lets aliases + * do the matching) + * + * Returns a discriminated result so the caller can: + * - act on `200`: parse the body, persist the new ETag. + * - skip parse on `304`: nothing changed since last poll. + * - report `429` / `5xx` upstream-status codes back to the host so the + * per-host backoff layer can react. + * + * Network is the only side effect; nothing in here touches storage, the host, + * or process state. That keeps it trivially testable: pass a mocked `fetch` + * implementation and assert. + */ + +/** Discriminated fetch result. */ +export type FetchResult = + | { kind: "ok"; body: string; etag: string | null; lastModified: string | null; status: 200 } + | { kind: "notModified"; status: 304 } + | { kind: "error"; status: number; message: string }; + +export interface FetcherOptions { + /** Custom `fetch` impl (for testing). Defaults to global `fetch`. */ + fetchImpl?: typeof fetch; + /** Per-request timeout. Defaults to 10s. */ + timeoutMs?: number; + /** Override base URL (for tests / mirrors). Defaults to `https://nyaa.si`. */ + baseUrl?: string; +} + +/** Default Nyaa base URL. */ +export const NYAA_BASE_URL = "https://nyaa.si"; + +/** + * One uploader subscription entry. + * + * Three shapes: + * - `user` — pulls `?page=rss&u=` (a Nyaa user feed). + * - `query` — pulls `?page=rss&q=` (a plain text search). + * - `params` — pulls `?page=rss&` where `` is an + * allowlisted set of Nyaa query keys (`q`, `c`, `f`). Used to express + * category / filter combinations like the Literature → English-translated + * view (`c=3_1`). + */ +export type UploaderSubscription = + | { kind: "user"; identifier: string } + | { kind: "query"; identifier: string } + | { kind: "params"; identifier: string }; + +/** + * Keys allowed through from a `q:?…` URL-style token. `page` is always + * injected by the plugin and can't be overridden; anything not in this set + * is silently dropped to keep the surface tight. + */ +const PARAMS_ALLOWLIST = new Set(["q", "c", "f", "u"]); + +/** + * Parse a `q:?key=value&…` body into a normalized, allowlisted query string. + * Returns null when no allowlisted keys remain (caller drops the token). + * + * Normalization sorts params alphabetically so two tokens that differ only + * in key order dedupe to the same identifier. + */ +function parseUrlParams(body: string): { kind: "user" | "params"; identifier: string } | null { + const params = new URLSearchParams(body); + const kept: [string, string][] = []; + for (const [rawKey, rawValue] of params.entries()) { + const key = rawKey.toLowerCase(); + if (!PARAMS_ALLOWLIST.has(key)) continue; + const value = rawValue.trim(); + if (value.length === 0) continue; + kept.push([key, value]); + } + if (kept.length === 0) return null; + + // If the *only* allowlisted key is `u`, collapse to a plain user token so + // `q:?u=1r0n` dedupes against the bare `1r0n` form and reuses the same + // URL-building branch. + if (kept.length === 1 && kept[0]?.[0] === "u") { + return { kind: "user", identifier: kept[0][1] }; + } + + kept.sort(([a], [b]) => (a < b ? -1 : a > b ? 1 : 0)); + const normalized = new URLSearchParams(kept).toString(); + return { kind: "params", identifier: normalized }; +} + +/** + * Parse a single uploader subscription token. + * + * Tokens look like: + * - `1r0n` → user feed + * - `q:LuminousScans` → plain search query + * - `query:Manga Group` → plain search query (long form) + * - `q:?c=3_1&q=Berserk` → URL-style params (allowlisted: q, c, f, u) + * - `query:?u=1r0n&c=3_1` → URL-style params, treated as user feed + * + * The leading `?` after `q:` / `query:` is the opt-in switch into URL mode, + * which keeps `q:c=3_1&q=Berserk` (no `?`) parsing as a literal search term + * for backwards compatibility. + * + * Empty / whitespace-only tokens return null (caller should drop them). + */ +export function parseSubscriptionToken(raw: string): UploaderSubscription | null { + const trimmed = raw.trim(); + if (trimmed.length === 0) return null; + + // `q:` / `query:` prefix → search query, in either plain or URL-params form. + const prefixMatch = trimmed.match(/^(q|query):(.*)$/i); + if (prefixMatch) { + const body = (prefixMatch[2] ?? "").trim(); + if (body.length === 0) return null; + + if (body.startsWith("?")) { + return parseUrlParams(body.slice(1)); + } + return { kind: "query", identifier: body }; + } + + // Plain identifier → username feed. + return { kind: "user", identifier: trimmed }; +} + +/** + * Build a stable per-plugin source key for a subscription. Mirrors the + * dedup key used in `parseSubscriptionList` so two ways of writing the + * same subscription collapse to the same source row. + * + * Used by `releases/register_sources` (to declare the plugin-owned key for + * each row) and as a fallback when reconstructing a subscription from a + * source key whose `config` is missing. Lower-cased identifier preserves + * the existing case-insensitive dedup behaviour. + */ +export function subscriptionToSourceKey(sub: UploaderSubscription): string { + return `${sub.kind}:${sub.identifier.toLowerCase()}`; +} + +/** + * Inverse of `subscriptionToSourceKey`: parse a `kind:identifier` source key + * back into a subscription. Returns null for unrecognized keys (older rows + * from a previous plugin version, manual edits, etc.) so the caller can log + * and skip without crashing the whole poll. + * + * Note: the identifier coming back is lower-cased (per the source key + * convention). Nyaa is case-insensitive on usernames and search terms, so + * the round-trip is lossless for our purposes. + */ +export function sourceKeyToSubscription(key: string): UploaderSubscription | null { + const idx = key.indexOf(":"); + if (idx <= 0 || idx === key.length - 1) return null; + const kind = key.slice(0, idx); + const identifier = key.slice(idx + 1); + if (kind === "user" || kind === "query" || kind === "params") { + return { kind, identifier }; + } + return null; +} + +/** + * Parse the admin `uploaders` config into a clean list of subscriptions. + * + * Accepts either a JSON array (preferred — what the manifest now declares) or + * a legacy comma-separated string. The string path is retained so existing + * stored configs and CLI/env-driven setups keep working without a migration. + * + * Skips empty tokens; preserves order; deduplicates case-insensitively. + */ +export function parseSubscriptionList(raw: unknown): UploaderSubscription[] { + let tokens: string[]; + if (Array.isArray(raw)) { + tokens = raw.filter((t): t is string => typeof t === "string"); + } else if (typeof raw === "string") { + tokens = raw.split(","); + } else { + return []; + } + + const seen = new Set(); + const out: UploaderSubscription[] = []; + for (const token of tokens) { + const sub = parseSubscriptionToken(token); + if (sub === null) continue; + const key = subscriptionToSourceKey(sub); + if (seen.has(key)) continue; + seen.add(key); + out.push(sub); + } + return out; +} + +/** Build the per-subscription RSS URL. */ +export function feedUrl( + subscription: UploaderSubscription, + baseUrl: string = NYAA_BASE_URL, +): string { + const base = baseUrl.replace(/\/+$/, ""); + if (subscription.kind === "user") { + return `${base}/?page=rss&u=${encodeURIComponent(subscription.identifier)}`; + } + if (subscription.kind === "query") { + return `${base}/?page=rss&q=${encodeURIComponent(subscription.identifier)}`; + } + // params: identifier is already a URL-encoded, allowlisted query string. + return `${base}/?page=rss&${subscription.identifier}`; +} + +/** + * Conditional GET against an uploader-subscription RSS feed. + * + * @param subscription - The uploader subscription to fetch. + * @param previousEtag - The ETag from the previous successful poll (if any). + * @param previousLastModified - Optional Last-Modified header from the previous + * poll. Nyaa often returns one but doesn't always honor `If-None-Match`; + * sending both maximizes 304 hit rate. + * @param opts - Fetcher options (custom fetch, timeout, base URL override). + */ +export async function fetchSubscriptionFeed( + subscription: UploaderSubscription, + previousEtag: string | null, + previousLastModified: string | null, + opts: FetcherOptions = {}, +): Promise { + const fetchImpl = opts.fetchImpl ?? globalThis.fetch; + const timeoutMs = opts.timeoutMs ?? 10_000; + const baseUrl = opts.baseUrl ?? NYAA_BASE_URL; + + const url = feedUrl(subscription, baseUrl); + const headers: Record = { + Accept: "application/rss+xml, application/xml;q=0.9, */*;q=0.5", + "User-Agent": "Codex-ReleaseTracker/1.0 (+https://github.com/AshDevFr/codex)", + }; + if (previousEtag) { + headers["If-None-Match"] = previousEtag; + } + if (previousLastModified) { + headers["If-Modified-Since"] = previousLastModified; + } + + const signal = AbortSignal.timeout(timeoutMs); + + let resp: Response; + try { + resp = await fetchImpl(url, { method: "GET", headers, signal }); + } catch (err) { + const msg = err instanceof Error ? err.message : "Unknown fetch error"; + return { kind: "error", status: 0, message: msg }; + } + + if (resp.status === 304) { + return { kind: "notModified", status: 304 }; + } + + if (resp.status === 200) { + const body = await resp.text(); + const etag = resp.headers.get("etag"); + const lastModified = resp.headers.get("last-modified"); + return { kind: "ok", body, etag, lastModified, status: 200 }; + } + + return { + kind: "error", + status: resp.status, + message: `upstream returned ${resp.status} ${resp.statusText}`, + }; +} diff --git a/plugins/release-nyaa/src/index.test.ts b/plugins/release-nyaa/src/index.test.ts new file mode 100644 index 00000000..9678e4a4 --- /dev/null +++ b/plugins/release-nyaa/src/index.test.ts @@ -0,0 +1,355 @@ +import { HostRpcClient, HostRpcError } from "@ashdev/codex-plugin-sdk"; +import { describe, expect, it, vi } from "vitest"; +import { pollSubscription, registerSources } from "./index.js"; +import type { AliasCandidate } from "./matcher.js"; + +// ----------------------------------------------------------------------------- +// Helpers — mirrors the makeMockRpc shape used by release-mangaupdates so the +// two suites stay readable side-by-side. +// ----------------------------------------------------------------------------- + +interface CapturedCall { + method: string; + params: unknown; +} + +function makeMockRpc(respond: (method: string, params: unknown) => unknown): { + rpc: HostRpcClient; + calls: CapturedCall[]; +} { + const calls: CapturedCall[] = []; + // eslint-disable-next-line prefer-const + let rpc: HostRpcClient; + const writeFn = (line: string) => { + const req = JSON.parse(line.trim()) as { + id: number; + method: string; + params: unknown; + }; + calls.push({ method: req.method, params: req.params }); + let result: unknown; + let error: { code: number; message: string } | null = null; + try { + result = respond(req.method, req.params); + } catch (err) { + // Preserve HostRpcError.code so tests can simulate METHOD_NOT_FOUND etc. + const code = err instanceof HostRpcError ? err.code : -32_000; + error = { + code, + message: err instanceof Error ? err.message : "synthetic error", + }; + } + setImmediate(() => { + const payload = error + ? { jsonrpc: "2.0", id: req.id, error } + : { jsonrpc: "2.0", id: req.id, result }; + rpc.handleResponse(JSON.stringify(payload)); + }); + }; + rpc = new HostRpcClient(writeFn); + return { rpc, calls }; +} + +function mockFetchOk(body: string, etag?: string): typeof fetch { + return vi.fn().mockResolvedValue( + new Response(body, { + status: 200, + headers: etag ? { etag } : {}, + }), + ) as unknown as typeof fetch; +} + +function stubResponse(status: number, body = "", headers: Record = {}): Response { + const h = new Headers(headers); + return { + status, + statusText: "", + headers: h, + text: async () => body, + } as unknown as Response; +} + +// ----------------------------------------------------------------------------- +// Fixtures — uses the user's 1r0n example shapes. +// ----------------------------------------------------------------------------- + +const uploaderFeedXml = ` + + + + <![CDATA[[1r0n] Boruto - Two Blue Vortex - Volume 02 (Digital) (1r0n)]]> + https://nyaa.si/download/1.torrent + https://nyaa.si/view/1 + Mon, 04 May 2026 02:31:00 GMT + aaa + + + <![CDATA[[1r0n] Dandadan c126-142 (Digital)]]> + https://nyaa.si/download/2.torrent + https://nyaa.si/view/2 + Sun, 03 May 2026 12:00:00 GMT + bbb + + + <![CDATA[[1r0n] Some Untracked Series v1 (Digital)]]> + https://nyaa.si/download/3.torrent + https://nyaa.si/view/3 + Sat, 02 May 2026 22:00:00 GMT + ccc + + +`; + +const trackedCandidates: AliasCandidate[] = [ + { seriesId: "s-boruto", aliases: ["Boruto: Two Blue Vortex", "Boruto Two Blue Vortex"] }, + { seriesId: "s-dandadan", aliases: ["Dandadan", "ダンダダン"] }, +]; + +// ----------------------------------------------------------------------------- +// pollSubscription +// ----------------------------------------------------------------------------- + +describe("pollSubscription", () => { + it("matches and records candidates for tracked series, skipping untracked", async () => { + const { rpc, calls } = makeMockRpc(() => ({ ledgerId: "ld", deduped: false })); + const out = await pollSubscription( + rpc, + "src-1", + { kind: "user", identifier: "1r0n" }, + trackedCandidates, + { + previousEtag: null, + timeoutMs: 1000, + minConfidence: 0.7, + fetchImpl: mockFetchOk(uploaderFeedXml, '"new-etag"'), + }, + ); + expect(out.fetched).toBe(true); + expect(out.notModified).toBe(false); + expect(out.parsed).toBe(3); + // Boruto + Dandadan match; "Some Untracked Series" doesn't. + expect(out.matched).toBe(2); + expect(out.recorded).toBe(2); + expect(out.etag).toBe('"new-etag"'); + + const recordCalls = calls.filter((c) => c.method === "releases/record"); + expect(recordCalls).toHaveLength(2); + const matched = recordCalls.map((c) => { + const p = c.params as { candidate: { seriesMatch: { codexSeriesId: string } } }; + return p.candidate.seriesMatch.codexSeriesId; + }); + expect(matched.sort()).toEqual(["s-boruto", "s-dandadan"]); + }); + + it("returns notModified when upstream replies 304", async () => { + const { rpc, calls } = makeMockRpc(() => ({ ledgerId: "x", deduped: false })); + const fetchImpl = vi.fn().mockResolvedValue(stubResponse(304)); + const out = await pollSubscription( + rpc, + "src-1", + { kind: "user", identifier: "1r0n" }, + trackedCandidates, + { + previousEtag: '"v1"', + timeoutMs: 1000, + minConfidence: 0.7, + fetchImpl: fetchImpl as unknown as typeof fetch, + }, + ); + expect(out.notModified).toBe(true); + expect(out.parsed).toBe(0); + expect(out.matched).toBe(0); + expect(out.upstreamStatus).toBe(304); + expect(calls.filter((c) => c.method === "releases/record")).toHaveLength(0); + }); + + it("propagates upstream 429 status without recording", async () => { + const { rpc, calls } = makeMockRpc(() => ({ ledgerId: "x", deduped: false })); + const fetchImpl = vi.fn().mockResolvedValue(stubResponse(429)); + const out = await pollSubscription( + rpc, + "src-1", + { kind: "user", identifier: "1r0n" }, + trackedCandidates, + { + previousEtag: null, + timeoutMs: 1000, + minConfidence: 0.7, + fetchImpl: fetchImpl as unknown as typeof fetch, + }, + ); + expect(out.fetched).toBe(false); + expect(out.upstreamStatus).toBe(429); + expect(calls.filter((c) => c.method === "releases/record")).toHaveLength(0); + }); + + it("attaches infoHash and format hints to the candidate payload", async () => { + const { rpc, calls } = makeMockRpc(() => ({ ledgerId: "ld", deduped: false })); + const fetchImpl = mockFetchOk(uploaderFeedXml); + await pollSubscription(rpc, "src-1", { kind: "user", identifier: "1r0n" }, trackedCandidates, { + previousEtag: null, + timeoutMs: 1000, + minConfidence: 0.7, + fetchImpl, + }); + const recordCalls = calls.filter((c) => c.method === "releases/record"); + const boruto = recordCalls.find((c) => { + const p = c.params as { candidate: { seriesMatch: { codexSeriesId: string } } }; + return p.candidate.seriesMatch.codexSeriesId === "s-boruto"; + }); + expect(boruto).toBeDefined(); + if (!boruto) return; + const params = boruto.params as { + candidate: { + infoHash: string | null; + formatHints: Record; + volume: number | null; + payloadUrl: string; + mediaUrl?: string | null; + mediaUrlKind?: string | null; + }; + }; + expect(params.candidate.infoHash).toBe("aaa"); + expect(params.candidate.formatHints.digital).toBe(true); + expect(params.candidate.formatHints.subscription).toBe("user:1r0n"); + expect(params.candidate.volume).toBe(2); + // Page url -> payloadUrl, .torrent -> mediaUrl with kind=torrent. + expect(params.candidate.payloadUrl).toBe("https://nyaa.si/view/1"); + expect(params.candidate.mediaUrl).toBe("https://nyaa.si/download/1.torrent"); + expect(params.candidate.mediaUrlKind).toBe("torrent"); + }); + + it("falls back to torrent link as payloadUrl when guid permalink is missing", async () => { + const noPermalinkXml = ` + + + + <![CDATA[[1r0n] Dandadan c126-142 (Digital)]]> + https://nyaa.si/download/99.torrent + nyaa-99 + Sun, 03 May 2026 12:00:00 GMT + zzz + + +`; + const { rpc, calls } = makeMockRpc(() => ({ ledgerId: "ld", deduped: false })); + await pollSubscription(rpc, "src-1", { kind: "user", identifier: "1r0n" }, trackedCandidates, { + previousEtag: null, + timeoutMs: 1000, + minConfidence: 0.7, + fetchImpl: mockFetchOk(noPermalinkXml), + }); + const record = calls.find((c) => c.method === "releases/record"); + expect(record).toBeDefined(); + const cand = (record?.params as { candidate: Record }).candidate; + expect(cand.payloadUrl).toBe("https://nyaa.si/download/99.torrent"); + // Both fields would point at the same URL — skip the duplicate. + expect(cand.mediaUrl).toBeUndefined(); + expect(cand.mediaUrlKind).toBeUndefined(); + }); + + it("counts deduped records as not-newly-recorded", async () => { + const { rpc } = makeMockRpc(() => ({ ledgerId: "ld", deduped: true })); + const fetchImpl = mockFetchOk(uploaderFeedXml); + const out = await pollSubscription( + rpc, + "src-1", + { kind: "user", identifier: "1r0n" }, + trackedCandidates, + { + previousEtag: null, + timeoutMs: 1000, + minConfidence: 0.7, + fetchImpl, + }, + ); + expect(out.matched).toBe(2); + expect(out.recorded).toBe(0); + }); + + it("skips items with no alias match without recording", async () => { + const { rpc, calls } = makeMockRpc(() => ({ ledgerId: "x", deduped: false })); + const fetchImpl = mockFetchOk(uploaderFeedXml); + const out = await pollSubscription( + rpc, + "src-1", + { kind: "user", identifier: "1r0n" }, + [{ seriesId: "s-other", aliases: ["Completely Unrelated Manga"] }], + { + previousEtag: null, + timeoutMs: 1000, + minConfidence: 0.7, + fetchImpl, + }, + ); + expect(out.parsed).toBe(3); + expect(out.matched).toBe(0); + expect(out.recorded).toBe(0); + expect(calls.filter((c) => c.method === "releases/record")).toHaveLength(0); + }); +}); + +// ----------------------------------------------------------------------------- +// registerSources +// ----------------------------------------------------------------------------- + +describe("registerSources", () => { + it("emits one source per subscription with stable kind:identifier keys", async () => { + const { rpc, calls } = makeMockRpc(() => ({ registered: 3, pruned: 0 })); + const result = await registerSources(rpc, [ + { kind: "user", identifier: "tsuna69" }, + { kind: "query", identifier: "LuminousScans" }, + { kind: "params", identifier: "c=3_1&q=Berserk" }, + ]); + expect(result).toEqual({ registered: 3, pruned: 0 }); + + const reg = calls.find((c) => c.method === "releases/register_sources"); + expect(reg).toBeDefined(); + if (!reg) return; + const payload = reg.params as { + sources: { sourceKey: string; displayName: string; kind: string; config: unknown }[]; + }; + const keys = payload.sources.map((s) => s.sourceKey); + expect(keys).toEqual(["user:tsuna69", "query:luminousscans", "params:c=3_1&q=berserk"]); + expect(payload.sources.every((s) => s.kind === "rss-uploader")).toBe(true); + // Round-trip data: config carries the original (case-preserving) subscription. + const userSrc = payload.sources[0]; + expect( + (userSrc?.config as { subscription: { identifier: string } }).subscription.identifier, + ).toBe("tsuna69"); + }); + + it("retries on METHOD_NOT_FOUND while the host installs the handler", async () => { + let calls = 0; + const { rpc } = makeMockRpc(() => { + calls++; + if (calls < 3) { + throw new HostRpcError("Method not found", -32601); + } + return { registered: 1, pruned: 0 }; + }); + const result = await registerSources(rpc, [{ kind: "user", identifier: "a" }]); + expect(result).toEqual({ registered: 1, pruned: 0 }); + expect(calls).toBe(3); + }); + + it("does not retry on non-method-not-found errors", async () => { + let calls = 0; + const { rpc } = makeMockRpc(() => { + calls++; + throw new HostRpcError("server boom", -32000); + }); + const result = await registerSources(rpc, [{ kind: "user", identifier: "a" }]); + expect(result).toBeNull(); + expect(calls).toBe(1); + }); + + it("sends an empty list when no subscriptions are configured (host wipes plugin's rows)", async () => { + const { rpc, calls } = makeMockRpc(() => ({ registered: 0, pruned: 2 })); + const result = await registerSources(rpc, []); + expect(result).toEqual({ registered: 0, pruned: 2 }); + const reg = calls.find((c) => c.method === "releases/register_sources"); + expect(reg).toBeDefined(); + expect((reg?.params as { sources: unknown[] }).sources).toEqual([]); + }); +}); diff --git a/plugins/release-nyaa/src/index.ts b/plugins/release-nyaa/src/index.ts new file mode 100644 index 00000000..ebbde59b --- /dev/null +++ b/plugins/release-nyaa/src/index.ts @@ -0,0 +1,511 @@ +/** + * Nyaa.si Release-Source Plugin for Codex. + * + * Polls Nyaa user / search RSS feeds for an admin-configured uploader + * allowlist and announces new releases for tracked series. Matching is + * alias-based: each parsed Nyaa title is normalized and compared to every + * tracked series' alias list. Confidence is 0.95 on exact normalized match, + * dropping to a fuzzy floor of 0.7 for near-matches; below that, the + * candidate is silently dropped (the host's threshold would reject it + * anyway). + * + * Source-row model: + * - On `onInitialize` (which the host re-runs after every config save), + * the plugin parses the admin's `uploaders` CSV and calls + * `releases/register_sources` with one entry per subscription. The host + * materializes one `release_sources` row per uploader, keyed on + * `(plugin_id, sourceKey)` where `sourceKey` is `kind:identifier` + * (e.g. `user:tsuna69`, `query:luminousscans`, `params:c=3_1&q=berserk`). + * - The host scheduler fires one `releases/poll` task per source row, so + * each uploader has its own poll cadence, ETag, and last-error status. + * + * Flow per `releases/poll`: + * 1. Recover the subscription from `params.config.subscription` (or fall + * back to parsing `params.sourceKey`). + * 2. Pull tracked-series + aliases from the host + * (`releases/list_tracked`). + * 3. Conditional GET the RSS feed using `params.etag`. + * 4. Parse each item; match against tracked aliases; emit a candidate via + * `releases/record`. + * 5. Return the new ETag and upstream status for the host's per-host + * backoff layer. + */ + +import { + createLogger, + createReleaseSourcePlugin, + type HostRpcClient, + HostRpcError, + type InitializeParams, + RELEASES_METHODS, + type ReleaseCandidate, + type ReleasePollRequest, + type ReleasePollResponse, + type TrackedSeriesEntry, +} from "@ashdev/codex-plugin-sdk"; +import { + fetchSubscriptionFeed, + parseSubscriptionList, + sourceKeyToSubscription, + subscriptionToSourceKey, + type UploaderSubscription, +} from "./fetcher.js"; +import { DEFAULT_MIN_CONFIDENCE, DEFAULT_REQUEST_TIMEOUT_MS, manifest } from "./manifest.js"; +import { type AliasCandidate, type AliasMatch, matchSeriesAny } from "./matcher.js"; +import { type ParsedRssItem, parseFeed } from "./parser.js"; + +const logger = createLogger({ name: manifest.name, level: "info" }); + +// ============================================================================= +// Plugin-level state (set during initialize) +// ============================================================================= + +interface PluginState { + hostRpc: HostRpcClient | null; + /** Parsed admin uploader subscription list. */ + subscriptions: UploaderSubscription[]; + /** Hard timeout for upstream fetches. */ + requestTimeoutMs: number; + /** Minimum confidence floor — passed to the matcher's `fuzzyFloor`. */ + minConfidence: number; + /** Override base URL (for tests / mirrors). */ + baseUrl: string | null; +} + +const state: PluginState = { + hostRpc: null, + subscriptions: [], + requestTimeoutMs: DEFAULT_REQUEST_TIMEOUT_MS, + minConfidence: DEFAULT_MIN_CONFIDENCE, + baseUrl: null, +}; + +/** Reset state. Exported for tests; not part of the plugin contract. */ +export function _resetState(): void { + state.hostRpc = null; + state.subscriptions = []; + state.requestTimeoutMs = DEFAULT_REQUEST_TIMEOUT_MS; + state.minConfidence = DEFAULT_MIN_CONFIDENCE; + state.baseUrl = null; +} + +// ============================================================================= +// Reverse-RPC wrappers +// ============================================================================= + +interface ListTrackedResponse { + tracked: TrackedSeriesEntry[]; + nextOffset?: number; +} + +interface RecordResponse { + ledgerId: string; + deduped: boolean; +} + +async function listTracked( + rpc: HostRpcClient, + sourceId: string, + offset: number, + limit: number, +): Promise { + return rpc.call(RELEASES_METHODS.LIST_TRACKED, { + sourceId, + offset, + limit, + }); +} + +async function recordCandidate( + rpc: HostRpcClient, + sourceId: string, + candidate: ReleaseCandidate, +): Promise { + try { + return await rpc.call(RELEASES_METHODS.RECORD, { + sourceId, + candidate, + }); + } catch (err) { + if (err instanceof HostRpcError) { + logger.warn( + `record failed for ${candidate.externalReleaseId}: ${err.message} (code ${err.code})`, + ); + } else { + const msg = err instanceof Error ? err.message : "unknown error"; + logger.warn(`record failed for ${candidate.externalReleaseId}: ${msg}`); + } + return null; + } +} + +// ============================================================================= +// Iteration helpers +// ============================================================================= + +/** + * Pull every tracked-series page from the host. We can't stream + * subscription-by-subscription because each Nyaa item has to be matched + * against the *full* alias set; partial pages would leak misses. + */ +export async function fetchAllTracked( + rpc: HostRpcClient, + sourceId: string, +): Promise { + const out: AliasCandidate[] = []; + const pageSize = 200; + let offset = 0; + while (true) { + const page = await listTracked(rpc, sourceId, offset, pageSize); + for (const entry of page.tracked) { + const aliases = entry.aliases ?? []; + // Drop entries with no aliases — Nyaa matching is alias-only. + if (aliases.length === 0) continue; + out.push({ seriesId: entry.seriesId, aliases }); + } + if (page.nextOffset === undefined || page.tracked.length === 0) return out; + offset = page.nextOffset; + } +} + +// ============================================================================= +// Per-subscription poll +// ============================================================================= + +/** Outcome of a single per-subscription fetch+parse cycle. */ +export interface SubscriptionPollOutcome { + subscription: UploaderSubscription; + fetched: boolean; + notModified: boolean; + parsed: number; + matched: number; + recorded: number; + /** Of those sent to record, how many the host deduped onto an existing row. */ + deduped: number; + upstreamStatus: number; + /** New ETag returned by upstream (only set when fetched=true). */ + etag: string | null; + error: string; +} + +/** + * Build a `ReleaseCandidate` from a parsed RSS item + the matcher's verdict. + * + * Language is hardcoded to `"en"` — Nyaa releases don't carry a language tag + * in the title or RSS metadata. English-only is the right default for the + * uploader allowlist this plugin is designed around (`1r0n`, etc.); admins + * who add non-English uploaders should configure tracked series' languages + * accordingly. The host's `latest_known_*` advance gate enforces the + * per-series language list. + */ +function toCandidate( + match: AliasMatch, + item: ParsedRssItem, + subscription: UploaderSubscription, +): ReleaseCandidate { + const formatHints: Record = { ...item.formatHints }; + if (item.chapterRangeEnd !== null) { + formatHints.chapterRangeEnd = item.chapterRangeEnd; + } + if (item.volumeRangeEnd !== null) { + formatHints.volumeRangeEnd = item.volumeRangeEnd; + } + formatHints.subscription = `${subscription.kind}:${subscription.identifier}`; + + // Nyaa RSS carries two URLs per item: + // : the human-readable post page (`/view/`) + // : the actual `.torrent` download + // We surface the page as `payloadUrl` (the inbox's external-link icon) + // and the torrent as `mediaUrl` with kind=torrent so the UI can render a + // second, kind-specific icon for one-click acquisition. When the page URL + // is missing we fall back to the torrent for `payloadUrl` and skip the + // separate media link to avoid pointing both icons at the same URL. + const torrentLink = item.link.length > 0 ? item.link : null; + const payloadUrl = item.pageUrl ?? torrentLink ?? `urn:nyaa:${item.externalReleaseId}`; + const hasDistinctMedia = item.pageUrl !== null && torrentLink !== null; + + return { + seriesMatch: { + codexSeriesId: match.seriesId, + confidence: match.confidence, + reason: match.reason, + }, + externalReleaseId: item.externalReleaseId, + chapter: item.chapter, + volume: item.volume, + language: "en", + groupOrUploader: item.group ?? (subscription.kind === "user" ? subscription.identifier : null), + payloadUrl, + ...(hasDistinctMedia ? { mediaUrl: torrentLink, mediaUrlKind: "torrent" as const } : {}), + infoHash: item.infoHash, + formatHints, + observedAt: item.observedAt, + }; +} + +/** + * Poll a single uploader subscription. Internal — exposed for testing. + */ +export async function pollSubscription( + rpc: HostRpcClient, + sourceId: string, + subscription: UploaderSubscription, + candidates: AliasCandidate[], + options: { + previousEtag: string | null; + timeoutMs: number; + minConfidence: number; + baseUrl?: string | null; + fetchImpl?: typeof fetch; + }, +): Promise { + const result = await fetchSubscriptionFeed(subscription, options.previousEtag, null, { + fetchImpl: options.fetchImpl, + timeoutMs: options.timeoutMs, + ...(options.baseUrl ? { baseUrl: options.baseUrl } : {}), + }); + + if (result.kind === "notModified") { + return { + subscription, + fetched: true, + notModified: true, + parsed: 0, + matched: 0, + recorded: 0, + deduped: 0, + upstreamStatus: 304, + etag: null, + error: "", + }; + } + + if (result.kind === "error") { + return { + subscription, + fetched: false, + notModified: false, + parsed: 0, + matched: 0, + recorded: 0, + deduped: 0, + upstreamStatus: result.status, + etag: null, + error: result.message, + }; + } + + // result.kind === "ok" + const items = parseFeed(result.body); + let matched = 0; + let recorded = 0; + let deduped = 0; + for (const item of items) { + // Prefer the alias-list form: a `Title A / Title B` Nyaa title surfaces + // both halves in `seriesGuessAliases`, so the matcher can hit on either + // the JP or EN side of the alias separator. Falls back to the single + // guess for titles without a slash. + const guesses = + item.seriesGuessAliases.length > 0 ? item.seriesGuessAliases : [item.seriesGuess]; + const m = matchSeriesAny(guesses, candidates, { + fuzzyFloor: options.minConfidence, + }); + if (m === null) continue; + matched++; + const candidate = toCandidate(m, item, subscription); + const outcome = await recordCandidate(rpc, sourceId, candidate); + if (!outcome) continue; + if (outcome.deduped) { + deduped++; + } else { + recorded++; + } + } + return { + subscription, + fetched: true, + notModified: false, + parsed: items.length, + matched, + recorded, + deduped, + upstreamStatus: 200, + etag: result.etag, + error: "", + }; +} + +// ============================================================================= +// Top-level poll handler +// ============================================================================= + +/** + * Resolve the subscription this poll request is for. The host stamps every + * `release_sources` row with its plugin-defined `config` (set at register + * time), so the preferred path is `params.config.subscription`. If a row + * pre-dates the config field (e.g. created in a previous plugin version), + * fall back to parsing `params.sourceKey`. + */ +function resolveSubscription(params: ReleasePollRequest): UploaderSubscription | null { + const cfg = params.config as { subscription?: unknown } | undefined | null; + const fromConfig = cfg?.subscription; + if (fromConfig && typeof fromConfig === "object") { + const obj = fromConfig as Record; + const kind = obj.kind; + const identifier = obj.identifier; + if ( + typeof identifier === "string" && + identifier.length > 0 && + (kind === "user" || kind === "query" || kind === "params") + ) { + return { kind, identifier }; + } + } + if (typeof params.sourceKey === "string" && params.sourceKey.length > 0) { + return sourceKeyToSubscription(params.sourceKey); + } + return null; +} + +async function poll(params: ReleasePollRequest, rpc: HostRpcClient): Promise { + const sourceId = params.sourceId; + const subscription = resolveSubscription(params); + if (subscription === null) { + logger.warn(`source=${sourceId} no resolvable subscription on poll request; skipping`); + return { notModified: false, upstreamStatus: 200 }; + } + + // 1. Pull tracked-series + aliases. + const tracked = await fetchAllTracked(rpc, sourceId); + if (tracked.length === 0) { + logger.info(`no tracked series with aliases for source=${sourceId}`); + return { notModified: false, upstreamStatus: 200 }; + } + + // 2. Conditional GET against this subscription's feed. + const outcome = await pollSubscription(rpc, sourceId, subscription, tracked, { + previousEtag: params.etag ?? null, + timeoutMs: state.requestTimeoutMs, + minConfidence: state.minConfidence, + ...(state.baseUrl ? { baseUrl: state.baseUrl } : {}), + }); + if (outcome.error) { + logger.warn( + `source=${sourceId} ${subscription.kind}:${subscription.identifier}: ${outcome.error} (status ${outcome.upstreamStatus})`, + ); + } + + logger.info( + `poll complete: source=${sourceId} subscription=${subscription.kind}:${subscription.identifier} tracked=${tracked.length} parsed=${outcome.parsed} matched=${outcome.matched} recorded=${outcome.recorded} deduped=${outcome.deduped} status=${outcome.upstreamStatus}${outcome.notModified ? " (304)" : ""}`, + ); + + // Report counters back to the host so it can build a meaningful + // `last_summary` for the source. Without these, the host only sees the + // (empty) `candidates` payload — we record via reverse-RPC mid-poll — + // and the status badge reads "Fetched 0 items" even on a busy poll. + return { + notModified: outcome.notModified, + upstreamStatus: outcome.upstreamStatus, + parsed: outcome.parsed, + matched: outcome.matched, + recorded: outcome.recorded, + deduped: outcome.deduped, + ...(outcome.etag !== null ? { etag: outcome.etag } : {}), + }; +} + +// ============================================================================= +// Plugin Initialization +// ============================================================================= + +/** + * Send the desired-state list of source rows to the host. Called from + * `onInitialize` (after the host has installed the releases reverse-RPC + * handler) so the plugin's source rows are materialized whenever the + * config changes. + * + * Retries on `METHOD_NOT_FOUND` with linear backoff: the host installs the + * releases handler shortly after `initialize` returns, and there is a small + * race window where the plugin's first reverse-RPC call may land before the + * handler is in place. + */ +export async function registerSources( + rpc: HostRpcClient, + subscriptions: UploaderSubscription[], +): Promise<{ registered: number; pruned: number } | null> { + const sources = subscriptions.map((sub) => ({ + sourceKey: subscriptionToSourceKey(sub), + displayName: displayNameFor(sub), + kind: "rss-uploader" as const, + config: { subscription: { kind: sub.kind, identifier: sub.identifier } }, + })); + + const maxAttempts = 5; + for (let attempt = 1; attempt <= maxAttempts; attempt++) { + try { + return await rpc.call<{ registered: number; pruned: number }>( + RELEASES_METHODS.REGISTER_SOURCES, + { sources }, + ); + } catch (err) { + const isMethodNotFound = err instanceof HostRpcError && err.code === -32601; + if (isMethodNotFound && attempt < maxAttempts) { + // Wait for the host to finish installing the releases reverse-RPC + // handler. Linear backoff: 50ms, 100ms, 150ms, 200ms. + await new Promise((r) => setTimeout(r, 50 * attempt)); + continue; + } + const reason = err instanceof Error ? err.message : String(err); + logger.error(`register_sources failed: ${reason}`); + return null; + } + } + return null; +} + +/** Human-readable label shown in the Release tracking settings table. */ +function displayNameFor(sub: UploaderSubscription): string { + if (sub.kind === "user") return `Nyaa: ${sub.identifier}`; + if (sub.kind === "query") return `Nyaa search: ${sub.identifier}`; + return `Nyaa params: ${sub.identifier}`; +} + +createReleaseSourcePlugin({ + manifest, + provider: { + async poll(params: ReleasePollRequest): Promise { + if (!state.hostRpc) { + throw new Error("Plugin not initialized: hostRpc client missing"); + } + return poll(params, state.hostRpc); + }, + }, + logLevel: "info", + async onInitialize(params: InitializeParams) { + state.hostRpc = params.hostRpc; + const ac = params.adminConfig ?? {}; + state.subscriptions = parseSubscriptionList(ac.uploaders); + if (typeof ac.requestTimeoutMs === "number" && Number.isFinite(ac.requestTimeoutMs)) { + state.requestTimeoutMs = Math.max(1_000, Math.min(ac.requestTimeoutMs, 60_000)); + } + if (typeof ac.baseUrl === "string" && ac.baseUrl.trim().length > 0) { + state.baseUrl = ac.baseUrl.trim(); + } + logger.info( + `initialized: subscriptions=${state.subscriptions.length} timeoutMs=${state.requestTimeoutMs} minConfidence=${state.minConfidence}`, + ); + + // Materialize source rows. Deferred to a microtask + retry on + // METHOD_NOT_FOUND so we run *after* the host installs the releases + // reverse-RPC handler (it does so right after `initialize` returns). + queueMicrotask(() => { + void registerSources(params.hostRpc, state.subscriptions).then((result) => { + if (result) { + logger.info(`register_sources: registered=${result.registered} pruned=${result.pruned}`); + } + }); + }); + }, +}); + +logger.info("Nyaa release-source plugin started"); diff --git a/plugins/release-nyaa/src/manifest.ts b/plugins/release-nyaa/src/manifest.ts new file mode 100644 index 00000000..9e9c31e6 --- /dev/null +++ b/plugins/release-nyaa/src/manifest.ts @@ -0,0 +1,74 @@ +import type { PluginManifest } from "@ashdev/codex-plugin-sdk"; +import packageJson from "../package.json" with { type: "json" }; + +/** Default per-fetch HTTP timeout. Nyaa is usually fast; 10s is generous. */ +export const DEFAULT_REQUEST_TIMEOUT_MS = 10_000; + +/** + * Default minimum confidence threshold for emitted candidates. Nyaa matches + * series via title parsing + alias comparison, which is fuzzier than the + * external-ID match used by MangaUpdates. The host's threshold (default 0.7) + * still filters at record time; this is the plugin-side floor below which we + * don't even bother calling `releases/record`. + */ +export const DEFAULT_MIN_CONFIDENCE = 0.7; + +export const manifest = { + name: "release-nyaa", + displayName: "Nyaa Releases", + version: packageJson.version, + description: + "Announces new chapter / volume torrents for tracked series via Nyaa.si uploader RSS feeds. Limited to an admin-configured uploader allowlist; matches via title aliases.", + author: "Codex", + homepage: "https://github.com/AshDevFr/codex", + protocolVersion: "1.1", + capabilities: { + releaseSource: { + kinds: ["rss-uploader"], + requiresAliases: true, + canAnnounceChapters: true, + canAnnounceVolumes: true, + }, + }, + configSchema: { + description: + "Nyaa plugin configuration. The plugin polls the listed uploaders' RSS feeds (or, for groups without a Nyaa account, a fallback search query) and emits release candidates only for tracked series whose aliases match the parsed title. Notification-only: Codex never downloads torrents.", + fields: [ + { + key: "uploaders", + label: "Uploader Subscriptions", + description: + "List of trusted uploader handles or queries. Each entry is one of: `username` (a Nyaa user feed); `q:` (a plain site-wide search); or `q:?` (URL-style allowlisted params: `q`, `c`, `f`, `u` — e.g. `q:?c=3_1&q=Berserk` to search the Literature → English-translated category). Accepts a JSON array (preferred) or a legacy comma-separated string. Confidence stays above the rejection threshold only for entries that match a tracked series alias.", + type: "string-array" as const, + required: false, + default: [], + example: ["1r0n", "TankobonBlur", "q:LuminousScans", "q:?c=3_1&q=Berserk"], + }, + { + key: "requestTimeoutMs", + label: "Request Timeout (ms)", + description: + "How long to wait for a single Nyaa RSS fetch before giving up. Defaults to 10000 (10 seconds).", + type: "number" as const, + required: false, + default: DEFAULT_REQUEST_TIMEOUT_MS, + }, + { + key: "baseUrl", + label: "Nyaa Base URL", + description: + "Override the Nyaa base URL. Useful for mirrors or for tests. Defaults to https://nyaa.si.", + type: "string" as const, + required: false, + default: "https://nyaa.si", + example: "https://nyaa.si", + }, + ], + }, + userDescription: + "Watches Nyaa.si uploader feeds for new releases of tracked series. Matches by title alias — make sure your series' aliases (auto-populated from metadata or added manually in the Tracking panel) cover the way the uploader names them. Notification-only — Codex never downloads anything.", + adminSetupInstructions: + "1. Set the **Uploaders** config field to a JSON array of entries (a comma-separated string is still accepted for backwards compatibility). Each entry is one of: `username` (a Nyaa user feed, e.g. `tsuna69`), `q:` (a plain site-wide search, e.g. `q:LuminousScans`), or `q:?` (URL-style search with allowlisted keys `q`, `c`, `f`, `u`, e.g. `q:?c=3_1&q=Berserk` for the English-translated Literature category). 2. Save. The plugin restarts and the host materializes one row per entry in **Settings → Release tracking** — that's where you flip rows on/off, override the poll interval, or hit *Poll now*. 3. Make sure tracked series have aliases that match how the uploader names releases (alternate spellings, romanizations, volume-range tags). The plugin auto-prunes rows when you remove an entry from the list and re-save, so the Release tracking table stays in sync with this list.", +} as const satisfies PluginManifest & { + capabilities: { releaseSource: { kinds: ["rss-uploader"] } }; +}; diff --git a/plugins/release-nyaa/src/matcher.test.ts b/plugins/release-nyaa/src/matcher.test.ts new file mode 100644 index 00000000..1e57b0c6 --- /dev/null +++ b/plugins/release-nyaa/src/matcher.test.ts @@ -0,0 +1,188 @@ +import { describe, expect, it } from "vitest"; +import { + CONFIDENCE_EXACT, + DEFAULT_FUZZY_FLOOR, + diceRatio, + matchSeries, + matchSeriesAny, + normalizeAlias, +} from "./matcher.js"; + +// ----------------------------------------------------------------------------- +// normalizeAlias — must match the Rust `normalize_alias` impl +// ----------------------------------------------------------------------------- + +describe("normalizeAlias", () => { + it("lowercases and strips punctuation", () => { + expect(normalizeAlias("My Hero Academia!")).toBe("my hero academia"); + }); + + it("collapses multiple spaces, drops leading/trailing space", () => { + expect(normalizeAlias(" Berserk - Vol ")).toBe("berserk vol"); + }); + + it("strips colons and other ASCII punctuation (matches Rust impl)", () => { + expect(normalizeAlias("Re:Zero - Starting Life in Another World")).toBe( + "rezero starting life in another world", + ); + }); + + it("returns empty string for input with only punctuation", () => { + expect(normalizeAlias("!!! - ?!")).toBe(""); + }); + + it("preserves Unicode alphanumerics", () => { + expect(normalizeAlias("僕のヒーロー")).toBe("僕のヒーロー"); + }); +}); + +// ----------------------------------------------------------------------------- +// diceRatio — sanity checks +// ----------------------------------------------------------------------------- + +describe("diceRatio", () => { + it("returns 1.0 for identical strings", () => { + expect(diceRatio("boruto two blue vortex", "boruto two blue vortex")).toBe(1); + }); + + it("returns 0 for empty inputs", () => { + expect(diceRatio("", "x")).toBe(0); + expect(diceRatio("x", "")).toBe(0); + }); + + it("scores high for word-rearranged near-matches", () => { + const r = diceRatio("boruto two blue vortex", "boruto - two blue vortex"); + expect(r).toBeGreaterThan(0.85); + }); + + it("scores low for unrelated series", () => { + const r = diceRatio("naruto", "boruto two blue vortex"); + expect(r).toBeLessThan(0.5); + }); +}); + +// ----------------------------------------------------------------------------- +// matchSeries +// ----------------------------------------------------------------------------- + +describe("matchSeries", () => { + const candidates = [ + { seriesId: "s-boruto", aliases: ["Boruto: Two Blue Vortex", "Boruto - Two Blue Vortex"] }, + { seriesId: "s-onepiece", aliases: ["One Piece"] }, + { seriesId: "s-dandadan", aliases: ["Dandadan", "ダンダダン"] }, + ]; + + it("returns null for empty seriesGuess", () => { + expect(matchSeries("", candidates)).toBeNull(); + expect(matchSeries(" ", candidates)).toBeNull(); + }); + + it("returns null when there are no candidates", () => { + expect(matchSeries("Boruto", [])).toBeNull(); + }); + + it("emits an alias-exact match at CONFIDENCE_EXACT", () => { + const m = matchSeries("Boruto Two Blue Vortex", candidates); + expect(m).not.toBeNull(); + if (m === null) return; + expect(m.seriesId).toBe("s-boruto"); + expect(m.confidence).toBe(CONFIDENCE_EXACT); + expect(m.reason).toBe("alias-exact"); + expect(m.matchedAlias).toBe("Boruto: Two Blue Vortex"); + }); + + it("emits an alias-fuzzy match for a near-miss above the floor", () => { + // Add a slightly different aliasing form. + const c = [{ seriesId: "s-frieren", aliases: ["Sousou no Frieren"] }]; + const m = matchSeries("Sousou Frieren", c, { fuzzyFloor: DEFAULT_FUZZY_FLOOR }); + if (m === null) { + // Below floor is also fine for this test — exercise the explicit + // match-or-skip semantics rather than asserting a confidence value. + expect(m).toBeNull(); + return; + } + expect(m.seriesId).toBe("s-frieren"); + expect(m.reason).toBe("alias-fuzzy"); + expect(m.confidence).toBeGreaterThanOrEqual(DEFAULT_FUZZY_FLOOR); + expect(m.confidence).toBeLessThan(CONFIDENCE_EXACT); + }); + + it("rejects unrelated names below the dice floor", () => { + const m = matchSeries("Berserk", candidates); + expect(m).toBeNull(); + }); + + it("rejects matches whose Dice ratio is below MIN_DICE_RATIO even with a low floor", () => { + const c = [{ seriesId: "s-x", aliases: ["Berserk"] }]; + // Even with a permissive floor, the matcher still requires Dice ≥ 0.85. + const m = matchSeries("Naruto", c, { fuzzyFloor: 0.5 }); + expect(m).toBeNull(); + }); + + it("picks the best candidate when multiple are above the floor", () => { + const c = [ + { seriesId: "s-bad", aliases: ["Boruto Two Vortex"] }, // worse Dice + { seriesId: "s-good", aliases: ["Boruto Two Blue Vortex"] }, // exact match + ]; + const m = matchSeries("Boruto Two Blue Vortex", c); + expect(m?.seriesId).toBe("s-good"); + expect(m?.reason).toBe("alias-exact"); + }); +}); + +// ----------------------------------------------------------------------------- +// matchSeriesAny — multi-alias matcher used for `Title A / Title B` Nyaa +// titles (1r0n / LuCaZ alias convention). +// ----------------------------------------------------------------------------- + +describe("matchSeriesAny", () => { + const candidates = [ + { seriesId: "s-slime", aliases: ["That Time I Got Reincarnated as a Slime"] }, + { seriesId: "s-bluebox", aliases: ["Blue Box"] }, + { seriesId: "s-onepiece", aliases: ["One Piece"] }, + ]; + + it("returns null on an empty guess list", () => { + expect(matchSeriesAny([], candidates)).toBeNull(); + }); + + it("matches when only the second alias hits a tracked series", () => { + const m = matchSeriesAny( + ["Tensei Shitara Slime Datta Ken", "That Time I Got Reincarnated as a Slime"], + candidates, + ); + expect(m).not.toBeNull(); + if (m === null) return; + expect(m.seriesId).toBe("s-slime"); + expect(m.reason).toBe("alias-exact"); + expect(m.confidence).toBe(CONFIDENCE_EXACT); + }); + + it("matches when only the first alias hits a tracked series (alias-split with EN-first)", () => { + const m = matchSeriesAny(["Blue Box", "Ao no Hako"], candidates); + expect(m?.seriesId).toBe("s-bluebox"); + expect(m?.reason).toBe("alias-exact"); + }); + + it("picks the higher-confidence match when both aliases score", () => { + const c = [ + { seriesId: "s-fuzzy", aliases: ["Boruto Two Vortex"] }, // fuzzy on guess A + { seriesId: "s-exact", aliases: ["Blue Box"] }, // exact on guess B + ]; + const m = matchSeriesAny(["Boruto Two Blue Vortex", "Blue Box"], c, { + fuzzyFloor: DEFAULT_FUZZY_FLOOR, + }); + expect(m?.seriesId).toBe("s-exact"); + expect(m?.reason).toBe("alias-exact"); + }); + + it("returns null when no alias hits the floor", () => { + const m = matchSeriesAny(["Berserk", "Holyland"], candidates); + expect(m).toBeNull(); + }); + + it("falls back to single-string semantics when given one alias", () => { + expect(matchSeriesAny(["One Piece"], candidates)?.seriesId).toBe("s-onepiece"); + expect(matchSeriesAny(["Berserk"], candidates)).toBeNull(); + }); +}); diff --git a/plugins/release-nyaa/src/matcher.ts b/plugins/release-nyaa/src/matcher.ts new file mode 100644 index 00000000..1644f469 --- /dev/null +++ b/plugins/release-nyaa/src/matcher.ts @@ -0,0 +1,244 @@ +/** + * Alias matcher for Nyaa releases. + * + * Nyaa identifies series only by name in the torrent title — there's no + * `nyaa_id` or other stable external ID that ties a release to a specific + * series in our DB. So matching is a two-step pipeline: + * + * 1. Normalize the parsed `seriesGuess` and every alias the host returned + * to a common shape (lowercase, alphanumeric + spaces only). This + * mirrors the `normalize_alias` function on the host + * ([src/db/entities/series_aliases.rs](src/db/entities/series_aliases.rs)) + * so a release whose normalized title exactly matches one of a series' + * stored aliases lands at confidence 0.95. + * 2. If no exact match, compute a token-level Sørensen-Dice similarity + * against every candidate alias. The highest ratio wins, scaled into a + * 0.7..0.85 confidence band; below the configured threshold we skip. + * + * The Dice ratio is more forgiving than edit distance for word-rearranged + * titles (`"Boruto Two Blue Vortex"` vs. `"Boruto - Two Blue Vortex"`) while + * still rejecting unrelated series at the threshold. We deliberately don't + * wire a heavy fuzzy-match library; the surface area is small. + */ + +/** A tracked-series candidate with its raw aliases. */ +export interface AliasCandidate { + /** Codex series UUID. */ + seriesId: string; + /** Raw aliases from `releases/list_tracked`. */ + aliases: string[]; +} + +/** A successful match. */ +export interface AliasMatch { + seriesId: string; + confidence: number; + /** Reason string surfaced in the SeriesMatch — "alias-exact" or "alias-fuzzy". */ + reason: string; + /** The matched alias (raw form, for logging). */ + matchedAlias: string; +} + +/** + * Confidence assigned on an exact normalized match. + * + * Below 1.0 because we still don't have an external ID — a release titled + * `"X"` could legitimately match multiple series with that alias. The host's + * threshold treats this as a strong-but-not-certain signal. + */ +export const CONFIDENCE_EXACT = 0.95; + +/** + * Floor below which fuzzy matches don't get emitted. The host's default + * threshold is 0.7; we share that floor so plugin-side filtering doesn't + * silently second-guess host config. + */ +export const DEFAULT_FUZZY_FLOOR = 0.7; + +/** + * Anything below this Dice-coefficient is rejected outright (even before the + * confidence floor kicks in). 0.85 lets through "two-blue-vortex" vs. "two + * blue vortex" but kills "naruto" vs. "boruto two blue vortex". + */ +export const MIN_DICE_RATIO = 0.85; + +// --------------------------------------------------------------------------- +// Normalization +// --------------------------------------------------------------------------- + +/** + * Normalize an alias to the same shape the host stores in + * `series_aliases.normalized`. Mirrors the Rust `normalize_alias` impl — keep + * these in lockstep. + */ +export function normalizeAlias(input: string): string { + let out = ""; + let lastWasSpace = false; + for (const ch of input) { + // Match Rust's `is_alphanumeric()` (Unicode-aware). + if (/[\p{L}\p{N}]/u.test(ch)) { + out += ch.toLowerCase(); + lastWasSpace = false; + } else if (/\s/.test(ch) && out.length > 0 && !lastWasSpace) { + out += " "; + lastWasSpace = true; + } + // Anything else (punctuation, control, symbols) is dropped. + } + return out.endsWith(" ") ? out.slice(0, -1) : out; +} + +// --------------------------------------------------------------------------- +// Dice coefficient (token-level, character-bigram fallback) +// --------------------------------------------------------------------------- + +/** + * Sørensen-Dice coefficient on word-bigrams of the input strings (with a + * character-bigram fallback for short / single-word strings). + * + * Range: 0..1, where 1.0 means identical bigram sets. + */ +export function diceRatio(a: string, b: string): number { + if (a.length === 0 || b.length === 0) return 0; + if (a === b) return 1; + + const bigramsA = bigrams(a); + const bigramsB = bigrams(b); + if (bigramsA.size === 0 || bigramsB.size === 0) return 0; + + let intersection = 0; + for (const bg of bigramsA) { + if (bigramsB.has(bg)) intersection++; + } + return (2 * intersection) / (bigramsA.size + bigramsB.size); +} + +function bigrams(s: string): Set { + const out = new Set(); + // Word bigrams first. + const words = s.split(/\s+/).filter((w) => w.length > 0); + if (words.length >= 2) { + for (let i = 0; i < words.length - 1; i++) { + out.add(`${words[i]} ${words[i + 1]}`); + } + } + // Plus character bigrams to handle word-rearrangement and short strings. + const flat = s.replace(/\s+/g, ""); + if (flat.length >= 2) { + for (let i = 0; i < flat.length - 1; i++) { + out.add(`#${flat.slice(i, i + 2)}`); + } + } else if (flat.length === 1) { + out.add(`#${flat}`); + } + return out; +} + +// --------------------------------------------------------------------------- +// Public matching entry point +// --------------------------------------------------------------------------- + +export interface MatchOptions { + /** + * Minimum confidence for a fuzzy match to be returned. Defaults to + * `DEFAULT_FUZZY_FLOOR` (0.7). Below this, the matcher returns null. + */ + fuzzyFloor?: number; +} + +/** + * Match a parsed series-guess against a list of tracked-series candidates and + * their aliases. Returns the best match or null if nothing clears the floor. + * + * On an exact normalized match against any alias of a candidate, confidence + * is `CONFIDENCE_EXACT` (0.95). If multiple candidates have aliases that + * normalize to the same form, the first one wins — that's a data-quality + * issue the host surfaces via the `latest_known_*` advance gate, not + * something the matcher can untangle alone. + * + * On no exact match, the matcher computes Dice ratios across the cartesian + * product (candidates × aliases), finds the maximum, scales it from + * `[MIN_DICE_RATIO, 1.0]` into `[fuzzyFloor, 0.85]`, and returns a fuzzy + * match if the result is at or above the floor. + */ +export function matchSeries( + seriesGuess: string, + candidates: AliasCandidate[], + opts: MatchOptions = {}, +): AliasMatch | null { + const floor = opts.fuzzyFloor ?? DEFAULT_FUZZY_FLOOR; + const target = normalizeAlias(seriesGuess); + if (target.length === 0 || candidates.length === 0) return null; + + // Pass 1 — exact normalized match. + for (const c of candidates) { + for (const alias of c.aliases) { + if (normalizeAlias(alias) === target) { + return { + seriesId: c.seriesId, + confidence: CONFIDENCE_EXACT, + reason: "alias-exact", + matchedAlias: alias, + }; + } + } + } + + // Pass 2 — best fuzzy match. + let best: AliasMatch | null = null; + let bestRatio = 0; + for (const c of candidates) { + for (const alias of c.aliases) { + const ratio = diceRatio(target, normalizeAlias(alias)); + if (ratio > bestRatio) { + bestRatio = ratio; + best = { + seriesId: c.seriesId, + confidence: 0, + reason: "alias-fuzzy", + matchedAlias: alias, + }; + } + } + } + if (best === null || bestRatio < MIN_DICE_RATIO) return null; + + // Linearly scale [MIN_DICE_RATIO..1.0] → [fuzzyFloor..0.85]. + // (We cap the fuzzy ceiling below CONFIDENCE_EXACT so an alias-exact match + // is always strictly stronger than the best alias-fuzzy match.) + const ceiling = 0.85; + const span = 1 - MIN_DICE_RATIO; + const t = (bestRatio - MIN_DICE_RATIO) / span; // 0..1 inside the band + const confidence = floor + t * (ceiling - floor); + if (confidence < floor) return null; + best.confidence = Number(confidence.toFixed(4)); + return best; +} + +/** + * Match a list of alias guesses (e.g. from a `Title A / Title B` Nyaa title) + * and return the best result across them. + * + * Picks the highest-confidence match across all guesses, preferring + * `alias-exact` over `alias-fuzzy` when ties exist (because exact carries a + * fixed `CONFIDENCE_EXACT` and fuzzy is bounded below it). When two guesses + * both produce alias-exact matches against different series, the first guess + * wins — that's the same precedence rule `matchSeries` applies internally + * across candidates. + */ +export function matchSeriesAny( + seriesGuesses: string[], + candidates: AliasCandidate[], + opts: MatchOptions = {}, +): AliasMatch | null { + if (seriesGuesses.length === 0) return null; + let best: AliasMatch | null = null; + for (const guess of seriesGuesses) { + const m = matchSeries(guess, candidates, opts); + if (m === null) continue; + if (best === null || m.confidence > best.confidence) { + best = m; + } + } + return best; +} diff --git a/plugins/release-nyaa/src/parser.test.ts b/plugins/release-nyaa/src/parser.test.ts new file mode 100644 index 00000000..b8dbf56b --- /dev/null +++ b/plugins/release-nyaa/src/parser.test.ts @@ -0,0 +1,481 @@ +import { describe, expect, it } from "vitest"; +import { parseFeed, parseItem, parseTitle } from "./parser.js"; + +// ----------------------------------------------------------------------------- +// parseTitle — corpus mirroring real-world Nyaa titles, including the user's +// 1r0n / mixed-format examples that motivated this phase. +// ----------------------------------------------------------------------------- + +describe("parseTitle", () => { + it("parses a 1r0n volume release with leading group token and trailing tags", () => { + const t = parseTitle("[1r0n] Boruto - Two Blue Vortex - Volume 02 (Digital) (1r0n)"); + expect(t).not.toBeNull(); + if (t === null) return; + expect(t.group).toBe("1r0n"); + expect(t.volume).toBe(2); + expect(t.chapter).toBeNull(); + expect(t.formatHints.digital).toBe(true); + // Series guess strips group, volume token, and parenthesized tags. + expect(t.seriesGuess).toBe("Boruto Two Blue Vortex"); + }); + + it("parses a v107 short-form volume release", () => { + const t = parseTitle("[1r0n] One Piece v107 (Digital)"); + expect(t).not.toBeNull(); + if (t === null) return; + expect(t.volume).toBe(107); + expect(t.chapter).toBeNull(); + expect(t.formatHints.digital).toBe(true); + expect(t.seriesGuess).toBe("One Piece"); + }); + + it("parses a single chapter release with `Chapter NNN` long form", () => { + const t = parseTitle("[1r0n] Chainsaw Man - Chapter 142 (Digital)"); + expect(t).not.toBeNull(); + if (t === null) return; + expect(t.chapter).toBe(142); + expect(t.volume).toBeNull(); + expect(t.seriesGuess).toBe("Chainsaw Man"); + }); + + it("parses a chapter range (the screenshot's loose-chapter shape)", () => { + const t = parseTitle("[Group] Dandadan c126-142 (2024) (Digital)"); + expect(t).not.toBeNull(); + if (t === null) return; + expect(t.chapter).toBe(126); + expect(t.chapterRangeEnd).toBe(142); + expect(t.volume).toBeNull(); + expect(t.formatHints.digital).toBe(true); + expect(t.seriesGuess).toBe("Dandadan"); + }); + + it("parses a volume range (`v01-14` from the user's mixed-format screenshot)", () => { + const t = parseTitle("[1r0n] Boruto v01-14 (Digital) (1r0n)"); + expect(t).not.toBeNull(); + if (t === null) return; + expect(t.volume).toBe(1); + expect(t.volumeRangeEnd).toBe(14); + expect(t.seriesGuess).toBe("Boruto"); + }); + + it("parses a Tankobon-Blur Vol. NN release", () => { + const t = parseTitle("[Tankobon Blur] Solo Leveling Vol. 13 (2024) (Digital) (Tankobon Blur)"); + expect(t).not.toBeNull(); + if (t === null) return; + expect(t.group).toBe("Tankobon Blur"); + expect(t.volume).toBe(13); + expect(t.formatHints.digital).toBe(true); + expect(t.seriesGuess).toBe("Solo Leveling"); + }); + + it("parses a plain release without leading group token", () => { + const t = parseTitle("Berserk Volume 42 (Digital)"); + expect(t).not.toBeNull(); + if (t === null) return; + expect(t.group).toBeNull(); + expect(t.volume).toBe(42); + expect(t.formatHints.digital).toBe(true); + expect(t.seriesGuess).toBe("Berserk"); + }); + + it("preserves decimal chapters", () => { + const t = parseTitle("[Group] Some Series c47.5 (Digital)"); + expect(t).not.toBeNull(); + if (t === null) return; + expect(t.chapter).toBe(47.5); + expect(t.seriesGuess).toBe("Some Series"); + }); + + it("captures JXL format hint", () => { + const t = parseTitle("[1r0n] One Piece v107 (Digital) (JXL)"); + expect(t).not.toBeNull(); + if (t === null) return; + expect(t.formatHints.digital).toBe(true); + expect(t.formatHints.jxl).toBe(true); + }); + + it("returns null for an empty title", () => { + expect(parseTitle("")).toBeNull(); + expect(parseTitle(" ")).toBeNull(); + }); + + it("falls back to the raw title (no axis info) when no chapter/volume tokens are present", () => { + const t = parseTitle("Just Some Manga Tanks (Digital)"); + expect(t).not.toBeNull(); + if (t === null) return; + expect(t.chapter).toBeNull(); + expect(t.volume).toBeNull(); + expect(t.seriesGuess).toBe("Just Some Manga Tanks"); + expect(t.formatHints.digital).toBe(true); + }); + + it("handles the 'ch.' prefix variant alongside the c.NNN form", () => { + const t = parseTitle("[Group] My Series ch.143 (Digital)"); + expect(t).not.toBeNull(); + if (t === null) return; + expect(t.chapter).toBe(143); + expect(t.seriesGuess).toBe("My Series"); + }); + + it("ignores leading bracketed token when not followed by content", () => { + const t = parseTitle("[Group]"); + expect(t).not.toBeNull(); + if (t === null) return; + expect(t.group).toBe("Group"); + expect(t.seriesGuess).toBe(""); + }); +}); + +// ----------------------------------------------------------------------------- +// parseTitle — multi-uploader/aggregated bundles (1r0n, danke-Empire, LuCaZ). +// +// Real-world bundle titles mix volumes, bare-numeric chapter ranges, and +// numeric "extras" (chapters not yet collected into a tankobon). Patterns we +// see in the wild: +// +// v01-09 → volume range only +// v01-111 + 1134-1176 → vol range + bare chapter range, "+" joined +// v01-28,125-137 → vol range + bare chapter range, "," joined +// v01-31, 276-293 → same, with whitespace after comma +// v01,009-090 → single volume + bare chapter range +// v01-16 + 70 → vol range + single bare chapter +// 001-069 as v01-16 + 70 → bare chapter range followed by vol info +// 031-037 → bare chapter range as primary identifier +// +// Bare numeric ranges are zero-padded to 3 digits in the corpus, which we use +// to distinguish chapter tokens from incidental numbers in series names. +// Year ranges always live inside `(...)` so they stay clear of the chapter +// tokenizer. +// ----------------------------------------------------------------------------- + +describe("parseTitle — aggregated bundle releases", () => { + it("After God v01-09 — volume range only", () => { + const t = parseTitle("After God v01-09 (2024-2026) (Digital) (1r0n)"); + expect(t).not.toBeNull(); + if (t === null) return; + expect(t.volume).toBe(1); + expect(t.volumeRangeEnd).toBe(9); + expect(t.chapter).toBeNull(); + expect(t.chapterRangeEnd).toBeNull(); + expect(t.seriesGuess).toBe("After God"); + expect(t.formatHints.digital).toBe(true); + }); + + it("One Piece v001-111 + 1134-1176 — vol range + bare chapter range joined by '+'", () => { + const t = parseTitle("One Piece v001-111 + 1134-1176 (2003-2026) (Digital) (1r0n)"); + expect(t).not.toBeNull(); + if (t === null) return; + expect(t.volume).toBe(1); + expect(t.volumeRangeEnd).toBe(111); + expect(t.chapter).toBe(1134); + expect(t.chapterRangeEnd).toBe(1176); + expect(t.seriesGuess).toBe("One Piece"); + }); + + it("Tensei… v01-28,125-137 — alias-split series, comma-joined chapter range", () => { + const t = parseTitle( + "Tensei Shitara Slime Datta Ken / That Time I Got Reincarnated as a Slime v01-28,125-137 (2017-2025) (Digital) (danke-Empire + nao)", + ); + expect(t).not.toBeNull(); + if (t === null) return; + expect(t.volume).toBe(1); + expect(t.volumeRangeEnd).toBe(28); + expect(t.chapter).toBe(125); + expect(t.chapterRangeEnd).toBe(137); + // Primary guess is the first alias. + expect(t.seriesGuess).toBe("Tensei Shitara Slime Datta Ken"); + // Both halves of `A / B` are exposed for matching. + expect(t.seriesGuessAliases).toEqual([ + "Tensei Shitara Slime Datta Ken", + "That Time I Got Reincarnated as a Slime", + ]); + }); + + it("Chillin'… 001-069 as v01-16 + 70 — bare chapter range + 'as' + vol range + extra chapter", () => { + const t = parseTitle( + "Chillin' in My 30s after Getting Fired from the Demon King's Army 001-069 as v01-16 + 70 (Digital) (danke-Empire + Aquila) [Oak]", + ); + expect(t).not.toBeNull(); + if (t === null) return; + expect(t.volume).toBe(1); + expect(t.volumeRangeEnd).toBe(16); + // Aggregated min/max across all chapter tokens in the release header. + expect(t.chapter).toBe(1); + expect(t.chapterRangeEnd).toBe(70); + expect(t.seriesGuess).toBe("Chillin' in My 30s after Getting Fired from the Demon King's Army"); + }); + + it("Never Say Ugly 031-037 — bare chapter range only, no volume token", () => { + const t = parseTitle( + "Never Say Ugly 031-037 (2024-2025) (Digital) (danke-Empire, Stick, Aquila)", + ); + expect(t).not.toBeNull(); + if (t === null) return; + expect(t.volume).toBeNull(); + expect(t.chapter).toBe(31); + expect(t.chapterRangeEnd).toBe(37); + expect(t.seriesGuess).toBe("Never Say Ugly"); + }); + + it("Edens Zero v01-31, 276-293 — comma+space separator", () => { + const t = parseTitle( + "Edens Zero v01-31, 276-293 (2018-2025) (Digital) (danke-Empire, DeadMan, SlikkyOak)", + ); + expect(t).not.toBeNull(); + if (t === null) return; + expect(t.volume).toBe(1); + expect(t.volumeRangeEnd).toBe(31); + expect(t.chapter).toBe(276); + expect(t.chapterRangeEnd).toBe(293); + expect(t.seriesGuess).toBe("Edens Zero"); + }); + + it("Ultimate Exorcist Kiyoshi v01,009-090 — single volume + bare chapter range", () => { + const t = parseTitle("Ultimate Exorcist Kiyoshi v01,009-090 (2024-2026) (Digital) (LuCaZ)"); + expect(t).not.toBeNull(); + if (t === null) return; + expect(t.volume).toBe(1); + expect(t.volumeRangeEnd).toBeNull(); + expect(t.chapter).toBe(9); + expect(t.chapterRangeEnd).toBe(90); + expect(t.seriesGuess).toBe("Ultimate Exorcist Kiyoshi"); + }); + + it("Boruto - Two Blue Vortex v01-05,021-033 — subtitle dash + comma-joined ranges", () => { + const t = parseTitle("Boruto - Two Blue Vortex v01-05,021-033 (2025-2026) (Digital) (LuCaZ)"); + expect(t).not.toBeNull(); + if (t === null) return; + expect(t.volume).toBe(1); + expect(t.volumeRangeEnd).toBe(5); + expect(t.chapter).toBe(21); + expect(t.chapterRangeEnd).toBe(33); + expect(t.seriesGuess).toBe("Boruto Two Blue Vortex"); + }); + + it("Ao no Hako / Blue Box v01-20,181-240 — alias-split + comma chapters", () => { + const t = parseTitle("Ao no Hako / Blue Box v01-20,181-240 (2022-2026) (Digital) (LuCaZ)"); + expect(t).not.toBeNull(); + if (t === null) return; + expect(t.volume).toBe(1); + expect(t.volumeRangeEnd).toBe(20); + expect(t.chapter).toBe(181); + expect(t.chapterRangeEnd).toBe(240); + expect(t.seriesGuess).toBe("Ao no Hako"); + expect(t.seriesGuessAliases).toEqual(["Ao no Hako", "Blue Box"]); + }); + + it("Ashita no Joe — Omnibus Edition is captured as a format hint", () => { + const t = parseTitle( + "Ashita no Joe - Fighting for Tomorrow v01-02 (2024-2025) (Omnibus Edition) (Digital) (LuCaZ)", + ); + expect(t).not.toBeNull(); + if (t === null) return; + expect(t.volume).toBe(1); + expect(t.volumeRangeEnd).toBe(2); + expect(t.formatHints.digital).toBe(true); + expect(t.formatHints.omnibus).toBe(true); + expect(t.seriesGuess).toBe("Ashita no Joe Fighting for Tomorrow"); + }); + + it("Dragon Ball Super v01-23,101-104", () => { + const t = parseTitle("Dragon Ball Super v01-23,101-104 (2017-2025) (Digital) (LuCaZ)"); + expect(t).not.toBeNull(); + if (t === null) return; + expect(t.volume).toBe(1); + expect(t.volumeRangeEnd).toBe(23); + expect(t.chapter).toBe(101); + expect(t.chapterRangeEnd).toBe(104); + expect(t.seriesGuess).toBe("Dragon Ball Super"); + }); + + it("Becoming a Princess Knight... v01-04 — apostrophe-free long title with vol range only", () => { + const t = parseTitle( + "Becoming a Princess Knight and Working at a Yuri Brothel v01-04 (2024-2025) (Digital) (LuCaZ)", + ); + expect(t).not.toBeNull(); + if (t === null) return; + expect(t.volume).toBe(1); + expect(t.volumeRangeEnd).toBe(4); + expect(t.chapter).toBeNull(); + expect(t.seriesGuess).toBe("Becoming a Princess Knight and Working at a Yuri Brothel"); + }); + + it("Amagami-san / Tying the Knot — alias-split + chapter range", () => { + const t = parseTitle( + "Amagami-san Chi no Enmusubi / Tying the Knot with an Amagami Sister v01-17,150-172 (2022-2025) (Digital) (LuCaZ)", + ); + expect(t).not.toBeNull(); + if (t === null) return; + expect(t.volume).toBe(1); + expect(t.volumeRangeEnd).toBe(17); + expect(t.chapter).toBe(150); + expect(t.chapterRangeEnd).toBe(172); + expect(t.seriesGuess).toBe("Amagami-san Chi no Enmusubi"); + expect(t.seriesGuessAliases).toEqual([ + "Amagami-san Chi no Enmusubi", + "Tying the Knot with an Amagami Sister", + ]); + }); +}); + +// ----------------------------------------------------------------------------- +// parseTitle — defensive: bare-number heuristics must not eat year ranges, +// and short bare numbers (1-2 digits) must not be promoted to chapters. +// ----------------------------------------------------------------------------- + +describe("parseTitle — bare-number safety net", () => { + it("does not treat a year range inside (...) as a chapter range", () => { + const t = parseTitle("Some Series v01-05 (2018-2025) (Digital)"); + expect(t).not.toBeNull(); + if (t === null) return; + expect(t.chapter).toBeNull(); + expect(t.chapterRangeEnd).toBeNull(); + expect(t.volume).toBe(1); + expect(t.volumeRangeEnd).toBe(5); + }); + + it("ignores bare 1-2 digit numbers in the series name (avoids false positives)", () => { + // "30s" appeared in the Chillin' title; standalone short numbers shouldn't + // be picked up as chapters. + const t = parseTitle("My 30s Adventure v01 (Digital)"); + expect(t).not.toBeNull(); + if (t === null) return; + expect(t.chapter).toBeNull(); + expect(t.volume).toBe(1); + expect(t.seriesGuess).toBe("My 30s Adventure"); + }); + + it("does not split on '/' when there is no surrounding spacing (URL-like fragments)", () => { + // Defensive: only ` / ` (spaced slash) is treated as an alias separator. + const t = parseTitle("AC/DC Tales v01 (Digital)"); + expect(t).not.toBeNull(); + if (t === null) return; + expect(t.seriesGuess).toBe("AC/DC Tales"); + expect(t.seriesGuessAliases).toEqual(["AC/DC Tales"]); + }); + + it("alias-split returns single-element array when no slash present", () => { + const t = parseTitle("Berserk Volume 42 (Digital)"); + expect(t).not.toBeNull(); + if (t === null) return; + expect(t.seriesGuessAliases).toEqual(["Berserk"]); + }); +}); + +// ----------------------------------------------------------------------------- +// parseItem +// ----------------------------------------------------------------------------- + +const sampleItem = ` + + <![CDATA[[1r0n] Chainsaw Man - Chapter 142 (Digital)]]> + https://nyaa.si/download/12345.torrent + https://nyaa.si/view/12345 + Mon, 04 May 2026 02:31:00 GMT + ABC123def456 + +`; + +describe("parseItem", () => { + it("extracts title, link, guid, infoHash, and pubDate", () => { + const item = parseItem(sampleItem); + expect(item).not.toBeNull(); + if (item === null) return; + expect(item.title).toBe("[1r0n] Chainsaw Man - Chapter 142 (Digital)"); + expect(item.link).toBe("https://nyaa.si/download/12345.torrent"); + expect(item.pageUrl).toBe("https://nyaa.si/view/12345"); + expect(item.externalReleaseId).toBe("https://nyaa.si/view/12345"); // guid wins + expect(item.infoHash).toBe("abc123def456"); // lowercased + expect(item.chapter).toBe(142); + expect(item.seriesGuess).toBe("Chainsaw Man"); + expect(new Date(item.observedAt).toISOString()).toBe("2026-05-04T02:31:00.000Z"); + }); + + it("returns null when title is missing", () => { + expect(parseItem("x")).toBeNull(); + }); + + it("returns null pageUrl when guid is not a /view/ permalink", () => { + const xml = ` + <![CDATA[[1r0n] Foo c.1 (Digital)]]> + https://nyaa.si/download/9.torrent + tag:nyaa.si,2026:9 + `; + const item = parseItem(xml); + expect(item).not.toBeNull(); + if (item === null) return; + expect(item.pageUrl).toBeNull(); + }); + + it("derives a deterministic externalReleaseId from infoHash when guid+link missing", () => { + const xml = ` + <![CDATA[[1r0n] Foo c.1 (Digital)]]> + DEADBEEF + `; + const item = parseItem(xml); + expect(item).not.toBeNull(); + if (item === null) return; + expect(item.externalReleaseId).toBe("urn:btih:deadbeef"); + }); + + it("uses a hashed fallback when guid, link, and infoHash are all missing", () => { + const xml = ` + <![CDATA[Foo c.1 (Digital)]]> + Mon, 04 May 2026 02:31:00 GMT + `; + const item = parseItem(xml); + expect(item).not.toBeNull(); + if (item === null) return; + expect(item.externalReleaseId).toMatch(/^t:[a-z0-9]+$/); + }); +}); + +// ----------------------------------------------------------------------------- +// parseFeed — full RSS body +// ----------------------------------------------------------------------------- + +const fullFeedXml = ` + + + Nyaa - 1r0n's torrents + + <![CDATA[[1r0n] Boruto - Two Blue Vortex - Volume 02 (Digital) (1r0n)]]> + https://nyaa.si/download/1.torrent + https://nyaa.si/view/1 + Mon, 04 May 2026 02:31:00 GMT + aaa + + + <![CDATA[[1r0n] Boruto v01-14 (Digital) (1r0n)]]> + https://nyaa.si/download/2.torrent + https://nyaa.si/view/2 + Sun, 03 May 2026 12:00:00 GMT + bbb + + + <![CDATA[[1r0n] Dandadan c126-142 (2024) (Digital)]]> + https://nyaa.si/download/3.torrent + https://nyaa.si/view/3 + Sat, 02 May 2026 22:00:00 GMT + ccc + + + + + +`; + +describe("parseFeed", () => { + it("parses every well-formed item and silently drops malformed ones", () => { + const items = parseFeed(fullFeedXml); + expect(items).toHaveLength(3); // empty-title item dropped + expect(items.map((i) => i.seriesGuess)).toEqual([ + "Boruto Two Blue Vortex", + "Boruto", + "Dandadan", + ]); + expect(items[0]?.volume).toBe(2); + expect(items[1]?.volumeRangeEnd).toBe(14); + expect(items[2]?.chapterRangeEnd).toBe(142); + }); +}); diff --git a/plugins/release-nyaa/src/parser.ts b/plugins/release-nyaa/src/parser.ts new file mode 100644 index 00000000..ee291093 --- /dev/null +++ b/plugins/release-nyaa/src/parser.ts @@ -0,0 +1,529 @@ +/** + * RSS parser for Nyaa.si feeds. + * + * Nyaa's RSS namespace exposes one extra element per item that we care about + * (``), plus the standard ``, `<link>`, `<guid>`, + * `<pubDate>`, and `<description>` fields. We pull all of them with the same + * lightweight regex pipeline used for MangaUpdates — no heavy XML dep. + * + * Parsing the title is where most of the work is. Real-world examples + * (sourced from production Nyaa feeds and the user's screenshot of 1r0n's + * subscription): + * + * "[1r0n] Boruto - Two Blue Vortex - Volume 02 (Digital) (1r0n)" + * "[1r0n] One Piece v107 (Digital)" + * "[1r0n] Chainsaw Man - Chapter 142 (Digital)" + * "[Group] Dandadan c126-142 (2024) (Digital)" + * "[Tankobon Blur] Solo Leveling Vol. 13 (2024) (Digital) (Tankobon Blur)" + * "Berserk Volume 42 (Digital)" + * + * The shape we want out of each item: + * - parsed series guess (alias-free string used for matching) + * - chapter / volume axes (decimals supported on chapter) + * - format hints (Digital / JXL / etc.) + * - uploader-tagged group (if encoded as a leading `[Group]` token) + * + * Nyaa titles are noisy; we keep parsing best-effort and surface confidence + * downstream from the alias matcher rather than failing here. + */ + +/** Parsed item, pre-`ReleaseCandidate`. */ +export interface ParsedRssItem { + /** Stable per-source ID. Derived from the link or guid. */ + externalReleaseId: string; + /** Original title. Useful for debugging / fallback. */ + title: string; + /** Series-name guess after stripping volume/chapter/group/format tokens. */ + seriesGuess: string; + /** + * All alias candidates extracted from the series-name region. When the title + * uses `Title A / Title B` (a common 1r0n / LuCaZ convention for "JP name / + * EN name"), both halves are surfaced here so the matcher can score against + * either. For titles without a slash separator this is a single-element + * array equal to `[seriesGuess]`. + */ + seriesGuessAliases: string[]; + /** Chapter number (decimals supported). Null if untyped. */ + chapter: number | null; + /** Trailing chapter of a chapter range (e.g. `c126-142` → 126..142). */ + chapterRangeEnd: number | null; + /** Volume number. Null if untyped. */ + volume: number | null; + /** Trailing volume of a volume range (e.g. `v01-14` → 1..14). */ + volumeRangeEnd: number | null; + /** Leading `[Group]` token, if any. */ + group: string | null; + /** Format hints as a small dictionary (digital, jxl, ...). */ + formatHints: Record<string, boolean>; + /** RSS `<link>` value. On Nyaa this is the `.torrent` download URL. */ + link: string; + /** + * Permalink to the release post page (e.g. `https://nyaa.si/view/12345`), + * derived from the `<guid isPermaLink="true">` tag. Null when the guid is + * missing or doesn't look like a post URL. + */ + pageUrl: string | null; + /** `nyaa:infoHash` value, lowercased; null if missing. */ + infoHash: string | null; + /** ISO-8601 timestamp. Falls back to "now" if pubDate is missing/invalid. */ + observedAt: string; +} + +// ----------------------------------------------------------------------------- +// XML helpers (mirror release-mangaupdates conventions) +// ----------------------------------------------------------------------------- + +function decodeXmlText(raw: string): string { + let s = raw.trim(); + const cdataMatch = s.match(/^<!\[CDATA\[([\s\S]*?)]]>$/); + if (cdataMatch?.[1] !== undefined) { + s = cdataMatch[1]; + } + return s + .replace(/&/g, "&") + .replace(/</g, "<") + .replace(/>/g, ">") + .replace(/"/g, '"') + .replace(/'/g, "'") + .replace(/'/g, "'"); +} + +/** Pull the first `<tag>` text content from an XML fragment, or null. */ +function extractTagText(xml: string, tag: string): string | null { + // Escape `:` for namespaced tags (e.g. `nyaa:infoHash`). + const safeTag = tag.replace(/:/g, "\\:"); + const re = new RegExp(`<${safeTag}[^>]*>([\\s\\S]*?)</${safeTag}>`, "i"); + const m = xml.match(re); + if (!m?.[1]) return null; + return decodeXmlText(m[1]); +} + +function splitItems(xml: string): string[] { + const out: string[] = []; + const re = /<item\b[^>]*>([\s\S]*?)<\/item>/gi; + for (;;) { + const match = re.exec(xml); + if (match === null) break; + if (match[1] !== undefined) out.push(match[1]); + } + return out; +} + +// ----------------------------------------------------------------------------- +// Title parsing +// ----------------------------------------------------------------------------- + +/** + * Strip a leading `[Group]` token off the title and return both pieces. + * If the title has no leading bracketed token, returns `{ rest: title, + * group: null }`. + */ +function extractLeadingGroup(title: string): { rest: string; group: string | null } { + const m = title.match(/^\s*\[([^\]]+)\]\s*(.*)$/); + if (!m?.[1]) return { rest: title, group: null }; + const group = m[1].trim(); + const rest = m[2] ?? ""; + return { rest, group: group.length > 0 ? group : null }; +} + +/** + * Strip every `(...)` group from a string. Used to keep year ranges, uploader + * credits, and format-hint tags out of the chapter/volume tokenizer — those + * always live inside parentheses, so anything inside them must not be + * interpreted as release-info. + */ +function stripParens(s: string): string { + return s.replace(/\([^)]*\)/g, " "); +} + +/** + * Locate the start of the "release-info span" — the offset in `s` (which has + * already had `(...)` groups blanked) where chapter/volume tokens begin. + * + * Anchors, in priority order: + * 1. A `v##`, `vol.##`, `volume ##` token (with or without a range). + * 2. A bare numeric range with both sides at 3+ digits (`031-037`, + * `001-069`). Two-digit forms are rejected to avoid false positives + * inside series names (`30s`, `My 100`, etc.). + * 3. A `c##` / `ch.##` / `Chapter ##` token. + * + * Returns the index of the anchor, or -1 if no release-info is present (the + * whole string is then treated as a series name). + */ +function findReleaseInfoStart(s: string): number { + const anchors: RegExp[] = [ + /\b(?:v|vol|volume)\.?\s*[0-9]+/i, + /\b[0-9]{3,4}\s*[-–]\s*[0-9]{3,4}\b/, + /\b(?:c|ch|chapter)\.?\s*[0-9]+/i, + ]; + let best = -1; + for (const re of anchors) { + const m = s.match(re); + if (m && m.index !== undefined && (best === -1 || m.index < best)) { + best = m.index; + } + } + return best; +} + +/** + * Spread tokens are the comma- / `+`- / whitespace- / `as`-separated atoms + * that make up the release-info span: + * + * - `volume` : single volume number (`v01`, `Vol. 13`) + * - `volRange` : volume range (`v01-14`) + * - `chapter` : single chapter number (`c143`, bare `70`) + * - `chapRange` : chapter range (`c126-142`, bare `031-037`) + * + * The tokenizer scans left-to-right and consumes one token per match. Bare + * numeric tokens are only accepted *after* the release-info anchor — see + * `findReleaseInfoStart` — so series-name digits don't leak in. + */ +type SpreadToken = + | { kind: "volume"; value: number } + | { kind: "volRange"; start: number; end: number } + | { kind: "chapter"; value: number } + | { kind: "chapRange"; start: number; end: number }; + +/** + * Tokenize the release-info span into volume/chapter atoms. + * + * `s` should be the parens-stripped substring starting at the release-info + * anchor. The tokenizer is intentionally permissive about separators (commas, + * `+`, whitespace, `as`) — we just consume tokens greedily and aggregate + * downstream. + */ +function tokenizeReleaseInfo(s: string): SpreadToken[] { + const tokens: SpreadToken[] = []; + + // Match either a prefixed volume/chapter token, or a bare numeric range / + // single. The order in the alternation matters: ranges must be tried before + // single tokens, and prefixed forms must be tried before bare numerics so + // we don't mis-classify `v05` as bare-chapter `5`. + // + // 1. `v##-##` / `vol.##-##` / `volume ##-##` → volRange + // 2. `v##` / `vol.##` / `volume ##` → volume + // 3. `c##.##-##.##` / `ch.##-##` / `Chapter ##-##` → chapRange + // 4. `c##.##` / `ch.##` / `Chapter ##` → chapter + // 5. bare `###-###` (3+ digits each side) → chapRange + // 6. bare `##` (1+ digits) — only matches *after* the first anchor token + // has been emitted, see `acceptShortBare` below. Lets us pick up + // "extra" chapters expressed as short numerics (`+ 70`) without + // promoting incidental name-region digits. + const tokenRe = new RegExp( + [ + "\\b(?<vrs>v|vol|volume)\\.?\\s*([0-9]+)\\s*[-–]\\s*([0-9]+)\\b", + "\\b(?<vss>v|vol|volume)\\.?\\s*([0-9]+)\\b", + "\\b(?<crs>c|ch|chapter)\\.?\\s*([0-9]+(?:\\.[0-9]+)?)\\s*[-–]\\s*([0-9]+(?:\\.[0-9]+)?)\\b", + "\\b(?<css>c|ch|chapter)\\.?\\s*([0-9]+(?:\\.[0-9]+)?)\\b", + "\\b(?<brs>)([0-9]{3,4})\\s*[-–]\\s*([0-9]{3,4})\\b", + "\\b(?<bss>)([0-9]{1,4})\\b", + ].join("|"), + "gi", + ); + + for (;;) { + const m = tokenRe.exec(s); + if (m === null) break; + const groups = m.groups ?? {}; + if (groups.vrs !== undefined) { + const start = Number.parseInt(m[2] ?? "", 10); + const end = Number.parseInt(m[3] ?? "", 10); + if (Number.isFinite(start) && Number.isFinite(end)) { + tokens.push({ kind: "volRange", start, end }); + } + continue; + } + if (groups.vss !== undefined) { + const value = Number.parseInt(m[5] ?? "", 10); + if (Number.isFinite(value)) tokens.push({ kind: "volume", value }); + continue; + } + if (groups.crs !== undefined) { + const start = Number.parseFloat(m[7] ?? ""); + const end = Number.parseFloat(m[8] ?? ""); + if (Number.isFinite(start) && Number.isFinite(end)) { + tokens.push({ kind: "chapRange", start, end }); + } + continue; + } + if (groups.css !== undefined) { + const value = Number.parseFloat(m[10] ?? ""); + if (Number.isFinite(value)) tokens.push({ kind: "chapter", value }); + continue; + } + if (groups.brs !== undefined) { + const start = Number.parseInt(m[12] ?? "", 10); + const end = Number.parseInt(m[13] ?? "", 10); + if (Number.isFinite(start) && Number.isFinite(end)) { + tokens.push({ kind: "chapRange", start, end }); + } + continue; + } + if (groups.bss !== undefined) { + const raw = m[15] ?? ""; + const value = Number.parseInt(raw, 10); + if (!Number.isFinite(value)) continue; + // Only accept short (≤2 digit) bare numerics once we've already + // committed to a richer token; on its own a `42` is more likely a + // year fragment or noise than a chapter. 3+ digits is unambiguous in + // this corpus so we always accept it. + if (raw.length < 3 && tokens.length === 0) continue; + tokens.push({ kind: "chapter", value }); + } + } + + return tokens; +} + +/** + * Aggregate spread tokens into volume + chapter axes by taking min/max across + * each kind. Downstream matching just needs to know the span a release covers + * ("does this release include chapter X?") — a min..max window answers that + * question conservatively without picking a single canonical token. + */ +function aggregateTokens(tokens: SpreadToken[]): { + volume: number | null; + volumeRangeEnd: number | null; + chapter: number | null; + chapterRangeEnd: number | null; +} { + let vMin: number | null = null; + let vMax: number | null = null; + let cMin: number | null = null; + let cMax: number | null = null; + for (const t of tokens) { + if (t.kind === "volume") { + vMin = vMin === null || t.value < vMin ? t.value : vMin; + vMax = vMax === null || t.value > vMax ? t.value : vMax; + } else if (t.kind === "volRange") { + vMin = vMin === null || t.start < vMin ? t.start : vMin; + vMax = vMax === null || t.end > vMax ? t.end : vMax; + } else if (t.kind === "chapter") { + cMin = cMin === null || t.value < cMin ? t.value : cMin; + cMax = cMax === null || t.value > cMax ? t.value : cMax; + } else { + cMin = cMin === null || t.start < cMin ? t.start : cMin; + cMax = cMax === null || t.end > cMax ? t.end : cMax; + } + } + return { + volume: vMin, + // Only emit a range-end when it actually differs from the start: a single + // volume is `volume=N, volumeRangeEnd=null`, matching the prior contract. + volumeRangeEnd: vMin !== null && vMax !== null && vMax !== vMin ? vMax : null, + chapter: cMin, + chapterRangeEnd: cMin !== null && cMax !== null && cMax !== cMin ? cMax : null, + }; +} + +/** + * Walk the parenthesized tags in the title and extract format hints. + * + * Common Nyaa hints we care about: + * - `(Digital)` → `digital` + * - `(JXL)` → `jxl` + * - `(Mag-Z)` / `(Magazine)` → `magazine` + * - `(Omnibus Edition)` / `(Omnibus)` → `omnibus` + * - `(2024)` is a year, ignored (we'd need it for naming dedup but not for filtering) + */ +function extractFormatHints(s: string): Record<string, boolean> { + const hints: Record<string, boolean> = {}; + const tagRe = /\(([^)]+)\)/g; + for (;;) { + const match = tagRe.exec(s); + if (match === null) break; + const tag = (match[1] ?? "").trim().toLowerCase(); + if (tag.length === 0) continue; + if (tag === "digital") hints.digital = true; + else if (tag === "jxl") hints.jxl = true; + else if (tag === "magazine" || tag === "mag-z") hints.magazine = true; + else if (tag === "webtoon") hints.webtoon = true; + else if (tag === "bw" || tag === "b&w") hints.bw = true; + else if (tag === "color") hints.color = true; + else if (tag === "omnibus" || tag === "omnibus edition") hints.omnibus = true; + } + return hints; +} + +/** + * Strip a trailing `[...]` token (e.g. `[Oak]` at the end of some + * danke-Empire releases). Mirrors `extractLeadingGroup` but at the tail and + * without surfacing the value — trailing brackets are credit, not a parsing + * signal we currently use. + */ +function stripTrailingBracket(s: string): string { + return s.replace(/\s*\[[^\]]+\]\s*$/g, "").trim(); +} + +/** + * Take the "name region" of a release title (everything before the first + * release-info anchor, with parens already stripped) and reduce it to a clean + * primary guess plus alias candidates. + * + * The name region may still contain: + * - subtitle dashes: `Boruto - Two Blue Vortex` → joined with spaces + * - alias separator: `Ao no Hako / Blue Box` → both halves returned + * + * Apostrophes and hyphenated words (`Amagami-san`, `Chillin'`) are preserved + * — the host's `normalize_alias` strips them at match time, but we want to + * keep them readable in logs and admin surfaces. + */ +function extractSeriesAliases(nameRegion: string): { + primary: string; + aliases: string[]; +} { + // Subtitle dashes: ` - `, ` – `, ` — ` are titling glue, not separators. + // Joining the halves with a single space mirrors the prior behavior the + // existing tests assert (`Boruto Two Blue Vortex`). + const dashJoined = nameRegion.replace(/\s+[-–—]\s+/g, " "); + + // Alias separator. Only ` / ` (with whitespace on both sides) splits — bare + // `/` survives so e.g. `AC/DC Tales` stays one alias. + const parts = dashJoined + .split(/\s+\/\s+/) + .map((p) => p.replace(/\s+/g, " ").trim()) + .filter((p) => p.length > 0); + + if (parts.length === 0) return { primary: "", aliases: [] }; + return { primary: parts[0] ?? "", aliases: parts }; +} + +/** + * Public entry point — extract the structured fields from a single Nyaa + * release title. + * + * Returns null only if the title is empty after trimming. Otherwise returns a + * best-effort parse where the series guess may still be empty (e.g. for + * meta-bundles without a leading series name); the matcher then drops those. + */ +export function parseTitle(title: string): { + seriesGuess: string; + seriesGuessAliases: string[]; + chapter: number | null; + chapterRangeEnd: number | null; + volume: number | null; + volumeRangeEnd: number | null; + group: string | null; + formatHints: Record<string, boolean>; +} | null { + const trimmed = title.trim(); + if (trimmed.length === 0) return null; + + const { rest, group } = extractLeadingGroup(trimmed); + const formatHints = extractFormatHints(rest); + + // Blank out `(...)` groups so years and uploader credits can't be picked up + // by the release-info tokenizer, then split into name region / release-info + // region at the first chapter/volume anchor. + const flattened = stripTrailingBracket(stripParens(rest)); + const anchor = findReleaseInfoStart(flattened); + const nameRegion = anchor === -1 ? flattened : flattened.slice(0, anchor); + const infoRegion = anchor === -1 ? "" : flattened.slice(anchor); + + const tokens = tokenizeReleaseInfo(infoRegion); + const { volume, volumeRangeEnd, chapter, chapterRangeEnd } = aggregateTokens(tokens); + const { primary, aliases } = extractSeriesAliases(nameRegion); + + return { + seriesGuess: primary, + seriesGuessAliases: aliases.length > 0 ? aliases : [primary], + chapter, + chapterRangeEnd, + volume, + volumeRangeEnd, + group, + formatHints, + }; +} + +// ----------------------------------------------------------------------------- +// Item parsing +// ----------------------------------------------------------------------------- + +function pubDateToIso(raw: string | null): string { + if (raw) { + const d = new Date(raw); + if (!Number.isNaN(d.getTime())) return d.toISOString(); + } + return new Date().toISOString(); +} + +/** + * Pull the post-page URL out of the guid when it looks like a Nyaa + * `/view/<id>` permalink. The `<link>` tag in Nyaa feeds is the `.torrent` + * download URL, which is not what we want to surface to users. + */ +function derivePageUrl(guid: string | null): string | null { + if (!guid) return null; + const trimmed = guid.trim(); + if (trimmed.length === 0) return null; + // Match http(s)://<host>/view/<id> with optional trailing slash / query. + if (/^https?:\/\/[^/]+\/view\/[^/?#]+/i.test(trimmed)) return trimmed; + return null; +} + +function deriveExternalReleaseId( + guid: string | null, + link: string | null, + infoHash: string | null, + title: string, + pubDate: string | null, +): string { + if (guid && guid.trim().length > 0) return guid.trim(); + if (link && link.trim().length > 0) return link.trim(); + if (infoHash && infoHash.length > 0) return `urn:btih:${infoHash}`; + // Deterministic fallback: djb2-ish hash. Same algorithm MangaUpdates uses. + const fallback = `${title}|${pubDate ?? ""}`; + let h = 5381; + for (let i = 0; i < fallback.length; i++) { + h = ((h << 5) + h + fallback.charCodeAt(i)) | 0; + } + return `t:${(h >>> 0).toString(36)}`; +} + +/** + * Parse a single Nyaa `<item>` block. Returns null when the title is missing + * (truly malformed entry). + */ +export function parseItem(itemXml: string): ParsedRssItem | null { + const title = extractTagText(itemXml, "title"); + if (!title) return null; + + const link = extractTagText(itemXml, "link"); + const guid = extractTagText(itemXml, "guid"); + const pubDate = extractTagText(itemXml, "pubDate"); + const infoHashRaw = extractTagText(itemXml, "nyaa:infoHash"); + const infoHash = infoHashRaw ? infoHashRaw.toLowerCase().trim() : null; + + const parsedTitle = parseTitle(title); + if (parsedTitle === null) return null; + + return { + externalReleaseId: deriveExternalReleaseId(guid, link, infoHash, title, pubDate), + title, + seriesGuess: parsedTitle.seriesGuess, + seriesGuessAliases: parsedTitle.seriesGuessAliases, + chapter: parsedTitle.chapter, + chapterRangeEnd: parsedTitle.chapterRangeEnd, + volume: parsedTitle.volume, + volumeRangeEnd: parsedTitle.volumeRangeEnd, + group: parsedTitle.group, + formatHints: parsedTitle.formatHints, + link: link ?? "", + pageUrl: derivePageUrl(guid), + infoHash, + observedAt: pubDateToIso(pubDate), + }; +} + +/** + * Parse a full Nyaa RSS feed body into structured items. Bad items (missing + * title) are dropped silently — Nyaa feeds occasionally include broken entries + * and we'd rather keep going than poison the whole poll. + */ +export function parseFeed(xml: string): ParsedRssItem[] { + return splitItems(xml) + .map(parseItem) + .filter((i): i is ParsedRssItem => i !== null); +} diff --git a/plugins/release-nyaa/tsconfig.json b/plugins/release-nyaa/tsconfig.json new file mode 100644 index 00000000..ef1ca5f9 --- /dev/null +++ b/plugins/release-nyaa/tsconfig.json @@ -0,0 +1,24 @@ +{ + "compilerOptions": { + "target": "ES2022", + "module": "NodeNext", + "moduleResolution": "NodeNext", + "lib": ["ES2022"], + "outDir": "./dist", + "rootDir": "./src", + "declaration": true, + "sourceMap": true, + "strict": true, + "esModuleInterop": true, + "skipLibCheck": true, + "forceConsistentCasingInFileNames": true, + "resolveJsonModule": true, + "isolatedModules": true, + "noUnusedLocals": true, + "noUnusedParameters": true, + "noImplicitReturns": true, + "noFallthroughCasesInSwitch": true + }, + "include": ["src/**/*"], + "exclude": ["node_modules", "dist", "**/*.test.ts"] +} diff --git a/plugins/release-nyaa/vitest.config.ts b/plugins/release-nyaa/vitest.config.ts new file mode 100644 index 00000000..ae847ff6 --- /dev/null +++ b/plugins/release-nyaa/vitest.config.ts @@ -0,0 +1,7 @@ +import { defineConfig } from "vitest/config"; + +export default defineConfig({ + test: { + include: ["src/**/*.test.ts"], + }, +}); diff --git a/plugins/sdk-typescript/src/host-rpc.ts b/plugins/sdk-typescript/src/host-rpc.ts new file mode 100644 index 00000000..6048bf8e --- /dev/null +++ b/plugins/sdk-typescript/src/host-rpc.ts @@ -0,0 +1,155 @@ +/** + * Generic host reverse-RPC client. + * + * Plugins use this to call host methods outside the storage namespace — + * notably `releases/list_tracked`, `releases/record`, + * `releases/source_state/get`, and `releases/source_state/set`. The class is + * intentionally generic so future reverse-RPC namespaces can reuse it + * without a per-namespace client. + * + * Wire-format and lifecycle mirror `PluginStorage`: send a JSON-RPC request + * over stdout with a unique id, and resolve when the host's response with + * the matching id arrives on stdin. The plugin server's main loop calls + * `handleResponse(line)` on every incoming response; whichever client owns + * the id resolves it (others no-op silently). + * + * The id counter starts at a high value (`1_000_000_000`) so it can never + * collide with `PluginStorage`'s sequence (`1, 2, 3, ...`). This means the + * dispatch in the server doesn't need to know which client a response + * belongs to — it can fan out to both, and at most one will match. + */ + +import { currentParentRequestId } from "./request-context.js"; +import type { JsonRpcError, JsonRpcRequest } from "./types/rpc.js"; + +/** Write function signature for sending JSON-RPC requests. */ +type WriteFn = (line: string) => void; + +/** + * Error thrown when a reverse-RPC call fails (host returned a JSON-RPC error, + * or the client was canceled). + */ +export class HostRpcError extends Error { + constructor( + message: string, + public readonly code: number, + public readonly data?: unknown, + ) { + super(message); + this.name = "HostRpcError"; + } +} + +/** + * Generic reverse-RPC client. Construct one per plugin instance and pass it + * around via `InitializeParams`. + */ +export class HostRpcClient { + // Start the counter high so it can't collide with PluginStorage's id space. + // `Number.MAX_SAFE_INTEGER` is far above this, so we have plenty of room + // before wrapping (and we never expect a single plugin lifetime to issue + // more than ~9 quintillion calls). + private nextId = 1_000_000_000; + private pendingRequests = new Map< + number, + { + resolve: (value: unknown) => void; + reject: (error: Error) => void; + } + >(); + private writeFn: WriteFn; + + /** + * @param writeFn - Optional custom write function (defaults to + * `process.stdout.write`). Useful for testing. + */ + constructor(writeFn?: WriteFn) { + this.writeFn = + writeFn ?? + ((line: string) => { + process.stdout.write(line); + }); + } + + /** + * Send a JSON-RPC request to the host and resolve with the result. + * + * @param method - JSON-RPC method name (e.g. `"releases/list_tracked"`). + * @param params - Method-specific params. Pass `undefined` when the method + * takes no params. + */ + async call<T = unknown>(method: string, params?: unknown): Promise<T> { + const id = this.nextId++; + // Stamp the forward call we're inside so the host can route this + // reverse-RPC back to the originating caller's task. Lifted from the + // `request-context` async-local storage that `server.ts` sets around + // every forward-request handler. + const parent = currentParentRequestId(); + const request: JsonRpcRequest = { + jsonrpc: "2.0", + id, + method, + params, + ...(parent !== undefined ? { parentRequestId: parent } : {}), + }; + + return new Promise<T>((resolve, reject) => { + this.pendingRequests.set(id, { + resolve: (v) => resolve(v as T), + reject, + }); + try { + this.writeFn(`${JSON.stringify(request)}\n`); + } catch (err) { + this.pendingRequests.delete(id); + const message = err instanceof Error ? err.message : "Unknown write error"; + reject(new HostRpcError(`Failed to send request: ${message}`, -1)); + } + }); + } + + /** + * Process an incoming JSON-RPC response line. Returns `true` if this + * client owned the response id and resolved it, `false` otherwise (so + * other clients can try). + * + * Called by the plugin server's main loop on every response. + */ + handleResponse(line: string): boolean { + const trimmed = line.trim(); + if (!trimmed) return false; + + let parsed: unknown; + try { + parsed = JSON.parse(trimmed); + } catch { + return false; + } + + const obj = parsed as Record<string, unknown>; + if (obj.method !== undefined) return false; // not a response + const rawId = obj.id; + if (typeof rawId !== "number") return false; + if (!this.pendingRequests.has(rawId)) return false; + + const pending = this.pendingRequests.get(rawId); + if (!pending) return false; + this.pendingRequests.delete(rawId); + + if ("error" in obj && obj.error) { + const err = obj.error as JsonRpcError; + pending.reject(new HostRpcError(err.message, err.code, err.data)); + } else { + pending.resolve(obj.result); + } + return true; + } + + /** Reject all pending requests (e.g. on shutdown). */ + cancelAll(): void { + for (const [, pending] of this.pendingRequests) { + pending.reject(new HostRpcError("Host RPC client stopped", -1)); + } + this.pendingRequests.clear(); + } +} diff --git a/plugins/sdk-typescript/src/index.ts b/plugins/sdk-typescript/src/index.ts index 61d427c5..f5a2ec29 100644 --- a/plugins/sdk-typescript/src/index.ts +++ b/plugins/sdk-typescript/src/index.ts @@ -63,6 +63,9 @@ export { RateLimitError, } from "./errors.js"; +// Host RPC (generic reverse-RPC client for non-storage host methods) +export { HostRpcClient, HostRpcError } from "./host-rpc.js"; + // Logger export { createLogger, Logger, type LoggerOptions, type LogLevel } from "./logger.js"; @@ -70,10 +73,12 @@ export { createLogger, Logger, type LoggerOptions, type LogLevel } from "./logge export { createMetadataPlugin, createRecommendationPlugin, + createReleaseSourcePlugin, createSyncPlugin, type InitializeParams, type MetadataPluginOptions, type RecommendationPluginOptions, + type ReleaseSourcePluginOptions, type SyncPluginOptions, } from "./server.js"; diff --git a/plugins/sdk-typescript/src/request-context.ts b/plugins/sdk-typescript/src/request-context.ts new file mode 100644 index 00000000..491963ae --- /dev/null +++ b/plugins/sdk-typescript/src/request-context.ts @@ -0,0 +1,44 @@ +/** + * Async-local context for the currently-handled forward request. + * + * When the SDK dispatches a forward call (e.g. `releases/poll`), it stores + * the call's `id` in this context for the duration of the handler. Any + * reverse-RPC the plugin makes while servicing that call (e.g. + * `releases/record` via `HostRpcClient.call`) reads the id and stamps it as + * `parentRequestId` on the outgoing request. + * + * The host uses `parentRequestId` to route the reverse-RPC back to the + * originating caller's tokio task, so emitted events land in the recording + * broadcaster scoped to that task and replay correctly in distributed + * deployments. Without this stamping, plugins that emit events via + * reverse-RPC would silently lose them on the worker. + * + * Plugin authors don't interact with this directly. The SDK's request + * dispatch (`server.ts`) sets it; `HostRpcClient.call` reads it. + */ + +import { AsyncLocalStorage } from "node:async_hooks"; + +const store = new AsyncLocalStorage<string | number | null>(); + +/** + * Run `fn` with `forwardRequestId` as the current parent. Calls to + * `currentParentRequestId()` made inside `fn` (or anything it awaits) will + * see this value. + */ +export function runWithParentRequestId<T>( + forwardRequestId: string | number | null, + fn: () => Promise<T>, +): Promise<T> { + return store.run(forwardRequestId, fn); +} + +/** + * Snapshot the current forward request id, or `undefined` if no forward + * request is on the call stack (e.g. background timers in the plugin that + * fire reverse-RPCs outside a forward-call context — those won't be replay- + * eligible, by design, since they don't belong to any task). + */ +export function currentParentRequestId(): string | number | null | undefined { + return store.getStore(); +} diff --git a/plugins/sdk-typescript/src/server.ts b/plugins/sdk-typescript/src/server.ts index 6b9fca08..f278091a 100644 --- a/plugins/sdk-typescript/src/server.ts +++ b/plugins/sdk-typescript/src/server.ts @@ -12,16 +12,19 @@ import { createInterface } from "node:readline"; import { PluginError } from "./errors.js"; +import { HostRpcClient } from "./host-rpc.js"; import { createLogger, type Logger } from "./logger.js"; +import { runWithParentRequestId } from "./request-context.js"; import { PluginStorage } from "./storage.js"; import type { BookMetadataProvider, MetadataContentType, MetadataProvider, RecommendationProvider, + ReleaseSourceProvider, SyncProvider, } from "./types/capabilities.js"; -import type { PluginManifest } from "./types/manifest.js"; +import type { PluginManifest, ReleaseSourceCapability } from "./types/manifest.js"; import type { BookMatchParams, BookSearchParams, @@ -34,6 +37,7 @@ import type { RecommendationDismissRequest, RecommendationRequest, } from "./types/recommendations.js"; +import type { ReleasePollRequest } from "./types/releases.js"; import { JSON_RPC_ERROR_CODES, type JsonRpcRequest, type JsonRpcResponse } from "./types/rpc.js"; import type { SyncPullRequest, SyncPushRequest } from "./types/sync.js"; @@ -161,6 +165,15 @@ export interface InitializeParams { * instance — the host resolves the user context automatically. */ storage: PluginStorage; + /** + * Generic host reverse-RPC client. + * + * Use this to call host methods outside the storage namespace, notably + * the `releases/*` methods (`releases/list_tracked`, `releases/record`, + * `releases/source_state/get`, `releases/source_state/set`) for plugins + * declaring the `releaseSource` capability. + */ + hostRpc: HostRpcClient; } /** @@ -196,6 +209,7 @@ function createPluginServer(options: PluginServerOptions): void { const logger = createLogger({ name: manifest.name, level: logLevel }); const prefix = label ? `${label} plugin` : "plugin"; const storage = new PluginStorage(); + const hostRpc = new HostRpcClient(); logger.info(`Starting ${prefix}: ${manifest.displayName} v${manifest.version}`); @@ -205,12 +219,13 @@ function createPluginServer(options: PluginServerOptions): void { }); rl.on("line", (line) => { - void handleLine(line, manifest, onInitialize, router, logger, storage); + void handleLine(line, manifest, onInitialize, router, logger, storage, hostRpc); }); rl.on("close", () => { logger.info("stdin closed, shutting down"); storage.cancelAll(); + hostRpc.cancelAll(); process.exit(0); }); @@ -243,13 +258,15 @@ async function handleLine( router: MethodRouter, logger: Logger, storage: PluginStorage, + hostRpc: HostRpcClient, ): Promise<void> { const trimmed = line.trim(); if (!trimmed) return; - // Try to detect storage responses before full request handling. - // Storage responses come from the host on stdin — they have id + (result|error) - // but no method field. + // Try to detect responses (storage or host-rpc) before full request handling. + // Both come from the host on stdin — they have id + (result|error) but no + // method field. The two clients use disjoint id ranges so each can claim + // ownership without coordination; whichever owns the id resolves it. let parsed: Record<string, unknown> | undefined; try { parsed = JSON.parse(trimmed) as Record<string, unknown>; @@ -258,8 +275,10 @@ async function handleLine( } if (parsed && isJsonRpcResponse(parsed)) { - logger.debug("Routing storage response", { id: parsed.id }); - storage.handleResponse(trimmed); + logger.debug("Routing reverse-RPC response", { id: parsed.id }); + if (!hostRpc.handleResponse(trimmed)) { + storage.handleResponse(trimmed); + } return; } @@ -271,7 +290,13 @@ async function handleLine( logger.debug(`Received request: ${request.method}`, { id: request.id }); - const response = await handleRequest(request, manifest, onInitialize, router, logger, storage); + // Run the request handler inside the parent-request async-local context. + // Reverse-RPCs the handler issues via `HostRpcClient.call` will read this + // and stamp `parentRequestId` so the host can route the call back to the + // originating task. See `request-context.ts`. + const response = await runWithParentRequestId(request.id, () => + handleRequest(request, manifest, onInitialize, router, logger, storage, hostRpc), + ); if (response !== null) { writeResponse(response); } @@ -313,6 +338,7 @@ async function handleRequest( router: MethodRouter, logger: Logger, storage: PluginStorage, + hostRpc: HostRpcClient, ): Promise<JsonRpcResponse | null> { const { method, params, id } = request; @@ -320,8 +346,10 @@ async function handleRequest( switch (method) { case "initialize": { const initParams = (params ?? {}) as InitializeParams; - // Inject the storage client so plugins can persist data + // Inject the reverse-RPC clients so plugins can persist data and + // call host-side methods (e.g. releases/list_tracked). initParams.storage = storage; + initParams.hostRpc = hostRpc; if (onInitialize) { await onInitialize(initParams); } @@ -334,6 +362,7 @@ async function handleRequest( case "shutdown": { logger.info("Shutdown requested"); storage.cancelAll(); + hostRpc.cancelAll(); const response: JsonRpcResponse = { jsonrpc: "2.0", id, result: null }; process.stdout.write(`${JSON.stringify(response)}\n`, () => { process.exit(0); @@ -659,3 +688,87 @@ export function createRecommendationPlugin(options: RecommendationPluginOptions) createPluginServer({ manifest, onInitialize, logLevel, label: "recommendation", router }); } + +// ============================================================================= +// Release Source Plugin +// ============================================================================= + +/** + * Validate `releases/poll` parameters. Requires a non-empty `sourceId` string; + * `etag` is optional. + */ +function validateReleasePollParams(params: unknown): ValidationError | null { + return validateStringFields(params, ["sourceId"]); +} + +/** + * Options for creating a release-source plugin. + */ +export interface ReleaseSourcePluginOptions { + /** Plugin manifest. Must declare `capabilities.releaseSource`. */ + manifest: PluginManifest & { + capabilities: { releaseSource: ReleaseSourceCapability }; + }; + /** ReleaseSourceProvider implementation. */ + provider: ReleaseSourceProvider; + /** Called when plugin receives initialize with credentials/config. */ + onInitialize?: (params: InitializeParams) => void | Promise<void>; + /** Log level (default: "info"). */ + logLevel?: "debug" | "info" | "warn" | "error"; +} + +/** + * Create and run a release-source plugin. + * + * The host calls `releases/poll` on a schedule (per `release_sources` row). + * The plugin returns candidates either inline (in the poll response) or by + * streaming `releases/record` reverse-RPC calls during the poll. Both styles + * are supported by the host. + * + * Plugins typically: + * 1. Fetch tracked series via `releases/list_tracked`. + * 2. For each series, GET the upstream feed (with `If-None-Match` from the + * previous ETag). + * 3. Parse + filter (language, group blocklist, etc.). + * 4. Either return all candidates in the poll response or call + * `releases/record` for each. + * 5. Persist the new ETag via `releases/source_state/set` (or include it on + * the poll response). + * + * @example + * ```typescript + * import { createReleaseSourcePlugin, type ReleaseSourceProvider } from "@ashdev/codex-plugin-sdk"; + * + * const provider: ReleaseSourceProvider = { + * async poll({ sourceId, etag }) { + * // ...fetch + parse... + * return { candidates: [...], etag: "new-etag" }; + * }, + * }; + * + * createReleaseSourcePlugin({ manifest, provider }); + * ``` + */ +export function createReleaseSourcePlugin(options: ReleaseSourcePluginOptions): void { + const { manifest, provider, onInitialize, logLevel } = options; + + if (!manifest.capabilities.releaseSource) { + throw new Error( + "manifest.capabilities.releaseSource is required for createReleaseSourcePlugin", + ); + } + + const router: MethodRouter = async (method, params, id) => { + switch (method) { + case "releases/poll": { + const err = validateReleasePollParams(params); + if (err) return invalidParamsError(id, err); + return success(id, await provider.poll(params as ReleasePollRequest)); + } + default: + return null; + } + }; + + createPluginServer({ manifest, onInitialize, logLevel, label: "release-source", router }); +} diff --git a/plugins/sdk-typescript/src/types/capabilities.ts b/plugins/sdk-typescript/src/types/capabilities.ts index 41ea3489..2faafc03 100644 --- a/plugins/sdk-typescript/src/types/capabilities.ts +++ b/plugins/sdk-typescript/src/types/capabilities.ts @@ -36,6 +36,7 @@ import type { RecommendationRequest, RecommendationResponse, } from "./recommendations.js"; +import type { ReleasePollRequest, ReleasePollResponse } from "./releases.js"; import type { ExternalUserInfo, SyncPullRequest, @@ -240,6 +241,38 @@ export interface RecommendationProvider { // Type Helpers // ============================================================================= +// ============================================================================= +// Release Source Provider Capability +// ============================================================================= + +/** + * Interface for plugins that announce new chapter/volume releases for tracked + * series. + * + * The host invokes `poll()` once per scheduled poll for a single + * `release_sources` row. The plugin can: + * 1. Return all candidates in the response (`ReleasePollResponse.candidates`). + * 2. Stream candidates mid-poll via the `releases/record` reverse-RPC call + * (using the storage-style RPC channel). + * 3. Mix both styles — the host treats them identically. + * + * Plugins should consult `releases/list_tracked` to discover which series are + * tracked (scoped by the manifest's `requiresAliases` / `requiresExternalIds` + * declarations) and `releases/source_state/get` to retrieve the previous + * ETag / cursor for conditional GETs. + * + * Declare this capability in the plugin manifest with `releaseSource: { ... }`. + */ +export interface ReleaseSourceProvider { + /** + * Poll the source for new releases and return any candidates. + * + * @param params - Source row to poll plus the previous ETag (if any). + * @returns Optional batch of candidates plus updated state hints. + */ + poll(params: ReleasePollRequest): Promise<ReleasePollResponse>; +} + /** * Partial series metadata provider - allows implementing only some methods * Use this for testing or gradual implementation diff --git a/plugins/sdk-typescript/src/types/index.ts b/plugins/sdk-typescript/src/types/index.ts index 84289fd0..2a5c459a 100644 --- a/plugins/sdk-typescript/src/types/index.ts +++ b/plugins/sdk-typescript/src/types/index.ts @@ -10,6 +10,7 @@ export type { PartialBookMetadataProvider, PartialMetadataProvider, RecommendationProvider, + ReleaseSourceProvider, SyncProvider, } from "./capabilities.js"; @@ -18,12 +19,16 @@ export type { ConfigField, ConfigSchema, CredentialField, + JsonValue, OAuthConfig, PluginCapabilities, PluginManifest, + ReleaseSourceCapability, + ReleaseSourceKind, } from "./manifest.js"; export { hasBookMetadataProvider, + hasReleaseSource, hasSeriesMetadataProvider, } from "./manifest.js"; @@ -66,6 +71,23 @@ export type { RecommendationResponse, UserLibraryEntry, } from "./recommendations.js"; +// From releases - release-source protocol types (these match Rust exactly) +export type { + ListTrackedRequest, + ListTrackedResponse, + MediaUrlKind, + RecordRequest, + RecordResponse, + ReleaseCandidate, + ReleasePollRequest, + ReleasePollResponse, + SeriesMatch, + SourceStateGetRequest, + SourceStateSetRequest, + SourceStateView, + TrackedSeriesEntry, +} from "./releases.js"; +export { RELEASES_METHODS } from "./releases.js"; // From rpc - JSON-RPC primitives export * from "./rpc.js"; // From sync - sync protocol types (these match Rust exactly) diff --git a/plugins/sdk-typescript/src/types/manifest.ts b/plugins/sdk-typescript/src/types/manifest.ts index 55d4bf7d..d388db68 100644 --- a/plugins/sdk-typescript/src/types/manifest.ts +++ b/plugins/sdk-typescript/src/types/manifest.ts @@ -24,6 +24,47 @@ export interface CredentialField { placeholder?: string; } +/** + * Source kinds a release-source plugin can expose. + * + * - `rss-uploader`: Per-uploader feed (e.g., a Nyaa user RSS feed). + * - `rss-series`: Per-series feed (e.g., MangaUpdates RSS for a single series). + * - `api-feed`: Generic API-driven feed. + * - `metadata-feed`: Metadata-derived signal (informational; usually doesn't + * write the ledger). + * + * Mirrors the Rust `ReleaseSourceKind` enum (kebab-case on the wire). + */ +export type ReleaseSourceKind = "rss-uploader" | "rss-series" | "api-feed" | "metadata-feed"; + +/** + * Release-source capability declaration. + * + * Declares both *what* the plugin can announce (chapters/volumes) and *what* + * it needs from the host (aliases, external IDs). The host uses these fields + * to scope `releases/list_tracked` responses so plugins only see data they + * asked for. + */ +export interface ReleaseSourceCapability { + /** Source kinds this plugin exposes. */ + kinds: ReleaseSourceKind[]; + /** + * Whether the plugin needs title aliases (set when the plugin matches by + * title rather than by external ID, e.g. Nyaa). + */ + requiresAliases?: boolean; + /** + * External-ID sources the plugin needs, e.g. `["mangaupdates"]` or + * `["mangadex"]`. The host filters `series_external_ids` to these sources + * when responding to `releases/list_tracked`. + */ + requiresExternalIds?: string[]; + /** Whether the plugin announces chapter-level releases. */ + canAnnounceChapters?: boolean; + /** Whether the plugin announces volume-level releases. */ + canAnnounceVolumes?: boolean; +} + /** * Plugin capabilities */ @@ -46,10 +87,34 @@ export interface PluginCapabilities { externalIdSource?: string; /** Can provide recommendations */ userRecommendationProvider?: boolean; + /** + * Release-source plugin capability. Set when this plugin announces new + * chapter/volume releases for tracked series via `releases/poll`. + */ + releaseSource?: ReleaseSourceCapability; } /** - * Configuration field definition for documenting plugin config options + * Any value that can round-trip through JSON. Used for config field defaults + * and examples, which the host carries as opaque `serde_json::Value` and + * forwards verbatim to plugins. + */ +export type JsonValue = + | null + | boolean + | number + | string + | JsonValue[] + | { [key: string]: JsonValue }; + +/** + * Configuration field definition for documenting plugin config options. + * + * `type` is a free-form hint, not a wire constraint. The host never validates + * stored config against this schema — it forwards the raw JSON to the plugin, + * which parses whatever shape it expects. Common values are `"number"`, + * `"string"`, `"boolean"`, `"string-array"`, and `"object"`; renderers fall + * back to a generic JSON editor for unrecognized types. */ export interface ConfigField { /** Field name (key in JSON config) */ @@ -58,14 +123,14 @@ export interface ConfigField { label: string; /** Description of what this field does */ description?: string; - /** Field type */ - type: "number" | "string" | "boolean"; + /** Field type — free-form hint; see interface docs for common values. */ + type: string; /** Whether this field is required */ required?: boolean; /** Default value if not provided */ - default?: number | string | boolean; + default?: JsonValue; /** Example value for documentation */ - example?: number | string | boolean; + example?: JsonValue; } /** @@ -194,3 +259,15 @@ export function hasBookMetadataProvider(manifest: PluginManifest): manifest is P manifest.capabilities.metadataProvider.includes("book") ); } + +/** + * Type guard to check if manifest declares the release-source capability. + */ +export function hasReleaseSource(manifest: PluginManifest): manifest is PluginManifest & { + capabilities: { releaseSource: ReleaseSourceCapability }; +} { + return ( + manifest.capabilities.releaseSource !== undefined && + manifest.capabilities.releaseSource !== null + ); +} diff --git a/plugins/sdk-typescript/src/types/releases.ts b/plugins/sdk-typescript/src/types/releases.ts new file mode 100644 index 00000000..1a061e9a --- /dev/null +++ b/plugins/sdk-typescript/src/types/releases.ts @@ -0,0 +1,300 @@ +/** + * Release-source protocol types - MUST match the Rust protocol exactly. + * + * Plugins implementing the `release_source` capability poll external sources + * for new chapter/volume releases and emit `ReleaseCandidate` rows. The host + * threshold-gates and dedups them through the `release_ledger` table. + * + * @see src/services/plugin/protocol.rs (`ReleasePollRequest`, `ReleasePollResponse`) + * @see src/services/release/candidate.rs (`ReleaseCandidate`, `SeriesMatch`) + * @see src/services/plugin/releases_handler.rs (reverse-RPC handlers) + */ + +// ============================================================================= +// Reverse-RPC method names (plugin -> host) +// ============================================================================= + +/** + * Method names for the `releases/*` reverse-RPC namespace. Plugins call these + * over the open RPC channel during `releases/poll` (or any other time). + */ +export const RELEASES_METHODS = { + /** List tracked series, scoped to what the plugin's manifest declared. */ + LIST_TRACKED: "releases/list_tracked", + /** Submit a candidate to the host's release ledger. */ + RECORD: "releases/record", + /** Get persisted per-source state (etag, last_polled_at, last_error). */ + SOURCE_STATE_GET: "releases/source_state/get", + /** Set persisted per-source state (etag only — other fields are host-owned). */ + SOURCE_STATE_SET: "releases/source_state/set", + /** + * Replace the set of `release_sources` rows owned by this plugin. + * + * Plugins call this from `onInitialize` (and after any config change, which + * triggers a process restart that re-runs `onInitialize`). Each call carries + * the plugin's full desired-state list; the host upserts every entry on + * `(plugin_id, source_key)` and prunes rows whose `source_key` is not in + * the request. User-managed fields (`enabled`, `pollIntervalS`) are + * preserved across re-registrations so an admin's overrides aren't + * trampled by a plugin restart. + */ + REGISTER_SOURCES: "releases/register_sources", +} as const; + +// ============================================================================= +// ReleaseCandidate (the wire shape plugins emit) +// ============================================================================= + +/** + * Per-series match metadata attached to every candidate. + * + * - `codexSeriesId` is the host's UUID for the series. Plugins resolve this + * from `releases/list_tracked` (don't invent series IDs). + * - `confidence` (0.0..=1.0) tells the host how sure the plugin is about the + * match. The host drops below-threshold candidates (default 0.7). + * - `reason` is a short opaque string used for debugging/UI, e.g. + * `"mangaupdates_id"`, `"alias-exact"`, `"alias-fuzzy"`. + */ +export interface SeriesMatch { + codexSeriesId: string; + confidence: number; + reason: string; +} + +/** + * Release candidate emitted by a plugin. + * + * **Field semantics:** + * - `externalReleaseId`: Stable per-source ID. The first dedup key. + * `(sourceId, externalReleaseId)` is `UNIQUE` in `release_ledger`. + * - `chapter` / `volume`: At least one should be set; both is fine for a + * "vol 15 covers ch 126-142" case (the volume axis advances; the chapter + * axis advances to the volume's last chapter only if the candidate + * carries it). Decimals supported on `chapter` (e.g. 47.5). + * - `language`: ISO 639-1 code, lowercase. Must be non-empty. The host's + * `latest_known_*` advance gate uses this against the per-series + * effective language list. + * - `groupOrUploader`: Scanlation group (MangaUpdates) or torrent uploader + * handle (Nyaa). Optional but strongly recommended. + * - `payloadUrl`: The link the user follows to actually consume / acquire + * the release. Must be non-empty. Conventionally a human-readable landing + * page (Nyaa view page, MangaUpdates release page). + * - `mediaUrl` / `mediaUrlKind`: Optional second URL describing how to + * actually fetch the bits — a `.torrent` file, a magnet link, or a direct + * download. Set both together; leave both unset for sources that only + * surface a landing page. The UI renders a kind-specific icon next to + * the standard external-link icon when these are present. + * - `infoHash`: Torrent info_hash if applicable. Cross-source dedup key. + * - `metadata` / `formatHints`: Free-form JSON for plugin-specific data + * (Nyaa size in bytes, MangaUpdates "is volume bundle" flag, etc.). + * - `observedAt`: When the plugin saw this entry. Used for ordering; + * bounded by `MAX_FUTURE_SKEW_S` (1h) on the host side. + */ +export interface ReleaseCandidate { + seriesMatch: SeriesMatch; + externalReleaseId: string; + chapter?: number | null; + volume?: number | null; + language: string; + formatHints?: Record<string, unknown> | null; + groupOrUploader?: string | null; + payloadUrl: string; + mediaUrl?: string | null; + mediaUrlKind?: MediaUrlKind | null; + infoHash?: string | null; + metadata?: Record<string, unknown> | null; + /** ISO-8601 timestamp. */ + observedAt: string; +} + +/** + * Classifies what `mediaUrl` points at so the UI can pick an appropriate + * icon and the host can label it consistently across sources. + * + * - `torrent`: HTTP(S) URL to a `.torrent` file. + * - `magnet`: `magnet:` URI. + * - `direct`: HTTP(S) URL to the file itself (DDL host, CDN, etc.). + * - `other`: anything else; render a generic download icon. + */ +export type MediaUrlKind = "torrent" | "magnet" | "direct" | "other"; + +// ============================================================================= +// releases/list_tracked +// ============================================================================= + +export interface ListTrackedRequest { + sourceId: string; + limit?: number; + offset?: number; +} + +/** + * One tracked-series row scoped to what the plugin's manifest asked for. + * Aliases are present only when `requiresAliases: true`; external IDs are + * present only for sources the plugin listed in `requiresExternalIds`. + */ +export interface TrackedSeriesEntry { + seriesId: string; + aliases?: string[]; + /** Map keyed by external-ID source name (e.g. `{ mangaupdates: "12345" }`). */ + externalIds?: Record<string, string>; + latestKnownChapter?: number | null; + latestKnownVolume?: number | null; +} + +export interface ListTrackedResponse { + tracked: TrackedSeriesEntry[]; + nextOffset?: number; +} + +// ============================================================================= +// releases/record +// ============================================================================= + +export interface RecordRequest { + sourceId: string; + candidate: ReleaseCandidate; +} + +export interface RecordResponse { + ledgerId: string; + /** True if the row deduped onto an existing ledger entry. */ + deduped: boolean; +} + +// ============================================================================= +// releases/source_state +// ============================================================================= + +export interface SourceStateGetRequest { + sourceId: string; +} + +export interface SourceStateView { + etag?: string; + lastPolledAt?: string; + lastError?: string; + lastErrorAt?: string; +} + +export interface SourceStateSetRequest { + sourceId: string; + /** Only `etag` is plugin-writable; other fields are host-owned. */ + etag?: string; +} + +// ============================================================================= +// releases/register_sources +// ============================================================================= + +/** + * One source the plugin wants the host to materialize as a `release_sources` + * row. The plugin owns the `sourceKey` namespace; the host treats it as an + * opaque string for dedup keyed on `(pluginId, sourceKey)`. + */ +export interface RegisteredSourceInput { + /** + * Stable per-plugin identifier. Reuse the same key across calls so user + * overrides (enabled, pollIntervalS) survive plugin restarts. + */ + sourceKey: string; + /** Human-readable label shown in the Release tracking settings UI. */ + displayName: string; + /** + * Must be one of the kinds the plugin declared in its + * `releaseSource.kinds` capability — the host rejects anything else. + */ + kind: "rss-uploader" | "rss-series" | "api-feed" | "metadata-feed"; + /** + * Optional opaque per-source config snapshot persisted on the row. The + * host doesn't interpret it; the plugin reads its own admin config + * directly. Useful for surfacing "what did this source originate from?" + * in the UI / logs. + */ + config?: Record<string, unknown> | null; +} + +export interface RegisterSourcesRequest { + sources: RegisteredSourceInput[]; +} + +export interface RegisterSourcesResponse { + /** Number of sources upserted (created or refreshed). */ + registered: number; + /** Number of sources removed because they were not in the request. */ + pruned: number; +} + +// ============================================================================= +// releases/poll (host -> plugin) +// ============================================================================= + +/** + * Parameters for the host's call into a release-source plugin's + * `releases/poll` handler. Carries the source row to poll plus any ETag the + * plugin recorded on its previous poll, plus the plugin-defined source key + * and per-source config snapshot so the plugin can dispatch directly without + * a reverse-RPC roundtrip. + */ +export interface ReleasePollRequest { + sourceId: string; + /** + * The same `sourceKey` the plugin passed to `releases/register_sources`. + * Useful when one plugin process owns multiple source rows (e.g., one per + * Nyaa uploader) and needs to know which one to poll. + */ + sourceKey?: string; + /** + * Snapshot of `release_sources.config` for this row. Plugins that stash + * per-source config on register can read it back here. + */ + config?: Record<string, unknown> | null; + etag?: string; +} + +/** + * Response from a `releases/poll` call. + * + * Plugins may also stream candidates over `releases/record` mid-poll; the + * host treats both styles identically. Use `candidates` for plugins that + * prefer to return everything at once. + * + * Plugins that stream via `releases/record` should also populate the + * counter fields (`parsed`, `matched`, `recorded`, `deduped`). Without + * them, the host can only see what came back in `candidates` and the + * source's status badge will read "Fetched 0 items" no matter what + * actually happened. + */ +export interface ReleasePollResponse { + /** Optional batch of candidates the host should evaluate and ledger. */ + candidates?: ReleaseCandidate[]; + /** New ETag observed (e.g., from the upstream feed's `ETag` header). */ + etag?: string; + /** Whether the upstream returned `304 Not Modified` (or equivalent). */ + notModified?: boolean; + /** HTTP status code observed (used by host's per-host backoff). */ + upstreamStatus?: number; + /** + * Items the plugin parsed from the upstream feed before any matching + * or threshold filtering. Streaming plugins should set this so the + * host's `last_summary` reflects upstream activity, not just the shape + * of the response payload. + */ + parsed?: number; + /** + * Of those parsed, the count that matched a tracked-series alias (i.e. + * became candidates the plugin then evaluated/streamed). + */ + matched?: number; + /** + * Of those matched, the count actually inserted into the ledger + * (excludes dedupes). For plugins that stream via `releases/record`, + * this is the count of non-deduped record outcomes. + */ + recorded?: number; + /** + * Of those matched, the count the host deduped onto an existing ledger + * row. Optional; when omitted the host infers `matched - recorded`. + */ + deduped?: number; +} diff --git a/plugins/sdk-typescript/src/types/rpc.ts b/plugins/sdk-typescript/src/types/rpc.ts index 92b61990..6fc57f80 100644 --- a/plugins/sdk-typescript/src/types/rpc.ts +++ b/plugins/sdk-typescript/src/types/rpc.ts @@ -7,6 +7,15 @@ export interface JsonRpcRequest { id: string | number | null; method: string; params?: unknown; + /** + * Reverse-RPC only: id of the forward call this plugin is currently + * servicing. Tells the host to route the reverse-RPC back to the + * originating caller's task so emitted events land in that caller's + * recording broadcaster (and replay correctly in distributed + * deployments). The SDK stamps this automatically via + * `AsyncLocalStorage` — plugin authors don't set it. + */ + parentRequestId?: string | number | null; } export interface JsonRpcSuccessResponse { diff --git a/screenshots/scripts/capture.ts b/screenshots/scripts/capture.ts index bf180940..c5adba5b 100644 --- a/screenshots/scripts/capture.ts +++ b/screenshots/scripts/capture.ts @@ -92,6 +92,33 @@ async function main(): Promise<void> { console.log("⚠️ Plugins scenario not found, skipping"); } + // Series-detail extras need plugins installed (the Fetch Metadata + // submenu sources its entries from installed plugins). + try { + const seriesDetail = await import("./scenarios/series-detail.js"); + scenarios.push({ name: "Series Detail", run: seriesDetail.run }); + } catch { + console.log("⚠️ Series Detail scenario not found, skipping"); + } + + // Library jobs editor needs at least one metadata-provider plugin + // installed so the provider dropdown is populated. + try { + const libraryJobs = await import("./scenarios/library-jobs.js"); + scenarios.push({ name: "Library Jobs", run: libraryJobs.run }); + } catch { + console.log("⚠️ Library Jobs scenario not found, skipping"); + } + + // Releases scenario depends on the MangaUpdates plugin being + // installed by the Plugins scenario, so it runs after. + try { + const releases = await import("./scenarios/releases.js"); + scenarios.push({ name: "Releases", run: releases.run }); + } catch { + console.log("⚠️ Releases scenario not found, skipping"); + } + // Logout runs last since it logs out and captures the login page try { const logout = await import("./scenarios/logout.js"); diff --git a/screenshots/scripts/scenarios/library-jobs.ts b/screenshots/scripts/scenarios/library-jobs.ts new file mode 100644 index 00000000..3910fca9 --- /dev/null +++ b/screenshots/scripts/scenarios/library-jobs.ts @@ -0,0 +1,91 @@ +import { Page, BrowserContext } from "playwright"; +import { captureScreenshot } from "../utils/screenshot.js"; +import { waitForPageReady } from "../utils/wait.js"; + +/** + * Per-library scheduled jobs scenario. + * + * Captures the LibraryJobs page (empty state) and the JobEditor modal + * mid-creation: the metadata-refresh job UI with provider selection, + * cron presets, field-group toggles, and matching strategy. + * + * Runs after Plugins so the provider dropdown has entries. + */ +export async function run(page: Page, _context: BrowserContext): Promise<void> { + console.log(" ⏱ Capturing per-library scheduled jobs..."); + + // Find the manga library's id from the sidebar. + await page.goto("/"); + await waitForPageReady(page); + await page.waitForTimeout(400); + + const mangaLink = page.locator('nav a[href*="/libraries/"]:has-text("Manga")').first(); + if ((await mangaLink.count()) === 0) { + console.log(" ⚠️ Manga library not found in sidebar"); + return; + } + const mangaHref = await mangaLink.getAttribute("href"); + if (!mangaHref) { + console.log(" ⚠️ Manga library href missing"); + return; + } + // Hrefs look like /libraries/<uuid>/series — strip the trailing tab. + const libraryId = mangaHref.split("/")[2]; + if (!libraryId) { + console.log(" ⚠️ Could not parse library id from href"); + return; + } + + await page.goto(`/libraries/${libraryId}/jobs`); + await waitForPageReady(page); + await page.waitForTimeout(800); + + // Empty-state list (no jobs yet). + await captureScreenshot(page, "library-jobs/empty"); + + // Open the editor. + const addButton = page.locator('button:has-text("Add job")').first(); + if ((await addButton.count()) === 0) { + console.log(" ⚠️ Add job button not found"); + return; + } + await addButton.click(); + await page.waitForSelector('[role="dialog"], .mantine-Modal-content', { + state: "visible", + timeout: 5000, + }); + await page.waitForTimeout(800); + + // Capture the editor in its initial state. + await captureScreenshot(page, "library-jobs/editor-empty"); + + // Fill in a representative job so the screenshot looks real. + const nameInput = page.locator('input[placeholder*="Daily" i], label:has-text("Name") + * input').first(); + if ((await nameInput.count()) > 0) { + await nameInput.fill("Daily metadata refresh"); + await page.waitForTimeout(200); + } + + // Pick a provider. The Select label reads "Provider" or "Plugin" — + // Mantine wraps the input in `.mantine-Select-input`. + const providerSelect = page.locator( + 'label:has-text("Provider"), label:has-text("Plugin")', + ).locator('..').locator('.mantine-Select-input').first(); + if ((await providerSelect.count()) > 0) { + await providerSelect.click(); + await page.waitForTimeout(300); + const firstOption = page.locator('[role="option"]').first(); + if ((await firstOption.count()) > 0) { + await firstOption.click(); + await page.waitForTimeout(400); + } else { + await page.keyboard.press("Escape"); + } + } + + await captureScreenshot(page, "library-jobs/editor-filled"); + + // Close without saving. + await page.keyboard.press("Escape"); + await page.waitForTimeout(400); +} diff --git a/screenshots/scripts/scenarios/plugins.ts b/screenshots/scripts/scenarios/plugins.ts index f097079e..ac5f8d95 100644 --- a/screenshots/scripts/scenarios/plugins.ts +++ b/screenshots/scripts/scenarios/plugins.ts @@ -64,6 +64,10 @@ async function pluginStoreScreenshots(page: Page): Promise<void> { // === ADD ANILIST RECOMMENDATIONS PLUGIN === await addPluginFromStore(page, "AniList Recommendations", "plugins/store-add-recommendations"); + // === ADD MANGAUPDATES RELEASES PLUGIN === + // Drives the Release tracking screenshots later in the run. + await addPluginFromStore(page, "MangaUpdates Releases", "plugins/store-add-mangaupdates"); + // Navigate back to plugins page to see all installed plugins await page.goto("/settings/plugins"); await waitForPageReady(page); diff --git a/screenshots/scripts/scenarios/releases.ts b/screenshots/scripts/scenarios/releases.ts new file mode 100644 index 00000000..5bb36457 --- /dev/null +++ b/screenshots/scripts/scenarios/releases.ts @@ -0,0 +1,242 @@ +import { Page, BrowserContext } from "playwright"; +import { captureScreenshot } from "../utils/screenshot.js"; +import { waitForPageReady } from "../utils/wait.js"; + +/** + * Release tracking scenario. + * + * Walks through the end-to-end MangaUpdates flow: + * 1. Settings → Release tracking (sources table, default schedule) + * 2. Manga series detail → enable Tracking, add a matching alias + * 3. Settings → Release tracking → Poll now on the MangaUpdates source + * 4. /releases inbox after the poll (filters + entries if any returned) + * 5. Series detail again with the SeriesReleasesPanel populated + * + * Assumes the plugins scenario has already installed and tested + * the MangaUpdates Releases plugin (so the source row exists). + */ +export async function run(page: Page, _context: BrowserContext): Promise<void> { + console.log(" 📡 Capturing release-tracking screenshots..."); + + // === STEP 1: Settings page (sources, schedule, notifications) === + await captureSettingsPage(page); + + // === STEP 2: Series detail — enable tracking + alias === + const seriesUrl = await enableTrackingOnMangaSeries(page); + + // === STEP 3: Trigger Poll Now on the source === + await pollMangaUpdatesSource(page); + + // === STEP 4: Releases inbox === + await captureReleasesInbox(page); + + // === STEP 5: Series releases panel populated === + if (seriesUrl) { + await captureSeriesReleasesPanel(page, seriesUrl); + } +} + +/** + * Capture the Release tracking settings page after the MangaUpdates + * plugin has registered its source. The default-schedule and + * notification-preferences cards are visible at the top, with the + * source table below. + */ +async function captureSettingsPage(page: Page): Promise<void> { + console.log(" 📷 Settings — Release tracking"); + + await page.goto("/settings/release-tracking"); + await waitForPageReady(page); + await page.waitForTimeout(800); + + await captureScreenshot(page, "releases/settings-overview"); +} + +/** + * Navigate to the manga library's first series, expand the Tracking + * panel, flip the toggle on, and add an alias matching the + * MangaUpdates title. Returns the series URL for later reuse. + */ +async function enableTrackingOnMangaSeries(page: Page): Promise<string | null> { + console.log(" 📷 Series detail — enable tracking"); + + // Prefer the Manga library so RTL covers show in screenshots. + const mangaLibraryLink = page.locator('nav a[href*="/libraries/"]:has-text("Manga")').first(); + if ((await mangaLibraryLink.count()) > 0) { + await mangaLibraryLink.click(); + } else { + await page.goto("/libraries/all/series"); + } + await waitForPageReady(page); + await page.waitForTimeout(500); + + const seriesCard = await page.$('[data-testid="series-card"], .series-card, a[href*="/series/"]'); + if (!seriesCard) { + console.log(" ⚠️ No series found, skipping tracking enable"); + return null; + } + await seriesCard.click(); + await waitForPageReady(page); + await page.waitForTimeout(800); + + const seriesUrl = page.url(); + + // Find the Release tracking card (header reads "Release tracking"). + const trackingHeader = page.locator( + 'button[aria-label="Expand release tracking"], button[aria-label="Collapse release tracking"]', + ).first(); + if ((await trackingHeader.count()) === 0) { + console.log(" ⚠️ Tracking panel not found (no release-source plugin enabled?)"); + return seriesUrl; + } + + // Expand the panel. + await trackingHeader.click(); + await page.waitForTimeout(400); + + // Flip the tracked toggle. The Switch is a sibling of the header + // button inside the same Card — querying by aria-label is robust. + const trackedSwitch = page.locator('input[aria-label="Toggle release tracking"]').first(); + if ((await trackedSwitch.count()) > 0) { + const isOn = await trackedSwitch.isChecked(); + if (!isOn) { + // Mantine wraps the input in a label, so click the visible track + // (the input itself is visually hidden). + const switchTrack = page.locator( + 'label:has(input[aria-label="Toggle release tracking"])', + ).first(); + await switchTrack.click(); + await page.waitForTimeout(800); + } + } + + // Add an alias the MangaUpdates feed will match against. + // The fixture is "Give My Regards to Black Jack" — its MU listing + // matches that exact title; no external ID is required. + const aliasInput = page.locator('input[placeholder*="alias" i], input[aria-label*="alias" i]').first(); + if ((await aliasInput.count()) === 0) { + // Fall back to the only TextInput inside the matcher-aliases section. + const fallback = page.locator( + 'div:has(> div:has-text("Matcher aliases")) input[type="text"], div:has-text("Matcher aliases") + * input[type="text"]', + ).first(); + if ((await fallback.count()) > 0) { + await fallback.fill("Give My Regards to Black Jack"); + await page.keyboard.press("Enter"); + } + } else { + await aliasInput.fill("Give My Regards to Black Jack"); + await page.keyboard.press("Enter"); + } + await page.waitForTimeout(800); + + // Capture the panel expanded with tracking enabled. + await captureScreenshot(page, "releases/series-tracking-enabled"); + + return seriesUrl; +} + +/** + * Hit "Poll now" on the MangaUpdates source row in + * Settings → Release tracking. Waits for the in-flight indicator to + * clear so any returned entries land in the ledger before we capture + * the inbox. + */ +async function pollMangaUpdatesSource(page: Page): Promise<void> { + console.log(" 📷 Triggering Poll Now on MangaUpdates source"); + + await page.goto("/settings/release-tracking"); + await waitForPageReady(page); + await page.waitForTimeout(500); + + // Find the row containing "MangaUpdates" and click its Poll Now action. + // The action icons render a refresh icon inside a Tooltip — we click + // the IconRefresh button on the matching row. + const row = page.locator('tr:has-text("MangaUpdates")').first(); + if ((await row.count()) === 0) { + console.log(" ⚠️ MangaUpdates source row not found"); + return; + } + + // Capture the row before polling (status idle). + await captureScreenshot(page, "releases/settings-before-poll"); + + const pollNowButton = row.locator('button:has(svg.tabler-icon-refresh)').first(); + if ((await pollNowButton.count()) === 0) { + console.log(" ⚠️ Poll Now button not found on MangaUpdates row"); + return; + } + await pollNowButton.click(); + await page.waitForTimeout(500); + + // The button shows a loading spinner while the poll is in flight. + // Wait for it to clear (max 60s — MangaUpdates RSS is usually fast + // but can stall on rate limits). + const start = Date.now(); + const maxWait = 60_000; + while (Date.now() - start < maxWait) { + const stillLoading = await row.locator('.mantine-Loader-root').count(); + if (stillLoading === 0) break; + await page.waitForTimeout(1000); + } + + // Re-fetch the page to surface the updated last-poll timestamp. + await page.reload(); + await waitForPageReady(page); + await page.waitForTimeout(500); + await captureScreenshot(page, "releases/settings-after-poll"); +} + +/** + * Capture the /releases inbox after the poll. Captures both states + * since on a fresh MangaUpdates poll the inbox may be empty (no + * recent chapters in the user's languages) or populated. + */ +async function captureReleasesInbox(page: Page): Promise<void> { + console.log(" 📷 Releases inbox"); + + await page.goto("/releases"); + await waitForPageReady(page); + await page.waitForTimeout(800); + + // Default state filter is "New" (announced). + await captureScreenshot(page, "releases/inbox-new"); + + // Switch to "All" to surface anything regardless of state — useful + // when the poll only landed dismissed/ignored entries. + const stateFilter = page.locator('[data-testid="releases-state-filter"]').first(); + if ((await stateFilter.count()) > 0) { + await stateFilter.click(); + await page.waitForTimeout(300); + const allOption = page.locator('[role="option"]:has-text("All")').first(); + if ((await allOption.count()) > 0) { + await allOption.click(); + await page.waitForTimeout(800); + await captureScreenshot(page, "releases/inbox-all"); + } else { + await page.keyboard.press("Escape"); + } + } +} + +/** + * Re-open the manga series and capture the SeriesReleasesPanel. + * The panel only renders once `tracking.tracked === true` and a + * release-source plugin is applicable to the library. + */ +async function captureSeriesReleasesPanel(page: Page, seriesUrl: string): Promise<void> { + console.log(" 📷 Series detail — releases panel"); + + await page.goto(seriesUrl); + await waitForPageReady(page); + await page.waitForTimeout(800); + + // The releases panel header is a div with role=button + aria-label + // "Expand releases". + const releasesHeader = page.locator('[aria-label="Expand releases"]').first(); + if ((await releasesHeader.count()) > 0) { + await releasesHeader.click(); + await page.waitForTimeout(800); + } + + await captureScreenshot(page, "releases/series-releases-panel"); +} diff --git a/screenshots/scripts/scenarios/series-detail.ts b/screenshots/scripts/scenarios/series-detail.ts new file mode 100644 index 00000000..5a658f06 --- /dev/null +++ b/screenshots/scripts/scenarios/series-detail.ts @@ -0,0 +1,189 @@ +import { Page, BrowserContext } from "playwright"; +import { captureScreenshot } from "../utils/screenshot.js"; +import { waitForPageReady, waitForThumbnails } from "../utils/wait.js"; + +/** + * Series detail extras scenario. + * + * Captures the new surfaces on the series detail and library pages: + * 1. Bulk selection toolbar + Bulk Metadata Edit modal (Library page) + * 2. Series detail actions menu (renumber, reset, edit metadata) + * 3. Series Metadata Edit modal + * 4. Series Info modal (read-only details) + * 5. External IDs edit modal + * 6. Reset Metadata confirmation + * + * Runs after Libraries (so series exist) and Plugins (so the + * "Fetch Metadata" submenu has entries). + */ +export async function run(page: Page, _context: BrowserContext): Promise<void> { + console.log(" 📚 Capturing series detail extras..."); + + await captureBulkMetadataFlow(page); + await captureSeriesDetailExtras(page); +} + +/** + * Library page → enter bulk-select mode by clicking a card's checkbox → + * select a couple of series → open Edit Metadata modal → tab through. + */ +async function captureBulkMetadataFlow(page: Page): Promise<void> { + console.log(" 📷 Library — bulk metadata edit"); + + await page.goto("/libraries/all/series"); + await waitForPageReady(page); + await waitForThumbnails(page); + await page.waitForTimeout(500); + + // The selection checkbox is hidden until hovered or until selection + // mode activates. Hovering then clicking the first card's checkbox + // is the canonical entry point. We click it via JS to bypass the + // hover-only CSS. + const firstCheckbox = page.locator('[data-selection-checkbox] input[type="checkbox"]').first(); + if ((await firstCheckbox.count()) === 0) { + console.log(" ⚠️ No selection checkboxes found"); + return; + } + await firstCheckbox.evaluate((el) => (el as HTMLInputElement).click()); + await page.waitForTimeout(400); + + // Select a second card so the bulk metadata modal has more than one + // entry to merge. + const secondCheckbox = page.locator('[data-selection-checkbox] input[type="checkbox"]').nth(1); + if ((await secondCheckbox.count()) > 0) { + await secondCheckbox.evaluate((el) => (el as HTMLInputElement).click()); + await page.waitForTimeout(400); + } + + // Capture the bulk selection toolbar visible above the grid. + await captureScreenshot(page, "series-detail/bulk-selection-toolbar"); + + // Open the Edit Metadata modal. + const editButton = page.locator('button:has-text("Edit Metadata")').first(); + if ((await editButton.count()) === 0) { + console.log(" ⚠️ Edit Metadata button not visible (missing permission?)"); + return; + } + await editButton.click(); + await page.waitForSelector('[role="dialog"], .mantine-Modal-content', { + state: "visible", + timeout: 5000, + }); + await page.waitForTimeout(600); + + // The modal opens on the General tab. + await captureScreenshot(page, "series-detail/bulk-metadata-general"); + + // Tab through the secondary tabs the modal exposes. + for (const [tabName, screenshot] of [ + ["Authors", "series-detail/bulk-metadata-authors"], + ["Tags", "series-detail/bulk-metadata-tags"], + ["Custom", "series-detail/bulk-metadata-custom"], + ] as const) { + const tab = page.locator(`button[role="tab"]:has-text("${tabName}")`).first(); + if ((await tab.count()) === 0) continue; + await tab.click(); + await page.waitForTimeout(400); + await captureScreenshot(page, screenshot); + } + + // Close without saving — destructive on the only fixture data. + await page.keyboard.press("Escape"); + await page.waitForTimeout(400); + + // Clear selection so subsequent scenarios start clean. + const clearButton = page.locator( + 'button[aria-label="Clear selection"], button:has-text("Cancel")', + ).first(); + if ((await clearButton.count()) > 0) { + await clearButton.click().catch(() => {}); + } + await page.waitForTimeout(300); +} + +/** + * Series detail actions menu + the modals it triggers. We capture the + * menu open and the Edit Metadata + Series Info + Reset Metadata + * confirmation modals, but never click through any destructive action. + */ +async function captureSeriesDetailExtras(page: Page): Promise<void> { + console.log(" 📷 Series detail — actions menu + modals"); + + await page.goto("/libraries/all/series"); + await waitForPageReady(page); + await page.waitForTimeout(500); + + const seriesCard = await page.$('[data-testid="series-card"], .series-card, a[href*="/series/"]'); + if (!seriesCard) { + console.log(" ⚠️ No series found"); + return; + } + await seriesCard.click(); + await waitForPageReady(page); + await page.waitForTimeout(800); + + // Open the actions menu (kebab in the header grid). + const actionsMenu = page.locator( + '.mantine-Grid-root button:has(svg.tabler-icon-dots-vertical)', + ).first(); + if ((await actionsMenu.count()) === 0) { + console.log(" ⚠️ Actions menu not found"); + return; + } + await actionsMenu.click(); + await page.waitForTimeout(400); + await captureScreenshot(page, "series-detail/actions-menu"); + + // Capture Reset Metadata confirmation modal — opens directly from + // the menu, so we click it then capture before dismissing. + const resetItem = page.locator('[role="menuitem"]:has-text("Reset Metadata")').first(); + if ((await resetItem.count()) > 0) { + await resetItem.click(); + await page.waitForSelector('[role="dialog"], .mantine-Modal-content', { + state: "visible", + timeout: 5000, + }); + await page.waitForTimeout(500); + await captureScreenshot(page, "series-detail/reset-metadata-confirm"); + // Dismiss without confirming. + await page.keyboard.press("Escape"); + await page.waitForTimeout(400); + } + + // Re-open the actions menu and click Edit Metadata to capture the + // modal. + await actionsMenu.click(); + await page.waitForTimeout(400); + const editItem = page.locator('[role="menuitem"]:has-text("Edit Metadata")').first(); + if ((await editItem.count()) > 0) { + await editItem.click(); + await page.waitForSelector('[role="dialog"], .mantine-Modal-content', { + state: "visible", + timeout: 5000, + }); + await page.waitForTimeout(600); + await captureScreenshot(page, "series-detail/edit-metadata-modal"); + await page.keyboard.press("Escape"); + await page.waitForTimeout(400); + } else { + await page.keyboard.press("Escape"); + } + + // Series Info modal — opens via a dedicated info button on the + // series header. The IconInfoCircle is the only "info" tabler icon + // in that area. + const infoButton = page.locator( + '.mantine-Grid-root button:has(svg.tabler-icon-info-circle)', + ).first(); + if ((await infoButton.count()) > 0) { + await infoButton.click(); + await page.waitForSelector('[role="dialog"], .mantine-Modal-content', { + state: "visible", + timeout: 5000, + }); + await page.waitForTimeout(500); + await captureScreenshot(page, "series-detail/info-modal"); + await page.keyboard.press("Escape"); + await page.waitForTimeout(400); + } +} diff --git a/screenshots/scripts/scenarios/settings.ts b/screenshots/scripts/scenarios/settings.ts index 2b241c51..f6fe1ef3 100644 --- a/screenshots/scripts/scenarios/settings.ts +++ b/screenshots/scripts/scenarios/settings.ts @@ -20,6 +20,10 @@ export async function run(page: Page, _context: BrowserContext): Promise<void> { { path: "/settings/book-errors", name: "settings/book-errors", label: "Book Errors" }, { path: "/settings/cleanup", name: "settings/cleanup", label: "Thumbnail Cleanup" }, { path: "/settings/pdf-cache", name: "settings/pdf-cache", label: "PDF Cache" }, + { path: "/settings/release-tracking", name: "settings/release-tracking", label: "Release Tracking" }, + { path: "/settings/plugin-storage", name: "settings/plugin-storage", label: "Plugin Storage" }, + { path: "/settings/exports", name: "settings/exports", label: "Series Exports" }, + { path: "/settings/integrations", name: "settings/integrations", label: "Integrations" }, { path: "/settings/profile", name: "settings/profile", label: "Profile" }, ]; diff --git a/src/api/docs.rs b/src/api/docs.rs index 1ca5d0ba..cb80dcc0 100644 --- a/src/api/docs.rs +++ b/src/api/docs.rs @@ -256,6 +256,28 @@ The following paths are exempt from rate limiting: v1::handlers::create_series_external_id, v1::handlers::delete_series_external_id, + // Release-tracking config + aliases + v1::handlers::tracking::get_series_tracking, + v1::handlers::tracking::update_series_tracking, + v1::handlers::tracking::list_series_aliases, + v1::handlers::tracking::create_series_alias, + v1::handlers::tracking::delete_series_alias, + + // Release ledger + sources (Phase 2) + v1::handlers::releases::list_series_releases, + v1::handlers::releases::list_release_inbox, + v1::handlers::releases::update_release_entry, + v1::handlers::releases::dismiss_release, + v1::handlers::releases::mark_release_acquired, + v1::handlers::releases::list_release_sources, + v1::handlers::releases::update_release_source, + v1::handlers::releases::poll_release_source_now, + v1::handlers::releases::reset_release_source, + v1::handlers::releases::get_release_tracking_applicability, + v1::handlers::releases::list_release_facets, + v1::handlers::releases::delete_release, + v1::handlers::releases::bulk_release_action, + // Cover management endpoints v1::handlers::list_series_covers, v1::handlers::get_series_cover_image, @@ -329,6 +351,8 @@ The following paths are exempt from rate limiting: v1::handlers::bulk_mark_series_as_unread, v1::handlers::bulk_analyze_series, v1::handlers::bulk_renumber_series, + v1::handlers::bulk_track_series_for_releases, + v1::handlers::bulk_untrack_series_for_releases, v1::handlers::bulk_generate_series_thumbnails, v1::handlers::bulk_generate_series_book_thumbnails, v1::handlers::bulk_reprocess_series_titles, @@ -678,6 +702,33 @@ The following paths are exempt from rate limiting: v1::dto::CreateAlternateTitleRequest, v1::dto::UpdateAlternateTitleRequest, + // Release-tracking DTOs + v1::dto::tracking::SeriesTrackingDto, + v1::dto::tracking::UpdateSeriesTrackingRequest, + v1::dto::tracking::SeriesAliasDto, + v1::dto::tracking::SeriesAliasListResponse, + v1::dto::tracking::CreateSeriesAliasRequest, + + // Release-ledger + source DTOs (Phase 2) + v1::dto::release::ReleaseLedgerEntryDto, + v1::dto::release::ReleaseLedgerListResponse, + v1::dto::release::UpdateReleaseLedgerEntryRequest, + v1::dto::release::ReleaseSourceDto, + v1::dto::release::ReleaseSourceListResponse, + v1::dto::release::UpdateReleaseSourceRequest, + v1::dto::release::PollNowResponse, + v1::dto::release::ResetReleaseSourceResponse, + v1::dto::release::ReleaseSeriesFacetDto, + v1::dto::release::ReleaseLibraryFacetDto, + v1::dto::release::ReleaseLanguageFacetDto, + v1::dto::release::ReleaseFacetsResponse, + v1::dto::release::BulkReleaseAction, + v1::dto::release::BulkReleaseActionRequest, + v1::dto::release::BulkReleaseActionResponse, + v1::dto::release::DeleteReleaseResponse, + v1::handlers::releases::ApplicabilityResponse, + v1::dto::PaginatedResponse<v1::dto::release::ReleaseLedgerEntryDto>, + // External Rating DTOs v1::dto::ExternalRatingDto, v1::dto::ExternalRatingListResponse, @@ -794,6 +845,8 @@ The following paths are exempt from rate limiting: v1::dto::BulkSeriesRequest, v1::dto::BulkAnalyzeSeriesRequest, v1::dto::BulkAnalyzeResponse, + v1::dto::BulkTrackForReleasesItem, + v1::dto::BulkTrackForReleasesResponse, v1::dto::BulkRenumberSeriesRequest, v1::dto::BulkGenerateBookThumbnailsRequest, v1::dto::BulkGenerateSeriesBookThumbnailsRequest, @@ -1002,6 +1055,8 @@ The following paths are exempt from rate limiting: // Library Content (name = "Libraries", description = "Library management endpoints"), (name = "Series", description = "Series browsing and search endpoints"), + (name = "Tracking", description = "Release-tracking config and matcher aliases"), + (name = "Releases", description = "Release ledger (announcements) and source admin"), (name = "Books", description = "Book details and metadata endpoints"), (name = "Pages", description = "Page image serving endpoints"), @@ -1144,7 +1199,7 @@ impl utoipa::Modify for TagGroupsModifier { }, { "name": "Library Content", - "tags": ["Libraries", "Series", "Books", "Pages"] + "tags": ["Libraries", "Series", "Tracking", "Releases", "Books", "Pages"] }, { "name": "Metadata & Taxonomy", diff --git a/src/api/routes/v1/dto/mod.rs b/src/api/routes/v1/dto/mod.rs index 43df7639..fe2bb4bd 100644 --- a/src/api/routes/v1/dto/mod.rs +++ b/src/api/routes/v1/dto/mod.rs @@ -22,6 +22,7 @@ pub mod plugin_storage; pub mod plugins; pub mod read_progress; pub mod recommendations; +pub mod release; pub mod scan; pub mod series; pub mod series_export; @@ -29,6 +30,7 @@ pub mod settings; pub mod setup; pub mod sharing_tag; pub mod task_metrics; +pub mod tracking; pub mod user; pub mod user_plugins; pub mod user_preferences; @@ -54,6 +56,8 @@ pub use plugins::*; pub use read_progress::*; #[allow(unused_imports)] pub use recommendations::*; +#[allow(unused_imports)] +pub use release::*; pub use scan::*; pub use series::*; #[allow(unused_imports)] @@ -62,6 +66,8 @@ pub use settings::*; pub use setup::*; pub use sharing_tag::*; pub use task_metrics::*; +#[allow(unused_imports)] +pub use tracking::*; pub use user::*; #[allow(unused_imports)] pub use user_plugins::*; diff --git a/src/api/routes/v1/dto/plugins.rs b/src/api/routes/v1/dto/plugins.rs index 80de669c..f8c50b08 100644 --- a/src/api/routes/v1/dto/plugins.rs +++ b/src/api/routes/v1/dto/plugins.rs @@ -233,7 +233,9 @@ pub struct ConfigFieldDto { /// Description of what this field does #[serde(skip_serializing_if = "Option::is_none")] pub description: Option<String>, - /// Field type: "number", "string", or "boolean" + /// Field type — free-form documentation hint. Common values: "number", + /// "string", "boolean", "string-array", "object". The host never validates + /// stored config against this; it forwards the raw JSON to the plugin. #[serde(rename = "type")] pub field_type: String, /// Whether this field is required @@ -403,10 +405,15 @@ pub struct PluginCapabilitiesDto { /// Can provide personalized recommendations #[serde(default)] pub user_recommendation_provider: bool, + /// Whether the plugin declares the `release_source` capability (announces + /// new chapter / volume releases for tracked series). + #[serde(default)] + pub release_source: bool, } impl From<PluginCapabilities> for PluginCapabilitiesDto { fn from(c: PluginCapabilities) -> Self { + let release_source = c.is_release_source(); Self { metadata_provider: c .metadata_provider @@ -416,6 +423,7 @@ impl From<PluginCapabilities> for PluginCapabilitiesDto { user_read_sync: c.user_read_sync, external_id_source: c.external_id_source, user_recommendation_provider: c.user_recommendation_provider, + release_source, } } } diff --git a/src/api/routes/v1/dto/release.rs b/src/api/routes/v1/dto/release.rs new file mode 100644 index 00000000..f386c894 --- /dev/null +++ b/src/api/routes/v1/dto/release.rs @@ -0,0 +1,404 @@ +//! DTOs for the release ledger and release-source admin endpoints. +//! +//! - `ReleaseLedgerEntryDto` mirrors a row in `release_ledger`. Used by the +//! per-series and inbox views. +//! - `ReleaseSourceDto` mirrors a row in `release_sources`. Used by the +//! admin source management UI. +//! +//! Note: this module deliberately does NOT introduce a new `ReleaseAnnounced` +//! event variant - that lands in Phase 7 along with the frontend inbox UI. +//! State-change endpoints in this module emit `SeriesUpdated` events with a +//! `releases` field marker so the existing event broadcaster carries them. + +use chrono::{DateTime, Utc}; +use serde::{Deserialize, Serialize}; +use utoipa::ToSchema; +use uuid::Uuid; + +use crate::db::entities::{release_ledger, release_sources}; + +// ============================================================================= +// Release ledger DTOs +// ============================================================================= + +/// A single release announcement. Sources write these; the inbox reads them. +#[derive(Debug, Clone, Serialize, Deserialize, ToSchema)] +#[serde(rename_all = "camelCase")] +pub struct ReleaseLedgerEntryDto { + #[schema(example = "550e8400-e29b-41d4-a716-446655440a00")] + pub id: Uuid, + #[schema(example = "550e8400-e29b-41d4-a716-446655440002")] + pub series_id: Uuid, + /// Series title at the time of the response. Joined from the `series` + /// table so the inbox UI can render a human-readable label without a + /// follow-up fetch. Falls back to the empty string only if the series + /// row was hard-deleted between the join and the read. + #[schema(example = "Chainsaw Man")] + pub series_title: String, + #[schema(example = "550e8400-e29b-41d4-a716-446655440b00")] + pub source_id: Uuid, + /// Plugin-stable identity for the release (used for dedup). + #[schema(example = "nyaa:1234567")] + pub external_release_id: String, + /// Torrent info_hash, if applicable. + #[serde(skip_serializing_if = "Option::is_none")] + pub info_hash: Option<String>, + /// Decimal supports `12.5` etc. + #[serde(skip_serializing_if = "Option::is_none")] + pub chapter: Option<f64>, + #[serde(skip_serializing_if = "Option::is_none")] + pub volume: Option<i32>, + #[serde(skip_serializing_if = "Option::is_none")] + pub language: Option<String>, + /// Sparse `{ "jxl": true, "container": "cbz", ... }`. + #[serde(skip_serializing_if = "Option::is_none")] + pub format_hints: Option<serde_json::Value>, + /// Group/scanlator/uploader attribution. + #[serde(skip_serializing_if = "Option::is_none")] + pub group_or_uploader: Option<String>, + /// Where to acquire the release. Conventionally a human-readable + /// landing page (Nyaa view page, MangaUpdates release page). + pub payload_url: String, + /// Optional second URL for direct fetch (`.torrent`, `magnet:`, DDL + /// link). Travels paired with [`Self::media_url_kind`]. + #[serde(skip_serializing_if = "Option::is_none")] + pub media_url: Option<String>, + /// Classifies what `media_url` points at: `torrent` | `magnet` | + /// `direct` | `other`. The frontend uses this to pick a kind-specific + /// icon next to the standard external-link icon. + #[serde(skip_serializing_if = "Option::is_none")] + pub media_url_kind: Option<String>, + pub confidence: f64, + /// `announced` | `dismissed` | `marked_acquired` | `hidden`. + pub state: String, + /// Source-specific extras (free-form). + #[serde(skip_serializing_if = "Option::is_none")] + pub metadata: Option<serde_json::Value>, + pub observed_at: DateTime<Utc>, + pub created_at: DateTime<Utc>, +} + +impl ReleaseLedgerEntryDto { + /// Build a DTO from a ledger row plus the joined series title. The title + /// must be looked up by the caller (typically a batch query in the + /// handler) since `From<Model>` alone can't carry it. + pub fn from_model_with_series_title(m: release_ledger::Model, series_title: String) -> Self { + Self { + id: m.id, + series_id: m.series_id, + series_title, + source_id: m.source_id, + external_release_id: m.external_release_id, + info_hash: m.info_hash, + chapter: m.chapter, + volume: m.volume, + language: m.language, + format_hints: m.format_hints, + group_or_uploader: m.group_or_uploader, + payload_url: m.payload_url, + media_url: m.media_url, + media_url_kind: m.media_url_kind, + confidence: m.confidence, + state: m.state, + metadata: m.metadata, + observed_at: m.observed_at, + created_at: m.created_at, + } + } +} + +/// PATCH payload for ledger row state transitions. +/// +/// Only `state` is patchable from the API today; the rest of the row is +/// source-controlled. `state` is validated against the canonical set: +/// `announced` | `dismissed` | `marked_acquired` | `hidden`. +#[derive(Debug, Clone, Default, Serialize, Deserialize, ToSchema)] +#[serde(rename_all = "camelCase")] +pub struct UpdateReleaseLedgerEntryRequest { + /// New state. See [`ReleaseLedgerEntryDto::state`] for allowed values. + pub state: Option<String>, +} + +#[derive(Debug, Serialize, Deserialize, ToSchema)] +#[serde(rename_all = "camelCase")] +pub struct ReleaseLedgerListResponse { + pub entries: Vec<ReleaseLedgerEntryDto>, +} + +// ============================================================================= +// Release source DTOs +// ============================================================================= + +/// A configured release source (one row per logical feed). +#[derive(Debug, Clone, Serialize, Deserialize, ToSchema)] +#[serde(rename_all = "camelCase")] +pub struct ReleaseSourceDto { + #[schema(example = "550e8400-e29b-41d4-a716-446655440b00")] + pub id: Uuid, + /// Owning plugin id, or `core` for in-core synthetic sources. + #[schema(example = "release-nyaa")] + pub plugin_id: String, + /// Plugin-defined unique key. + #[schema(example = "nyaa:user:tsuna69")] + pub source_key: String, + pub display_name: String, + /// `rss-uploader` | `rss-series` | `api-feed` | `metadata-feed` | `metadata-piggyback`. + pub kind: String, + pub enabled: bool, + /// Per-source cron override (5-field POSIX cron). `null` when the row + /// inherits the server-wide `release_tracking.default_cron_schedule`. + /// Always present in the response (not omitted on null) so clients can + /// distinguish "inheriting" from "field missing." + pub cron_schedule: Option<String>, + /// The cron expression actually used by the scheduler for this source: + /// the row's `cron_schedule` if set, otherwise the resolved server-wide + /// default. Lets the UI display "Daily (Default)" without needing to + /// fetch the global setting separately. + pub effective_cron_schedule: String, + #[serde(skip_serializing_if = "Option::is_none")] + pub last_polled_at: Option<DateTime<Utc>>, + #[serde(skip_serializing_if = "Option::is_none")] + pub last_error: Option<String>, + #[serde(skip_serializing_if = "Option::is_none")] + pub last_error_at: Option<DateTime<Utc>>, + /// Opaque etag/cursor used for conditional fetches. + #[serde(skip_serializing_if = "Option::is_none")] + pub etag: Option<String>, + /// Source-specific configuration (free-form). + #[serde(skip_serializing_if = "Option::is_none")] + pub config: Option<serde_json::Value>, + /// One-line summary of the most recent successful poll. Surfaced under + /// the row's status badge so users can see *why* a poll returned no + /// announcements without grepping logs. NULL until the first successful + /// poll on the source. + #[serde(skip_serializing_if = "Option::is_none")] + pub last_summary: Option<String>, + pub created_at: DateTime<Utc>, + pub updated_at: DateTime<Utc>, +} + +impl ReleaseSourceDto { + /// Build the DTO from a model + the resolved server-wide default cron + /// schedule. Use this in handlers that already have the default in + /// hand (avoids a settings round-trip per row). + pub fn from_model_with_default(m: release_sources::Model, server_default: &str) -> Self { + let effective = crate::services::release::schedule::resolve_cron_schedule( + m.cron_schedule.as_deref(), + server_default, + ); + Self { + id: m.id, + plugin_id: m.plugin_id, + source_key: m.source_key, + display_name: m.display_name, + kind: m.kind, + enabled: m.enabled, + cron_schedule: m.cron_schedule, + effective_cron_schedule: effective, + last_polled_at: m.last_polled_at, + last_error: m.last_error, + last_error_at: m.last_error_at, + etag: m.etag, + config: m.config, + last_summary: m.last_summary, + created_at: m.created_at, + updated_at: m.updated_at, + } + } +} + +impl From<release_sources::Model> for ReleaseSourceDto { + /// Convenience for callers that don't have the server default handy + /// (e.g. unit tests). Falls back to the compile-time + /// `DEFAULT_CRON_SCHEDULE` for resolution. Production handlers should + /// prefer [`ReleaseSourceDto::from_model_with_default`]. + fn from(m: release_sources::Model) -> Self { + Self::from_model_with_default(m, crate::services::release::schedule::DEFAULT_CRON_SCHEDULE) + } +} + +#[derive(Debug, Serialize, Deserialize, ToSchema)] +#[serde(rename_all = "camelCase")] +pub struct ReleaseSourceListResponse { + pub sources: Vec<ReleaseSourceDto>, +} + +/// PATCH payload for a release source. All fields optional; omit to leave alone. +/// +/// `cron_schedule` uses double-Option semantics: +/// - field absent (`None`): leave the row's cron_schedule unchanged +/// - explicit `null` (`Some(None)`) / `""` / `" "`: clear the override +/// (revert to inheriting the server-wide +/// `release_tracking.default_cron_schedule`) +/// - `Some(Some("0 */6 * * *"))`: set a per-source override +#[derive(Debug, Clone, Default, Serialize, Deserialize, ToSchema)] +#[serde(rename_all = "camelCase")] +pub struct UpdateReleaseSourceRequest { + pub display_name: Option<String>, + pub enabled: Option<bool>, + /// 5-field POSIX cron expression. Use `null` (or empty string) to + /// clear the override and inherit the server-wide default. + #[serde(default, with = "double_option")] + pub cron_schedule: Option<Option<String>>, +} + +/// Local copy of the `Option<Option<T>>` serde adapter used by `tracking.rs`. +/// See that module for the full rationale; in short: distinguishes "field +/// absent" (leave alone) from "explicit null" (clear). +mod double_option { + use serde::{Deserialize, Deserializer, Serialize, Serializer}; + + pub fn serialize<S, T>(value: &Option<Option<T>>, ser: S) -> Result<S::Ok, S::Error> + where + S: Serializer, + T: Serialize, + { + match value { + Some(Some(v)) => v.serialize(ser), + Some(None) => ser.serialize_none(), + None => ser.serialize_none(), + } + } + + pub fn deserialize<'de, D, T>(de: D) -> Result<Option<Option<T>>, D::Error> + where + D: Deserializer<'de>, + T: Deserialize<'de>, + { + Option::<T>::deserialize(de).map(Some) + } +} + +/// Response shape from the `reset` endpoint. +/// +/// Returns the number of ledger rows removed so callers can show a +/// confirmation toast. The source's transient poll state (etag, +/// last_polled_at, last_error, last_summary) is also cleared, but those +/// are not counted here. +#[derive(Debug, Serialize, Deserialize, ToSchema)] +#[serde(rename_all = "camelCase")] +pub struct ResetReleaseSourceResponse { + /// Number of `release_ledger` rows deleted for this source. + pub deleted_ledger_entries: u64, +} + +// ============================================================================= +// Facets +// ============================================================================= + +/// One series option in the inbox facets response. Carries the joined +/// `library_id` and `library_name` so the frontend can group the dropdown +/// by library without a follow-up call. +#[derive(Debug, Clone, Serialize, Deserialize, ToSchema)] +#[serde(rename_all = "camelCase")] +pub struct ReleaseSeriesFacetDto { + pub series_id: Uuid, + pub series_title: String, + pub library_id: Uuid, + pub library_name: String, + /// Number of ledger rows matching the active filter for this series. + pub count: u64, +} + +/// One library option in the inbox facets response. +#[derive(Debug, Clone, Serialize, Deserialize, ToSchema)] +#[serde(rename_all = "camelCase")] +pub struct ReleaseLibraryFacetDto { + pub library_id: Uuid, + pub library_name: String, + pub count: u64, +} + +/// One language option in the inbox facets response. +#[derive(Debug, Clone, Serialize, Deserialize, ToSchema)] +#[serde(rename_all = "camelCase")] +pub struct ReleaseLanguageFacetDto { + pub language: String, + pub count: u64, +} + +/// Response shape for `GET /api/v1/releases/facets`. +/// +/// Each list reflects the distinct values present in the ledger under the +/// **other** active filters (Solr-style facet exclusion), so dropdowns +/// never offer combinations that would yield zero results. The frontend +/// uses these to populate cascading filter Select inputs without forcing +/// the user to type UUIDs. +#[derive(Debug, Clone, Serialize, Deserialize, ToSchema)] +#[serde(rename_all = "camelCase")] +pub struct ReleaseFacetsResponse { + pub languages: Vec<ReleaseLanguageFacetDto>, + pub libraries: Vec<ReleaseLibraryFacetDto>, + pub series: Vec<ReleaseSeriesFacetDto>, +} + +// ============================================================================= +// Bulk operations +// ============================================================================= + +/// Action requested by `POST /api/v1/releases/bulk`. +#[derive(Debug, Clone, Copy, Serialize, Deserialize, ToSchema, PartialEq, Eq)] +#[serde(rename_all = "kebab-case")] +pub enum BulkReleaseAction { + /// Set state to `dismissed`. + Dismiss, + /// Set state to `marked_acquired`. + MarkAcquired, + /// Set state to `ignored`. Same effect as auto-ignore at ingestion, + /// but applied manually after the fact. + Ignore, + /// Reset state back to `announced`. Universal undo for `dismissed`, + /// `marked_acquired`, and `ignored`. Does not re-emit the release- + /// announced SSE event (the user is the one driving the change). + Reset, + /// Hard-delete the ledger rows. Each affected source's `etag` is + /// cleared so the next poll re-fetches without `If-None-Match` and + /// re-announces the deleted releases. + Delete, +} + +/// Request body for `POST /api/v1/releases/bulk`. +#[derive(Debug, Clone, Serialize, Deserialize, ToSchema)] +#[serde(rename_all = "camelCase")] +pub struct BulkReleaseActionRequest { + pub ids: Vec<Uuid>, + pub action: BulkReleaseAction, +} + +/// Response from `POST /api/v1/releases/bulk`. +#[derive(Debug, Clone, Serialize, Deserialize, ToSchema)] +#[serde(rename_all = "camelCase")] +pub struct BulkReleaseActionResponse { + /// Number of ledger rows actually affected. Less than `ids.len()` when + /// some IDs were already deleted concurrently. + pub affected: u64, + /// Action that ran (echoed back for client-side confirmation toasts). + pub action: BulkReleaseAction, +} + +/// Response from `DELETE /api/v1/releases/{id}`. +/// +/// Single-row delete returns a small confirmation rather than 204 so the +/// frontend can surface a toast that mentions the etag clear ("the next +/// poll will re-fetch this release"). Mirrors the bulk-delete shape with +/// `affected = 1`. +#[derive(Debug, Clone, Serialize, Deserialize, ToSchema)] +#[serde(rename_all = "camelCase")] +pub struct DeleteReleaseResponse { + /// `true` if the row was deleted, `false` if it didn't exist. + pub deleted: bool, +} + +/// Response shape from the `poll-now` endpoint. +/// +/// `status` is `enqueued` after a successful enqueue. The `message` carries +/// the task ID for follow-up (`tasks.id`); the task runs asynchronously, so +/// this response does not reflect poll outcome. +#[derive(Debug, Serialize, Deserialize, ToSchema)] +#[serde(rename_all = "camelCase")] +pub struct PollNowResponse { + /// `enqueued` on success. + pub status: String, + /// Human-readable message; includes the enqueued task ID. + pub message: String, +} diff --git a/src/api/routes/v1/dto/series.rs b/src/api/routes/v1/dto/series.rs index 40d174ff..c3f44c68 100644 --- a/src/api/routes/v1/dto/series.rs +++ b/src/api/routes/v1/dto/series.rs @@ -249,6 +249,36 @@ pub struct SeriesDto { #[schema(example = 2)] pub unread_count: Option<i64>, + /// Difference between the upstream original-language chapter count + /// (`series_metadata.total_chapter_count`, supplied by metadata + /// providers like MangaBaka or AniList) and the highest locally-owned + /// chapter (`local_max_chapter`). + /// + /// Always `None` unless the series is tracked AND `track_chapters` is + /// enabled AND the provider count is populated AND the rounded-to-1- + /// decimal gap is positive. **This is an informational signal, not a + /// release announcement** — Phase 6's MangaUpdates plugin owns the + /// translation-release feed. + #[serde(skip_serializing_if = "Option::is_none")] + #[schema(example = 3.0)] + pub upstream_chapter_gap: Option<f32>, + + /// Difference between the upstream original-language volume count + /// (`series_metadata.total_volume_count`) and the highest locally-owned + /// volume (`local_max_volume`). Same suppression rules as + /// `upstream_chapter_gap`, gated on `track_volumes`. + #[serde(skip_serializing_if = "Option::is_none")] + #[schema(example = 1)] + pub upstream_volume_gap: Option<i32>, + + /// Display name of the metadata provider that supplied the upstream + /// counts (e.g., "MangaBaka", "AniList"). Set whenever at least one of + /// `upstream_chapter_gap` / `upstream_volume_gap` is populated. Used by + /// the Phase 7 badge tooltip. + #[serde(skip_serializing_if = "Option::is_none")] + #[schema(example = "MangaBaka")] + pub upstream_gap_provider: Option<String>, + /// When the series was created #[schema(example = "2024-01-01T00:00:00Z")] pub created_at: DateTime<Utc>, @@ -1265,6 +1295,22 @@ pub struct FullSeriesResponse { #[schema(example = 14)] pub volumes_owned: Option<i64>, + /// Upstream-vs-local chapter delta. See `SeriesDto::upstream_chapter_gap`. + #[serde(skip_serializing_if = "Option::is_none")] + #[schema(example = 3.0)] + pub upstream_chapter_gap: Option<f32>, + + /// Upstream-vs-local volume delta. See `SeriesDto::upstream_volume_gap`. + #[serde(skip_serializing_if = "Option::is_none")] + #[schema(example = 1)] + pub upstream_volume_gap: Option<i32>, + + /// Provider that supplied the upstream counts. See + /// `SeriesDto::upstream_gap_provider`. + #[serde(skip_serializing_if = "Option::is_none")] + #[schema(example = "MangaBaka")] + pub upstream_gap_provider: Option<String>, + /// Number of unread books in this series (user-specific) #[serde(skip_serializing_if = "Option::is_none")] #[schema(example = 2)] diff --git a/src/api/routes/v1/dto/tracking.rs b/src/api/routes/v1/dto/tracking.rs new file mode 100644 index 00000000..72c2defe --- /dev/null +++ b/src/api/routes/v1/dto/tracking.rs @@ -0,0 +1,251 @@ +//! DTOs for release-tracking config and aliases endpoints. +//! +//! Maps the `series_tracking` sidecar and `series_aliases` table onto the v1 +//! HTTP API. Distinct from `series_alternate_titles` — aliases here are +//! arbitrary matcher strings, not labelled localized titles. + +use chrono::{DateTime, Utc}; +use serde::{Deserialize, Serialize}; +use utoipa::ToSchema; +use uuid::Uuid; + +use crate::db::entities::{series_aliases, series_tracking}; + +// ============================================================================= +// Tracking config DTOs +// ============================================================================= + +/// Per-series release-tracking configuration. +/// +/// Returned even for untracked series — the row defaults to `tracked: false` +/// with conservative defaults so the frontend can render the panel without +/// special-casing missing rows. +#[derive(Debug, Clone, Serialize, Deserialize, ToSchema)] +#[serde(rename_all = "camelCase")] +pub struct SeriesTrackingDto { + /// Series ID this config belongs to. + #[schema(example = "550e8400-e29b-41d4-a716-446655440002")] + pub series_id: Uuid, + /// Whether release tracking is enabled. + pub tracked: bool, + /// Whether to announce new chapters. + pub track_chapters: bool, + /// Whether to announce new volumes. + pub track_volumes: bool, + /// Latest known external chapter (supports decimals like 12.5). + #[serde(skip_serializing_if = "Option::is_none")] + pub latest_known_chapter: Option<f64>, + /// Latest known external volume. + #[serde(skip_serializing_if = "Option::is_none")] + pub latest_known_volume: Option<i32>, + /// Sparse map of `{ "<volume>": { "first": ch, "last": ch } }`. + #[serde(skip_serializing_if = "Option::is_none")] + pub volume_chapter_map: Option<serde_json::Value>, + /// Per-series override of the source poll interval (seconds). + #[serde(skip_serializing_if = "Option::is_none")] + pub poll_interval_override_s: Option<i32>, + /// Per-series override of the server's confidence threshold (0.0 - 1.0). + #[serde(skip_serializing_if = "Option::is_none")] + pub confidence_threshold_override: Option<f64>, + /// Per-series language preference (ISO 639-1 codes, e.g. `["en", "es"]`). + /// `null` means "fall back to the server-wide default (`release_tracking.default_languages`)." + /// Used by aggregation feeds (e.g. MangaUpdates) that emit candidates in many languages. + #[serde(skip_serializing_if = "Option::is_none")] + pub languages: Option<Vec<String>>, + /// When the row was created (epoch when virtual). + pub created_at: DateTime<Utc>, + /// When the row was last updated (epoch when virtual). + pub updated_at: DateTime<Utc>, +} + +impl From<series_tracking::Model> for SeriesTrackingDto { + fn from(m: series_tracking::Model) -> Self { + Self { + series_id: m.series_id, + tracked: m.tracked, + track_chapters: m.track_chapters, + track_volumes: m.track_volumes, + latest_known_chapter: m.latest_known_chapter, + latest_known_volume: m.latest_known_volume, + volume_chapter_map: m.volume_chapter_map, + poll_interval_override_s: m.poll_interval_override_s, + confidence_threshold_override: m.confidence_threshold_override, + languages: m.languages.and_then(|v| serde_json::from_value(v).ok()), + created_at: m.created_at, + updated_at: m.updated_at, + } + } +} + +/// PATCH payload for tracking config. All fields are optional: +/// omit a field to leave it untouched. Use a JSON `null` on a nullable field +/// to clear it explicitly. +#[derive(Debug, Clone, Default, Serialize, Deserialize, ToSchema)] +#[serde(rename_all = "camelCase")] +pub struct UpdateSeriesTrackingRequest { + pub tracked: Option<bool>, + pub track_chapters: Option<bool>, + pub track_volumes: Option<bool>, + /// Use `Some(null)` to clear, `Some(<value>)` to set, omit to leave alone. + #[serde( + default, + skip_serializing_if = "Option::is_none", + with = "double_option" + )] + pub latest_known_chapter: Option<Option<f64>>, + #[serde( + default, + skip_serializing_if = "Option::is_none", + with = "double_option" + )] + pub latest_known_volume: Option<Option<i32>>, + #[serde( + default, + skip_serializing_if = "Option::is_none", + with = "double_option" + )] + pub volume_chapter_map: Option<Option<serde_json::Value>>, + #[serde( + default, + skip_serializing_if = "Option::is_none", + with = "double_option" + )] + pub poll_interval_override_s: Option<Option<i32>>, + #[serde( + default, + skip_serializing_if = "Option::is_none", + with = "double_option" + )] + pub confidence_threshold_override: Option<Option<f64>>, + /// ISO 639-1 codes; `null` clears (falls back to server-wide default). + #[serde( + default, + skip_serializing_if = "Option::is_none", + with = "double_option" + )] + pub languages: Option<Option<Vec<String>>>, +} + +/// `Option<Option<T>>` SerDe helper: distinguishes "field omitted" from "field +/// present and null". The default `Option<T>` flattens both, which collapses +/// the "leave alone vs. clear" distinction we need for PATCH semantics. +mod double_option { + use serde::{Deserialize, Deserializer, Serialize, Serializer}; + + pub fn serialize<S, T>(value: &Option<Option<T>>, ser: S) -> Result<S::Ok, S::Error> + where + S: Serializer, + T: Serialize, + { + match value { + Some(Some(v)) => v.serialize(ser), + Some(None) => ser.serialize_none(), + None => ser.serialize_none(), + } + } + + pub fn deserialize<'de, D, T>(de: D) -> Result<Option<Option<T>>, D::Error> + where + D: Deserializer<'de>, + T: Deserialize<'de>, + { + // Field is present (otherwise serde would call `default`); read it as + // `Option<T>` so explicit null becomes `Some(None)` and a present value + // becomes `Some(Some(v))`. + Option::<T>::deserialize(de).map(Some) + } +} + +// ============================================================================= +// Aliases DTOs +// ============================================================================= + +/// Title alias used by release-source plugins to match incoming releases by +/// title (Nyaa, MangaUpdates without an external ID, etc.). +#[derive(Debug, Clone, Serialize, Deserialize, ToSchema)] +#[serde(rename_all = "camelCase")] +pub struct SeriesAliasDto { + /// Alias row ID. + #[schema(example = "550e8400-e29b-41d4-a716-446655440100")] + pub id: Uuid, + #[schema(example = "550e8400-e29b-41d4-a716-446655440002")] + pub series_id: Uuid, + /// Alias as entered (preserves casing/punctuation). + #[schema(example = "My Hero Academia")] + pub alias: String, + /// Lowercased + punctuation-stripped form used for matching. + #[schema(example = "my hero academia")] + pub normalized: String, + /// `metadata` (auto-derived) | `manual` (user-entered). + #[schema(example = "manual")] + pub source: String, + pub created_at: DateTime<Utc>, +} + +impl From<series_aliases::Model> for SeriesAliasDto { + fn from(m: series_aliases::Model) -> Self { + Self { + id: m.id, + series_id: m.series_id, + alias: m.alias, + normalized: m.normalized, + source: m.source, + created_at: m.created_at, + } + } +} + +#[derive(Debug, Serialize, Deserialize, ToSchema)] +#[serde(rename_all = "camelCase")] +pub struct SeriesAliasListResponse { + pub aliases: Vec<SeriesAliasDto>, +} + +#[derive(Debug, Serialize, Deserialize, ToSchema)] +#[serde(rename_all = "camelCase")] +pub struct CreateSeriesAliasRequest { + /// Alias text. Will be trimmed; must normalize to non-empty. + #[schema(example = "Boku no Hero Academia")] + pub alias: String, + /// Optional explicit source. Defaults to `manual` when called from the API. + /// Plugin-internal flows write `metadata`; we don't expose that to HTTP. + #[serde(default)] + pub source: Option<String>, +} + +// ============================================================================= +// Bulk track-for-releases DTOs +// ============================================================================= + +/// Per-series outcome of a bulk track / untrack operation. +/// +/// Returned in `BulkTrackForReleasesResponse.results` so the UI can show a +/// per-row status (e.g. "tracked", "skipped: not found", "errored: …") without +/// re-querying the tracking config endpoint per series. +#[derive(Debug, Clone, Serialize, Deserialize, ToSchema)] +#[serde(rename_all = "camelCase")] +pub struct BulkTrackForReleasesItem { + pub series_id: Uuid, + /// `tracked` | `untracked` | `skipped` | `errored`. + pub outcome: String, + /// Free-form detail (error message for `errored`, reason for `skipped`). + /// `None` for the success cases. + #[serde(skip_serializing_if = "Option::is_none")] + pub detail: Option<String>, +} + +/// Aggregate result of `POST /series/bulk/track-for-releases` and its untrack +/// counterpart. Counts and per-series outcomes for client-side display. +#[derive(Debug, Clone, Serialize, Deserialize, ToSchema)] +#[serde(rename_all = "camelCase")] +pub struct BulkTrackForReleasesResponse { + /// Series successfully flipped to `tracked = true` (or `false` for the + /// untrack endpoint). + pub changed: usize, + /// Series whose `tracked` flag was already in the target state. No-ops. + pub already_in_state: usize, + /// Series that could not be processed (missing, error, etc.). + pub errored: usize, + /// Per-series outcomes in input order. + pub results: Vec<BulkTrackForReleasesItem>, +} diff --git a/src/api/routes/v1/handlers/bulk.rs b/src/api/routes/v1/handlers/bulk.rs index d92db85a..1509e107 100644 --- a/src/api/routes/v1/handlers/bulk.rs +++ b/src/api/routes/v1/handlers/bulk.rs @@ -7,17 +7,19 @@ use super::super::dto::{ BulkAnalyzeBooksRequest, BulkAnalyzeResponse, BulkAnalyzeSeriesRequest, BulkBooksRequest, BulkGenerateBookThumbnailsRequest, BulkGenerateSeriesBookThumbnailsRequest, BulkGenerateSeriesThumbnailsRequest, BulkMetadataResetResponse, BulkRenumberSeriesRequest, - BulkReprocessSeriesTitlesRequest, BulkSeriesRequest, BulkTaskResponse, MarkReadResponse, + BulkReprocessSeriesTitlesRequest, BulkSeriesRequest, BulkTaskResponse, + BulkTrackForReleasesItem, BulkTrackForReleasesResponse, MarkReadResponse, }; use crate::api::{AppState, error::ApiError, extractors::AuthContext, permissions::Permission}; use crate::db::repositories::{ AlternateTitleRepository, BookRepository, ExternalLinkRepository, ExternalRatingRepository, GenreRepository, ReadProgressRepository, SeriesCoversRepository, SeriesExternalIdRepository, - SeriesMetadataRepository, SeriesRepository, SharingTagRepository, TagRepository, - TaskRepository, + SeriesMetadataRepository, SeriesRepository, SeriesTrackingRepository, SharingTagRepository, + TagRepository, TaskRepository, TrackingUpdate, }; use crate::events::{EntityChangeEvent, EntityEvent}; use crate::require_permission; +use crate::services::release::seed::seed_tracking_for_series; use crate::tasks::types::TaskType; use axum::{Json, extract::State}; use chrono::Utc; @@ -427,6 +429,262 @@ pub async fn bulk_analyze_series( })) } +// ============================================================================ +// Release-tracking Bulk Handlers +// ============================================================================ + +/// Bulk-enable release tracking for multiple series. +/// +/// For each `series_id` in the request, flips `series_tracking.tracked` to +/// `true` and runs the seed pass (auto-derives aliases, `latest_known_*`, +/// `track_chapters` / `track_volumes` from existing data). Series that don't +/// exist are reported as `outcome: skipped`. Series already tracked are +/// reported as `outcome: skipped, detail: "already tracked"` and the seed is +/// not re-run (idempotent — a re-run would simply re-derive identical +/// values, but we skip the work). +/// +/// Mirrors the per-series PATCH `false -> true` transition: same seed +/// function, same idempotency guarantees. +#[utoipa::path( + post, + path = "/api/v1/series/bulk/track-for-releases", + request_body = BulkSeriesRequest, + responses( + (status = 200, description = "Bulk-tracked series", body = BulkTrackForReleasesResponse), + (status = 401, description = "Unauthorized"), + (status = 403, description = "Forbidden"), + ), + security( + ("bearer_auth" = []), + ("api_key" = []) + ), + tag = "Bulk Operations" +)] +pub async fn bulk_track_series_for_releases( + State(state): State<Arc<AppState>>, + auth: AuthContext, + Json(request): Json<BulkSeriesRequest>, +) -> Result<Json<BulkTrackForReleasesResponse>, ApiError> { + require_permission!(auth, Permission::SeriesWrite)?; + + let mut response = BulkTrackForReleasesResponse { + changed: 0, + already_in_state: 0, + errored: 0, + results: Vec::with_capacity(request.series_ids.len()), + }; + + for series_id in request.series_ids { + let outcome = track_one_series(&state, series_id, &auth).await; + match outcome.outcome.as_str() { + "tracked" => response.changed += 1, + "skipped" => response.already_in_state += 1, + _ => response.errored += 1, + } + response.results.push(outcome); + } + + Ok(Json(response)) +} + +/// Bulk-disable release tracking for multiple series. +/// +/// Flips `series_tracking.tracked` to `false`. Does not delete aliases, +/// `latest_known_*`, or other tracking config — the user can re-track +/// without losing customizations, and the seed will re-derive any +/// auto-derived fields on the next track-on transition. +#[utoipa::path( + post, + path = "/api/v1/series/bulk/untrack-for-releases", + request_body = BulkSeriesRequest, + responses( + (status = 200, description = "Bulk-untracked series", body = BulkTrackForReleasesResponse), + (status = 401, description = "Unauthorized"), + (status = 403, description = "Forbidden"), + ), + security( + ("bearer_auth" = []), + ("api_key" = []) + ), + tag = "Bulk Operations" +)] +pub async fn bulk_untrack_series_for_releases( + State(state): State<Arc<AppState>>, + auth: AuthContext, + Json(request): Json<BulkSeriesRequest>, +) -> Result<Json<BulkTrackForReleasesResponse>, ApiError> { + require_permission!(auth, Permission::SeriesWrite)?; + + let mut response = BulkTrackForReleasesResponse { + changed: 0, + already_in_state: 0, + errored: 0, + results: Vec::with_capacity(request.series_ids.len()), + }; + + for series_id in request.series_ids { + let outcome = untrack_one_series(&state, series_id, &auth).await; + match outcome.outcome.as_str() { + "untracked" => response.changed += 1, + "skipped" => response.already_in_state += 1, + _ => response.errored += 1, + } + response.results.push(outcome); + } + + Ok(Json(response)) +} + +/// Track a single series and seed defaults. Helper for `bulk_track_series_for_releases`. +/// +/// Returns a structured per-series outcome rather than propagating errors so +/// one bad series doesn't fail the whole bulk request. +async fn track_one_series( + state: &AppState, + series_id: Uuid, + auth: &AuthContext, +) -> BulkTrackForReleasesItem { + let series = match SeriesRepository::get_by_id(&state.db, series_id).await { + Ok(Some(s)) => s, + Ok(None) => { + return BulkTrackForReleasesItem { + series_id, + outcome: "skipped".to_string(), + detail: Some("series not found".to_string()), + }; + } + Err(e) => { + return BulkTrackForReleasesItem { + series_id, + outcome: "errored".to_string(), + detail: Some(format!("lookup failed: {}", e)), + }; + } + }; + + // Skip if already tracked — idempotent no-op. + let already_tracked = SeriesTrackingRepository::get(&state.db, series_id) + .await + .ok() + .flatten() + .map(|r| r.tracked) + .unwrap_or(false); + if already_tracked { + return BulkTrackForReleasesItem { + series_id, + outcome: "skipped".to_string(), + detail: Some("already tracked".to_string()), + }; + } + + // Seed first so the auto-derived fields are populated, then flip the + // tracked flag in a second pass. Same order the per-series PATCH uses. + if let Err(e) = seed_tracking_for_series(&state.db, series_id).await { + return BulkTrackForReleasesItem { + series_id, + outcome: "errored".to_string(), + detail: Some(format!("seed failed: {}", e)), + }; + } + let update = TrackingUpdate { + tracked: Some(true), + ..Default::default() + }; + if let Err(e) = SeriesTrackingRepository::upsert(&state.db, series_id, update).await { + return BulkTrackForReleasesItem { + series_id, + outcome: "errored".to_string(), + detail: Some(format!("upsert failed: {}", e)), + }; + } + + let event = EntityChangeEvent { + event: EntityEvent::SeriesUpdated { + series_id, + library_id: series.library_id, + fields: Some(vec!["tracking".to_string()]), + }, + timestamp: Utc::now(), + user_id: Some(auth.user_id), + }; + let _ = state.event_broadcaster.emit(event); + + BulkTrackForReleasesItem { + series_id, + outcome: "tracked".to_string(), + detail: None, + } +} + +/// Untrack a single series. Helper for `bulk_untrack_series_for_releases`. +async fn untrack_one_series( + state: &AppState, + series_id: Uuid, + auth: &AuthContext, +) -> BulkTrackForReleasesItem { + let series = match SeriesRepository::get_by_id(&state.db, series_id).await { + Ok(Some(s)) => s, + Ok(None) => { + return BulkTrackForReleasesItem { + series_id, + outcome: "skipped".to_string(), + detail: Some("series not found".to_string()), + }; + } + Err(e) => { + return BulkTrackForReleasesItem { + series_id, + outcome: "errored".to_string(), + detail: Some(format!("lookup failed: {}", e)), + }; + } + }; + + // No tracking row at all -> nothing to do, treat as already in target state. + let already_untracked = SeriesTrackingRepository::get(&state.db, series_id) + .await + .ok() + .flatten() + .map(|r| !r.tracked) + .unwrap_or(true); + if already_untracked { + return BulkTrackForReleasesItem { + series_id, + outcome: "skipped".to_string(), + detail: Some("already untracked".to_string()), + }; + } + + let update = TrackingUpdate { + tracked: Some(false), + ..Default::default() + }; + if let Err(e) = SeriesTrackingRepository::upsert(&state.db, series_id, update).await { + return BulkTrackForReleasesItem { + series_id, + outcome: "errored".to_string(), + detail: Some(format!("upsert failed: {}", e)), + }; + } + + let event = EntityChangeEvent { + event: EntityEvent::SeriesUpdated { + series_id, + library_id: series.library_id, + fields: Some(vec!["tracking".to_string()]), + }, + timestamp: Utc::now(), + user_id: Some(auth.user_id), + }; + let _ = state.event_broadcaster.emit(event); + + BulkTrackForReleasesItem { + series_id, + outcome: "untracked".to_string(), + detail: None, + } +} + // ============================================================================ // Thumbnail Bulk Handlers // ============================================================================ diff --git a/src/api/routes/v1/handlers/mod.rs b/src/api/routes/v1/handlers/mod.rs index 76b77dc1..5d45f8bb 100644 --- a/src/api/routes/v1/handlers/mod.rs +++ b/src/api/routes/v1/handlers/mod.rs @@ -61,6 +61,7 @@ pub mod plugin_storage; pub mod plugins; pub mod read_progress; pub mod recommendations; +pub mod releases; pub mod scan; pub mod series; pub mod series_exports; @@ -69,6 +70,7 @@ pub mod setup; pub mod sharing_tags; pub mod task_metrics; pub mod task_queue; +pub mod tracking; pub mod user_plugins; pub mod user_preferences; pub mod users; diff --git a/src/api/routes/v1/handlers/releases.rs b/src/api/routes/v1/handlers/releases.rs new file mode 100644 index 00000000..8a1eb88e --- /dev/null +++ b/src/api/routes/v1/handlers/releases.rs @@ -0,0 +1,1220 @@ +//! HTTP handlers for the release ledger and release-source admin endpoints. +//! +//! Three groups of endpoints: +//! +//! 1. Per-series ledger reads (`GET /series/{id}/releases`) - read tracked +//! series releases for the series detail Releases tab. +//! 2. Inbox + state transitions (`GET /releases`, `PATCH /releases/{id}`, +//! `POST /releases/{id}/dismiss|mark-acquired`) - cross-series inbox UI. +//! 3. Source admin (`GET /release-sources`, `PATCH /release-sources/{id}`, +//! `POST /release-sources/{id}/poll-now`) - admin-only source management. +//! +//! Phase 2 keeps `poll-now` as a stub returning HTTP 501; Phase 4 wires it +//! into the task queue. + +use axum::{ + Json, + extract::{Path, Query, State}, + http::StatusCode, + response::Response, +}; +use chrono::Utc; +use serde::Deserialize; +use std::sync::Arc; +use uuid::Uuid; + +use super::super::dto::common::{ + DEFAULT_PAGE, DEFAULT_PAGE_SIZE, MAX_PAGE_SIZE, PaginatedResponse, PaginationLinkBuilder, +}; +use super::super::dto::release::{ + BulkReleaseAction, BulkReleaseActionRequest, BulkReleaseActionResponse, DeleteReleaseResponse, + PollNowResponse, ReleaseFacetsResponse, ReleaseLanguageFacetDto, ReleaseLedgerEntryDto, + ReleaseLedgerListResponse, ReleaseLibraryFacetDto, ReleaseSeriesFacetDto, ReleaseSourceDto, + ReleaseSourceListResponse, ResetReleaseSourceResponse, UpdateReleaseLedgerEntryRequest, + UpdateReleaseSourceRequest, +}; +use super::paginated_response; +use crate::api::{ + error::ApiError, + extractors::{AuthContext, AuthState}, + permissions::Permission, +}; +use crate::db::entities::release_ledger::state as ledger_state; +use crate::db::repositories::{ + LedgerInboxFilter, LibraryRepository, PluginsRepository, ReleaseLedgerRepository, + ReleaseSourceRepository, ReleaseSourceUpdate, SeriesRepository, +}; +use crate::events::{EntityChangeEvent, EntityEvent}; + +/// Hydrate ledger rows with series titles via a single batched lookup. +/// +/// The DTO carries `series_title` so the inbox UI can render a human label +/// without a follow-up call. We do this in the handler (rather than a SQL +/// JOIN in the repo) to keep the repository surface narrow and reuse the +/// existing `SeriesRepository::get_by_ids` batch query. +async fn hydrate_ledger_dtos( + db: &sea_orm::DatabaseConnection, + rows: Vec<crate::db::entities::release_ledger::Model>, +) -> Result<Vec<ReleaseLedgerEntryDto>, ApiError> { + let mut series_ids: Vec<Uuid> = rows.iter().map(|r| r.series_id).collect(); + series_ids.sort_unstable(); + series_ids.dedup(); + + let title_by_id: std::collections::HashMap<Uuid, String> = if series_ids.is_empty() { + std::collections::HashMap::new() + } else { + SeriesRepository::get_by_ids(db, &series_ids) + .await + .map_err(|e| ApiError::Internal(format!("Failed to load series titles: {}", e)))? + .into_iter() + .map(|s| (s.id, s.name)) + .collect() + }; + + Ok(rows + .into_iter() + .map(|row| { + let title = title_by_id.get(&row.series_id).cloned().unwrap_or_default(); + ReleaseLedgerEntryDto::from_model_with_series_title(row, title) + }) + .collect()) +} + +// ============================================================================= +// Per-series ledger +// ============================================================================= + +/// Query parameters for the per-series ledger view. +#[derive(Debug, Deserialize, utoipa::IntoParams)] +#[serde(rename_all = "camelCase")] +#[into_params(rename_all = "camelCase")] +pub struct SeriesReleaseListParams { + /// Filter by state. Defaults to all states (no filter) so the per-series + /// view shows the full history. + #[serde(default)] + pub state: Option<String>, + /// 1-indexed page number. + #[serde(default = "default_page")] + pub page: u64, + /// Items per page (max 500, default 50). + #[serde(default = "default_page_size")] + pub page_size: u64, +} + +fn default_page() -> u64 { + DEFAULT_PAGE +} + +fn default_page_size() -> u64 { + DEFAULT_PAGE_SIZE +} + +/// List release-ledger entries for a series. +#[utoipa::path( + get, + path = "/api/v1/series/{series_id}/releases", + params( + ("series_id" = Uuid, Path, description = "Series ID"), + SeriesReleaseListParams, + ), + responses( + (status = 200, description = "Paginated ledger entries for the series", body = PaginatedResponse<ReleaseLedgerEntryDto>), + (status = 404, description = "Series not found"), + (status = 403, description = "Forbidden"), + ), + security( + ("jwt_bearer" = []), + ("api_key" = []) + ), + tag = "Releases" +)] +pub async fn list_series_releases( + State(state): State<Arc<AuthState>>, + auth: AuthContext, + Path(series_id): Path<Uuid>, + Query(params): Query<SeriesReleaseListParams>, +) -> Result<Response, ApiError> { + auth.require_permission(&Permission::SeriesRead)?; + + let series = SeriesRepository::get_by_id(&state.db, series_id) + .await + .map_err(|e| ApiError::Internal(format!("Failed to fetch series: {}", e)))? + .ok_or_else(|| ApiError::NotFound("Series not found".to_string()))?; + + let page = params.page.max(1); + let page_size = params.page_size.clamp(1, MAX_PAGE_SIZE); + let offset = (page - 1) * page_size; + + // Validate state filter if present. + if let Some(ref s) = params.state + && !ledger_state::is_valid(s) + { + return Err(ApiError::BadRequest(format!("invalid state filter: {}", s))); + } + + let rows = ReleaseLedgerRepository::list_for_series( + &state.db, + series_id, + params.state.as_deref(), + page_size, + offset, + ) + .await + .map_err(|e| ApiError::Internal(format!("Failed to list releases: {}", e)))?; + + // Total comes from a count query that respects the same state filter. + let filter = LedgerInboxFilter { + state: params.state.clone(), + series_id: Some(series_id), + ..Default::default() + }; + // count_inbox always filters by state; if the caller didn't pass one, we + // fall back to counting all states for the series instead of the inbox + // default (`announced`). Run a manual count via list_for_series with a + // large limit when the caller asked for "all states." + let total = if params.state.is_some() { + ReleaseLedgerRepository::count_inbox(&state.db, filter) + .await + .map_err(|e| ApiError::Internal(format!("Failed to count releases: {}", e)))? + } else { + ReleaseLedgerRepository::list_for_series(&state.db, series_id, None, 0, 0) + .await + .map_err(|e| ApiError::Internal(format!("Failed to count releases: {}", e)))? + .len() as u64 + }; + + let total_pages = if page_size == 0 { + 0 + } else { + total.div_ceil(page_size) + }; + + // All rows belong to the same series, so we can reuse the title we + // already loaded for the existence check rather than re-fetching it. + let dtos: Vec<ReleaseLedgerEntryDto> = rows + .into_iter() + .map(|row| ReleaseLedgerEntryDto::from_model_with_series_title(row, series.name.clone())) + .collect(); + let base_path = format!("/api/v1/series/{}/releases", series_id); + let mut builder = PaginationLinkBuilder::new(&base_path, page, page_size, total_pages); + if let Some(ref s) = params.state { + builder = builder.with_param("state", s); + } + let response = PaginatedResponse::with_builder(dtos, page, page_size, total, &builder); + Ok(paginated_response(response, &builder)) +} + +// ============================================================================= +// Inbox + state transitions +// ============================================================================= + +/// Query parameters for the cross-series inbox view. +#[derive(Debug, Deserialize, utoipa::IntoParams)] +#[serde(rename_all = "camelCase")] +#[into_params(rename_all = "camelCase")] +pub struct ReleaseInboxParams { + /// Filter by state. Defaults to `announced`. Pass `all` to disable + /// state filtering entirely (returns rows in every state). + #[serde(default)] + pub state: Option<String>, + #[serde(default)] + pub series_id: Option<Uuid>, + #[serde(default)] + pub source_id: Option<Uuid>, + #[serde(default)] + pub language: Option<String>, + /// Restrict to series belonging to this library. + #[serde(default)] + pub library_id: Option<Uuid>, + #[serde(default = "default_page")] + pub page: u64, + #[serde(default = "default_page_size")] + pub page_size: u64, +} + +/// Cross-series inbox: announced (or filtered) ledger entries, paginated. +#[utoipa::path( + get, + path = "/api/v1/releases", + params(ReleaseInboxParams), + responses( + (status = 200, description = "Paginated inbox entries", body = PaginatedResponse<ReleaseLedgerEntryDto>), + (status = 403, description = "Forbidden"), + ), + security( + ("jwt_bearer" = []), + ("api_key" = []) + ), + tag = "Releases" +)] +pub async fn list_release_inbox( + State(state): State<Arc<AuthState>>, + auth: AuthContext, + Query(params): Query<ReleaseInboxParams>, +) -> Result<Response, ApiError> { + auth.require_permission(&Permission::SeriesRead)?; + + let page = params.page.max(1); + let page_size = params.page_size.clamp(1, MAX_PAGE_SIZE); + let offset = (page - 1) * page_size; + + // `all` is a sentinel meaning "no state filter"; otherwise validate + // against the canonical set. + let all_states = matches!(params.state.as_deref(), Some("all")); + let normalised_state = if all_states { + None + } else { + params.state.clone() + }; + if let Some(ref s) = normalised_state + && !ledger_state::is_valid(s) + { + return Err(ApiError::BadRequest(format!("invalid state filter: {}", s))); + } + + let filter = LedgerInboxFilter { + state: normalised_state, + all_states, + series_id: params.series_id, + source_id: params.source_id, + language: params.language.clone(), + library_id: params.library_id, + }; + let rows = ReleaseLedgerRepository::list_inbox(&state.db, filter.clone(), page_size, offset) + .await + .map_err(|e| ApiError::Internal(format!("Failed to list inbox: {}", e)))?; + let total = ReleaseLedgerRepository::count_inbox(&state.db, filter) + .await + .map_err(|e| ApiError::Internal(format!("Failed to count inbox: {}", e)))?; + let total_pages = if page_size == 0 { + 0 + } else { + total.div_ceil(page_size) + }; + + let dtos = hydrate_ledger_dtos(&state.db, rows).await?; + let mut builder = PaginationLinkBuilder::new("/api/v1/releases", page, page_size, total_pages); + if let Some(ref s) = params.state { + builder = builder.with_param("state", s); + } + if let Some(sid) = params.series_id { + builder = builder.with_param("seriesId", &sid.to_string()); + } + if let Some(src) = params.source_id { + builder = builder.with_param("sourceId", &src.to_string()); + } + if let Some(ref lang) = params.language { + builder = builder.with_param("language", lang); + } + if let Some(lib) = params.library_id { + builder = builder.with_param("libraryId", &lib.to_string()); + } + let response = PaginatedResponse::with_builder(dtos, page, page_size, total, &builder); + Ok(paginated_response(response, &builder)) +} + +/// PATCH a ledger entry's state (general-purpose state transition). +#[utoipa::path( + patch, + path = "/api/v1/releases/{release_id}", + params( + ("release_id" = Uuid, Path, description = "Ledger entry ID") + ), + request_body = UpdateReleaseLedgerEntryRequest, + responses( + (status = 200, description = "Updated ledger entry", body = ReleaseLedgerEntryDto), + (status = 400, description = "Invalid state"), + (status = 404, description = "Ledger entry not found"), + (status = 403, description = "Forbidden"), + ), + security( + ("jwt_bearer" = []), + ("api_key" = []) + ), + tag = "Releases" +)] +pub async fn update_release_entry( + State(state): State<Arc<AuthState>>, + auth: AuthContext, + Path(release_id): Path<Uuid>, + Json(request): Json<UpdateReleaseLedgerEntryRequest>, +) -> Result<Json<ReleaseLedgerEntryDto>, ApiError> { + auth.require_permission(&Permission::SeriesWrite)?; + + let new_state = request + .state + .ok_or_else(|| ApiError::BadRequest("state is required".to_string()))?; + + update_state_internal(&state, auth.user_id, release_id, &new_state).await +} + +/// Convenience POST: dismiss a release. +#[utoipa::path( + post, + path = "/api/v1/releases/{release_id}/dismiss", + params( + ("release_id" = Uuid, Path, description = "Ledger entry ID") + ), + responses( + (status = 200, description = "Release dismissed", body = ReleaseLedgerEntryDto), + (status = 404, description = "Ledger entry not found"), + (status = 403, description = "Forbidden"), + ), + security( + ("jwt_bearer" = []), + ("api_key" = []) + ), + tag = "Releases" +)] +pub async fn dismiss_release( + State(state): State<Arc<AuthState>>, + auth: AuthContext, + Path(release_id): Path<Uuid>, +) -> Result<Json<ReleaseLedgerEntryDto>, ApiError> { + auth.require_permission(&Permission::SeriesWrite)?; + update_state_internal(&state, auth.user_id, release_id, ledger_state::DISMISSED).await +} + +/// Convenience POST: mark a release acquired. +#[utoipa::path( + post, + path = "/api/v1/releases/{release_id}/mark-acquired", + params( + ("release_id" = Uuid, Path, description = "Ledger entry ID") + ), + responses( + (status = 200, description = "Release marked acquired", body = ReleaseLedgerEntryDto), + (status = 404, description = "Ledger entry not found"), + (status = 403, description = "Forbidden"), + ), + security( + ("jwt_bearer" = []), + ("api_key" = []) + ), + tag = "Releases" +)] +pub async fn mark_release_acquired( + State(state): State<Arc<AuthState>>, + auth: AuthContext, + Path(release_id): Path<Uuid>, +) -> Result<Json<ReleaseLedgerEntryDto>, ApiError> { + auth.require_permission(&Permission::SeriesWrite)?; + update_state_internal( + &state, + auth.user_id, + release_id, + ledger_state::MARKED_ACQUIRED, + ) + .await +} + +async fn update_state_internal( + state: &Arc<AuthState>, + user_id: Uuid, + release_id: Uuid, + new_state: &str, +) -> Result<Json<ReleaseLedgerEntryDto>, ApiError> { + if !ledger_state::is_valid(new_state) { + return Err(ApiError::BadRequest(format!( + "invalid state: {}", + new_state + ))); + } + + // Fetch the row first so we have series_id for the SSE event. + let existing = ReleaseLedgerRepository::get_by_id(&state.db, release_id) + .await + .map_err(|e| ApiError::Internal(format!("Failed to fetch ledger entry: {}", e)))? + .ok_or_else(|| ApiError::NotFound("Ledger entry not found".to_string()))?; + let series_id = existing.series_id; + + let updated = ReleaseLedgerRepository::set_state(&state.db, release_id, new_state) + .await + .map_err(|e| { + if e.to_string().contains("invalid state") { + ApiError::BadRequest(e.to_string()) + } else { + ApiError::Internal(format!("Failed to update ledger entry: {}", e)) + } + })?; + + // Look up the series for both the SSE event (library_id) and the DTO + // (series_title). If the series was deleted concurrently we still return + // the updated row, dropping the event and using an empty title — the + // ledger row's series_id remains valid for navigation. + let series = SeriesRepository::get_by_id(&state.db, series_id) + .await + .ok() + .flatten(); + if let Some(ref s) = series { + let event = EntityChangeEvent { + event: EntityEvent::SeriesUpdated { + series_id, + library_id: s.library_id, + fields: Some(vec!["releases".to_string()]), + }, + timestamp: Utc::now(), + user_id: Some(user_id), + }; + let _ = state.event_broadcaster.emit(event); + } + + let title = series.map(|s| s.name).unwrap_or_default(); + Ok(Json(ReleaseLedgerEntryDto::from_model_with_series_title( + updated, title, + ))) +} + +// ============================================================================= +// Inbox facets +// ============================================================================= + +/// Query parameters for the inbox facets endpoint. +/// +/// The same shape as [`ReleaseInboxParams`] minus pagination, plus an +/// extra `excludeDimension` knob the handler ignores today (reserved for +/// when the frontend wants strict facet exclusion). For each facet +/// dimension we currently apply *all* other active filters (Solr-style +/// non-self-exclusion would require taking the dimension off before +/// filtering — a follow-up if any UI needs it). +#[derive(Debug, Deserialize, utoipa::IntoParams)] +#[serde(rename_all = "camelCase")] +#[into_params(rename_all = "camelCase")] +pub struct ReleaseFacetsParams { + #[serde(default)] + pub state: Option<String>, + #[serde(default)] + pub series_id: Option<Uuid>, + #[serde(default)] + pub source_id: Option<Uuid>, + #[serde(default)] + pub language: Option<String>, + #[serde(default)] + pub library_id: Option<Uuid>, +} + +/// Distinct values present in the inbox under the given filters. +/// +/// Returns the languages, libraries, and series that have at least one +/// matching ledger row. The frontend uses this to populate cascading +/// Select dropdowns so users never have to type a UUID and never see +/// dropdown options that would yield zero results. +#[utoipa::path( + get, + path = "/api/v1/releases/facets", + params(ReleaseFacetsParams), + responses( + (status = 200, description = "Facets for the inbox view", body = ReleaseFacetsResponse), + (status = 400, description = "Invalid state filter"), + (status = 403, description = "SeriesRead permission required"), + ), + security( + ("jwt_bearer" = []), + ("api_key" = []) + ), + tag = "Releases" +)] +pub async fn list_release_facets( + State(state): State<Arc<AuthState>>, + auth: AuthContext, + Query(params): Query<ReleaseFacetsParams>, +) -> Result<Json<ReleaseFacetsResponse>, ApiError> { + auth.require_permission(&Permission::SeriesRead)?; + + let all_states = matches!(params.state.as_deref(), Some("all")); + let normalised_state = if all_states { + None + } else { + params.state.clone() + }; + if let Some(ref s) = normalised_state + && !ledger_state::is_valid(s) + { + return Err(ApiError::BadRequest(format!("invalid state filter: {}", s))); + } + + let base_filter = LedgerInboxFilter { + state: normalised_state, + all_states, + series_id: params.series_id, + source_id: params.source_id, + language: params.language.clone(), + library_id: params.library_id, + }; + + // Each facet excludes its own dimension from the filter so it always + // shows the full set of options (Solr-style facet exclusion). Without + // this, picking a series in the dropdown would collapse the series + // dropdown to that single series. + let series_filter = LedgerInboxFilter { + series_id: None, + ..base_filter.clone() + }; + let library_filter = LedgerInboxFilter { + library_id: None, + ..base_filter.clone() + }; + let language_filter = LedgerInboxFilter { + language: None, + ..base_filter.clone() + }; + + let series_facets = ReleaseLedgerRepository::list_series_facets(&state.db, series_filter) + .await + .map_err(|e| ApiError::Internal(format!("Failed to list series facets: {}", e)))?; + let library_facets = ReleaseLedgerRepository::list_library_facets(&state.db, library_filter) + .await + .map_err(|e| ApiError::Internal(format!("Failed to list library facets: {}", e)))?; + let language_facets = ReleaseLedgerRepository::list_language_facets(&state.db, language_filter) + .await + .map_err(|e| ApiError::Internal(format!("Failed to list language facets: {}", e)))?; + + // Hydrate series titles + library names in two batched lookups. + let mut series_ids: Vec<Uuid> = series_facets.iter().map(|f| f.series_id).collect(); + series_ids.sort_unstable(); + series_ids.dedup(); + let mut library_ids: Vec<Uuid> = series_facets + .iter() + .map(|f| f.library_id) + .chain(library_facets.iter().map(|f| f.library_id)) + .collect(); + library_ids.sort_unstable(); + library_ids.dedup(); + + let series_models = SeriesRepository::get_by_ids(&state.db, &series_ids) + .await + .map_err(|e| ApiError::Internal(format!("Failed to load series: {}", e)))?; + let series_titles: std::collections::HashMap<Uuid, String> = + series_models.into_iter().map(|s| (s.id, s.name)).collect(); + let library_map = LibraryRepository::get_by_ids(&state.db, &library_ids) + .await + .map_err(|e| ApiError::Internal(format!("Failed to load libraries: {}", e)))?; + + let series_dtos: Vec<ReleaseSeriesFacetDto> = series_facets + .into_iter() + .map(|f| { + let library_name = library_map + .get(&f.library_id) + .map(|l| l.name.clone()) + .unwrap_or_default(); + ReleaseSeriesFacetDto { + series_id: f.series_id, + series_title: series_titles.get(&f.series_id).cloned().unwrap_or_default(), + library_id: f.library_id, + library_name, + count: f.count, + } + }) + .collect(); + let library_dtos: Vec<ReleaseLibraryFacetDto> = library_facets + .into_iter() + .map(|f| { + let library_name = library_map + .get(&f.library_id) + .map(|l| l.name.clone()) + .unwrap_or_default(); + ReleaseLibraryFacetDto { + library_id: f.library_id, + library_name, + count: f.count, + } + }) + .collect(); + let language_dtos: Vec<ReleaseLanguageFacetDto> = language_facets + .into_iter() + .map(|f| ReleaseLanguageFacetDto { + language: f.language, + count: f.count, + }) + .collect(); + + Ok(Json(ReleaseFacetsResponse { + languages: language_dtos, + libraries: library_dtos, + series: series_dtos, + })) +} + +// ============================================================================= +// Delete + bulk +// ============================================================================= + +/// Hard-delete a single ledger row. +/// +/// Also clears the source's `etag` so the next poll bypasses +/// `If-None-Match` and re-records the deleted row in `announced` state +/// (assuming the upstream still lists it). This is the lever users want +/// when they marked something incorrectly and need to "get it back". +#[utoipa::path( + delete, + path = "/api/v1/releases/{release_id}", + params( + ("release_id" = Uuid, Path, description = "Ledger entry ID") + ), + responses( + (status = 200, description = "Release deleted", body = DeleteReleaseResponse), + (status = 404, description = "Ledger entry not found"), + (status = 403, description = "Forbidden"), + ), + security( + ("jwt_bearer" = []), + ("api_key" = []) + ), + tag = "Releases" +)] +pub async fn delete_release( + State(state): State<Arc<AuthState>>, + auth: AuthContext, + Path(release_id): Path<Uuid>, +) -> Result<Json<DeleteReleaseResponse>, ApiError> { + auth.require_permission(&Permission::SeriesWrite)?; + + // Look up the row first to capture series_id (for SSE) and source_id + // (for the etag clear). Returning a clean 404 here matches the rest + // of the release endpoints. + let existing = ReleaseLedgerRepository::get_by_id(&state.db, release_id) + .await + .map_err(|e| ApiError::Internal(format!("Failed to fetch ledger entry: {}", e)))? + .ok_or_else(|| ApiError::NotFound("Ledger entry not found".to_string()))?; + let series_id = existing.series_id; + let source_id = existing.source_id; + + let deleted = ReleaseLedgerRepository::delete(&state.db, release_id) + .await + .map_err(|e| ApiError::Internal(format!("Failed to delete ledger entry: {}", e)))?; + + // Best-effort etag clear: if it fails we still report the delete + // succeeded (the row is already gone). User can manually `reset` the + // source if they really need an etag flush. + if deleted && let Err(e) = ReleaseSourceRepository::clear_etag(&state.db, source_id).await { + tracing::warn!( + "Failed to clear etag for source {} after deleting release {}: {}", + source_id, + release_id, + e + ); + } + + if deleted && let Ok(Some(s)) = SeriesRepository::get_by_id(&state.db, series_id).await { + let event = EntityChangeEvent { + event: EntityEvent::SeriesUpdated { + series_id, + library_id: s.library_id, + fields: Some(vec!["releases".to_string()]), + }, + timestamp: Utc::now(), + user_id: Some(auth.user_id), + }; + let _ = state.event_broadcaster.emit(event); + } + + Ok(Json(DeleteReleaseResponse { deleted })) +} + +/// Apply an action to a batch of ledger rows. +/// +/// `dismiss`, `mark-acquired`, `ignore`, and `reset` all set state +/// in-place. `delete` removes the rows and clears the affected sources' +/// etags so the next poll re-fetches without `If-None-Match`. All run +/// as bulk SQL (no per-row round trips), so this scales to thousands of +/// rows in one call. +#[utoipa::path( + post, + path = "/api/v1/releases/bulk", + request_body = BulkReleaseActionRequest, + responses( + (status = 200, description = "Bulk action applied", body = BulkReleaseActionResponse), + (status = 400, description = "Empty ID list or invalid action"), + (status = 403, description = "Forbidden"), + ), + security( + ("jwt_bearer" = []), + ("api_key" = []) + ), + tag = "Releases" +)] +pub async fn bulk_release_action( + State(state): State<Arc<AuthState>>, + auth: AuthContext, + Json(request): Json<BulkReleaseActionRequest>, +) -> Result<Json<BulkReleaseActionResponse>, ApiError> { + auth.require_permission(&Permission::SeriesWrite)?; + + if request.ids.is_empty() { + return Err(ApiError::BadRequest("ids must not be empty".to_string())); + } + // Soft cap to keep an unbounded list from melting the DB. 500 matches + // MAX_PAGE_SIZE so a user can bulk-action a full inbox page. + const MAX_BULK: usize = 500; + if request.ids.len() > MAX_BULK { + return Err(ApiError::BadRequest(format!( + "too many ids: {} (max {})", + request.ids.len(), + MAX_BULK + ))); + } + + // Snapshot affected sources + series before mutating, so we can clear + // etags (delete) and emit SSE events (all actions). For dismiss/ + // mark-acquired we don't strictly need the source list, but loading + // rows once keeps the code path uniform and lets us emit one + // SeriesUpdated event per affected series. + let rows_before = ReleaseLedgerRepository::find_by_ids(&state.db, &request.ids) + .await + .map_err(|e| ApiError::Internal(format!("Failed to load ledger rows: {}", e)))?; + let mut affected_series: Vec<Uuid> = rows_before.iter().map(|r| r.series_id).collect(); + affected_series.sort_unstable(); + affected_series.dedup(); + let mut affected_sources: Vec<Uuid> = rows_before.iter().map(|r| r.source_id).collect(); + affected_sources.sort_unstable(); + affected_sources.dedup(); + + let affected: u64 = match request.action { + BulkReleaseAction::Dismiss => ReleaseLedgerRepository::set_state_many( + &state.db, + &request.ids, + ledger_state::DISMISSED, + ) + .await + .map_err(|e| ApiError::Internal(format!("Failed to dismiss releases: {}", e)))?, + BulkReleaseAction::MarkAcquired => ReleaseLedgerRepository::set_state_many( + &state.db, + &request.ids, + ledger_state::MARKED_ACQUIRED, + ) + .await + .map_err(|e| ApiError::Internal(format!("Failed to mark releases acquired: {}", e)))?, + BulkReleaseAction::Ignore => { + ReleaseLedgerRepository::set_state_many(&state.db, &request.ids, ledger_state::IGNORED) + .await + .map_err(|e| ApiError::Internal(format!("Failed to ignore releases: {}", e)))? + } + BulkReleaseAction::Reset => ReleaseLedgerRepository::set_state_many( + &state.db, + &request.ids, + ledger_state::ANNOUNCED, + ) + .await + .map_err(|e| ApiError::Internal(format!("Failed to reset releases: {}", e)))?, + BulkReleaseAction::Delete => { + let count = ReleaseLedgerRepository::delete_many(&state.db, &request.ids) + .await + .map_err(|e| ApiError::Internal(format!("Failed to delete releases: {}", e)))?; + if count > 0 + && let Err(e) = + ReleaseSourceRepository::clear_etag_many(&state.db, &affected_sources).await + { + tracing::warn!( + "Failed to clear etags for {} sources after bulk delete: {}", + affected_sources.len(), + e + ); + } + count + } + }; + + // Emit one SeriesUpdated event per affected series so any open client + // refreshes the per-series Releases panel + the inbox badge. + if affected > 0 { + let series_models = SeriesRepository::get_by_ids(&state.db, &affected_series) + .await + .ok() + .unwrap_or_default(); + for s in series_models { + let event = EntityChangeEvent { + event: EntityEvent::SeriesUpdated { + series_id: s.id, + library_id: s.library_id, + fields: Some(vec!["releases".to_string()]), + }, + timestamp: Utc::now(), + user_id: Some(auth.user_id), + }; + let _ = state.event_broadcaster.emit(event); + } + } + + Ok(Json(BulkReleaseActionResponse { + affected, + action: request.action, + })) +} + +// ============================================================================= +// Source admin +// ============================================================================= + +/// List all configured release sources (admin-only). +#[utoipa::path( + get, + path = "/api/v1/release-sources", + responses( + (status = 200, description = "Source list", body = ReleaseSourceListResponse), + (status = 403, description = "PluginsManage permission required"), + ), + security( + ("jwt_bearer" = []), + ("api_key" = []) + ), + tag = "Releases" +)] +pub async fn list_release_sources( + State(state): State<Arc<AuthState>>, + auth: AuthContext, +) -> Result<Json<ReleaseSourceListResponse>, ApiError> { + auth.require_permission(&Permission::PluginsManage)?; + let sources = ReleaseSourceRepository::list_all(&state.db) + .await + .map_err(|e| ApiError::Internal(format!("Failed to list sources: {}", e)))?; + let server_default = resolve_server_default_cron(&state.db).await; + Ok(Json(ReleaseSourceListResponse { + sources: sources + .into_iter() + .map(|m| ReleaseSourceDto::from_model_with_default(m, &server_default)) + .collect(), + })) +} + +/// Fetch the server-wide default cron schedule for release-source polling. +/// Falls back to the compile-time default on a settings-fetch failure +/// rather than 500-ing the request — the field is informational on the +/// response shape. +async fn resolve_server_default_cron(db: &sea_orm::DatabaseConnection) -> String { + use crate::services::release::schedule::{DEFAULT_CRON_SCHEDULE, read_default_cron_schedule}; + use crate::services::settings::SettingsService; + match SettingsService::new(db.clone()).await { + Ok(svc) => read_default_cron_schedule(&svc).await, + Err(e) => { + tracing::warn!( + "Failed to load settings service for cron resolution; using compile-time default: {}", + e + ); + DEFAULT_CRON_SCHEDULE.to_string() + } + } +} + +/// PATCH a release source (admin-only). +/// +/// Toggle `enabled`, override `cronSchedule`, or rename `displayName`. +/// Sending `cronSchedule: null` clears the override and reverts the row to +/// inheriting the server-wide `release_tracking.default_cron_schedule`. +#[utoipa::path( + patch, + path = "/api/v1/release-sources/{source_id}", + params( + ("source_id" = Uuid, Path, description = "Source ID") + ), + request_body = UpdateReleaseSourceRequest, + responses( + (status = 200, description = "Source updated", body = ReleaseSourceDto), + (status = 400, description = "Invalid update payload"), + (status = 404, description = "Source not found"), + (status = 403, description = "PluginsManage permission required"), + ), + security( + ("jwt_bearer" = []), + ("api_key" = []) + ), + tag = "Releases" +)] +pub async fn update_release_source( + State(state): State<Arc<AuthState>>, + auth: AuthContext, + Path(source_id): Path<Uuid>, + Json(request): Json<UpdateReleaseSourceRequest>, +) -> Result<Json<ReleaseSourceDto>, ApiError> { + auth.require_permission(&Permission::PluginsManage)?; + + // Confirm existence to return a clean 404 instead of a generic 500. + ReleaseSourceRepository::get_by_id(&state.db, source_id) + .await + .map_err(|e| ApiError::Internal(format!("Failed to fetch source: {}", e)))? + .ok_or_else(|| ApiError::NotFound("Release source not found".to_string()))?; + + let update = ReleaseSourceUpdate { + display_name: request.display_name, + enabled: request.enabled, + cron_schedule: request.cron_schedule, + config: None, // config edits go through plugin admin, not here + }; + + let updated = ReleaseSourceRepository::update(&state.db, source_id, update) + .await + .map_err(|e| { + let msg = e.to_string(); + if msg.to_lowercase().contains("cron") { + ApiError::BadRequest(msg) + } else { + ApiError::Internal(format!("Failed to update source: {}", e)) + } + })?; + + // Best-effort reconcile so the scheduler picks up enable/disable or + // interval changes without a restart. Reconcile failures don't block + // the API response — the change is durable in the DB and the next + // scheduler restart picks it up. + if let Some(ref scheduler) = state.scheduler { + let mut guard = scheduler.lock().await; + if let Err(e) = guard.reconcile_release_sources().await { + tracing::warn!( + "Failed to reconcile release-source schedules after update: {}", + e + ); + } + } + + let server_default = resolve_server_default_cron(&state.db).await; + Ok(Json(ReleaseSourceDto::from_model_with_default( + updated, + &server_default, + ))) +} + +/// Trigger a manual poll for a source. +/// +/// Enqueues a `PollReleaseSource` task immediately. The task runs +/// asynchronously via the worker pool; the response confirms the enqueue, +/// not the poll outcome. +#[utoipa::path( + post, + path = "/api/v1/release-sources/{source_id}/poll-now", + params( + ("source_id" = Uuid, Path, description = "Source ID") + ), + responses( + (status = 202, description = "Poll task enqueued", body = PollNowResponse), + (status = 404, description = "Source not found"), + (status = 403, description = "PluginsManage permission required"), + (status = 409, description = "Source disabled"), + ), + security( + ("jwt_bearer" = []), + ("api_key" = []) + ), + tag = "Releases" +)] +pub async fn poll_release_source_now( + State(state): State<Arc<AuthState>>, + auth: AuthContext, + Path(source_id): Path<Uuid>, +) -> Result<(StatusCode, Json<PollNowResponse>), ApiError> { + auth.require_permission(&Permission::PluginsManage)?; + + // Confirm the source exists. + let source = ReleaseSourceRepository::get_by_id(&state.db, source_id) + .await + .map_err(|e| ApiError::Internal(format!("Failed to fetch source: {}", e)))? + .ok_or_else(|| ApiError::NotFound("Release source not found".to_string()))?; + + if !source.enabled { + return Err(ApiError::Conflict(format!( + "Source '{}' is disabled; enable it before polling", + source.display_name + ))); + } + + let outcome = crate::scheduler::release_sources::enqueue_poll_now(&state.db, source_id) + .await + .map_err(|e| ApiError::Internal(format!("Failed to enqueue poll task: {}", e)))?; + + let (status, message) = if outcome.coalesced { + ( + "already_running".to_string(), + format!( + "A poll for this source is already running (task_id={}); coalesced", + outcome.task_id + ), + ) + } else { + ( + "enqueued".to_string(), + format!("Poll task enqueued (task_id={})", outcome.task_id), + ) + }; + + Ok(( + StatusCode::ACCEPTED, + Json(PollNowResponse { status, message }), + )) +} + +/// Reset a release source to a clean slate. +/// +/// Deletes every `release_ledger` row owned by the source and clears the +/// source's transient poll state (`etag`, `last_polled_at`, `last_error`, +/// `last_error_at`, `last_summary`). User-managed fields (`enabled`, +/// `cron_schedule`, `display_name`, `config`) are preserved. +/// +/// Intended for testing/troubleshooting: after a reset, the next poll +/// fetches the upstream feed without an `If-None-Match` header (so no 304 +/// short-circuit) and re-records every release as `announced`. Does NOT +/// auto-enqueue a poll — call `POST /release-sources/{id}/poll-now` after +/// resetting if you want immediate re-fetch. +#[utoipa::path( + post, + path = "/api/v1/release-sources/{source_id}/reset", + params( + ("source_id" = Uuid, Path, description = "Source ID") + ), + responses( + (status = 200, description = "Source reset", body = ResetReleaseSourceResponse), + (status = 404, description = "Source not found"), + (status = 403, description = "PluginsManage permission required"), + ), + security( + ("jwt_bearer" = []), + ("api_key" = []) + ), + tag = "Releases" +)] +pub async fn reset_release_source( + State(state): State<Arc<AuthState>>, + auth: AuthContext, + Path(source_id): Path<Uuid>, +) -> Result<Json<ResetReleaseSourceResponse>, ApiError> { + auth.require_permission(&Permission::PluginsManage)?; + + // Confirm existence to return a clean 404. + ReleaseSourceRepository::get_by_id(&state.db, source_id) + .await + .map_err(|e| ApiError::Internal(format!("Failed to fetch source: {}", e)))? + .ok_or_else(|| ApiError::NotFound("Release source not found".to_string()))?; + + let deleted = ReleaseLedgerRepository::delete_by_source(&state.db, source_id) + .await + .map_err(|e| ApiError::Internal(format!("Failed to clear ledger: {}", e)))?; + + ReleaseSourceRepository::clear_poll_state(&state.db, source_id) + .await + .map_err(|e| ApiError::Internal(format!("Failed to reset source state: {}", e)))?; + + Ok(Json(ResetReleaseSourceResponse { + deleted_ledger_entries: deleted, + })) +} + +// ============================================================================= +// OpenAPI placeholder +// ============================================================================= + +// `ReleaseLedgerListResponse` is unused in handlers (we return paginated +// responses) but kept in the DTO module for potential simpler clients. Pull it +// in here to silence the unused-import lint. +#[allow(dead_code)] +fn _opening_api_keepalive() -> ReleaseLedgerListResponse { + ReleaseLedgerListResponse { entries: vec![] } +} + +// ============================================================================= +// Applicability lookup +// ============================================================================= + +/// Query string for `GET /api/v1/release-sources/applicability`. +#[derive(Debug, Deserialize, utoipa::IntoParams)] +#[serde(rename_all = "camelCase")] +pub struct ApplicabilityQuery { + /// Optional library scope. When provided, only plugins that apply to + /// this library are considered (a plugin's `library_ids` field is + /// either empty = all, or contains this UUID). + #[serde(default)] + pub library_id: Option<Uuid>, +} + +/// Response shape for `GET /api/v1/release-sources/applicability`. +#[derive(Debug, serde::Serialize, utoipa::ToSchema)] +#[serde(rename_all = "camelCase")] +pub struct ApplicabilityResponse { + /// `true` when at least one enabled `release_source` plugin applies to + /// the requested library (or, if no `libraryId` was supplied, to *any* + /// library). The frontend uses this to decide whether to render the + /// per-series Tracking panel and Releases tab, or to show the + /// bulk-track menu entry. + pub applicable: bool, + /// Plugin display names (or fallback to `name` when no manifest cached + /// yet) of the enabled release-source plugins covering this library. + /// Empty when `applicable` is `false`. Useful for surfacing "Powered by + /// MangaUpdates, Nyaa" hints in the UI. + pub plugin_display_names: Vec<String>, +} + +/// Whether release tracking is available for a given library. +/// +/// Read-only, requires only `SeriesRead`: the response carries no +/// admin-sensitive data (no plugin IDs, no configs, no library +/// allowlists), just the boolean and friendly display names. Used by the +/// frontend to: +/// +/// - hide the per-series Tracking panel + Releases tab on libraries with +/// no applicable plugin (cleaner UX); +/// - decide whether to show the "Track for releases" / "Don't track for +/// releases" entries in the bulk-selection menu. +#[utoipa::path( + get, + path = "/api/v1/release-sources/applicability", + params(ApplicabilityQuery), + responses( + (status = 200, description = "Applicability info", body = ApplicabilityResponse), + (status = 403, description = "SeriesRead permission required"), + ), + security( + ("jwt_bearer" = []), + ("api_key" = []) + ), + tag = "Releases" +)] +pub async fn get_release_tracking_applicability( + State(state): State<Arc<AuthState>>, + auth: AuthContext, + axum::extract::Query(query): axum::extract::Query<ApplicabilityQuery>, +) -> Result<Json<ApplicabilityResponse>, ApiError> { + auth.require_permission(&Permission::SeriesRead)?; + + let plugins = PluginsRepository::get_enabled(&state.db) + .await + .map_err(|e| ApiError::Internal(format!("Failed to load plugins: {}", e)))?; + + let mut display_names: Vec<String> = Vec::new(); + for plugin in plugins { + // Capability check via the cached manifest. We deserialize the + // shape lightly via the canonical `PluginManifest` struct so + // a malformed manifest doesn't claim release-source capability. + let Some(manifest_json) = plugin.manifest.as_ref() else { + continue; + }; + let Ok(manifest) = serde_json::from_value::< + crate::services::plugin::protocol::PluginManifest, + >(manifest_json.clone()) else { + continue; + }; + if manifest.capabilities.release_source.is_none() { + continue; + } + + // Library-scope check. The DB column is JSON; an empty array means + // "all libraries". Anything not deserializing into a Vec<Uuid> + // (NULL, non-array, etc.) is treated as "all libraries" too — + // that matches the existing convention elsewhere in the codebase. + let library_ids: Vec<Uuid> = + serde_json::from_value(plugin.library_ids.clone()).unwrap_or_default(); + if let Some(lib) = query.library_id + && !library_ids.is_empty() + && !library_ids.contains(&lib) + { + continue; + } + + let label = if plugin.display_name.trim().is_empty() { + plugin.name.clone() + } else { + plugin.display_name.clone() + }; + display_names.push(label); + } + + Ok(Json(ApplicabilityResponse { + applicable: !display_names.is_empty(), + plugin_display_names: display_names, + })) +} diff --git a/src/api/routes/v1/handlers/series.rs b/src/api/routes/v1/handlers/series.rs index d9cf6140..17a3b92f 100644 --- a/src/api/routes/v1/handlers/series.rs +++ b/src/api/routes/v1/handlers/series.rs @@ -30,11 +30,14 @@ use crate::db::entities::{series, series_metadata}; use crate::db::repositories::{ AlternateTitleRepository, BookRepository, ExternalLinkRepository, ExternalRatingRepository, GenreRepository, LibraryRepository, ReadProgressRepository, SeriesCoversRepository, - SeriesExternalIdRepository, SeriesMetadataRepository, SeriesRepository, SharingTagRepository, - TagRepository, UserSeriesRatingRepository, + SeriesExternalIdRepository, SeriesMetadataRepository, SeriesRepository, + SeriesTrackingRepository, SharingTagRepository, TagRepository, UserSeriesRatingRepository, }; use crate::events::{EntityChangeEvent, EntityEvent, EntityType}; use crate::require_permission; +use crate::services::release::upstream_gap::{ + UpstreamGap, UpstreamGapInputs, compute_upstream_gap, +}; use crate::utils::{ json_merge_patch, normalize_for_search, parse_custom_metadata, serialize_custom_metadata, validate_custom_metadata_size, @@ -180,6 +183,33 @@ async fn series_to_dto( .map(|m| m.title.clone()) .unwrap_or_else(|| "Unknown Series".to_string()); + // Phase 5 of release-tracking: compute the upstream-publication gap + // signal. Skipped entirely for untracked series. + let tracking = SeriesTrackingRepository::get(db, series.id) + .await + .map_err(|e| ApiError::Internal(format!("Failed to fetch series tracking: {:?}", e)))?; + let external_ids = if tracking.as_ref().map(|t| t.tracked).unwrap_or(false) { + SeriesExternalIdRepository::get_for_series(db, series.id) + .await + .map_err(|e| { + ApiError::Internal(format!("Failed to fetch series external IDs: {:?}", e)) + })? + } else { + Vec::new() + }; + let UpstreamGap { + chapter_gap: upstream_chapter_gap, + volume_gap: upstream_volume_gap, + provider: upstream_gap_provider, + } = compute_upstream_gap(&UpstreamGapInputs { + tracking: tracking.as_ref(), + total_chapter_count: metadata.as_ref().and_then(|m| m.total_chapter_count), + total_volume_count: metadata.as_ref().and_then(|m| m.total_volume_count), + local_max_chapter: aggregates.local_max_chapter, + local_max_volume: aggregates.local_max_volume, + external_ids: &external_ids, + }); + Ok(SeriesDto { id: series.id, library_id: series.library_id, @@ -197,6 +227,9 @@ async fn series_to_dto( selected_cover_source: selected_cover.map(|c| c.source), has_custom_cover: Some(has_custom_cover), unread_count, + upstream_chapter_gap, + upstream_volume_gap, + upstream_gap_provider, created_at: series.created_at, updated_at: series.updated_at, }) @@ -241,6 +274,7 @@ async fn series_to_full_dtos_batched( ext_ratings_map, ext_links_map, ext_ids_map, + tracking_map, ) = tokio::join!( SeriesMetadataRepository::get_by_series_ids(db, &series_ids), SeriesRepository::get_book_counts_for_series_ids(db, &series_ids), @@ -261,6 +295,7 @@ async fn series_to_full_dtos_batched( ExternalRatingRepository::get_for_series_ids(db, &series_ids), ExternalLinkRepository::get_for_series_ids(db, &series_ids), SeriesExternalIdRepository::get_for_series_ids(db, &series_ids), + SeriesTrackingRepository::get_for_series_ids(db, &series_ids), ); // Handle errors @@ -294,6 +329,8 @@ async fn series_to_full_dtos_batched( .map_err(|e| ApiError::Internal(format!("Failed to fetch external links: {}", e)))?; let ext_ids_map = ext_ids_map .map_err(|e| ApiError::Internal(format!("Failed to fetch external IDs: {}", e)))?; + let tracking_map = tracking_map + .map_err(|e| ApiError::Internal(format!("Failed to fetch tracking rows: {}", e)))?; // Build full responses let mut results = Vec::with_capacity(series_list.len()); @@ -415,6 +452,24 @@ async fn series_to_full_dtos_batched( .map(|ids| ids.iter().cloned().map(SeriesExternalIdDto::from).collect()) .unwrap_or_default(); + // Phase 5 of release-tracking: upstream-publication gap signal. + let series_external_ids = ext_ids_map + .get(&series_id) + .map(|v| v.as_slice()) + .unwrap_or(&[]); + let UpstreamGap { + chapter_gap: upstream_chapter_gap, + volume_gap: upstream_volume_gap, + provider: upstream_gap_provider, + } = compute_upstream_gap(&UpstreamGapInputs { + tracking: tracking_map.get(&series_id), + total_chapter_count: metadata.total_chapter_count, + total_volume_count: metadata.total_volume_count, + local_max_chapter: aggregates.local_max_chapter, + local_max_volume: aggregates.local_max_volume, + external_ids: series_external_ids, + }); + results.push(FullSeriesResponse { id: series.id, library_id: series.library_id, @@ -423,6 +478,9 @@ async fn series_to_full_dtos_batched( local_max_volume: aggregates.local_max_volume, local_max_chapter: aggregates.local_max_chapter, volumes_owned: aggregates.volumes_owned, + upstream_chapter_gap, + upstream_volume_gap, + upstream_gap_provider, unread_count, path: Some(series.path), selected_cover_source: selected_cover.map(|c| c.source.clone()), diff --git a/src/api/routes/v1/handlers/tracking.rs b/src/api/routes/v1/handlers/tracking.rs new file mode 100644 index 00000000..d7ad2be2 --- /dev/null +++ b/src/api/routes/v1/handlers/tracking.rs @@ -0,0 +1,357 @@ +//! HTTP handlers for release-tracking config + title aliases. +//! +//! Endpoints (all under `/api/v1/series/{series_id}`): +//! - `GET /tracking` — read (returns a virtual untracked row when none exists) +//! - `PATCH /tracking` — update (upserts on first write) +//! - `GET /aliases` — list aliases for the series +//! - `POST /aliases` — add a manual alias (idempotent on duplicate) +//! - `DELETE /aliases/{alias_id}` — remove an alias + +use axum::{ + Json, + extract::{Path, State}, + http::StatusCode, +}; +use chrono::Utc; +use std::sync::Arc; +use uuid::Uuid; + +use super::super::dto::tracking::{ + CreateSeriesAliasRequest, SeriesAliasDto, SeriesAliasListResponse, SeriesTrackingDto, + UpdateSeriesTrackingRequest, +}; +use crate::api::{ + error::ApiError, + extractors::{AuthContext, AuthState}, + permissions::Permission, +}; +use crate::db::entities::series_aliases::alias_source; +use crate::db::repositories::{ + SeriesAliasRepository, SeriesRepository, SeriesTrackingRepository, TrackingUpdate, +}; +use crate::events::{EntityChangeEvent, EntityEvent}; +use crate::require_permission; +use crate::services::release::seed::seed_tracking_for_series; + +// ============================================================================= +// Tracking config handlers +// ============================================================================= + +/// Get release-tracking config for a series. +/// +/// Returns a virtual untracked row when no `series_tracking` row exists, so the +/// frontend can render the panel uniformly without special-casing absent rows. +#[utoipa::path( + get, + path = "/api/v1/series/{series_id}/tracking", + params( + ("series_id" = Uuid, Path, description = "Series ID") + ), + responses( + (status = 200, description = "Tracking config", body = SeriesTrackingDto), + (status = 404, description = "Series not found"), + (status = 403, description = "Forbidden"), + ), + security( + ("jwt_bearer" = []), + ("api_key" = []) + ), + tag = "Tracking" +)] +pub async fn get_series_tracking( + State(state): State<Arc<AuthState>>, + auth: AuthContext, + Path(series_id): Path<Uuid>, +) -> Result<Json<SeriesTrackingDto>, ApiError> { + require_permission!(auth, Permission::SeriesRead)?; + + SeriesRepository::get_by_id(&state.db, series_id) + .await + .map_err(|e| ApiError::Internal(format!("Failed to fetch series: {}", e)))? + .ok_or_else(|| ApiError::NotFound("Series not found".to_string()))?; + + let row = SeriesTrackingRepository::get_or_default(&state.db, series_id) + .await + .map_err(|e| ApiError::Internal(format!("Failed to fetch tracking: {}", e)))?; + Ok(Json(row.into())) +} + +/// Update release-tracking config for a series. +/// +/// Upserts: creates the row on first write, applies the patch otherwise. +/// All fields are optional — omit to leave alone, send `null` on a nullable +/// field to clear it. +#[utoipa::path( + patch, + path = "/api/v1/series/{series_id}/tracking", + params( + ("series_id" = Uuid, Path, description = "Series ID") + ), + request_body = UpdateSeriesTrackingRequest, + responses( + (status = 200, description = "Tracking config updated", body = SeriesTrackingDto), + (status = 404, description = "Series not found"), + (status = 403, description = "Forbidden"), + ), + security( + ("jwt_bearer" = []), + ("api_key" = []) + ), + tag = "Tracking" +)] +pub async fn update_series_tracking( + State(state): State<Arc<AuthState>>, + auth: AuthContext, + Path(series_id): Path<Uuid>, + Json(request): Json<UpdateSeriesTrackingRequest>, +) -> Result<Json<SeriesTrackingDto>, ApiError> { + require_permission!(auth, Permission::SeriesWrite)?; + + let series = SeriesRepository::get_by_id(&state.db, series_id) + .await + .map_err(|e| ApiError::Internal(format!("Failed to fetch series: {}", e)))? + .ok_or_else(|| ApiError::NotFound("Series not found".to_string()))?; + + // Detect the false -> true tracked transition so we can seed defaults + // before applying the user's patch. This eliminates the empty-form UX + // where a freshly-tracked series has no aliases / no latest_known_*. + // + // The user's patch is applied *after* the seed, so any explicit value + // they sent (e.g. a custom latest_known_chapter override) still wins. + let was_tracked = SeriesTrackingRepository::get(&state.db, series_id) + .await + .map_err(|e| ApiError::Internal(format!("Failed to fetch tracking: {}", e)))? + .map(|r| r.tracked) + .unwrap_or(false); + let is_flipping_to_tracked = matches!(request.tracked, Some(true)) && !was_tracked; + if is_flipping_to_tracked && let Err(e) = seed_tracking_for_series(&state.db, series_id).await { + // Best-effort: if seeding fails (e.g. transient DB error), still + // honor the user's intent to flip tracked on. The next re-toggle + // or a manual backfill task can re-seed. + tracing::warn!( + "Seed failed for series {} on track-on transition: {}", + series_id, + e + ); + } + + let update = TrackingUpdate { + tracked: request.tracked, + track_chapters: request.track_chapters, + track_volumes: request.track_volumes, + latest_known_chapter: request.latest_known_chapter, + latest_known_volume: request.latest_known_volume, + volume_chapter_map: request.volume_chapter_map, + poll_interval_override_s: request.poll_interval_override_s, + confidence_threshold_override: request.confidence_threshold_override, + languages: request + .languages + .map(|opt| opt.map(|langs| serde_json::json!(langs))), + }; + + let row = SeriesTrackingRepository::upsert(&state.db, series_id, update) + .await + .map_err(|e| ApiError::Internal(format!("Failed to update tracking: {}", e)))?; + + let event = EntityChangeEvent { + event: EntityEvent::SeriesUpdated { + series_id, + library_id: series.library_id, + fields: Some(vec!["tracking".to_string()]), + }, + timestamp: Utc::now(), + user_id: Some(auth.user_id), + }; + let _ = state.event_broadcaster.emit(event); + + Ok(Json(row.into())) +} + +// ============================================================================= +// Alias handlers +// ============================================================================= + +/// List release-matching aliases for a series. +#[utoipa::path( + get, + path = "/api/v1/series/{series_id}/aliases", + params( + ("series_id" = Uuid, Path, description = "Series ID") + ), + responses( + (status = 200, description = "List of aliases", body = SeriesAliasListResponse), + (status = 404, description = "Series not found"), + (status = 403, description = "Forbidden"), + ), + security( + ("jwt_bearer" = []), + ("api_key" = []) + ), + tag = "Tracking" +)] +pub async fn list_series_aliases( + State(state): State<Arc<AuthState>>, + auth: AuthContext, + Path(series_id): Path<Uuid>, +) -> Result<Json<SeriesAliasListResponse>, ApiError> { + require_permission!(auth, Permission::SeriesRead)?; + + SeriesRepository::get_by_id(&state.db, series_id) + .await + .map_err(|e| ApiError::Internal(format!("Failed to fetch series: {}", e)))? + .ok_or_else(|| ApiError::NotFound("Series not found".to_string()))?; + + let aliases = SeriesAliasRepository::get_for_series(&state.db, series_id) + .await + .map_err(|e| ApiError::Internal(format!("Failed to fetch aliases: {}", e)))?; + + Ok(Json(SeriesAliasListResponse { + aliases: aliases.into_iter().map(Into::into).collect(), + })) +} + +/// Create a release-matching alias for a series. +/// +/// Idempotent: if `(series_id, alias)` already exists, returns the existing +/// row with HTTP 200 instead of inserting a duplicate. +#[utoipa::path( + post, + path = "/api/v1/series/{series_id}/aliases", + params( + ("series_id" = Uuid, Path, description = "Series ID") + ), + request_body = CreateSeriesAliasRequest, + responses( + (status = 201, description = "Alias created", body = SeriesAliasDto), + (status = 200, description = "Alias already existed (idempotent)", body = SeriesAliasDto), + (status = 400, description = "Invalid alias (empty after normalization)"), + (status = 404, description = "Series not found"), + (status = 403, description = "Forbidden"), + ), + security( + ("jwt_bearer" = []), + ("api_key" = []) + ), + tag = "Tracking" +)] +pub async fn create_series_alias( + State(state): State<Arc<AuthState>>, + auth: AuthContext, + Path(series_id): Path<Uuid>, + Json(request): Json<CreateSeriesAliasRequest>, +) -> Result<(StatusCode, Json<SeriesAliasDto>), ApiError> { + require_permission!(auth, Permission::SeriesWrite)?; + + let series = SeriesRepository::get_by_id(&state.db, series_id) + .await + .map_err(|e| ApiError::Internal(format!("Failed to fetch series: {}", e)))? + .ok_or_else(|| ApiError::NotFound("Series not found".to_string()))?; + + // Determine source. HTTP defaults to `manual`; we accept `metadata` only + // for explicit admin imports (e.g., a follow-up tool that wants to seed + // metadata-source aliases through the API rather than the backfill task). + let source = request + .source + .as_deref() + .filter(|s| alias_source::is_valid(s)) + .unwrap_or(alias_source::MANUAL); + + // Detect insert-vs-existing by counting before/after — `create()` returns + // the existing row on duplicate, but doesn't tell us which case we hit. + let before = SeriesAliasRepository::count_for_series(&state.db, series_id) + .await + .map_err(|e| ApiError::Internal(format!("Failed to count aliases: {}", e)))?; + let alias = SeriesAliasRepository::create(&state.db, series_id, &request.alias, source) + .await + .map_err(|e| { + let msg = e.to_string(); + if msg.contains("empty") + || msg.contains("normalize") + || msg.contains("invalid alias source") + { + ApiError::BadRequest(msg) + } else { + ApiError::Internal(format!("Failed to create alias: {}", e)) + } + })?; + let after = SeriesAliasRepository::count_for_series(&state.db, series_id) + .await + .map_err(|e| ApiError::Internal(format!("Failed to count aliases: {}", e)))?; + + let status = if after > before { + // Newly inserted: emit update event so the frontend invalidates its cache. + let event = EntityChangeEvent { + event: EntityEvent::SeriesUpdated { + series_id, + library_id: series.library_id, + fields: Some(vec!["aliases".to_string()]), + }, + timestamp: Utc::now(), + user_id: Some(auth.user_id), + }; + let _ = state.event_broadcaster.emit(event); + StatusCode::CREATED + } else { + StatusCode::OK + }; + + Ok((status, Json(alias.into()))) +} + +/// Delete a release-matching alias. +#[utoipa::path( + delete, + path = "/api/v1/series/{series_id}/aliases/{alias_id}", + params( + ("series_id" = Uuid, Path, description = "Series ID"), + ("alias_id" = Uuid, Path, description = "Alias ID") + ), + responses( + (status = 204, description = "Alias deleted"), + (status = 404, description = "Series or alias not found"), + (status = 403, description = "Forbidden"), + ), + security( + ("jwt_bearer" = []), + ("api_key" = []) + ), + tag = "Tracking" +)] +pub async fn delete_series_alias( + State(state): State<Arc<AuthState>>, + auth: AuthContext, + Path((series_id, alias_id)): Path<(Uuid, Uuid)>, +) -> Result<StatusCode, ApiError> { + require_permission!(auth, Permission::SeriesWrite)?; + + let series = SeriesRepository::get_by_id(&state.db, series_id) + .await + .map_err(|e| ApiError::Internal(format!("Failed to fetch series: {}", e)))? + .ok_or_else(|| ApiError::NotFound("Series not found".to_string()))?; + + // Verify the alias actually belongs to this series before deleting. + let row = SeriesAliasRepository::get_by_id(&state.db, alias_id) + .await + .map_err(|e| ApiError::Internal(format!("Failed to fetch alias: {}", e)))?; + let row = match row { + Some(r) if r.series_id == series_id => r, + _ => return Err(ApiError::NotFound("Alias not found".to_string())), + }; + + SeriesAliasRepository::delete(&state.db, row.id) + .await + .map_err(|e| ApiError::Internal(format!("Failed to delete alias: {}", e)))?; + + let event = EntityChangeEvent { + event: EntityEvent::SeriesUpdated { + series_id, + library_id: series.library_id, + fields: Some(vec!["aliases".to_string()]), + }, + timestamp: Utc::now(), + user_id: Some(auth.user_id), + }; + let _ = state.event_broadcaster.emit(event); + + Ok(StatusCode::NO_CONTENT) +} diff --git a/src/api/routes/v1/routes/mod.rs b/src/api/routes/v1/routes/mod.rs index e1c278bb..e47cc76a 100644 --- a/src/api/routes/v1/routes/mod.rs +++ b/src/api/routes/v1/routes/mod.rs @@ -11,6 +11,7 @@ mod misc; mod oidc; mod plugins; mod recommendations; +mod releases; mod series; mod setup; mod tasks; @@ -41,6 +42,7 @@ pub fn create_router(state: Arc<AppState>) -> Router { .merge(plugins::routes(state.clone())) .merge(user_plugins::routes(state.clone())) .merge(recommendations::routes(state.clone())) + .merge(releases::routes(state.clone())) // Apply state to all routes .with_state(state) } diff --git a/src/api/routes/v1/routes/releases.rs b/src/api/routes/v1/routes/releases.rs new file mode 100644 index 00000000..18293006 --- /dev/null +++ b/src/api/routes/v1/routes/releases.rs @@ -0,0 +1,66 @@ +//! Release-tracking routes (cross-series inbox + source admin). +//! +//! Per-series ledger (`/series/{id}/releases`) lives in `series.rs` to keep +//! all series-scoped routes together; this module wires the cross-series +//! inbox and the admin source-management endpoints. + +use super::super::handlers; +use crate::api::extractors::AppState; +use axum::{ + Router, + routing::{get, patch, post}, +}; +use std::sync::Arc; + +pub fn routes(_state: Arc<AppState>) -> Router<Arc<AppState>> { + Router::new() + // Inbox + state transitions. Note: `/releases/facets` and + // `/releases/bulk` come **before** the parameterised + // `/releases/{release_id}` PATCH/DELETE so axum's matcher doesn't + // try to parse "facets" or "bulk" as a UUID. + .route("/releases", get(handlers::releases::list_release_inbox)) + .route( + "/releases/facets", + get(handlers::releases::list_release_facets), + ) + .route( + "/releases/bulk", + post(handlers::releases::bulk_release_action), + ) + .route( + "/releases/{release_id}", + patch(handlers::releases::update_release_entry) + .delete(handlers::releases::delete_release), + ) + .route( + "/releases/{release_id}/dismiss", + post(handlers::releases::dismiss_release), + ) + .route( + "/releases/{release_id}/mark-acquired", + post(handlers::releases::mark_release_acquired), + ) + // Applicability (SeriesRead required) — used by the frontend to + // hide release-tracking UI on libraries not covered by any plugin. + .route( + "/release-sources/applicability", + get(handlers::releases::get_release_tracking_applicability), + ) + // Source admin (PluginsManage required) + .route( + "/release-sources", + get(handlers::releases::list_release_sources), + ) + .route( + "/release-sources/{source_id}", + patch(handlers::releases::update_release_source), + ) + .route( + "/release-sources/{source_id}/poll-now", + post(handlers::releases::poll_release_source_now), + ) + .route( + "/release-sources/{source_id}/reset", + post(handlers::releases::reset_release_source), + ) +} diff --git a/src/api/routes/v1/routes/series.rs b/src/api/routes/v1/routes/series.rs index bc374b51..432aa18f 100644 --- a/src/api/routes/v1/routes/series.rs +++ b/src/api/routes/v1/routes/series.rs @@ -271,6 +271,14 @@ pub fn routes(_state: Arc<AppState>) -> Router<Arc<AppState>> { "/series/bulk/renumber", post(handlers::bulk_renumber_series), ) + .route( + "/series/bulk/track-for-releases", + post(handlers::bulk_track_series_for_releases), + ) + .route( + "/series/bulk/untrack-for-releases", + post(handlers::bulk_untrack_series_for_releases), + ) .route( "/series/bulk/thumbnails/generate", post(handlers::bulk_generate_series_thumbnails), @@ -337,4 +345,31 @@ pub fn routes(_state: Arc<AppState>) -> Router<Arc<AppState>> { "/series/{series_id}/title/reprocess", post(handlers::task_queue::reprocess_series_title), ) + // Release-tracking config (per series) + .route( + "/series/{series_id}/tracking", + get(handlers::tracking::get_series_tracking), + ) + .route( + "/series/{series_id}/tracking", + patch(handlers::tracking::update_series_tracking), + ) + // Release-matching aliases (per series) + .route( + "/series/{series_id}/aliases", + get(handlers::tracking::list_series_aliases), + ) + .route( + "/series/{series_id}/aliases", + post(handlers::tracking::create_series_alias), + ) + .route( + "/series/{series_id}/aliases/{alias_id}", + delete(handlers::tracking::delete_series_alias), + ) + // Per-series release ledger (Phase 2) + .route( + "/series/{series_id}/releases", + get(handlers::releases::list_series_releases), + ) } diff --git a/src/commands/seed.rs b/src/commands/seed.rs index 0c24a327..7388d3df 100644 --- a/src/commands/seed.rs +++ b/src/commands/seed.rs @@ -66,6 +66,12 @@ pub struct SeedPluginConfig { pub credential_delivery: String, #[serde(default)] pub credentials: Option<serde_json::Value>, + /// Optional admin-side plugin configuration (the same JSON object that + /// the user would paste into "Configuration" in the plugin edit dialog). + /// Persisted on the plugin row so the plugin process receives it via + /// `InitializeParams.adminConfig` on first start. + #[serde(default, alias = "admin_config")] + pub config: Option<serde_json::Value>, #[serde(default = "default_true")] pub enabled: bool, } @@ -332,7 +338,7 @@ async fn seed_plugins( vec![], // library_ids (empty = all libraries) plugin_cfg.credentials.as_ref(), // credentials &plugin_cfg.credential_delivery, // credential_delivery - None, // config + plugin_cfg.config.clone(), // admin config plugin_cfg.enabled, None, // created_by None, // rate_limit_requests_per_minute diff --git a/src/commands/serve.rs b/src/commands/serve.rs index 094422a9..985c7475 100644 --- a/src/commands/serve.rs +++ b/src/commands/serve.rs @@ -279,13 +279,19 @@ pub async fn serve_command(config_path: PathBuf) -> anyhow::Result<()> { )); // Initialize plugin manager (before workers so they can handle plugin tasks) + // + // Note: no broadcaster injection. Reverse-RPC handlers (e.g. + // `releases/record`) emit through the task-local recording broadcaster + // set up by `TaskWorker::run_task`, not through a manager-held one. + // See `crate::events::with_recording_broadcaster`. info!("Initializing plugin manager..."); let plugin_manager = Arc::new( crate::services::plugin::PluginManager::with_defaults(Arc::new( db.sea_orm_connection().clone(), )) .with_metrics_service(plugin_metrics_service.clone()) - .with_plugin_file_storage(plugin_file_storage.clone()), + .with_plugin_file_storage(plugin_file_storage.clone()) + .with_scheduler(scheduler.clone()), ); // Load enabled plugins from database match plugin_manager.load_all().await { diff --git a/src/commands/worker.rs b/src/commands/worker.rs index 8b9dedc1..571b41b6 100644 --- a/src/commands/worker.rs +++ b/src/commands/worker.rs @@ -116,6 +116,11 @@ pub async fn worker_command(config_path: PathBuf) -> anyhow::Result<()> { let plugin_metrics_service = Arc::new(crate::services::PluginMetricsService::new()); // Initialize plugin manager for plugin auto-match tasks + // + // Note: no broadcaster injection. Reverse-RPC handlers (e.g. + // `releases/record`) emit through the task-local recording broadcaster + // set up by `TaskWorker::run_task`, not through a manager-held one. + // See `crate::events::with_recording_broadcaster`. info!("Initializing plugin manager..."); let plugin_manager = Arc::new( crate::services::plugin::PluginManager::with_defaults(Arc::new( diff --git a/src/db/entities/mod.rs b/src/db/entities/mod.rs index 62265e51..cc062594 100644 --- a/src/db/entities/mod.rs +++ b/src/db/entities/mod.rs @@ -42,6 +42,9 @@ pub mod user_plugins; // Series metadata enhancement entities pub mod genres; +pub mod release_ledger; +pub mod release_sources; +pub mod series_aliases; pub mod series_alternate_titles; pub mod series_covers; pub mod series_exports; @@ -51,6 +54,7 @@ pub mod series_external_ratings; pub mod series_genres; pub mod series_metadata; pub mod series_tags; +pub mod series_tracking; pub mod tags; pub mod user_preferences; pub mod user_series_ratings; diff --git a/src/db/entities/release_ledger.rs b/src/db/entities/release_ledger.rs new file mode 100644 index 00000000..1ce6b13a --- /dev/null +++ b/src/db/entities/release_ledger.rs @@ -0,0 +1,117 @@ +//! `SeaORM` entity for the `release_ledger` table. +//! +//! Dedup-keyed announcement ledger. Sources write rows here; the inbox UI +//! reads from it. Dedup keys: `(source_id, external_release_id)` and +//! `info_hash` (where present). Cross-source duplicates (Nyaa + MangaDex +//! both seeing ch47) become two ledger rows; the UI groups them at display +//! time so the user can pick a source. + +use chrono::{DateTime, Utc}; +use sea_orm::entity::prelude::*; +use serde::{Deserialize, Serialize}; +use uuid::Uuid; + +#[derive(Clone, Debug, PartialEq, DeriveEntityModel, Serialize, Deserialize)] +#[sea_orm(table_name = "release_ledger")] +pub struct Model { + #[sea_orm(primary_key, auto_increment = false)] + pub id: Uuid, + pub series_id: Uuid, + pub source_id: Uuid, + /// Plugin-stable identity for the release. Required for dedup. + pub external_release_id: String, + /// Optional. Torrent sources have it; HTTP sources don't. + pub info_hash: Option<String>, + /// Decimal handles 12.5, 110.1, etc. + pub chapter: Option<f64>, + pub volume: Option<i32>, + pub language: Option<String>, + /// `{ "jxl": true, "container": "cbz", ... }`. + pub format_hints: Option<serde_json::Value>, + pub group_or_uploader: Option<String>, + /// Where the user goes to acquire (Nyaa torrent page, MangaDex chapter, ...). + pub payload_url: String, + /// Optional second URL for direct fetch (`.torrent`, `magnet:`, DDL). + /// Travels paired with [`Self::media_url_kind`]. + pub media_url: Option<String>, + /// `torrent` | `magnet` | `direct` | `other`. See + /// `services::release::candidate::MediaUrlKind` for the canonical list. + pub media_url_kind: Option<String>, + pub confidence: f64, + /// `announced` | `dismissed` | `marked_acquired` | `ignored` | `hidden`. + pub state: String, + pub metadata: Option<serde_json::Value>, + pub observed_at: DateTime<Utc>, + pub created_at: DateTime<Utc>, +} + +#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)] +pub enum Relation { + #[sea_orm( + belongs_to = "super::series::Entity", + from = "Column::SeriesId", + to = "super::series::Column::Id", + on_update = "NoAction", + on_delete = "Cascade" + )] + Series, + #[sea_orm( + belongs_to = "super::release_sources::Entity", + from = "Column::SourceId", + to = "super::release_sources::Column::Id", + on_update = "NoAction", + on_delete = "Cascade" + )] + ReleaseSource, +} + +impl Related<super::series::Entity> for Entity { + fn to() -> RelationDef { + Relation::Series.def() + } +} + +impl Related<super::release_sources::Entity> for Entity { + fn to() -> RelationDef { + Relation::ReleaseSource.def() + } +} + +impl ActiveModelBehavior for ActiveModel {} + +/// Canonical strings for `state`. +pub mod state { + pub const ANNOUNCED: &str = "announced"; + pub const DISMISSED: &str = "dismissed"; + pub const MARKED_ACQUIRED: &str = "marked_acquired"; + /// Auto-applied at ingestion when the release matches a book the user + /// already owns (direct match on volume or chapter). Distinct from + /// `dismissed`, which is a user decision. Reversible via the bulk + /// `reset` action. + pub const IGNORED: &str = "ignored"; + pub const HIDDEN: &str = "hidden"; + + pub fn is_valid(s: &str) -> bool { + matches!( + s, + ANNOUNCED | DISMISSED | MARKED_ACQUIRED | IGNORED | HIDDEN + ) + } +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn state_validates_known_values() { + assert!(state::is_valid("announced")); + assert!(state::is_valid("dismissed")); + assert!(state::is_valid("marked_acquired")); + assert!(state::is_valid("ignored")); + assert!(state::is_valid("hidden")); + assert!(!state::is_valid("acquired")); + assert!(!state::is_valid("new")); + assert!(!state::is_valid("")); + } +} diff --git a/src/db/entities/release_sources.rs b/src/db/entities/release_sources.rs new file mode 100644 index 00000000..661fac7a --- /dev/null +++ b/src/db/entities/release_sources.rs @@ -0,0 +1,102 @@ +//! `SeaORM` entity for the `release_sources` table. +//! +//! One row per logical source a plugin (or core) exposes. A single plugin can +//! expose many sources: e.g., the Nyaa plugin exposes one source per uploader +//! subscription. Source-level state (poll cadence, last-poll status, ETag / +//! cursor) lives here so the scheduler and reverse-RPC handlers can manage +//! sources without round-tripping through the plugin. + +use chrono::{DateTime, Utc}; +use sea_orm::entity::prelude::*; +use serde::{Deserialize, Serialize}; +use uuid::Uuid; + +#[derive(Clone, Debug, PartialEq, DeriveEntityModel, Serialize, Deserialize)] +#[sea_orm(table_name = "release_sources")] +pub struct Model { + #[sea_orm(primary_key, auto_increment = false)] + pub id: Uuid, + /// Owning plugin id (string). The literal `"core"` is reserved for in-core + /// synthetic sources (e.g., metadata-piggyback in Phase 5). + pub plugin_id: String, + /// Plugin-defined unique key (e.g., `nyaa:user:tsuna69`). + pub source_key: String, + pub display_name: String, + /// `rss-uploader` | `rss-series` | `api-feed` | `metadata-feed` | `metadata-piggyback`. + pub kind: String, + pub enabled: bool, + /// 5-field POSIX cron expression. NULL means "inherit the server-wide + /// `release_tracking.default_cron_schedule` setting." The host + /// normalizes to the 6-field format expected by `tokio-cron-scheduler` + /// at scheduler-load time. + pub cron_schedule: Option<String>, + pub last_polled_at: Option<DateTime<Utc>>, + pub last_error: Option<String>, + pub last_error_at: Option<DateTime<Utc>>, + pub etag: Option<String>, + pub config: Option<serde_json::Value>, + /// One-line human-readable summary of the most recent poll (e.g. + /// `"fetched 12 items, matched 0, recorded 0"`). Written by the + /// poll-source task on every successful completion. NULL until the + /// first successful poll. Surfaced by the Release tracking settings UI + /// under the per-row status badge so users can tell *why* a poll + /// returned no announcements without grepping container logs. + pub last_summary: Option<String>, + pub created_at: DateTime<Utc>, + pub updated_at: DateTime<Utc>, +} + +#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)] +pub enum Relation { + #[sea_orm(has_many = "super::release_ledger::Entity")] + ReleaseLedger, +} + +impl Related<super::release_ledger::Entity> for Entity { + fn to() -> RelationDef { + Relation::ReleaseLedger.def() + } +} + +impl ActiveModelBehavior for ActiveModel {} + +/// Canonical strings for `plugin_id`. +pub mod plugin_id { + /// In-core synthetic sources (e.g., metadata-piggyback in Phase 5). Not a + /// real plugin; bypasses plugin-host lookup. + #[allow(dead_code)] // wired up in Phase 5 (metadata piggyback) + pub const CORE: &str = "core"; +} + +/// Canonical strings for `kind`. +pub mod kind { + pub const RSS_UPLOADER: &str = "rss-uploader"; + pub const RSS_SERIES: &str = "rss-series"; + pub const API_FEED: &str = "api-feed"; + pub const METADATA_FEED: &str = "metadata-feed"; + pub const METADATA_PIGGYBACK: &str = "metadata-piggyback"; + + pub fn is_valid(s: &str) -> bool { + matches!( + s, + RSS_UPLOADER | RSS_SERIES | API_FEED | METADATA_FEED | METADATA_PIGGYBACK + ) + } +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn kind_validates_known_values() { + assert!(kind::is_valid("rss-uploader")); + assert!(kind::is_valid("rss-series")); + assert!(kind::is_valid("api-feed")); + assert!(kind::is_valid("metadata-feed")); + assert!(kind::is_valid("metadata-piggyback")); + assert!(!kind::is_valid("rss")); + assert!(!kind::is_valid("api")); + assert!(!kind::is_valid("")); + } +} diff --git a/src/db/entities/series.rs b/src/db/entities/series.rs index 104d9d88..b190fb54 100644 --- a/src/db/entities/series.rs +++ b/src/db/entities/series.rs @@ -60,6 +60,14 @@ pub enum Relation { UserSeriesRatings, #[sea_orm(has_many = "super::series_sharing_tags::Entity")] SeriesSharingTags, + // Release tracking sidecar (1:1) and matcher aliases. + #[sea_orm(has_one = "super::series_tracking::Entity")] + SeriesTracking, + #[sea_orm(has_many = "super::series_aliases::Entity")] + SeriesAliases, + // Release ledger entries for this series (Phase 2). + #[sea_orm(has_many = "super::release_ledger::Entity")] + ReleaseLedger, } impl Related<super::books::Entity> for Entity { @@ -168,4 +176,22 @@ impl Related<super::sharing_tags::Entity> for Entity { } } +impl Related<super::series_tracking::Entity> for Entity { + fn to() -> RelationDef { + Relation::SeriesTracking.def() + } +} + +impl Related<super::series_aliases::Entity> for Entity { + fn to() -> RelationDef { + Relation::SeriesAliases.def() + } +} + +impl Related<super::release_ledger::Entity> for Entity { + fn to() -> RelationDef { + Relation::ReleaseLedger.def() + } +} + impl ActiveModelBehavior for ActiveModel {} diff --git a/src/db/entities/series_aliases.rs b/src/db/entities/series_aliases.rs new file mode 100644 index 00000000..a8c17dd9 --- /dev/null +++ b/src/db/entities/series_aliases.rs @@ -0,0 +1,135 @@ +//! `SeaORM` entity for the `series_aliases` table. +//! +//! Title aliases used by release-source plugins that match by title (e.g. +//! Nyaa). Distinct from `series_alternate_titles`, which is purpose-built for +//! labelled localized titles (Japanese / Romaji / English / Korean) with a +//! unique-per-label constraint - aliases here are arbitrary strings, normalized +//! for matching. + +use chrono::{DateTime, Utc}; +use sea_orm::entity::prelude::*; +use serde::{Deserialize, Serialize}; +use uuid::Uuid; + +#[derive(Clone, Debug, PartialEq, DeriveEntityModel, Eq, Serialize, Deserialize)] +#[sea_orm(table_name = "series_aliases")] +pub struct Model { + #[sea_orm(primary_key, auto_increment = false)] + pub id: Uuid, + pub series_id: Uuid, + /// The alias as displayed (preserves casing/punctuation for UI). + pub alias: String, + /// Lowercased + punctuation-stripped, used for matcher equality. + pub normalized: String, + /// 'metadata' | 'manual'. + pub source: String, + pub created_at: DateTime<Utc>, +} + +#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)] +pub enum Relation { + #[sea_orm( + belongs_to = "super::series::Entity", + from = "Column::SeriesId", + to = "super::series::Column::Id", + on_update = "NoAction", + on_delete = "Cascade" + )] + Series, +} + +impl Related<super::series::Entity> for Entity { + fn to() -> RelationDef { + Relation::Series.def() + } +} + +impl ActiveModelBehavior for ActiveModel {} + +/// Canonical strings for `source`. +pub mod alias_source { + pub const METADATA: &str = "metadata"; + pub const MANUAL: &str = "manual"; + + pub fn is_valid(s: &str) -> bool { + matches!(s, METADATA | MANUAL) + } +} + +/// Normalize an alias for matching: lowercase, strip non-alphanumeric, collapse whitespace. +/// +/// The normalization is intentionally aggressive: a release titled +/// `"My Series, Vol. 1 (Digital)"` and an alias stored as `"My Series"` should +/// share a common `normalized` prefix so a parser can match against the +/// normalized form. The raw `alias` field preserves the user's input for UI. +pub fn normalize_alias(input: &str) -> String { + let mut out = String::with_capacity(input.len()); + let mut last_was_space = false; + for ch in input.chars() { + if ch.is_alphanumeric() { + for lc in ch.to_lowercase() { + out.push(lc); + } + last_was_space = false; + } else if ch.is_whitespace() && !out.is_empty() && !last_was_space { + out.push(' '); + last_was_space = true; + } + // Any other punctuation/symbols get dropped. + } + if out.ends_with(' ') { + out.pop(); + } + out +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn normalize_lowercases_and_strips_punctuation() { + assert_eq!(normalize_alias("My Hero Academia"), "my hero academia"); + assert_eq!(normalize_alias("My Hero Academia!"), "my hero academia"); + assert_eq!( + normalize_alias("Re:Zero - Starting Life in Another World"), + "rezero starting life in another world" + ); + } + + #[test] + fn normalize_collapses_whitespace() { + assert_eq!(normalize_alias(" Lots of spaces "), "lots of spaces"); + assert_eq!(normalize_alias("Tab\tand\nnewline"), "tab and newline"); + } + + #[test] + fn normalize_strips_digital_suffix_marker() { + // Tag suffixes commonly seen in Nyaa titles. + assert_eq!( + normalize_alias("My Series v01 (Digital)"), + "my series v01 digital" + ); + } + + #[test] + fn normalize_handles_unicode_lowercase() { + // Unicode lowercase round-trip (Greek, German). + assert_eq!(normalize_alias("ÄÖÜ"), "äöü"); + } + + #[test] + fn normalize_empty_input() { + assert_eq!(normalize_alias(""), ""); + assert_eq!(normalize_alias(" "), ""); + assert_eq!(normalize_alias("!!!---!!!"), ""); + } + + #[test] + fn alias_source_validates_known_values() { + assert!(alias_source::is_valid("metadata")); + assert!(alias_source::is_valid("manual")); + assert!(!alias_source::is_valid("auto")); + assert!(!alias_source::is_valid("")); + } +} diff --git a/src/db/entities/series_tracking.rs b/src/db/entities/series_tracking.rs new file mode 100644 index 00000000..76aab71f --- /dev/null +++ b/src/db/entities/series_tracking.rs @@ -0,0 +1,59 @@ +//! `SeaORM` entity for the `series_tracking` table. +//! +//! 1:1 sidecar to `series` carrying release-tracking flags. Lives in its own +//! table (not on `series` directly) so the subsystem stays cleanly separable - +//! disabling release tracking is a no-join, and removing it later doesn't +//! require a destructive migration on the core series table. + +use chrono::{DateTime, Utc}; +use sea_orm::entity::prelude::*; +use serde::{Deserialize, Serialize}; +use uuid::Uuid; + +#[derive(Clone, Debug, PartialEq, DeriveEntityModel, Serialize, Deserialize)] +#[sea_orm(table_name = "series_tracking")] +pub struct Model { + /// Primary key AND foreign key to series.id (1:1 sidecar). + #[sea_orm(primary_key, auto_increment = false)] + pub series_id: Uuid, + /// Whether release tracking is enabled for this series. + pub tracked: bool, + pub track_chapters: bool, + pub track_volumes: bool, + /// Latest external chapter (decimal handles 12.5, 110.1, etc.). + pub latest_known_chapter: Option<f64>, + pub latest_known_volume: Option<i32>, + /// Sparse map: `{ "<volume>": { "first": <ch>, "last": <ch> } }`. + pub volume_chapter_map: Option<serde_json::Value>, + /// Per-series override of the source's poll interval (seconds). Null = use source default. + pub poll_interval_override_s: Option<i32>, + /// Per-series override of the server's confidence threshold. Null = use server default. + pub confidence_threshold_override: Option<f64>, + /// Per-series language preference (ISO 639-1 codes, e.g. `["en", "es"]`). + /// `None` = fall back to the server-wide default (`release_tracking.default_languages`). + /// Used by aggregation feeds like MangaUpdates that emit candidates in many + /// languages; the plugin filters client-side before recording. + pub languages: Option<serde_json::Value>, + pub created_at: DateTime<Utc>, + pub updated_at: DateTime<Utc>, +} + +#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)] +pub enum Relation { + #[sea_orm( + belongs_to = "super::series::Entity", + from = "Column::SeriesId", + to = "super::series::Column::Id", + on_update = "NoAction", + on_delete = "Cascade" + )] + Series, +} + +impl Related<super::series::Entity> for Entity { + fn to() -> RelationDef { + Relation::Series.def() + } +} + +impl ActiveModelBehavior for ActiveModel {} diff --git a/src/db/repositories/mod.rs b/src/db/repositories/mod.rs index 81aaa89a..0f410087 100644 --- a/src/db/repositories/mod.rs +++ b/src/db/repositories/mod.rs @@ -17,11 +17,15 @@ pub mod page; pub mod plugin_failures; pub mod plugins; pub mod read_progress; +pub mod release_ledger; +pub mod release_sources; pub mod series; +pub mod series_aliases; pub mod series_covers; pub mod series_export; pub mod series_external_id; pub mod series_metadata; +pub mod series_tracking; pub mod settings; pub mod tag; pub mod task; @@ -63,11 +67,21 @@ pub use page::PageRepository; pub use plugin_failures::{FailureContext, PluginFailuresRepository}; pub use plugins::PluginsRepository; pub use read_progress::ReadProgressRepository; +#[allow(unused_imports)] +pub use release_ledger::{ + LedgerInboxFilter, NewReleaseEntry, RecordOutcome, ReleaseLedgerRepository, +}; +#[allow(unused_imports)] +pub use release_sources::{NewReleaseSource, ReleaseSourceRepository, ReleaseSourceUpdate}; pub use series::{SeriesQueryOptions, SeriesQuerySort, SeriesRepository, SeriesSortFieldRepo}; +#[allow(unused_imports)] +pub use series_aliases::SeriesAliasRepository; pub use series_covers::SeriesCoversRepository; pub use series_export::SeriesExportRepository; pub use series_external_id::SeriesExternalIdRepository; pub use series_metadata::SeriesMetadataRepository; +#[allow(unused_imports)] +pub use series_tracking::{SeriesTrackingRepository, TrackingUpdate}; pub use settings::SettingsRepository; pub use tag::TagRepository; pub use task::TaskRepository; diff --git a/src/db/repositories/release_ledger.rs b/src/db/repositories/release_ledger.rs new file mode 100644 index 00000000..20583f64 --- /dev/null +++ b/src/db/repositories/release_ledger.rs @@ -0,0 +1,1129 @@ +//! Repository for the `release_ledger` table. +//! +//! Sources write announcements; the inbox UI reads them. Two dedup keys live +//! at the schema level (`(source_id, external_release_id)` unique; +//! `info_hash` unique-where-non-null), so the repository's `record` method +//! is idempotent on either: callers don't need to pre-check. + +#![allow(dead_code)] + +use anyhow::Result; +use chrono::Utc; +use sea_orm::{ + ActiveModelTrait, ColumnTrait, DatabaseConnection, EntityTrait, Order, PaginatorTrait, + QueryFilter, QueryOrder, QuerySelect, Set, sea_query::NullOrdering, +}; +use uuid::Uuid; + +use crate::db::entities::release_ledger::{ + self, Entity as ReleaseLedger, Model as ReleaseLedgerRow, state, +}; + +/// New-row payload. Keys plus payload fields. +#[derive(Debug, Clone)] +pub struct NewReleaseEntry { + pub series_id: Uuid, + pub source_id: Uuid, + pub external_release_id: String, + pub info_hash: Option<String>, + pub chapter: Option<f64>, + pub volume: Option<i32>, + pub language: Option<String>, + pub format_hints: Option<serde_json::Value>, + pub group_or_uploader: Option<String>, + pub payload_url: String, + pub media_url: Option<String>, + pub media_url_kind: Option<String>, + pub confidence: f64, + pub metadata: Option<serde_json::Value>, + pub observed_at: chrono::DateTime<Utc>, + /// State to insert with. `None` defaults to `announced`. Used by the + /// poll/reverse-RPC path to insert directly as `ignored` when the + /// release matches a book the user already owns. + pub initial_state: Option<String>, +} + +/// Outcome of a `record` call. +#[derive(Debug, Clone, PartialEq)] +pub struct RecordOutcome { + pub row: ReleaseLedgerRow, + /// `true` if this call deduped onto an existing row, `false` if it inserted. + pub deduped: bool, +} + +/// Filters for the inbox query. +#[derive(Debug, Default, Clone)] +pub struct LedgerInboxFilter { + /// Only rows in this state. `None` means "all states" (no filter). + /// Note: `list_inbox` historically defaulted to `announced` when `None`; + /// callers that want the "all states" view must opt in explicitly via + /// the [`LedgerInboxFilter::all_states`] flag. + pub state: Option<String>, + /// When `true`, no state filter is applied even if `state` is `None`. + /// Used by the inbox UI's "All" state option. + pub all_states: bool, + pub series_id: Option<Uuid>, + pub source_id: Option<Uuid>, + pub language: Option<String>, + /// Restrict to series belonging to this library. + pub library_id: Option<Uuid>, +} + +/// Per-series facet entry. +#[derive(Debug, Clone, PartialEq)] +pub struct SeriesFacet { + pub series_id: Uuid, + pub library_id: Uuid, + pub count: u64, +} + +/// Per-library facet entry. +#[derive(Debug, Clone, PartialEq)] +pub struct LibraryFacet { + pub library_id: Uuid, + pub count: u64, +} + +/// Per-language facet entry. +#[derive(Debug, Clone, PartialEq)] +pub struct LanguageFacet { + pub language: String, + pub count: u64, +} + +pub struct ReleaseLedgerRepository; + +impl ReleaseLedgerRepository { + pub async fn get_by_id(db: &DatabaseConnection, id: Uuid) -> Result<Option<ReleaseLedgerRow>> { + Ok(ReleaseLedger::find_by_id(id).one(db).await?) + } + + /// Idempotent insert. Dedup priority: + /// 1. `(source_id, external_release_id)` - cheapest, always present. + /// 2. `info_hash` - cross-source dedup, only when present. + /// + /// Returns the existing row when either key matches, otherwise inserts. + pub async fn record(db: &DatabaseConnection, entry: NewReleaseEntry) -> Result<RecordOutcome> { + if entry.confidence.is_nan() { + anyhow::bail!("confidence cannot be NaN"); + } + if entry.payload_url.trim().is_empty() { + anyhow::bail!("payload_url cannot be empty"); + } + if entry.external_release_id.trim().is_empty() { + anyhow::bail!("external_release_id cannot be empty"); + } + + // 1. Primary dedup: (source_id, external_release_id). + if let Some(existing) = ReleaseLedger::find() + .filter(release_ledger::Column::SourceId.eq(entry.source_id)) + .filter(release_ledger::Column::ExternalReleaseId.eq(&entry.external_release_id)) + .one(db) + .await? + { + return Ok(RecordOutcome { + row: existing, + deduped: true, + }); + } + + // 2. Secondary dedup: info_hash (cross-source). + if let Some(ref hash) = entry.info_hash + && let Some(existing) = ReleaseLedger::find() + .filter(release_ledger::Column::InfoHash.eq(hash)) + .one(db) + .await? + { + return Ok(RecordOutcome { + row: existing, + deduped: true, + }); + } + + let initial_state = match entry.initial_state { + Some(s) if state::is_valid(&s) => s, + Some(invalid) => anyhow::bail!("invalid initial_state: {}", invalid), + None => state::ANNOUNCED.to_string(), + }; + let active = release_ledger::ActiveModel { + id: Set(Uuid::new_v4()), + series_id: Set(entry.series_id), + source_id: Set(entry.source_id), + external_release_id: Set(entry.external_release_id), + info_hash: Set(entry.info_hash), + chapter: Set(entry.chapter), + volume: Set(entry.volume), + language: Set(entry.language), + format_hints: Set(entry.format_hints), + group_or_uploader: Set(entry.group_or_uploader), + payload_url: Set(entry.payload_url), + media_url: Set(entry.media_url), + media_url_kind: Set(entry.media_url_kind), + confidence: Set(entry.confidence), + state: Set(initial_state), + metadata: Set(entry.metadata), + observed_at: Set(entry.observed_at), + created_at: Set(Utc::now()), + }; + let inserted = active.insert(db).await?; + Ok(RecordOutcome { + row: inserted, + deduped: false, + }) + } + + /// Per-series ledger view: highest volume/chapter first, then most recent + /// observation as a tie-breaker. Matches the inbox ordering so the series + /// detail panel reads the same way as the cross-series list. + pub async fn list_for_series( + db: &DatabaseConnection, + series_id: Uuid, + state_filter: Option<&str>, + limit: u64, + offset: u64, + ) -> Result<Vec<ReleaseLedgerRow>> { + let mut query = ReleaseLedger::find() + .filter(release_ledger::Column::SeriesId.eq(series_id)) + .order_by_with_nulls( + release_ledger::Column::Volume, + Order::Desc, + NullOrdering::Last, + ) + .order_by_with_nulls( + release_ledger::Column::Chapter, + Order::Desc, + NullOrdering::Last, + ) + .order_by_desc(release_ledger::Column::ObservedAt) + .order_by_asc(release_ledger::Column::Id); + if let Some(s) = state_filter { + query = query.filter(release_ledger::Column::State.eq(s)); + } + if limit > 0 { + query = query.limit(limit); + } + if offset > 0 { + query = query.offset(offset); + } + Ok(query.all(db).await?) + } + + /// Inbox view across all series, with filters. + /// + /// Sort order: group all rows of a series together (highest volume/chapter + /// on top), then break ties between series by the most recent observation. + /// Grouping by series first matches how users read the inbox: they want + /// every chapter of a series listed contiguously and descending, even when + /// rows come from multiple poll batches with different `observed_at`s. + /// + /// Inner-joins `series` so the cross-series order is by `series.name` + /// (alphabetical) rather than by `series_id` (a meaningless UUID order). + pub async fn list_inbox( + db: &DatabaseConnection, + filter: LedgerInboxFilter, + limit: u64, + offset: u64, + ) -> Result<Vec<ReleaseLedgerRow>> { + use sea_orm::{JoinType, RelationTrait}; + let mut query = ReleaseLedger::find() + .join(JoinType::InnerJoin, release_ledger::Relation::Series.def()) + .order_by_asc(crate::db::entities::series::Column::Name) + .order_by_asc(release_ledger::Column::SeriesId) + .order_by_with_nulls( + release_ledger::Column::Volume, + Order::Desc, + NullOrdering::Last, + ) + .order_by_with_nulls( + release_ledger::Column::Chapter, + Order::Desc, + NullOrdering::Last, + ) + .order_by_desc(release_ledger::Column::ObservedAt) + .order_by_asc(release_ledger::Column::Id); + // `series_already_joined: true` so apply_inbox_filter doesn't add + // a duplicate join when `library_id` is present in the filter. + query = apply_inbox_filter(query, &filter, true); + if limit > 0 { + query = query.limit(limit); + } + if offset > 0 { + query = query.offset(offset); + } + Ok(query.all(db).await?) + } + + /// Total count for the inbox view (paginator support). + pub async fn count_inbox(db: &DatabaseConnection, filter: LedgerInboxFilter) -> Result<u64> { + let mut query = ReleaseLedger::find(); + query = apply_inbox_filter(query, &filter, false); + Ok(query.count(db).await?) + } + + /// List the distinct series present in the inbox under a given filter, + /// each with the row count. Used by the inbox UI to populate the series + /// facet dropdown. Joins the `series` table to surface `library_id` so + /// the frontend can group by library. + pub async fn list_series_facets( + db: &DatabaseConnection, + filter: LedgerInboxFilter, + ) -> Result<Vec<SeriesFacet>> { + // We join via series.id to get library_id, then count rows. Excluding + // `series_id` from the filter is the caller's job; the facet itself + // _is_ the series dimension. + use sea_orm::{FromQueryResult, JoinType, RelationTrait}; + #[derive(Debug, FromQueryResult)] + struct Row { + series_id: Uuid, + library_id: Uuid, + count: i64, + } + let mut query = ReleaseLedger::find() + .select_only() + .column(release_ledger::Column::SeriesId) + .column(crate::db::entities::series::Column::LibraryId) + .column_as(release_ledger::Column::Id.count(), "count") + .join(JoinType::InnerJoin, release_ledger::Relation::Series.def()) + .group_by(release_ledger::Column::SeriesId) + .group_by(crate::db::entities::series::Column::LibraryId); + query = apply_inbox_filter(query, &filter, true); + let rows = query.into_model::<Row>().all(db).await?; + Ok(rows + .into_iter() + .map(|r| SeriesFacet { + series_id: r.series_id, + library_id: r.library_id, + count: r.count.max(0) as u64, + }) + .collect()) + } + + /// List the distinct libraries present in the inbox under a given filter, + /// each with the row count. + pub async fn list_library_facets( + db: &DatabaseConnection, + filter: LedgerInboxFilter, + ) -> Result<Vec<LibraryFacet>> { + use sea_orm::{FromQueryResult, JoinType, RelationTrait}; + #[derive(Debug, FromQueryResult)] + struct Row { + library_id: Uuid, + count: i64, + } + let mut query = ReleaseLedger::find() + .select_only() + .column(crate::db::entities::series::Column::LibraryId) + .column_as(release_ledger::Column::Id.count(), "count") + .join(JoinType::InnerJoin, release_ledger::Relation::Series.def()) + .group_by(crate::db::entities::series::Column::LibraryId); + query = apply_inbox_filter(query, &filter, true); + let rows = query.into_model::<Row>().all(db).await?; + Ok(rows + .into_iter() + .map(|r| LibraryFacet { + library_id: r.library_id, + count: r.count.max(0) as u64, + }) + .collect()) + } + + /// List the distinct languages present in the inbox under a given filter, + /// each with the row count. Skips rows with NULL/empty language. + pub async fn list_language_facets( + db: &DatabaseConnection, + filter: LedgerInboxFilter, + ) -> Result<Vec<LanguageFacet>> { + use sea_orm::FromQueryResult; + #[derive(Debug, FromQueryResult)] + struct Row { + language: Option<String>, + count: i64, + } + let mut query = ReleaseLedger::find() + .select_only() + .column(release_ledger::Column::Language) + .column_as(release_ledger::Column::Id.count(), "count") + .filter(release_ledger::Column::Language.is_not_null()) + .group_by(release_ledger::Column::Language); + query = apply_inbox_filter(query, &filter, false); + let rows = query.into_model::<Row>().all(db).await?; + Ok(rows + .into_iter() + .filter_map(|r| { + let lang = r.language?; + if lang.is_empty() { + return None; + } + Some(LanguageFacet { + language: lang, + count: r.count.max(0) as u64, + }) + }) + .collect()) + } + + /// Set the state of a ledger row. Validates the state string. + pub async fn set_state( + db: &DatabaseConnection, + id: Uuid, + new_state: &str, + ) -> Result<ReleaseLedgerRow> { + if !state::is_valid(new_state) { + anyhow::bail!("invalid state: {}", new_state); + } + let existing = ReleaseLedger::find_by_id(id) + .one(db) + .await? + .ok_or_else(|| anyhow::anyhow!("ledger row {} not found", id))?; + let mut active: release_ledger::ActiveModel = existing.into(); + active.state = Set(new_state.to_string()); + Ok(active.update(db).await?) + } + + /// Delete a ledger row by id. Used by admin tooling. + pub async fn delete(db: &DatabaseConnection, id: Uuid) -> Result<bool> { + let result = ReleaseLedger::delete_by_id(id).exec(db).await?; + Ok(result.rows_affected > 0) + } + + /// Delete all ledger rows for a source. Returns the number of rows + /// removed. Used by the source-reset admin endpoint to give testers a + /// clean slate without dropping the source itself. + pub async fn delete_by_source(db: &DatabaseConnection, source_id: Uuid) -> Result<u64> { + let result = ReleaseLedger::delete_many() + .filter(release_ledger::Column::SourceId.eq(source_id)) + .exec(db) + .await?; + Ok(result.rows_affected) + } + + /// Fetch rows by id list, in unspecified order. + pub async fn find_by_ids( + db: &DatabaseConnection, + ids: &[Uuid], + ) -> Result<Vec<ReleaseLedgerRow>> { + if ids.is_empty() { + return Ok(Vec::new()); + } + Ok(ReleaseLedger::find() + .filter(release_ledger::Column::Id.is_in(ids.to_vec())) + .all(db) + .await?) + } + + /// Look up the distinct `source_id`s touched by a set of ledger rows. + /// Used by the inbox's per-row "delete" so we can clear each affected + /// source's etag in the same transaction (forcing the next poll to + /// bypass `If-None-Match` and re-announce the deleted rows). + pub async fn distinct_sources_for_ids( + db: &DatabaseConnection, + ids: &[Uuid], + ) -> Result<Vec<Uuid>> { + if ids.is_empty() { + return Ok(Vec::new()); + } + let rows = ReleaseLedger::find() + .filter(release_ledger::Column::Id.is_in(ids.to_vec())) + .all(db) + .await?; + let mut sources: Vec<Uuid> = rows.into_iter().map(|r| r.source_id).collect(); + sources.sort_unstable(); + sources.dedup(); + Ok(sources) + } + + /// Bulk-delete ledger rows by id. Returns the number of rows removed. + pub async fn delete_many(db: &DatabaseConnection, ids: &[Uuid]) -> Result<u64> { + if ids.is_empty() { + return Ok(0); + } + let result = ReleaseLedger::delete_many() + .filter(release_ledger::Column::Id.is_in(ids.to_vec())) + .exec(db) + .await?; + Ok(result.rows_affected) + } + + /// Bulk-update state on ledger rows by id. Returns the number of rows + /// updated. + pub async fn set_state_many( + db: &DatabaseConnection, + ids: &[Uuid], + new_state: &str, + ) -> Result<u64> { + if !state::is_valid(new_state) { + anyhow::bail!("invalid state: {}", new_state); + } + if ids.is_empty() { + return Ok(0); + } + let result = ReleaseLedger::update_many() + .col_expr( + release_ledger::Column::State, + sea_orm::sea_query::Expr::value(new_state.to_string()), + ) + .filter(release_ledger::Column::Id.is_in(ids.to_vec())) + .exec(db) + .await?; + Ok(result.rows_affected) + } +} + +/// Apply the inbox filter to a `Select` query. Centralised so the inbox +/// list/count and the facets queries stay in sync. +/// +/// State semantics: +/// - `filter.all_states == true` → no state filter. +/// - `filter.state.is_some()` → exact match. +/// - otherwise → defaults to `announced` (legacy default). +/// +/// `series_already_joined`: pass `true` when the caller has already inner +/// joined `release_ledger.series_id → series.id` (e.g. the facet queries +/// that need `series.library_id` in `SELECT`/`GROUP BY`). When `false`, +/// this function will add the join itself if the filter needs it. +fn apply_inbox_filter<E>( + mut query: sea_orm::Select<E>, + filter: &LedgerInboxFilter, + series_already_joined: bool, +) -> sea_orm::Select<E> +where + E: EntityTrait, +{ + use sea_orm::{JoinType, RelationTrait}; + + if !filter.all_states { + let state_filter = filter.state.as_deref().unwrap_or(state::ANNOUNCED); + query = query.filter(release_ledger::Column::State.eq(state_filter)); + } + if let Some(sid) = filter.series_id { + query = query.filter(release_ledger::Column::SeriesId.eq(sid)); + } + if let Some(src) = filter.source_id { + query = query.filter(release_ledger::Column::SourceId.eq(src)); + } + if let Some(ref lang) = filter.language { + query = query.filter(release_ledger::Column::Language.eq(lang)); + } + if let Some(lib_id) = filter.library_id { + if !series_already_joined { + query = query.join(JoinType::InnerJoin, release_ledger::Relation::Series.def()); + } + query = query.filter(crate::db::entities::series::Column::LibraryId.eq(lib_id)); + } + query +} + +#[cfg(test)] +mod tests { + use super::*; + use crate::db::ScanningStrategy; + use crate::db::entities::release_sources::kind; + use crate::db::repositories::{ + LibraryRepository, NewReleaseSource, ReleaseSourceRepository, SeriesRepository, + }; + use crate::db::test_helpers::create_test_db; + + async fn setup_world(db: &DatabaseConnection) -> (Uuid, Uuid) { + let library = LibraryRepository::create(db, "Lib", "/lib", ScanningStrategy::Default) + .await + .unwrap(); + let series = SeriesRepository::create(db, library.id, "Series", None) + .await + .unwrap(); + let source = ReleaseSourceRepository::create( + db, + NewReleaseSource { + plugin_id: "release-nyaa".to_string(), + source_key: "nyaa:user:tsuna69".to_string(), + display_name: "Nyaa - tsuna69".to_string(), + kind: kind::RSS_UPLOADER.to_string(), + enabled: None, + config: None, + }, + ) + .await + .unwrap(); + (series.id, source.id) + } + + fn entry(series_id: Uuid, source_id: Uuid, ext_id: &str) -> NewReleaseEntry { + NewReleaseEntry { + series_id, + source_id, + external_release_id: ext_id.to_string(), + info_hash: None, + chapter: Some(143.0), + volume: None, + language: Some("en".to_string()), + format_hints: None, + group_or_uploader: Some("tsuna69".to_string()), + payload_url: format!("https://nyaa.si/view/{}", ext_id), + media_url: None, + media_url_kind: None, + confidence: 0.95, + metadata: None, + observed_at: Utc::now(), + initial_state: None, + } + } + + #[tokio::test] + async fn record_uses_initial_state_when_provided() { + let (db, _temp) = create_test_db().await; + let conn = db.sea_orm_connection(); + let (series_id, source_id) = setup_world(conn).await; + + // Default: lands as announced. + let default = ReleaseLedgerRepository::record(conn, entry(series_id, source_id, "rel-d")) + .await + .unwrap(); + assert_eq!(default.row.state, state::ANNOUNCED); + + // Caller-specified ignored: lands as ignored. + let mut e = entry(series_id, source_id, "rel-i"); + e.initial_state = Some(state::IGNORED.to_string()); + let ignored = ReleaseLedgerRepository::record(conn, e).await.unwrap(); + assert_eq!(ignored.row.state, state::IGNORED); + + // Invalid state: rejected. + let mut e = entry(series_id, source_id, "rel-x"); + e.initial_state = Some("not_a_state".to_string()); + assert!(ReleaseLedgerRepository::record(conn, e).await.is_err()); + } + + #[tokio::test] + async fn record_persists_media_url_pair() { + let (db, _temp) = create_test_db().await; + let conn = db.sea_orm_connection(); + let (series_id, source_id) = setup_world(conn).await; + + let mut e = entry(series_id, source_id, "rel-media"); + e.media_url = Some("https://nyaa.si/download/1.torrent".to_string()); + e.media_url_kind = Some("torrent".to_string()); + let outcome = ReleaseLedgerRepository::record(conn, e).await.unwrap(); + assert!(!outcome.deduped); + assert_eq!( + outcome.row.media_url.as_deref(), + Some("https://nyaa.si/download/1.torrent") + ); + assert_eq!(outcome.row.media_url_kind.as_deref(), Some("torrent")); + + let fetched = ReleaseLedgerRepository::get_by_id(conn, outcome.row.id) + .await + .unwrap() + .expect("row exists"); + assert_eq!( + fetched.media_url.as_deref(), + Some("https://nyaa.si/download/1.torrent") + ); + assert_eq!(fetched.media_url_kind.as_deref(), Some("torrent")); + } + + #[tokio::test] + async fn record_inserts_then_dedups_on_external_id() { + let (db, _temp) = create_test_db().await; + let conn = db.sea_orm_connection(); + let (series_id, source_id) = setup_world(conn).await; + + let first = ReleaseLedgerRepository::record(conn, entry(series_id, source_id, "rel-1")) + .await + .unwrap(); + assert!(!first.deduped); + + let second = ReleaseLedgerRepository::record(conn, entry(series_id, source_id, "rel-1")) + .await + .unwrap(); + assert!(second.deduped); + assert_eq!(first.row.id, second.row.id); + } + + #[tokio::test] + async fn record_dedups_on_info_hash_across_sources() { + let (db, _temp) = create_test_db().await; + let conn = db.sea_orm_connection(); + let (series_id, src_a) = setup_world(conn).await; + // Second source - same plugin, different uploader. + let src_b = ReleaseSourceRepository::create( + conn, + NewReleaseSource { + plugin_id: "release-nyaa".to_string(), + source_key: "nyaa:user:other".to_string(), + display_name: "Nyaa - other".to_string(), + kind: kind::RSS_UPLOADER.to_string(), + enabled: None, + config: None, + }, + ) + .await + .unwrap(); + + let mut e1 = entry(series_id, src_a, "rel-A"); + e1.info_hash = Some("deadbeefcafe".to_string()); + let mut e2 = entry(series_id, src_b.id, "rel-B"); + e2.info_hash = Some("deadbeefcafe".to_string()); + + let r1 = ReleaseLedgerRepository::record(conn, e1).await.unwrap(); + let r2 = ReleaseLedgerRepository::record(conn, e2).await.unwrap(); + assert!(!r1.deduped); + assert!( + r2.deduped, + "same info_hash from different source must dedup onto the first row" + ); + assert_eq!(r1.row.id, r2.row.id); + } + + #[tokio::test] + async fn record_validates_required_fields() { + let (db, _temp) = create_test_db().await; + let conn = db.sea_orm_connection(); + let (series_id, source_id) = setup_world(conn).await; + + let mut bad = entry(series_id, source_id, "rel-x"); + bad.payload_url = "".to_string(); + let err = ReleaseLedgerRepository::record(conn, bad) + .await + .unwrap_err(); + assert!(err.to_string().contains("payload_url")); + + let mut bad = entry(series_id, source_id, "rel-x"); + bad.external_release_id = "".to_string(); + let err = ReleaseLedgerRepository::record(conn, bad) + .await + .unwrap_err(); + assert!(err.to_string().contains("external_release_id")); + + let mut bad = entry(series_id, source_id, "rel-x"); + bad.confidence = f64::NAN; + let err = ReleaseLedgerRepository::record(conn, bad) + .await + .unwrap_err(); + assert!(err.to_string().contains("NaN")); + } + + #[tokio::test] + async fn list_for_series_sorts_chapter_desc_over_observed_at() { + // The series detail panel must mirror the inbox's per-series order: + // highest chapter wins, even if a lower chapter was observed later. + let (db, _temp) = create_test_db().await; + let conn = db.sea_orm_connection(); + let (series_id, source_id) = setup_world(conn).await; + + let now = Utc::now(); + let mut high_old = entry(series_id, source_id, "rel-high"); + high_old.chapter = Some(200.0); + high_old.observed_at = now - chrono::Duration::hours(6); + let mut low_new = entry(series_id, source_id, "rel-low"); + low_new.chapter = Some(150.0); + low_new.observed_at = now; + ReleaseLedgerRepository::record(conn, high_old) + .await + .unwrap(); + ReleaseLedgerRepository::record(conn, low_new) + .await + .unwrap(); + + let rows = ReleaseLedgerRepository::list_for_series(conn, series_id, None, 10, 0) + .await + .unwrap(); + assert_eq!(rows.len(), 2); + assert_eq!(rows[0].chapter, Some(200.0)); + assert_eq!(rows[1].chapter, Some(150.0)); + } + + #[tokio::test] + async fn list_for_series_orders_by_observed_at_desc() { + let (db, _temp) = create_test_db().await; + let conn = db.sea_orm_connection(); + let (series_id, source_id) = setup_world(conn).await; + + let now = Utc::now(); + let mut older = entry(series_id, source_id, "rel-old"); + older.observed_at = now - chrono::Duration::hours(2); + let mut newer = entry(series_id, source_id, "rel-new"); + newer.observed_at = now; + ReleaseLedgerRepository::record(conn, older).await.unwrap(); + ReleaseLedgerRepository::record(conn, newer).await.unwrap(); + + let rows = ReleaseLedgerRepository::list_for_series(conn, series_id, None, 10, 0) + .await + .unwrap(); + assert_eq!(rows.len(), 2); + assert_eq!(rows[0].external_release_id, "rel-new"); + assert_eq!(rows[1].external_release_id, "rel-old"); + } + + #[tokio::test] + async fn list_inbox_filters_by_state() { + let (db, _temp) = create_test_db().await; + let conn = db.sea_orm_connection(); + let (series_id, source_id) = setup_world(conn).await; + + let r1 = ReleaseLedgerRepository::record(conn, entry(series_id, source_id, "rel-1")) + .await + .unwrap(); + let _r2 = ReleaseLedgerRepository::record(conn, entry(series_id, source_id, "rel-2")) + .await + .unwrap(); + + // Dismiss one. + ReleaseLedgerRepository::set_state(conn, r1.row.id, state::DISMISSED) + .await + .unwrap(); + + let announced = + ReleaseLedgerRepository::list_inbox(conn, LedgerInboxFilter::default(), 10, 0) + .await + .unwrap(); + assert_eq!(announced.len(), 1); + assert_eq!(announced[0].external_release_id, "rel-2"); + + let dismissed = ReleaseLedgerRepository::list_inbox( + conn, + LedgerInboxFilter { + state: Some(state::DISMISSED.to_string()), + ..Default::default() + }, + 10, + 0, + ) + .await + .unwrap(); + assert_eq!(dismissed.len(), 1); + assert_eq!(dismissed[0].external_release_id, "rel-1"); + } + + #[tokio::test] + async fn list_inbox_orders_series_alphabetically_by_name() { + // Cross-series ordering used to be by `series_id` (UUID), which is + // deterministic but meaningless to users. Now the inbox joins `series` + // and orders by `name ASC`, so "A series" appears before "Z series". + let (db, _temp) = create_test_db().await; + let conn = db.sea_orm_connection(); + let library = LibraryRepository::create(conn, "Lib", "/lib", ScanningStrategy::Default) + .await + .unwrap(); + let source = ReleaseSourceRepository::create( + conn, + NewReleaseSource { + plugin_id: "release-nyaa".to_string(), + source_key: "nyaa:user:tsuna69".to_string(), + display_name: "Nyaa - tsuna69".to_string(), + kind: kind::RSS_UPLOADER.to_string(), + enabled: None, + config: None, + }, + ) + .await + .unwrap(); + // Create series in reverse alphabetical order to prove the sort isn't + // just preserving insertion order. + let zebra = SeriesRepository::create(conn, library.id, "Zebra", None) + .await + .unwrap(); + let middle = SeriesRepository::create(conn, library.id, "Middle", None) + .await + .unwrap(); + let alpha = SeriesRepository::create(conn, library.id, "Alpha", None) + .await + .unwrap(); + + for sid in [zebra.id, middle.id, alpha.id] { + ReleaseLedgerRepository::record(conn, entry(sid, source.id, &format!("rel-{}", sid))) + .await + .unwrap(); + } + + let rows = ReleaseLedgerRepository::list_inbox(conn, LedgerInboxFilter::default(), 100, 0) + .await + .unwrap(); + let series_order: Vec<Uuid> = rows.iter().map(|r| r.series_id).collect(); + assert_eq!( + series_order, + vec![alpha.id, middle.id, zebra.id], + "inbox should list series alphabetically by series.name" + ); + } + + #[tokio::test] + async fn list_inbox_groups_series_across_observation_batches() { + // Bug repro: when a series has rows from two separate poll batches + // (different `observed_at`s), the inbox must still list every chapter + // contiguously and descending — not split into two desc clusters by + // batch. A user reading the inbox doesn't care which poll surfaced a + // chapter; they want the series' chapter list, in order. + let (db, _temp) = create_test_db().await; + let conn = db.sea_orm_connection(); + let (series_id, source_id) = setup_world(conn).await; + + let now = Utc::now(); + let earlier = now - chrono::Duration::hours(6); + // Earlier batch: lower chapters. Later batch: higher chapters. + for ch in [122.0_f64, 123.0, 124.0, 125.0] { + let mut e = entry(series_id, source_id, &format!("rel-{}", ch)); + e.chapter = Some(ch); + e.observed_at = earlier; + ReleaseLedgerRepository::record(conn, e).await.unwrap(); + } + for ch in [150.0_f64, 151.0, 156.0] { + let mut e = entry(series_id, source_id, &format!("rel-{}", ch)); + e.chapter = Some(ch); + e.observed_at = now; + ReleaseLedgerRepository::record(conn, e).await.unwrap(); + } + + let rows = ReleaseLedgerRepository::list_inbox(conn, LedgerInboxFilter::default(), 100, 0) + .await + .unwrap(); + let chapters: Vec<f64> = rows.iter().filter_map(|r| r.chapter).collect(); + assert_eq!( + chapters, + vec![156.0, 151.0, 150.0, 125.0, 124.0, 123.0, 122.0], + "chapters of one series must be one contiguous desc list, regardless of observed_at batch" + ); + } + + #[tokio::test] + async fn list_inbox_orders_chapters_desc_within_series() { + // A poll batch records every release with the same `observed_at`. The + // inbox must still present the highest chapter first per series, not + // the arbitrary order rows happened to be inserted in. + let (db, _temp) = create_test_db().await; + let conn = db.sea_orm_connection(); + let (series_id, source_id) = setup_world(conn).await; + + let now = Utc::now(); + // Insert in shuffled chapter order to prove the DB is doing the sort. + for ch in [129.0_f64, 145.0, 122.0, 150.5, 137.0, 156.0, 138.0] { + let mut e = entry(series_id, source_id, &format!("rel-{}", ch)); + e.chapter = Some(ch); + e.observed_at = now; + ReleaseLedgerRepository::record(conn, e).await.unwrap(); + } + + let rows = ReleaseLedgerRepository::list_inbox(conn, LedgerInboxFilter::default(), 100, 0) + .await + .unwrap(); + let chapters: Vec<f64> = rows.iter().filter_map(|r| r.chapter).collect(); + assert_eq!( + chapters, + vec![156.0, 150.5, 145.0, 138.0, 137.0, 129.0, 122.0], + "rows of the same series must be sorted by chapter desc" + ); + } + + #[tokio::test] + async fn list_inbox_groups_series_with_chapters_desc_inside() { + // Two series in the same poll batch: the inbox must keep each series' + // rows contiguous and sort their chapters descending. The cross-series + // order is by series_id ASC (deterministic, but not user-meaningful). + let (db, _temp) = create_test_db().await; + let conn = db.sea_orm_connection(); + let (series_a, src) = setup_world(conn).await; + let library = LibraryRepository::create(conn, "Lib2", "/lib2", ScanningStrategy::Default) + .await + .unwrap(); + let series_b = SeriesRepository::create(conn, library.id, "Series B", None) + .await + .unwrap(); + + let now = Utc::now(); + let mut a1 = entry(series_a, src, "a-1"); + a1.chapter = Some(10.0); + a1.observed_at = now; + let mut a2 = entry(series_a, src, "a-2"); + a2.chapter = Some(20.0); + a2.observed_at = now; + let mut b1 = entry(series_b.id, src, "b-1"); + b1.chapter = Some(5.0); + b1.observed_at = now; + let mut b2 = entry(series_b.id, src, "b-2"); + b2.chapter = Some(7.0); + b2.observed_at = now; + // Insert interleaved to prove ordering doesn't leak from insertion order. + ReleaseLedgerRepository::record(conn, a1).await.unwrap(); + ReleaseLedgerRepository::record(conn, b1).await.unwrap(); + ReleaseLedgerRepository::record(conn, a2).await.unwrap(); + ReleaseLedgerRepository::record(conn, b2).await.unwrap(); + + let rows = ReleaseLedgerRepository::list_inbox(conn, LedgerInboxFilter::default(), 100, 0) + .await + .unwrap(); + // Each series' rows must be contiguous and chapter-desc internally. + let series_groups: Vec<Vec<(Uuid, f64)>> = rows + .iter() + .map(|r| (r.series_id, r.chapter.unwrap())) + .fold(Vec::new(), |mut acc, (sid, ch)| { + if acc.last().is_some_and(|g: &Vec<_>| g[0].0 == sid) { + acc.last_mut().unwrap().push((sid, ch)); + } else { + acc.push(vec![(sid, ch)]); + } + acc + }); + assert_eq!( + series_groups.len(), + 2, + "rows of each series must be contiguous" + ); + for group in &series_groups { + let chs: Vec<f64> = group.iter().map(|(_, c)| *c).collect(); + let mut sorted = chs.clone(); + sorted.sort_by(|a, b| b.partial_cmp(a).unwrap()); + assert_eq!(chs, sorted, "chapters within a series must be desc"); + } + } + + #[tokio::test] + async fn list_inbox_supports_combined_filters() { + let (db, _temp) = create_test_db().await; + let conn = db.sea_orm_connection(); + let (series_a, src_a) = setup_world(conn).await; + // Second series. + let library = LibraryRepository::create(conn, "Lib2", "/lib2", ScanningStrategy::Default) + .await + .unwrap(); + let series_b = SeriesRepository::create(conn, library.id, "Series B", None) + .await + .unwrap(); + + // 2 entries on A, 1 on B. + ReleaseLedgerRepository::record(conn, entry(series_a, src_a, "rel-1")) + .await + .unwrap(); + ReleaseLedgerRepository::record(conn, entry(series_a, src_a, "rel-2")) + .await + .unwrap(); + ReleaseLedgerRepository::record(conn, entry(series_b.id, src_a, "rel-3")) + .await + .unwrap(); + + let only_a = ReleaseLedgerRepository::list_inbox( + conn, + LedgerInboxFilter { + series_id: Some(series_a), + ..Default::default() + }, + 10, + 0, + ) + .await + .unwrap(); + assert_eq!(only_a.len(), 2); + + let total = ReleaseLedgerRepository::count_inbox(conn, LedgerInboxFilter::default()) + .await + .unwrap(); + assert_eq!(total, 3); + } + + #[tokio::test] + async fn set_state_validates_and_transitions() { + let (db, _temp) = create_test_db().await; + let conn = db.sea_orm_connection(); + let (series_id, source_id) = setup_world(conn).await; + let r = ReleaseLedgerRepository::record(conn, entry(series_id, source_id, "rel-1")) + .await + .unwrap(); + + let updated = ReleaseLedgerRepository::set_state(conn, r.row.id, state::MARKED_ACQUIRED) + .await + .unwrap(); + assert_eq!(updated.state, "marked_acquired"); + + let err = ReleaseLedgerRepository::set_state(conn, r.row.id, "garbage") + .await + .unwrap_err(); + assert!(err.to_string().contains("invalid state")); + } + + #[tokio::test] + async fn cascade_deletes_ledger_when_series_deleted() { + let (db, _temp) = create_test_db().await; + let conn = db.sea_orm_connection(); + let (series_id, source_id) = setup_world(conn).await; + ReleaseLedgerRepository::record(conn, entry(series_id, source_id, "rel-1")) + .await + .unwrap(); + + SeriesRepository::delete(conn, series_id).await.unwrap(); + + let rows = ReleaseLedgerRepository::list_for_series(conn, series_id, None, 10, 0) + .await + .unwrap(); + assert!(rows.is_empty(), "ledger rows cascaded with series"); + } + + #[tokio::test] + async fn delete_by_source_removes_only_that_sources_rows() { + let (db, _temp) = create_test_db().await; + let conn = db.sea_orm_connection(); + let (series_id, source_a) = setup_world(conn).await; + + // Add a second source so we can prove scoping. + let source_b = ReleaseSourceRepository::create( + conn, + NewReleaseSource { + plugin_id: "release-nyaa".to_string(), + source_key: "nyaa:user:other".to_string(), + display_name: "Nyaa - other".to_string(), + kind: kind::RSS_UPLOADER.to_string(), + enabled: None, + config: None, + }, + ) + .await + .unwrap(); + + ReleaseLedgerRepository::record(conn, entry(series_id, source_a, "rel-1")) + .await + .unwrap(); + ReleaseLedgerRepository::record(conn, entry(series_id, source_a, "rel-2")) + .await + .unwrap(); + ReleaseLedgerRepository::record(conn, entry(series_id, source_b.id, "rel-3")) + .await + .unwrap(); + + let removed = ReleaseLedgerRepository::delete_by_source(conn, source_a) + .await + .unwrap(); + assert_eq!(removed, 2); + + // Source A is empty; source B still has its row. + let after_a = + ReleaseLedgerRepository::list_inbox(conn, LedgerInboxFilter::default(), 100, 0) + .await + .unwrap() + .into_iter() + .filter(|r| r.source_id == source_a) + .count(); + assert_eq!(after_a, 0); + let after_b = + ReleaseLedgerRepository::list_inbox(conn, LedgerInboxFilter::default(), 100, 0) + .await + .unwrap() + .into_iter() + .filter(|r| r.source_id == source_b.id) + .count(); + assert_eq!(after_b, 1); + } + + #[tokio::test] + async fn cascade_deletes_ledger_when_source_deleted() { + let (db, _temp) = create_test_db().await; + let conn = db.sea_orm_connection(); + let (series_id, source_id) = setup_world(conn).await; + ReleaseLedgerRepository::record(conn, entry(series_id, source_id, "rel-1")) + .await + .unwrap(); + + ReleaseSourceRepository::delete(conn, source_id) + .await + .unwrap(); + + let rows = ReleaseLedgerRepository::list_for_series(conn, series_id, None, 10, 0) + .await + .unwrap(); + assert!(rows.is_empty(), "ledger rows cascaded with source"); + } +} diff --git a/src/db/repositories/release_sources.rs b/src/db/repositories/release_sources.rs new file mode 100644 index 00000000..da8130db --- /dev/null +++ b/src/db/repositories/release_sources.rs @@ -0,0 +1,868 @@ +//! Repository for the `release_sources` table. +//! +//! One row per logical source a plugin (or core) exposes. The plugin → source +//! relationship is many-to-one: e.g., a single Nyaa plugin instance exposes +//! one source per uploader subscription. CRUD here, plus state-tracking +//! helpers (`record_poll_success`, `record_poll_error`) used by the polling +//! task in Phase 4. + +#![allow(dead_code)] + +use anyhow::Result; +use chrono::{DateTime, Utc}; +use sea_orm::{ + ActiveModelTrait, ColumnTrait, DatabaseConnection, EntityTrait, PaginatorTrait, QueryFilter, + QueryOrder, Set, +}; +use uuid::Uuid; + +use crate::db::entities::release_sources::{ + self, Entity as ReleaseSources, Model as ReleaseSource, kind, +}; +use crate::utils::cron::validate_cron_expression; + +/// Normalize a caller-supplied cron schedule: trim, treat empty as `None`, +/// validate the parse, and return the trimmed string. Errors when the +/// expression is non-empty but invalid. +fn sanitize_cron_schedule(value: Option<String>) -> Result<Option<String>> { + let Some(raw) = value else { return Ok(None) }; + let trimmed = raw.trim(); + if trimmed.is_empty() { + return Ok(None); + } + validate_cron_expression(trimmed) + .map_err(|e| anyhow::anyhow!("invalid cron_schedule: {}", e))?; + Ok(Some(trimmed.to_string())) +} + +/// Parameters for creating a new release source. Only the fields a caller is +/// expected to choose live here; `created_at` / `updated_at` / `id` are +/// generated. +#[derive(Debug, Clone)] +pub struct NewReleaseSource { + pub plugin_id: String, + pub source_key: String, + pub display_name: String, + pub kind: String, + pub enabled: Option<bool>, + pub config: Option<serde_json::Value>, +} + +/// PATCH-style update payload. Each `Option<T>` distinguishes "leave alone" +/// (`None`) from "set". `cron_schedule` uses `Option<Option<String>>` so the +/// caller can explicitly clear a row's override (revert to inheriting the +/// server-wide default) by sending `Some(None)`. +#[derive(Debug, Default, Clone)] +pub struct ReleaseSourceUpdate { + pub display_name: Option<String>, + pub enabled: Option<bool>, + pub cron_schedule: Option<Option<String>>, + pub config: Option<Option<serde_json::Value>>, +} + +pub struct ReleaseSourceRepository; + +impl ReleaseSourceRepository { + pub async fn get_by_id(db: &DatabaseConnection, id: Uuid) -> Result<Option<ReleaseSource>> { + Ok(ReleaseSources::find_by_id(id).one(db).await?) + } + + /// Lookup by the natural composite key `(plugin_id, source_key)`. + pub async fn find_by_key( + db: &DatabaseConnection, + plugin_id: &str, + source_key: &str, + ) -> Result<Option<ReleaseSource>> { + Ok(ReleaseSources::find() + .filter(release_sources::Column::PluginId.eq(plugin_id)) + .filter(release_sources::Column::SourceKey.eq(source_key)) + .one(db) + .await?) + } + + /// List all sources, ordered by `(plugin_id, source_key)` for stable display. + pub async fn list_all(db: &DatabaseConnection) -> Result<Vec<ReleaseSource>> { + Ok(ReleaseSources::find() + .order_by_asc(release_sources::Column::PluginId) + .order_by_asc(release_sources::Column::SourceKey) + .all(db) + .await?) + } + + /// List enabled sources only. Hot path for the scheduler. + pub async fn list_enabled(db: &DatabaseConnection) -> Result<Vec<ReleaseSource>> { + Ok(ReleaseSources::find() + .filter(release_sources::Column::Enabled.eq(true)) + .order_by_asc(release_sources::Column::PluginId) + .order_by_asc(release_sources::Column::SourceKey) + .all(db) + .await?) + } + + /// Count all sources (used for inventory metrics). + pub async fn count(db: &DatabaseConnection) -> Result<u64> { + Ok(ReleaseSources::find().count(db).await?) + } + + /// Create a new source. Validates `kind` against the canonical set. + /// New rows always start with `cron_schedule = NULL` (inherit the + /// server-wide default); admins can override per-row via PATCH. + pub async fn create( + db: &DatabaseConnection, + params: NewReleaseSource, + ) -> Result<ReleaseSource> { + if !kind::is_valid(¶ms.kind) { + anyhow::bail!("invalid kind: {}", params.kind); + } + if params.plugin_id.trim().is_empty() { + anyhow::bail!("plugin_id cannot be empty"); + } + if params.source_key.trim().is_empty() { + anyhow::bail!("source_key cannot be empty"); + } + + let now = Utc::now(); + let active = release_sources::ActiveModel { + id: Set(Uuid::new_v4()), + plugin_id: Set(params.plugin_id), + source_key: Set(params.source_key), + display_name: Set(params.display_name), + kind: Set(params.kind), + enabled: Set(params.enabled.unwrap_or(true)), + cron_schedule: Set(None), + last_polled_at: Set(None), + last_error: Set(None), + last_error_at: Set(None), + etag: Set(None), + config: Set(params.config), + last_summary: Set(None), + created_at: Set(now), + updated_at: Set(now), + }; + Ok(active.insert(db).await?) + } + + /// Get-or-create a synthetic in-core source (used by the metadata-piggyback + /// path in Phase 5). Distinct from `create` so callers don't accidentally + /// create duplicate synthetic rows. + pub async fn get_or_create( + db: &DatabaseConnection, + params: NewReleaseSource, + ) -> Result<ReleaseSource> { + if let Some(existing) = Self::find_by_key(db, ¶ms.plugin_id, ¶ms.source_key).await? + { + return Ok(existing); + } + Self::create(db, params).await + } + + /// Idempotent upsert keyed on `(plugin_id, source_key)`. + /// + /// On insert, the row is created with `params` and defaults to enabled. + /// On update, **only the plugin-owned descriptive fields** are refreshed + /// (`display_name`, `kind`, `config`). User-managed fields (`enabled`, + /// `cron_schedule`) are preserved so an admin's schedule override or + /// disable toggle survives a plugin re-registration. + /// + /// Used by `releases/register_sources` so a plugin can declare its full + /// desired-state list on every initialize without trampling user choices. + pub async fn upsert( + db: &DatabaseConnection, + params: NewReleaseSource, + ) -> Result<ReleaseSource> { + if !kind::is_valid(¶ms.kind) { + anyhow::bail!("invalid kind: {}", params.kind); + } + if let Some(existing) = Self::find_by_key(db, ¶ms.plugin_id, ¶ms.source_key).await? + { + let mut active: release_sources::ActiveModel = existing.into(); + active.display_name = Set(params.display_name); + active.kind = Set(params.kind); + active.config = Set(params.config); + active.updated_at = Set(Utc::now()); + return Ok(active.update(db).await?); + } + Self::create(db, params).await + } + + /// Return every row owned by `plugin_id`, ordered by `source_key`. + pub async fn list_by_plugin( + db: &DatabaseConnection, + plugin_id: &str, + ) -> Result<Vec<ReleaseSource>> { + Ok(ReleaseSources::find() + .filter(release_sources::Column::PluginId.eq(plugin_id)) + .order_by_asc(release_sources::Column::SourceKey) + .all(db) + .await?) + } + + /// Delete every row owned by `plugin_id` whose `source_key` is **not** in + /// `keep_keys`. Returns the number of rows removed. Cascades to + /// `release_ledger`. Used by `register_sources` to prune sources that the + /// plugin no longer declares. + pub async fn delete_by_plugin_excluding( + db: &DatabaseConnection, + plugin_id: &str, + keep_keys: &[String], + ) -> Result<u64> { + let mut query = + ReleaseSources::delete_many().filter(release_sources::Column::PluginId.eq(plugin_id)); + if !keep_keys.is_empty() { + query = query.filter(release_sources::Column::SourceKey.is_not_in(keep_keys.to_vec())); + } + let result = query.exec(db).await?; + Ok(result.rows_affected) + } + + /// Apply a PATCH-style update. + pub async fn update( + db: &DatabaseConnection, + id: Uuid, + update: ReleaseSourceUpdate, + ) -> Result<ReleaseSource> { + let existing = ReleaseSources::find_by_id(id) + .one(db) + .await? + .ok_or_else(|| anyhow::anyhow!("release source {} not found", id))?; + + let mut active: release_sources::ActiveModel = existing.into(); + if let Some(name) = update.display_name { + active.display_name = Set(name); + } + if let Some(enabled) = update.enabled { + active.enabled = Set(enabled); + } + if let Some(cron) = update.cron_schedule { + // Some(None) -> clear (inherit server default); Some(Some(s)) -> set override. + let sanitized = sanitize_cron_schedule(cron)?; + active.cron_schedule = Set(sanitized); + } + if let Some(cfg) = update.config { + active.config = Set(cfg); + } + active.updated_at = Set(Utc::now()); + Ok(active.update(db).await?) + } + + /// Record a successful poll. Clears any prior error and bumps `last_polled_at`. + /// Optionally sets a new etag/cursor. + pub async fn record_poll_success( + db: &DatabaseConnection, + id: Uuid, + polled_at: DateTime<Utc>, + etag: Option<String>, + summary: Option<String>, + ) -> Result<()> { + let existing = ReleaseSources::find_by_id(id) + .one(db) + .await? + .ok_or_else(|| anyhow::anyhow!("release source {} not found", id))?; + let mut active: release_sources::ActiveModel = existing.into(); + active.last_polled_at = Set(Some(polled_at)); + active.last_error = Set(None); + active.last_error_at = Set(None); + if let Some(e) = etag { + active.etag = Set(Some(e)); + } + // None passed by the caller means "leave alone"; older callers can pass + // None and keep their existing behavior. Pass Some("…") to overwrite. + if let Some(s) = summary { + active.last_summary = Set(Some(s)); + } + active.updated_at = Set(Utc::now()); + active.update(db).await?; + Ok(()) + } + + /// Record a poll error. Does NOT touch `last_polled_at` (we still consider + /// the poll attempt observed, but `last_error` lets the UI surface failures). + pub async fn record_poll_error( + db: &DatabaseConnection, + id: Uuid, + error: &str, + errored_at: DateTime<Utc>, + ) -> Result<()> { + let existing = ReleaseSources::find_by_id(id) + .one(db) + .await? + .ok_or_else(|| anyhow::anyhow!("release source {} not found", id))?; + let mut active: release_sources::ActiveModel = existing.into(); + active.last_error = Set(Some(error.to_string())); + active.last_error_at = Set(Some(errored_at)); + active.updated_at = Set(Utc::now()); + active.update(db).await?; + Ok(()) + } + + /// Delete a source. Cascades to `release_ledger` rows. + pub async fn delete(db: &DatabaseConnection, id: Uuid) -> Result<bool> { + let result = ReleaseSources::delete_by_id(id).exec(db).await?; + Ok(result.rows_affected > 0) + } + + /// Reset all transient poll state on a source: clears `etag`, + /// `last_polled_at`, `last_error`, `last_error_at`, and `last_summary`. + /// Leaves user-managed fields (`enabled`, `cron_schedule`, + /// `display_name`, `config`) untouched. + /// + /// Used by the source-reset admin endpoint so a forced re-poll fetches + /// the upstream feed afresh (no `If-None-Match` 304) and re-records + /// every release as `announced`. + pub async fn clear_poll_state(db: &DatabaseConnection, id: Uuid) -> Result<()> { + let existing = ReleaseSources::find_by_id(id) + .one(db) + .await? + .ok_or_else(|| anyhow::anyhow!("release source {} not found", id))?; + let mut active: release_sources::ActiveModel = existing.into(); + active.last_polled_at = Set(None); + active.last_error = Set(None); + active.last_error_at = Set(None); + active.etag = Set(None); + active.last_summary = Set(None); + active.updated_at = Set(Utc::now()); + active.update(db).await?; + Ok(()) + } + + /// Clear only the `etag` for this source. Used when a user deletes + /// individual ledger rows and wants the next poll to bypass the + /// upstream's `If-None-Match` cache (so the deleted row gets re-recorded + /// in `announced` state). Lighter than `clear_poll_state`: poll history + /// (`last_polled_at`, `last_error`, `last_summary`) is preserved. + pub async fn clear_etag(db: &DatabaseConnection, id: Uuid) -> Result<()> { + // Use update_many so a missing row is a silent no-op rather than an + // error. Per-row ledger deletes can race with a source deletion; the + // dropped ledger row is the user's intent regardless of whether the + // source still exists. + ReleaseSources::update_many() + .col_expr( + release_sources::Column::Etag, + sea_orm::sea_query::Expr::value(Option::<String>::None), + ) + .col_expr( + release_sources::Column::UpdatedAt, + sea_orm::sea_query::Expr::value(Utc::now()), + ) + .filter(release_sources::Column::Id.eq(id)) + .exec(db) + .await?; + Ok(()) + } + + /// Clear `etag` on every source in `ids` in a single statement. + pub async fn clear_etag_many(db: &DatabaseConnection, ids: &[Uuid]) -> Result<()> { + if ids.is_empty() { + return Ok(()); + } + ReleaseSources::update_many() + .col_expr( + release_sources::Column::Etag, + sea_orm::sea_query::Expr::value(Option::<String>::None), + ) + .col_expr( + release_sources::Column::UpdatedAt, + sea_orm::sea_query::Expr::value(Utc::now()), + ) + .filter(release_sources::Column::Id.is_in(ids.to_vec())) + .exec(db) + .await?; + Ok(()) + } +} + +#[cfg(test)] +mod tests { + use super::*; + use crate::db::test_helpers::create_test_db; + + fn nyaa_source() -> NewReleaseSource { + NewReleaseSource { + plugin_id: "release-nyaa".to_string(), + source_key: "nyaa:user:tsuna69".to_string(), + display_name: "Nyaa - tsuna69".to_string(), + kind: kind::RSS_UPLOADER.to_string(), + enabled: None, + config: None, + } + } + + #[tokio::test] + async fn create_and_lookup_roundtrip() { + let (db, _temp) = create_test_db().await; + let conn = db.sea_orm_connection(); + + let created = ReleaseSourceRepository::create(conn, nyaa_source()) + .await + .unwrap(); + assert_eq!(created.plugin_id, "release-nyaa"); + assert!(created.enabled, "default to enabled"); + + let by_id = ReleaseSourceRepository::get_by_id(conn, created.id) + .await + .unwrap() + .unwrap(); + assert_eq!(by_id.id, created.id); + + let by_key = + ReleaseSourceRepository::find_by_key(conn, "release-nyaa", "nyaa:user:tsuna69") + .await + .unwrap() + .unwrap(); + assert_eq!(by_key.id, created.id); + } + + #[tokio::test] + async fn create_rejects_invalid_kind() { + let (db, _temp) = create_test_db().await; + let conn = db.sea_orm_connection(); + + let mut params = nyaa_source(); + params.kind = "frobnicate".to_string(); + let err = ReleaseSourceRepository::create(conn, params) + .await + .unwrap_err(); + assert!(err.to_string().contains("invalid kind")); + } + + #[tokio::test] + async fn update_rejects_invalid_cron() { + let (db, _temp) = create_test_db().await; + let conn = db.sea_orm_connection(); + + let s = ReleaseSourceRepository::create(conn, nyaa_source()) + .await + .unwrap(); + let err = ReleaseSourceRepository::update( + conn, + s.id, + ReleaseSourceUpdate { + cron_schedule: Some(Some("not a cron".to_string())), + ..Default::default() + }, + ) + .await + .unwrap_err(); + assert!(err.to_string().to_lowercase().contains("cron")); + } + + #[tokio::test] + async fn update_clears_cron_schedule_with_explicit_none() { + let (db, _temp) = create_test_db().await; + let conn = db.sea_orm_connection(); + + let s = ReleaseSourceRepository::create(conn, nyaa_source()) + .await + .unwrap(); + // Set an override. + ReleaseSourceRepository::update( + conn, + s.id, + ReleaseSourceUpdate { + cron_schedule: Some(Some("0 */6 * * *".to_string())), + ..Default::default() + }, + ) + .await + .unwrap(); + let after_set = ReleaseSourceRepository::get_by_id(conn, s.id) + .await + .unwrap() + .unwrap(); + assert_eq!(after_set.cron_schedule.as_deref(), Some("0 */6 * * *")); + + // Clear back to inherit. + ReleaseSourceRepository::update( + conn, + s.id, + ReleaseSourceUpdate { + cron_schedule: Some(None), + ..Default::default() + }, + ) + .await + .unwrap(); + let after_clear = ReleaseSourceRepository::get_by_id(conn, s.id) + .await + .unwrap() + .unwrap(); + assert!(after_clear.cron_schedule.is_none()); + } + + #[tokio::test] + async fn update_treats_empty_cron_as_clear() { + let (db, _temp) = create_test_db().await; + let conn = db.sea_orm_connection(); + + let s = ReleaseSourceRepository::create(conn, nyaa_source()) + .await + .unwrap(); + ReleaseSourceRepository::update( + conn, + s.id, + ReleaseSourceUpdate { + cron_schedule: Some(Some("0 */6 * * *".to_string())), + ..Default::default() + }, + ) + .await + .unwrap(); + + ReleaseSourceRepository::update( + conn, + s.id, + ReleaseSourceUpdate { + cron_schedule: Some(Some(" ".to_string())), + ..Default::default() + }, + ) + .await + .unwrap(); + let after = ReleaseSourceRepository::get_by_id(conn, s.id) + .await + .unwrap() + .unwrap(); + assert!(after.cron_schedule.is_none()); + } + + #[tokio::test] + async fn get_or_create_is_idempotent_per_key() { + let (db, _temp) = create_test_db().await; + let conn = db.sea_orm_connection(); + + let a = ReleaseSourceRepository::get_or_create(conn, nyaa_source()) + .await + .unwrap(); + let b = ReleaseSourceRepository::get_or_create(conn, nyaa_source()) + .await + .unwrap(); + assert_eq!(a.id, b.id, "same key returns same row"); + } + + #[tokio::test] + async fn list_enabled_filters_disabled_rows() { + let (db, _temp) = create_test_db().await; + let conn = db.sea_orm_connection(); + + let a = ReleaseSourceRepository::create(conn, nyaa_source()) + .await + .unwrap(); + let mut p2 = nyaa_source(); + p2.source_key = "nyaa:user:other".to_string(); + let b = ReleaseSourceRepository::create(conn, p2).await.unwrap(); + + ReleaseSourceRepository::update( + conn, + b.id, + ReleaseSourceUpdate { + enabled: Some(false), + ..Default::default() + }, + ) + .await + .unwrap(); + + let enabled = ReleaseSourceRepository::list_enabled(conn).await.unwrap(); + assert_eq!(enabled.len(), 1); + assert_eq!(enabled[0].id, a.id); + } + + #[tokio::test] + async fn record_poll_success_clears_error_and_sets_etag() { + let (db, _temp) = create_test_db().await; + let conn = db.sea_orm_connection(); + let s = ReleaseSourceRepository::create(conn, nyaa_source()) + .await + .unwrap(); + + // Seed an error first. + ReleaseSourceRepository::record_poll_error(conn, s.id, "503 upstream", Utc::now()) + .await + .unwrap(); + let after_err = ReleaseSourceRepository::get_by_id(conn, s.id) + .await + .unwrap() + .unwrap(); + assert_eq!(after_err.last_error.as_deref(), Some("503 upstream")); + + // Successful poll clears the error and sets etag + summary. + let polled_at = Utc::now(); + ReleaseSourceRepository::record_poll_success( + conn, + s.id, + polled_at, + Some("\"etag-1\"".to_string()), + Some("Fetched 0 items".to_string()), + ) + .await + .unwrap(); + let after_ok = ReleaseSourceRepository::get_by_id(conn, s.id) + .await + .unwrap() + .unwrap(); + assert_eq!(after_ok.last_error, None); + assert_eq!(after_ok.last_error_at, None); + assert_eq!(after_ok.last_polled_at, Some(polled_at)); + assert_eq!(after_ok.etag.as_deref(), Some("\"etag-1\"")); + assert_eq!(after_ok.last_summary.as_deref(), Some("Fetched 0 items")); + } + + #[tokio::test] + async fn record_poll_error_does_not_touch_last_polled_at() { + let (db, _temp) = create_test_db().await; + let conn = db.sea_orm_connection(); + let s = ReleaseSourceRepository::create(conn, nyaa_source()) + .await + .unwrap(); + + // First a success. + let success_at = Utc::now(); + ReleaseSourceRepository::record_poll_success(conn, s.id, success_at, None, None) + .await + .unwrap(); + + // Then an error. + ReleaseSourceRepository::record_poll_error(conn, s.id, "boom", Utc::now()) + .await + .unwrap(); + + let after = ReleaseSourceRepository::get_by_id(conn, s.id) + .await + .unwrap() + .unwrap(); + assert_eq!( + after.last_polled_at, + Some(success_at), + "last_polled_at preserved on error so users can see when we last got data" + ); + assert_eq!(after.last_error.as_deref(), Some("boom")); + } + + #[tokio::test] + async fn unique_constraint_on_plugin_id_source_key() { + let (db, _temp) = create_test_db().await; + let conn = db.sea_orm_connection(); + ReleaseSourceRepository::create(conn, nyaa_source()) + .await + .unwrap(); + + // Same (plugin_id, source_key) - should fail at the unique index. + let result = ReleaseSourceRepository::create(conn, nyaa_source()).await; + assert!(result.is_err(), "duplicate key must fail"); + } + + #[tokio::test] + async fn upsert_creates_when_missing_and_preserves_user_fields_on_update() { + let (db, _temp) = create_test_db().await; + let conn = db.sea_orm_connection(); + + // First call creates the row. + let created = ReleaseSourceRepository::upsert(conn, nyaa_source()) + .await + .unwrap(); + assert!(created.enabled); + assert!( + created.cron_schedule.is_none(), + "fresh row inherits server-wide default" + ); + + // Admin disables and sets a cron override. + ReleaseSourceRepository::update( + conn, + created.id, + ReleaseSourceUpdate { + enabled: Some(false), + cron_schedule: Some(Some("0 */6 * * *".to_string())), + ..Default::default() + }, + ) + .await + .unwrap(); + + // Plugin re-registers with a different display name, kind, and config. + let mut params = nyaa_source(); + params.display_name = "Nyaa: tsuna69 (refreshed)".to_string(); + params.config = Some(serde_json::json!({ "subscription": "tsuna69" })); + let updated = ReleaseSourceRepository::upsert(conn, params).await.unwrap(); + + assert_eq!(updated.id, created.id, "same key returns same row"); + assert_eq!(updated.display_name, "Nyaa: tsuna69 (refreshed)"); + assert_eq!( + updated.config, + Some(serde_json::json!({ "subscription": "tsuna69" })) + ); + assert!( + !updated.enabled, + "user-set enabled flag must survive a plugin re-register" + ); + assert_eq!( + updated.cron_schedule.as_deref(), + Some("0 */6 * * *"), + "user-set cron_schedule must survive a plugin re-register" + ); + } + + #[tokio::test] + async fn list_by_plugin_returns_only_that_plugins_rows() { + let (db, _temp) = create_test_db().await; + let conn = db.sea_orm_connection(); + + ReleaseSourceRepository::create(conn, nyaa_source()) + .await + .unwrap(); + let mut other = nyaa_source(); + other.plugin_id = "release-mangaupdates".to_string(); + other.source_key = "default".to_string(); + ReleaseSourceRepository::create(conn, other).await.unwrap(); + + let nyaa = ReleaseSourceRepository::list_by_plugin(conn, "release-nyaa") + .await + .unwrap(); + assert_eq!(nyaa.len(), 1); + assert_eq!(nyaa[0].plugin_id, "release-nyaa"); + } + + #[tokio::test] + async fn delete_by_plugin_excluding_prunes_missing_keys() { + let (db, _temp) = create_test_db().await; + let conn = db.sea_orm_connection(); + + let mut a = nyaa_source(); + a.source_key = "user:tsuna69".to_string(); + let mut b = nyaa_source(); + b.source_key = "user:other".to_string(); + let mut c = nyaa_source(); + c.source_key = "user:gone".to_string(); + ReleaseSourceRepository::create(conn, a).await.unwrap(); + ReleaseSourceRepository::create(conn, b).await.unwrap(); + ReleaseSourceRepository::create(conn, c).await.unwrap(); + + // Keep only the first two. + let keep = vec!["user:tsuna69".to_string(), "user:other".to_string()]; + let removed = + ReleaseSourceRepository::delete_by_plugin_excluding(conn, "release-nyaa", &keep) + .await + .unwrap(); + assert_eq!(removed, 1); + + let remaining = ReleaseSourceRepository::list_by_plugin(conn, "release-nyaa") + .await + .unwrap(); + assert_eq!(remaining.len(), 2); + let keys: Vec<&str> = remaining.iter().map(|r| r.source_key.as_str()).collect(); + assert!(keys.contains(&"user:tsuna69")); + assert!(keys.contains(&"user:other")); + } + + #[tokio::test] + async fn delete_by_plugin_excluding_with_empty_keep_removes_all() { + let (db, _temp) = create_test_db().await; + let conn = db.sea_orm_connection(); + + ReleaseSourceRepository::create(conn, nyaa_source()) + .await + .unwrap(); + let mut other = nyaa_source(); + other.source_key = "user:other".to_string(); + ReleaseSourceRepository::create(conn, other).await.unwrap(); + + let removed = + ReleaseSourceRepository::delete_by_plugin_excluding(conn, "release-nyaa", &[]) + .await + .unwrap(); + assert_eq!(removed, 2); + + let remaining = ReleaseSourceRepository::list_by_plugin(conn, "release-nyaa") + .await + .unwrap(); + assert!(remaining.is_empty()); + } + + #[tokio::test] + async fn delete_by_plugin_excluding_does_not_touch_other_plugins() { + let (db, _temp) = create_test_db().await; + let conn = db.sea_orm_connection(); + + ReleaseSourceRepository::create(conn, nyaa_source()) + .await + .unwrap(); + let mut other = nyaa_source(); + other.plugin_id = "release-mangaupdates".to_string(); + other.source_key = "default".to_string(); + ReleaseSourceRepository::create(conn, other).await.unwrap(); + + // Wipe everything for nyaa; mangaupdates row must survive. + ReleaseSourceRepository::delete_by_plugin_excluding(conn, "release-nyaa", &[]) + .await + .unwrap(); + + let mu = ReleaseSourceRepository::list_by_plugin(conn, "release-mangaupdates") + .await + .unwrap(); + assert_eq!(mu.len(), 1); + } + + #[tokio::test] + async fn clear_poll_state_resets_transient_fields_only() { + let (db, _temp) = create_test_db().await; + let conn = db.sea_orm_connection(); + let s = ReleaseSourceRepository::create(conn, nyaa_source()) + .await + .unwrap(); + + // Seed some poll state and a user override. + ReleaseSourceRepository::record_poll_success( + conn, + s.id, + Utc::now(), + Some("\"etag-1\"".to_string()), + Some("Fetched 3 items".to_string()), + ) + .await + .unwrap(); + ReleaseSourceRepository::update( + conn, + s.id, + ReleaseSourceUpdate { + enabled: Some(false), + cron_schedule: Some(Some("0 */6 * * *".to_string())), + ..Default::default() + }, + ) + .await + .unwrap(); + + ReleaseSourceRepository::clear_poll_state(conn, s.id) + .await + .unwrap(); + + let after = ReleaseSourceRepository::get_by_id(conn, s.id) + .await + .unwrap() + .unwrap(); + assert!(after.etag.is_none()); + assert!(after.last_polled_at.is_none()); + assert!(after.last_error.is_none()); + assert!(after.last_error_at.is_none()); + assert!(after.last_summary.is_none()); + // User-managed fields preserved. + assert!(!after.enabled); + assert_eq!(after.cron_schedule.as_deref(), Some("0 */6 * * *")); + } + + #[tokio::test] + async fn delete_removes_row() { + let (db, _temp) = create_test_db().await; + let conn = db.sea_orm_connection(); + let s = ReleaseSourceRepository::create(conn, nyaa_source()) + .await + .unwrap(); + + let removed = ReleaseSourceRepository::delete(conn, s.id).await.unwrap(); + assert!(removed); + let gone = ReleaseSourceRepository::get_by_id(conn, s.id) + .await + .unwrap(); + assert!(gone.is_none()); + } +} diff --git a/src/db/repositories/series.rs b/src/db/repositories/series.rs index e7c0bc63..f1b09a10 100644 --- a/src/db/repositories/series.rs +++ b/src/db/repositories/series.rs @@ -2067,6 +2067,62 @@ impl SeriesRepository { Ok(map) } + /// Fetch the set of owned `(volume, chapter)` keys for a series, used + /// by the release-tracking auto-ignore predicate. + /// + /// Skips books with both `volume` and `chapter` null (no signal). + /// `has_any_volume_metadata` reflects whether any non-deleted book in + /// the series carries a non-null `volume`; the count fallback in + /// [`crate::services::release::auto_ignore`] only fires when this is + /// false. + pub async fn get_owned_release_keys_for_series( + db: &DatabaseConnection, + series_id: Uuid, + ) -> Result<crate::services::release::auto_ignore::OwnedReleaseKeys> { + use crate::services::release::auto_ignore::OwnedReleaseKeys; + + #[derive(Debug, FromQueryResult)] + struct KeyRow { + volume: Option<i32>, + chapter: Option<f32>, + } + + let rows: Vec<KeyRow> = books::Entity::find() + .select_only() + .column_as(book_metadata::Column::Volume, "volume") + .column_as(book_metadata::Column::Chapter, "chapter") + .join(JoinType::LeftJoin, books::Relation::BookMetadata.def()) + .filter(books::Column::SeriesId.eq(series_id)) + .filter(books::Column::Deleted.eq(false)) + .into_model::<KeyRow>() + .all(db) + .await + .context("Failed to load owned release keys")?; + + let mut keys: Vec<(Option<i32>, Option<f64>)> = Vec::with_capacity(rows.len()); + let mut has_any_volume_metadata = false; + let mut volumes_owned_count: i64 = 0; + for r in rows { + if r.volume.is_some() { + has_any_volume_metadata = true; + } + if r.volume.is_some() && r.chapter.is_none() { + volumes_owned_count += 1; + } + // Skip rows with no signal at all. + if r.volume.is_none() && r.chapter.is_none() { + continue; + } + keys.push((r.volume, r.chapter.map(f64::from))); + } + + Ok(OwnedReleaseKeys { + keys, + has_any_volume_metadata, + volumes_owned_count, + }) + } + /// Delete a series pub async fn delete(db: &DatabaseConnection, id: Uuid) -> Result<()> { Series::delete_by_id(id) @@ -3603,4 +3659,129 @@ mod tests { assert_eq!(none.local_max_volume, None); assert_eq!(none.volumes_owned, None); } + + #[tokio::test] + async fn test_owned_release_keys_with_metadata() { + let (db, _temp_dir) = create_test_db().await; + let conn = db.sea_orm_connection(); + + let library = LibraryRepository::create( + conn, + "Test Library", + "/test/path", + ScanningStrategy::Default, + ) + .await + .unwrap(); + + let series = SeriesRepository::create(conn, library.id, "Mixed", None) + .await + .unwrap(); + + // Whole vol 1, whole vol 3, ch 12 of vol 2, pure ch 99.5, untyped book. + insert_book_with_classification(conn, series.id, library.id, "/v1.cbz", Some(1), None) + .await; + insert_book_with_classification(conn, series.id, library.id, "/v3.cbz", Some(3), None) + .await; + insert_book_with_classification( + conn, + series.id, + library.id, + "/v2c12.cbz", + Some(2), + Some(12.0), + ) + .await; + insert_book_with_classification( + conn, + series.id, + library.id, + "/c99-5.cbz", + None, + Some(99.5), + ) + .await; + insert_book_with_classification(conn, series.id, library.id, "/untyped.cbz", None, None) + .await; + + let owned = SeriesRepository::get_owned_release_keys_for_series(conn, series.id) + .await + .unwrap(); + + assert!(owned.has_any_volume_metadata); + assert_eq!(owned.volumes_owned_count, 2); + // Untyped book is filtered out; the other four contribute keys. + assert_eq!(owned.keys.len(), 4); + assert!(owned.keys.contains(&(Some(1), None))); + assert!(owned.keys.contains(&(Some(3), None))); + assert!(owned.keys.contains(&(Some(2), Some(12.0)))); + assert!(owned.keys.contains(&(None, Some(99.5)))); + } + + #[tokio::test] + async fn test_owned_release_keys_pure_count_world() { + let (db, _temp_dir) = create_test_db().await; + let conn = db.sea_orm_connection(); + + let library = LibraryRepository::create( + conn, + "Test Library", + "/test/path", + ScanningStrategy::Default, + ) + .await + .unwrap(); + + let series = SeriesRepository::create(conn, library.id, "NoMeta", None) + .await + .unwrap(); + + // Three untyped books — count world. + for i in 1..=3 { + insert_book_with_classification( + conn, + series.id, + library.id, + &format!("/u{}.cbz", i), + None, + None, + ) + .await; + } + + let owned = SeriesRepository::get_owned_release_keys_for_series(conn, series.id) + .await + .unwrap(); + + assert!(!owned.has_any_volume_metadata); + assert_eq!(owned.volumes_owned_count, 0); + assert!(owned.keys.is_empty()); + } + + #[tokio::test] + async fn test_owned_release_keys_empty_series() { + let (db, _temp_dir) = create_test_db().await; + let conn = db.sea_orm_connection(); + + let library = LibraryRepository::create( + conn, + "Test Library", + "/test/path", + ScanningStrategy::Default, + ) + .await + .unwrap(); + + let empty = SeriesRepository::create(conn, library.id, "Empty", None) + .await + .unwrap(); + + let owned = SeriesRepository::get_owned_release_keys_for_series(conn, empty.id) + .await + .unwrap(); + + assert!(!owned.has_any_volume_metadata); + assert_eq!(owned.volumes_owned_count, 0); + assert!(owned.keys.is_empty()); + } } diff --git a/src/db/repositories/series_aliases.rs b/src/db/repositories/series_aliases.rs new file mode 100644 index 00000000..fdf595de --- /dev/null +++ b/src/db/repositories/series_aliases.rs @@ -0,0 +1,408 @@ +//! Repository for the `series_aliases` table. +//! +//! Title aliases used by release-source plugins to match incoming release +//! titles against tracked series when an external ID isn't available (e.g. +//! Nyaa). Distinct from `alternate_title.rs` which manages localized titles +//! with labels. + +#![allow(dead_code)] + +use anyhow::Result; +use chrono::Utc; +use sea_orm::{ + ActiveModelTrait, ColumnTrait, DatabaseConnection, EntityTrait, PaginatorTrait, QueryFilter, + Set, +}; +use std::collections::HashMap; +use uuid::Uuid; + +use crate::db::entities::series_aliases::{ + self, Entity as SeriesAliases, Model as SeriesAlias, alias_source, normalize_alias, +}; + +pub struct SeriesAliasRepository; + +impl SeriesAliasRepository { + /// Get an alias row by id. + pub async fn get_by_id(db: &DatabaseConnection, id: Uuid) -> Result<Option<SeriesAlias>> { + Ok(SeriesAliases::find_by_id(id).one(db).await?) + } + + /// Get all aliases for a series, ordered by alias for stable display. + pub async fn get_for_series( + db: &DatabaseConnection, + series_id: Uuid, + ) -> Result<Vec<SeriesAlias>> { + use sea_orm::QueryOrder; + let results = SeriesAliases::find() + .filter(series_aliases::Column::SeriesId.eq(series_id)) + .order_by_asc(series_aliases::Column::Alias) + .all(db) + .await?; + Ok(results) + } + + /// Bulk-fetch aliases for many series, returned as a HashMap keyed by series_id. + pub async fn get_for_series_ids( + db: &DatabaseConnection, + series_ids: &[Uuid], + ) -> Result<HashMap<Uuid, Vec<SeriesAlias>>> { + if series_ids.is_empty() { + return Ok(HashMap::new()); + } + let results = SeriesAliases::find() + .filter(series_aliases::Column::SeriesId.is_in(series_ids.to_vec())) + .all(db) + .await?; + let mut map: HashMap<Uuid, Vec<SeriesAlias>> = HashMap::new(); + for row in results { + map.entry(row.series_id).or_default().push(row); + } + Ok(map) + } + + /// Find every series whose normalized alias equals `normalized`. + /// Returns rows so the caller can reach `series_id` and the original alias. + pub async fn find_by_normalized( + db: &DatabaseConnection, + normalized: &str, + ) -> Result<Vec<SeriesAlias>> { + Ok(SeriesAliases::find() + .filter(series_aliases::Column::Normalized.eq(normalized)) + .all(db) + .await?) + } + + /// Create an alias. Returns the existing row if `(series_id, alias)` + /// already exists - aliases are idempotent on add. + pub async fn create( + db: &DatabaseConnection, + series_id: Uuid, + alias: &str, + source: &str, + ) -> Result<SeriesAlias> { + if !alias_source::is_valid(source) { + anyhow::bail!("invalid alias source: {}", source); + } + let trimmed = alias.trim(); + if trimmed.is_empty() { + anyhow::bail!("alias cannot be empty"); + } + + // Idempotent on (series_id, alias). + if let Some(existing) = SeriesAliases::find() + .filter(series_aliases::Column::SeriesId.eq(series_id)) + .filter(series_aliases::Column::Alias.eq(trimmed)) + .one(db) + .await? + { + return Ok(existing); + } + + let normalized = normalize_alias(trimmed); + if normalized.is_empty() { + anyhow::bail!("alias normalizes to empty string"); + } + + let active = series_aliases::ActiveModel { + id: Set(Uuid::new_v4()), + series_id: Set(series_id), + alias: Set(trimmed.to_string()), + normalized: Set(normalized), + source: Set(source.to_string()), + created_at: Set(Utc::now()), + }; + Ok(active.insert(db).await?) + } + + /// Bulk-insert aliases for a series. Existing aliases (by normalized text) + /// are skipped. Returns the number of newly inserted rows. + pub async fn bulk_create( + db: &DatabaseConnection, + series_id: Uuid, + aliases: &[&str], + source: &str, + ) -> Result<usize> { + if !alias_source::is_valid(source) { + anyhow::bail!("invalid alias source: {}", source); + } + let mut inserted = 0; + for alias in aliases { + // Skip blanks defensively; create() also checks but a noisy upstream + // shouldn't cause a hard error here. + if alias.trim().is_empty() { + continue; + } + // create() is idempotent; we count only true inserts by checking before/after. + let before = Self::count_for_series_with_alias(db, series_id, alias.trim()).await?; + Self::create(db, series_id, alias, source).await?; + let after = Self::count_for_series_with_alias(db, series_id, alias.trim()).await?; + if after > before { + inserted += 1; + } + } + Ok(inserted) + } + + async fn count_for_series_with_alias( + db: &DatabaseConnection, + series_id: Uuid, + alias: &str, + ) -> Result<u64> { + let count = SeriesAliases::find() + .filter(series_aliases::Column::SeriesId.eq(series_id)) + .filter(series_aliases::Column::Alias.eq(alias)) + .count(db) + .await?; + Ok(count) + } + + /// Delete an alias by id. Returns true if a row was removed. + pub async fn delete(db: &DatabaseConnection, id: Uuid) -> Result<bool> { + let result = SeriesAliases::delete_by_id(id).exec(db).await?; + Ok(result.rows_affected > 0) + } + + /// Delete all aliases from a given source for a series. Useful for + /// "refresh metadata-sourced aliases" without touching manual aliases. + pub async fn delete_by_source_for_series( + db: &DatabaseConnection, + series_id: Uuid, + source: &str, + ) -> Result<u64> { + let result = SeriesAliases::delete_many() + .filter(series_aliases::Column::SeriesId.eq(series_id)) + .filter(series_aliases::Column::Source.eq(source)) + .exec(db) + .await?; + Ok(result.rows_affected) + } + + /// Delete all aliases for a series (independent of cascade). + pub async fn delete_all_for_series(db: &DatabaseConnection, series_id: Uuid) -> Result<u64> { + let result = SeriesAliases::delete_many() + .filter(series_aliases::Column::SeriesId.eq(series_id)) + .exec(db) + .await?; + Ok(result.rows_affected) + } + + /// Count aliases for a series. + pub async fn count_for_series(db: &DatabaseConnection, series_id: Uuid) -> Result<u64> { + let count = SeriesAliases::find() + .filter(series_aliases::Column::SeriesId.eq(series_id)) + .count(db) + .await?; + Ok(count) + } +} + +#[cfg(test)] +mod tests { + use super::*; + use crate::db::ScanningStrategy; + use crate::db::repositories::{LibraryRepository, SeriesRepository}; + use crate::db::test_helpers::create_test_db; + + async fn make_two_series(db: &DatabaseConnection) -> (Uuid, Uuid) { + let library = LibraryRepository::create(db, "Lib", "/lib", ScanningStrategy::Default) + .await + .unwrap(); + let s1 = SeriesRepository::create(db, library.id, "Series 1", None) + .await + .unwrap(); + let s2 = SeriesRepository::create(db, library.id, "Series 2", None) + .await + .unwrap(); + (s1.id, s2.id) + } + + #[tokio::test] + async fn create_inserts_with_normalized_form() { + let (db, _temp) = create_test_db().await; + let conn = db.sea_orm_connection(); + let (s1, _) = make_two_series(conn).await; + + let row = SeriesAliasRepository::create(conn, s1, "My Hero Academia!", "manual") + .await + .unwrap(); + assert_eq!(row.alias, "My Hero Academia!"); + assert_eq!(row.normalized, "my hero academia"); + assert_eq!(row.source, "manual"); + } + + #[tokio::test] + async fn create_is_idempotent_per_series() { + let (db, _temp) = create_test_db().await; + let conn = db.sea_orm_connection(); + let (s1, _) = make_two_series(conn).await; + + let r1 = SeriesAliasRepository::create(conn, s1, "Boku no Hero", "manual") + .await + .unwrap(); + let r2 = SeriesAliasRepository::create(conn, s1, "Boku no Hero", "manual") + .await + .unwrap(); + assert_eq!(r1.id, r2.id, "same alias on same series returns same row"); + + let count = SeriesAliasRepository::count_for_series(conn, s1) + .await + .unwrap(); + assert_eq!(count, 1); + } + + #[tokio::test] + async fn same_alias_allowed_on_different_series() { + let (db, _temp) = create_test_db().await; + let conn = db.sea_orm_connection(); + let (s1, s2) = make_two_series(conn).await; + + let a = SeriesAliasRepository::create(conn, s1, "Common Title", "metadata") + .await + .unwrap(); + let b = SeriesAliasRepository::create(conn, s2, "Common Title", "metadata") + .await + .unwrap(); + assert_ne!(a.id, b.id); + assert_eq!(a.normalized, b.normalized); + } + + #[tokio::test] + async fn create_rejects_blank_or_punctuation_only() { + let (db, _temp) = create_test_db().await; + let conn = db.sea_orm_connection(); + let (s1, _) = make_two_series(conn).await; + + let err = SeriesAliasRepository::create(conn, s1, " ", "manual") + .await + .unwrap_err(); + assert!(err.to_string().contains("empty")); + + let err = SeriesAliasRepository::create(conn, s1, "!!!---!!!", "manual") + .await + .unwrap_err(); + assert!(err.to_string().contains("normalize")); + } + + #[tokio::test] + async fn create_rejects_invalid_source() { + let (db, _temp) = create_test_db().await; + let conn = db.sea_orm_connection(); + let (s1, _) = make_two_series(conn).await; + + let err = SeriesAliasRepository::create(conn, s1, "X", "auto") + .await + .unwrap_err(); + assert!(err.to_string().contains("invalid alias source")); + } + + #[tokio::test] + async fn find_by_normalized_returns_all_matches() { + let (db, _temp) = create_test_db().await; + let conn = db.sea_orm_connection(); + let (s1, s2) = make_two_series(conn).await; + + SeriesAliasRepository::create(conn, s1, "My Series", "manual") + .await + .unwrap(); + SeriesAliasRepository::create(conn, s2, "MY SERIES!", "metadata") + .await + .unwrap(); + SeriesAliasRepository::create(conn, s1, "Other Title", "manual") + .await + .unwrap(); + + let matches = SeriesAliasRepository::find_by_normalized(conn, "my series") + .await + .unwrap(); + assert_eq!(matches.len(), 2, "both series share normalized 'my series'"); + let mut series_ids: Vec<Uuid> = matches.into_iter().map(|m| m.series_id).collect(); + series_ids.sort(); + let mut expected = [s1, s2]; + expected.sort(); + assert_eq!(series_ids, expected); + } + + #[tokio::test] + async fn bulk_create_dedups_and_counts_inserts() { + let (db, _temp) = create_test_db().await; + let conn = db.sea_orm_connection(); + let (s1, _) = make_two_series(conn).await; + + let inserted = SeriesAliasRepository::bulk_create( + conn, + s1, + &["Title A", "Title B", "Title A", ""], + "metadata", + ) + .await + .unwrap(); + assert_eq!(inserted, 2, "blank skipped, duplicate dedup'd"); + + let again = + SeriesAliasRepository::bulk_create(conn, s1, &["Title A", "Title C"], "metadata") + .await + .unwrap(); + assert_eq!(again, 1, "Title A already present, only Title C is new"); + + let count = SeriesAliasRepository::count_for_series(conn, s1) + .await + .unwrap(); + assert_eq!(count, 3); + } + + #[tokio::test] + async fn delete_by_source_only_touches_that_source() { + let (db, _temp) = create_test_db().await; + let conn = db.sea_orm_connection(); + let (s1, _) = make_two_series(conn).await; + + SeriesAliasRepository::create(conn, s1, "Manual One", "manual") + .await + .unwrap(); + SeriesAliasRepository::create(conn, s1, "Meta One", "metadata") + .await + .unwrap(); + SeriesAliasRepository::create(conn, s1, "Meta Two", "metadata") + .await + .unwrap(); + + let removed = SeriesAliasRepository::delete_by_source_for_series(conn, s1, "metadata") + .await + .unwrap(); + assert_eq!(removed, 2); + + let remaining = SeriesAliasRepository::get_for_series(conn, s1) + .await + .unwrap(); + assert_eq!(remaining.len(), 1); + assert_eq!(remaining[0].source, "manual"); + } + + #[tokio::test] + async fn cascade_deletes_aliases_when_series_deleted() { + let (db, _temp) = create_test_db().await; + let conn = db.sea_orm_connection(); + let (s1, _) = make_two_series(conn).await; + + SeriesAliasRepository::create(conn, s1, "Will Be Cascaded", "manual") + .await + .unwrap(); + SeriesRepository::delete(conn, s1).await.unwrap(); + + let after = SeriesAliasRepository::get_for_series(conn, s1) + .await + .unwrap(); + assert!(after.is_empty()); + } + + #[tokio::test] + async fn get_for_series_ids_handles_empty_input() { + let (db, _temp) = create_test_db().await; + let conn = db.sea_orm_connection(); + let map = SeriesAliasRepository::get_for_series_ids(conn, &[]) + .await + .unwrap(); + assert!(map.is_empty()); + } +} diff --git a/src/db/repositories/series_tracking.rs b/src/db/repositories/series_tracking.rs new file mode 100644 index 00000000..450c03e1 --- /dev/null +++ b/src/db/repositories/series_tracking.rs @@ -0,0 +1,510 @@ +//! Repository for the `series_tracking` sidecar table. +//! +//! Provides 1:1 read/write access to release-tracking metadata for a series +//! (whether it's tracked, current external chapter/volume, per-series overrides, +//! etc.). This repository is intentionally narrow - it doesn't reach into +//! `series_external_ids` (already its own repo) or `series_aliases` (sibling +//! repo); the release-tracking service composes them. + +#![allow(dead_code)] + +use anyhow::Result; +use chrono::Utc; +use sea_orm::{ActiveModelTrait, ColumnTrait, DatabaseConnection, EntityTrait, QueryFilter, Set}; +use uuid::Uuid; + +use crate::db::entities::series_tracking::{ + self, Entity as SeriesTracking, Model as SeriesTrackingRow, +}; + +/// Parameters for upserting a tracking row. Each `Option<Option<T>>` distinguishes +/// "leave alone" (`None`) from "explicitly clear" (`Some(None)`). +#[derive(Debug, Default, Clone)] +pub struct TrackingUpdate { + pub tracked: Option<bool>, + pub track_chapters: Option<bool>, + pub track_volumes: Option<bool>, + /// Outer `None` = leave alone; inner `None` = clear. + pub latest_known_chapter: Option<Option<f64>>, + pub latest_known_volume: Option<Option<i32>>, + pub volume_chapter_map: Option<Option<serde_json::Value>>, + pub poll_interval_override_s: Option<Option<i32>>, + pub confidence_threshold_override: Option<Option<f64>>, + /// Per-series language preference. Outer `None` = leave alone; inner `None` = + /// clear (revert to server-wide default). + pub languages: Option<Option<serde_json::Value>>, +} + +pub struct SeriesTrackingRepository; + +impl SeriesTrackingRepository { + /// Get the tracking row for a series, if one exists. + pub async fn get( + db: &DatabaseConnection, + series_id: Uuid, + ) -> Result<Option<SeriesTrackingRow>> { + let result = SeriesTracking::find_by_id(series_id).one(db).await?; + Ok(result) + } + + /// Get the tracking row, defaulting to a virtual untracked row if none exists. + /// The returned row is NOT persisted unless explicitly upserted. + pub async fn get_or_default( + db: &DatabaseConnection, + series_id: Uuid, + ) -> Result<SeriesTrackingRow> { + if let Some(row) = Self::get(db, series_id).await? { + return Ok(row); + } + let now = Utc::now(); + Ok(SeriesTrackingRow { + series_id, + tracked: false, + track_chapters: true, + track_volumes: true, + latest_known_chapter: None, + latest_known_volume: None, + volume_chapter_map: None, + poll_interval_override_s: None, + confidence_threshold_override: None, + languages: None, + created_at: now, + updated_at: now, + }) + } + + /// Upsert: insert if missing, otherwise apply the update fields. Fields with + /// `None` in `update` are left untouched. + pub async fn upsert( + db: &DatabaseConnection, + series_id: Uuid, + update: TrackingUpdate, + ) -> Result<SeriesTrackingRow> { + let now = Utc::now(); + let existing = SeriesTracking::find_by_id(series_id).one(db).await?; + + match existing { + Some(existing) => { + let mut active: series_tracking::ActiveModel = existing.into(); + if let Some(v) = update.tracked { + active.tracked = Set(v); + } + if let Some(v) = update.track_chapters { + active.track_chapters = Set(v); + } + if let Some(v) = update.track_volumes { + active.track_volumes = Set(v); + } + if let Some(v) = update.latest_known_chapter { + active.latest_known_chapter = Set(v); + } + if let Some(v) = update.latest_known_volume { + active.latest_known_volume = Set(v); + } + if let Some(v) = update.volume_chapter_map { + active.volume_chapter_map = Set(v); + } + if let Some(v) = update.poll_interval_override_s { + active.poll_interval_override_s = Set(v); + } + if let Some(v) = update.confidence_threshold_override { + active.confidence_threshold_override = Set(v); + } + if let Some(v) = update.languages { + active.languages = Set(v); + } + active.updated_at = Set(now); + let model = active.update(db).await?; + Ok(model) + } + None => { + let active = series_tracking::ActiveModel { + series_id: Set(series_id), + tracked: Set(update.tracked.unwrap_or(false)), + track_chapters: Set(update.track_chapters.unwrap_or(true)), + track_volumes: Set(update.track_volumes.unwrap_or(true)), + latest_known_chapter: Set(update.latest_known_chapter.unwrap_or(None)), + latest_known_volume: Set(update.latest_known_volume.unwrap_or(None)), + volume_chapter_map: Set(update.volume_chapter_map.unwrap_or(None)), + poll_interval_override_s: Set(update.poll_interval_override_s.unwrap_or(None)), + confidence_threshold_override: Set(update + .confidence_threshold_override + .unwrap_or(None)), + languages: Set(update.languages.unwrap_or(None)), + created_at: Set(now), + updated_at: Set(now), + }; + let model = active.insert(db).await?; + Ok(model) + } + } + } + + /// Convenience: toggle `tracked` on an existing or virtual row. + pub async fn set_tracked( + db: &DatabaseConnection, + series_id: Uuid, + tracked: bool, + ) -> Result<SeriesTrackingRow> { + Self::upsert( + db, + series_id, + TrackingUpdate { + tracked: Some(tracked), + ..Default::default() + }, + ) + .await + } + + /// List all tracked series IDs. Used by the polling service to enumerate + /// what to ask plugins for. Paginated to keep memory bounded for large + /// libraries; pass `limit = 0` for no limit (callers should normally page). + pub async fn list_tracked_ids( + db: &DatabaseConnection, + limit: u64, + offset: u64, + ) -> Result<Vec<Uuid>> { + use sea_orm::QuerySelect; + let mut query = SeriesTracking::find().filter(series_tracking::Column::Tracked.eq(true)); + if limit > 0 { + query = query.limit(limit); + } + if offset > 0 { + query = query.offset(offset); + } + let results = query.all(db).await?; + Ok(results.into_iter().map(|m| m.series_id).collect()) + } + + /// Batched lookup: fetch tracking rows for many series in one query and + /// return them keyed by `series_id`. Series without a tracking row are + /// absent from the map (callers should treat this as untracked). + pub async fn get_for_series_ids( + db: &DatabaseConnection, + series_ids: &[Uuid], + ) -> Result<std::collections::HashMap<Uuid, SeriesTrackingRow>> { + if series_ids.is_empty() { + return Ok(std::collections::HashMap::new()); + } + let rows = SeriesTracking::find() + .filter(series_tracking::Column::SeriesId.is_in(series_ids.to_vec())) + .all(db) + .await?; + Ok(rows.into_iter().map(|r| (r.series_id, r)).collect()) + } + + /// Count tracked series. + pub async fn count_tracked(db: &DatabaseConnection) -> Result<u64> { + use sea_orm::PaginatorTrait; + let count = SeriesTracking::find() + .filter(series_tracking::Column::Tracked.eq(true)) + .count(db) + .await?; + Ok(count) + } + + /// Delete the tracking row for a series. Cascade from series delete handles + /// the normal case; this is for explicit user-initiated "stop tracking and + /// forget overrides." + pub async fn delete(db: &DatabaseConnection, series_id: Uuid) -> Result<bool> { + let result = SeriesTracking::delete_by_id(series_id).exec(db).await?; + Ok(result.rows_affected > 0) + } +} + +#[cfg(test)] +mod tests { + use super::*; + use crate::db::ScanningStrategy; + use crate::db::repositories::{LibraryRepository, SeriesRepository}; + use crate::db::test_helpers::create_test_db; + + async fn make_series(db: &DatabaseConnection) -> Uuid { + let library = + LibraryRepository::create(db, "Test Library", "/test/path", ScanningStrategy::Default) + .await + .unwrap(); + let series = SeriesRepository::create(db, library.id, "Test Series", None) + .await + .unwrap(); + series.id + } + + #[tokio::test] + async fn get_returns_none_when_no_row() { + let (db, _temp) = create_test_db().await; + let series_id = make_series(db.sea_orm_connection()).await; + + let row = SeriesTrackingRepository::get(db.sea_orm_connection(), series_id) + .await + .unwrap(); + assert!(row.is_none()); + } + + #[tokio::test] + async fn get_or_default_returns_untracked_row() { + let (db, _temp) = create_test_db().await; + let series_id = make_series(db.sea_orm_connection()).await; + + let row = SeriesTrackingRepository::get_or_default(db.sea_orm_connection(), series_id) + .await + .unwrap(); + assert_eq!(row.series_id, series_id); + assert!(!row.tracked); + assert!(row.track_chapters); + assert!(row.track_volumes); + } + + #[tokio::test] + async fn upsert_inserts_then_updates() { + let (db, _temp) = create_test_db().await; + let series_id = make_series(db.sea_orm_connection()).await; + + // First upsert inserts. + let row = SeriesTrackingRepository::upsert( + db.sea_orm_connection(), + series_id, + TrackingUpdate { + tracked: Some(true), + latest_known_chapter: Some(Some(142.0)), + ..Default::default() + }, + ) + .await + .unwrap(); + assert!(row.tracked); + assert_eq!(row.latest_known_chapter, Some(142.0)); + + // Second upsert updates only specified fields. + let row2 = SeriesTrackingRepository::upsert( + db.sea_orm_connection(), + series_id, + TrackingUpdate { + latest_known_chapter: Some(Some(143.0)), + ..Default::default() + }, + ) + .await + .unwrap(); + assert!(row2.tracked, "tracked should be preserved"); + assert_eq!(row2.latest_known_chapter, Some(143.0)); + } + + #[tokio::test] + async fn upsert_can_clear_optional_fields() { + let (db, _temp) = create_test_db().await; + let series_id = make_series(db.sea_orm_connection()).await; + + SeriesTrackingRepository::upsert( + db.sea_orm_connection(), + series_id, + TrackingUpdate { + latest_known_chapter: Some(Some(50.0)), + ..Default::default() + }, + ) + .await + .unwrap(); + + // Explicit clear via Some(None). + let cleared = SeriesTrackingRepository::upsert( + db.sea_orm_connection(), + series_id, + TrackingUpdate { + latest_known_chapter: Some(None), + ..Default::default() + }, + ) + .await + .unwrap(); + assert_eq!(cleared.latest_known_chapter, None); + } + + #[tokio::test] + async fn set_tracked_toggles_flag() { + let (db, _temp) = create_test_db().await; + let series_id = make_series(db.sea_orm_connection()).await; + + let row = SeriesTrackingRepository::set_tracked(db.sea_orm_connection(), series_id, true) + .await + .unwrap(); + assert!(row.tracked); + + let row = SeriesTrackingRepository::set_tracked(db.sea_orm_connection(), series_id, false) + .await + .unwrap(); + assert!(!row.tracked); + } + + #[tokio::test] + async fn list_tracked_ids_filters_to_tracked() { + let (db, _temp) = create_test_db().await; + let conn = db.sea_orm_connection(); + + let library = LibraryRepository::create(conn, "Lib", "/lib", ScanningStrategy::Default) + .await + .unwrap(); + let s1 = SeriesRepository::create(conn, library.id, "A", None) + .await + .unwrap(); + let s2 = SeriesRepository::create(conn, library.id, "B", None) + .await + .unwrap(); + let _s3 = SeriesRepository::create(conn, library.id, "C", None) + .await + .unwrap(); + + SeriesTrackingRepository::set_tracked(conn, s1.id, true) + .await + .unwrap(); + SeriesTrackingRepository::set_tracked(conn, s2.id, false) + .await + .unwrap(); + // s3 has no tracking row at all. + + let ids = SeriesTrackingRepository::list_tracked_ids(conn, 0, 0) + .await + .unwrap(); + assert_eq!(ids.len(), 1); + assert_eq!(ids[0], s1.id); + + let count = SeriesTrackingRepository::count_tracked(conn).await.unwrap(); + assert_eq!(count, 1); + } + + #[tokio::test] + async fn get_for_series_ids_returns_only_existing_rows() { + let (db, _temp) = create_test_db().await; + let conn = db.sea_orm_connection(); + + let library = LibraryRepository::create(conn, "Lib", "/lib", ScanningStrategy::Default) + .await + .unwrap(); + let s1 = SeriesRepository::create(conn, library.id, "A", None) + .await + .unwrap(); + let s2 = SeriesRepository::create(conn, library.id, "B", None) + .await + .unwrap(); + let s3 = SeriesRepository::create(conn, library.id, "C", None) + .await + .unwrap(); + + SeriesTrackingRepository::set_tracked(conn, s1.id, true) + .await + .unwrap(); + SeriesTrackingRepository::set_tracked(conn, s2.id, false) + .await + .unwrap(); + // s3 has no tracking row. + + let map = SeriesTrackingRepository::get_for_series_ids(conn, &[s1.id, s2.id, s3.id]) + .await + .unwrap(); + assert_eq!(map.len(), 2); + assert!(map.get(&s1.id).map(|r| r.tracked).unwrap_or(false)); + assert_eq!(map.get(&s2.id).map(|r| r.tracked), Some(false)); + assert!(!map.contains_key(&s3.id)); + + let empty = SeriesTrackingRepository::get_for_series_ids(conn, &[]) + .await + .unwrap(); + assert!(empty.is_empty()); + } + + #[tokio::test] + async fn upsert_round_trips_languages_field() { + let (db, _temp) = create_test_db().await; + let conn = db.sea_orm_connection(); + let series_id = make_series(conn).await; + + // Insert with languages set. + let row = SeriesTrackingRepository::upsert( + conn, + series_id, + TrackingUpdate { + tracked: Some(true), + languages: Some(Some(serde_json::json!(["en", "es"]))), + ..Default::default() + }, + ) + .await + .unwrap(); + assert_eq!(row.languages, Some(serde_json::json!(["en", "es"]))); + + // Update to a different language list; other fields preserved. + let row = SeriesTrackingRepository::upsert( + conn, + series_id, + TrackingUpdate { + languages: Some(Some(serde_json::json!(["en"]))), + ..Default::default() + }, + ) + .await + .unwrap(); + assert!(row.tracked, "tracked preserved"); + assert_eq!(row.languages, Some(serde_json::json!(["en"]))); + + // Clear: Some(None) drops back to server-wide default. + let cleared = SeriesTrackingRepository::upsert( + conn, + series_id, + TrackingUpdate { + languages: Some(None), + ..Default::default() + }, + ) + .await + .unwrap(); + assert_eq!(cleared.languages, None); + + // Outer None leaves languages untouched. + let again = SeriesTrackingRepository::upsert( + conn, + series_id, + TrackingUpdate { + languages: Some(Some(serde_json::json!(["en", "fr"]))), + ..Default::default() + }, + ) + .await + .unwrap(); + assert_eq!(again.languages, Some(serde_json::json!(["en", "fr"]))); + let untouched = SeriesTrackingRepository::upsert( + conn, + series_id, + TrackingUpdate { + tracked: Some(true), + ..Default::default() + }, + ) + .await + .unwrap(); + assert_eq!( + untouched.languages, + Some(serde_json::json!(["en", "fr"])), + "languages preserved when not in update" + ); + } + + #[tokio::test] + async fn cascade_deletes_tracking_when_series_deleted() { + let (db, _temp) = create_test_db().await; + let conn = db.sea_orm_connection(); + let series_id = make_series(conn).await; + + SeriesTrackingRepository::set_tracked(conn, series_id, true) + .await + .unwrap(); + + // Delete the series; tracking should follow via FK cascade. + SeriesRepository::delete(conn, series_id).await.unwrap(); + + let row = SeriesTrackingRepository::get(conn, series_id) + .await + .unwrap(); + assert!(row.is_none(), "tracking row should be cascaded away"); + } +} diff --git a/src/db/repositories/task.rs b/src/db/repositories/task.rs index 65e3ab8e..21b8b1a5 100644 --- a/src/db/repositories/task.rs +++ b/src/db/repositories/task.rs @@ -118,9 +118,7 @@ impl TaskRepository { let params = task_type.params(); // Check if a task already exists for this entity - if let Some(existing_task) = - Self::find_existing_task(db, type_str, library_id, series_id, book_id).await? - { + if let Some(existing_task) = Self::find_existing_task(db, &task_type).await? { info!( "Task already exists: {} ({}) - skipping duplicate", existing_task.id, type_str @@ -166,10 +164,7 @@ impl TaskRepository { if err_str.contains("unique") || err_str.contains("duplicate") { // Race condition: another task was inserted between our check and insert // Find and return the existing task - if let Some(existing_task) = - Self::find_existing_task(db, type_str, library_id, series_id, book_id) - .await? - { + if let Some(existing_task) = Self::find_existing_task(db, &task_type).await? { info!( "Task was created concurrently: {} ({}) - using existing task", existing_task.id, type_str @@ -202,9 +197,7 @@ impl TaskRepository { let params = task_type.params(); // Check if a task already exists for this entity - if let Some(existing_task) = - Self::find_existing_task(db, type_str, library_id, series_id, book_id).await? - { + if let Some(existing_task) = Self::find_existing_task(db, &task_type).await? { info!( "Task already exists: {} ({}) - skipping duplicate", existing_task.id, type_str @@ -249,10 +242,7 @@ impl TaskRepository { Err(e) => { let err_str = e.to_string().to_lowercase(); if err_str.contains("unique") || err_str.contains("duplicate") { - if let Some(existing_task) = - Self::find_existing_task(db, type_str, library_id, series_id, book_id) - .await? - { + if let Some(existing_task) = Self::find_existing_task(db, &task_type).await? { info!( "Task was created concurrently: {} ({}) - using existing task", existing_task.id, type_str @@ -382,14 +372,26 @@ impl TaskRepository { Ok(enqueued) } - /// Find an existing pending/processing task for the given entity + /// Find an existing pending/processing task for the given task. + /// + /// Dedup key, in order of preference: + /// 1. The most specific FK column set on the task (`book_id` > + /// `series_id` > `library_id`). + /// 2. The JSON-param pair returned by `TaskType::dedup_params()`, for + /// task types whose identity lives in `params` (e.g. + /// `PollReleaseSource`). Without this, two such tasks differing only + /// in `params` would falsely collide on `task_type` alone. + /// 3. None — only `task_type` and status are matched. This is the + /// desired behavior for singleton task types like `FindDuplicates`. async fn find_existing_task( db: &DatabaseConnection, - task_type: &str, - library_id: Option<Uuid>, - series_id: Option<Uuid>, - book_id: Option<Uuid>, + task: &TaskType, ) -> Result<Option<tasks::Model>> { + let task_type = task.type_string(); + let library_id = task.library_id(); + let series_id = task.series_id(); + let book_id = task.book_id(); + let mut query = Tasks::find() .filter(tasks::Column::TaskType.eq(task_type)) .filter(tasks::Column::Status.is_in(["pending", "processing"])); @@ -401,6 +403,18 @@ impl TaskRepository { query = query.filter(tasks::Column::SeriesId.eq(ser_id)); } else if let Some(lib_id) = library_id { query = query.filter(tasks::Column::LibraryId.eq(lib_id)); + } else if let Some((key, value)) = task.dedup_params() { + // Params-based dedup: route through the helper that knows how + // to query JSON params portably across SQLite and Postgres. + return match Self::find_pending_or_processing_by_param(db, task_type, key, &value) + .await? + { + Some(id) => Tasks::find_by_id(id) + .one(db) + .await + .context("Failed to load existing task by id"), + None => Ok(None), + }; } query.one(db).await.context("Failed to find existing task") @@ -452,6 +466,75 @@ impl TaskRepository { Ok(result.is_some()) } + /// Find a pending or processing task by `task_type` and a single + /// JSON-param key/value match. Returns the first matching task ID, if + /// any. Used by enqueue paths that want to coalesce concurrent + /// requests onto an in-flight task instead of stacking duplicates. + /// + /// `param_value` is matched as a string against `params->>key`. UUIDs + /// should be passed as their canonical hyphenated form. + pub async fn find_pending_or_processing_by_param( + db: &DatabaseConnection, + task_type: &str, + param_key: &str, + param_value: &str, + ) -> Result<Option<Uuid>> { + let backend = db.get_database_backend(); + let stmt = match backend { + DbBackend::Postgres => Statement::from_sql_and_values( + DbBackend::Postgres, + r#"SELECT id FROM tasks + WHERE task_type = $1 + AND status IN ('pending', 'processing') + AND params->>$2 = $3 + ORDER BY created_at ASC + LIMIT 1"#, + vec![task_type.into(), param_key.into(), param_value.into()], + ), + _ => { + // SQLite's json_extract path needs a string literal, not a + // bind parameter, so we splice the key into the JSON path. + // Reject anything that isn't a simple identifier to avoid + // injection — callers pass static keys (`source_id`, etc.). + if !param_key + .chars() + .all(|c| c.is_ascii_alphanumeric() || c == '_') + { + anyhow::bail!("invalid param_key: {}", param_key); + } + let path = format!("$.{}", param_key); + Statement::from_sql_and_values( + DbBackend::Sqlite, + format!( + r#"SELECT id FROM tasks + WHERE task_type = ? + AND status IN ('pending', 'processing') + AND json_extract(params, '{}') = ? + ORDER BY created_at ASC + LIMIT 1"#, + path + ), + vec![task_type.into(), param_value.into()], + ) + } + }; + + let result = db + .query_one(stmt) + .await + .context("Failed to query for in-flight task")?; + match result { + Some(row) => { + let task_id: Uuid = row.try_get::<Uuid>("", "id").or_else(|_| { + let id_str: String = row.try_get("", "id")?; + Uuid::parse_str(&id_str).map_err(|e| sea_orm::DbErr::Type(e.to_string())) + })?; + Ok(Some(task_id)) + } + None => Ok(None), + } + } + /// Find a pending or processing task with matching params, returning its ID and status. /// /// Like `has_pending_or_processing` but returns the task ID and status string diff --git a/src/events/broadcaster.rs b/src/events/broadcaster.rs index 17ebc266..e876af5c 100644 --- a/src/events/broadcaster.rs +++ b/src/events/broadcaster.rs @@ -92,6 +92,10 @@ impl EventBroadcaster { /// retrieval via `take_recorded_events()`. /// /// Returns the number of receivers that received the event. + // The SendError carries the original event back to the caller; that is + // tokio's contract, not something we control. The event payload is + // by-value already and doesn't justify boxing the error variant. + #[allow(clippy::result_large_err)] pub fn emit( &self, event: EntityChangeEvent, diff --git a/src/events/mod.rs b/src/events/mod.rs index ee05e6a1..dc3e9a52 100644 --- a/src/events/mod.rs +++ b/src/events/mod.rs @@ -8,9 +8,11 @@ //! them on the web server when tasks complete. mod broadcaster; +mod task_context; mod types; pub use broadcaster::{EventBroadcaster, RecordedEvent}; +pub use task_context::{current_recording_broadcaster, with_recording_broadcaster}; // TaskProgress is part of the public API for task progress reporting #[allow(unused_imports)] pub use types::{ diff --git a/src/events/task_context.rs b/src/events/task_context.rs new file mode 100644 index 00000000..df5aad83 --- /dev/null +++ b/src/events/task_context.rs @@ -0,0 +1,109 @@ +//! Tokio task-local that exposes the "current task's recording broadcaster" +//! to code that runs inside a `TaskHandler::handle` call (and to any +//! reverse-RPC dispatch the handler triggers, since the dispatcher runs on +//! the caller's task — see `services::plugin::rpc`). +//! +//! Why this exists: when a worker runs a task in distributed mode (PostgreSQL +//! deployments), it creates a per-task recording broadcaster so every +//! `EntityChangeEvent` emitted during the task is captured into +//! `tasks.result.emitted_events` and replayed by the web server's +//! `TaskListener`. Code that emits events inside the task call stack receives +//! the broadcaster as a parameter — but plugin reverse-RPC handlers +//! (`releases/record` etc.) sit behind a JSON-RPC dispatcher that only +//! receives the request, not the broadcaster. Threading the broadcaster +//! through every layer of the dispatcher is invasive; the task-local is the +//! seam. +//! +//! The reverse-RPC dispatcher in [`crate::services::plugin::rpc`] runs the +//! dispatch on the *caller's* tokio task (the one that issued the forward +//! call), so the task-local set up by [`crate::tasks::worker`] is in scope. + +use std::sync::Arc; + +use super::EventBroadcaster; + +tokio::task_local! { + /// Recording broadcaster for the currently-executing task. Set by the + /// worker around `handler.handle(...)`. Read by reverse-RPC handlers via + /// [`current_recording_broadcaster`]. + static CURRENT_RECORDING_BROADCASTER: Arc<EventBroadcaster>; +} + +/// Run `fut` with `broadcaster` as the current task's recording broadcaster. +/// +/// Anything inside `fut` that calls [`current_recording_broadcaster`] sees +/// `Some(broadcaster)`. Outside this scope, callers see `None` and should +/// fall back to whatever they would have done previously (typically: skip +/// the emit, since out-of-task emits have nowhere to be replayed to). +pub async fn with_recording_broadcaster<F, T>(broadcaster: Arc<EventBroadcaster>, fut: F) -> T +where + F: std::future::Future<Output = T>, +{ + CURRENT_RECORDING_BROADCASTER.scope(broadcaster, fut).await +} + +/// Snapshot the current task's recording broadcaster, if any. +/// +/// Returns `None` when called outside of a `with_recording_broadcaster` +/// scope (e.g. on the web server's request-handling tasks, where emits go +/// through the long-lived broadcaster directly). +pub fn current_recording_broadcaster() -> Option<Arc<EventBroadcaster>> { + CURRENT_RECORDING_BROADCASTER.try_with(|b| b.clone()).ok() +} + +#[cfg(test)] +mod tests { + use super::*; + + #[tokio::test] + async fn returns_none_outside_scope() { + assert!(current_recording_broadcaster().is_none()); + } + + #[tokio::test] + async fn returns_broadcaster_inside_scope() { + let b = Arc::new(EventBroadcaster::new(8)); + let b_for_check = b.clone(); + with_recording_broadcaster(b, async move { + let inside = current_recording_broadcaster().expect("should be set"); + assert!(Arc::ptr_eq(&inside, &b_for_check)); + }) + .await; + assert!(current_recording_broadcaster().is_none()); + } + + #[tokio::test] + async fn nested_scope_overrides_outer() { + let outer = Arc::new(EventBroadcaster::new(8)); + let inner = Arc::new(EventBroadcaster::new(8)); + let inner_for_check = inner.clone(); + with_recording_broadcaster(outer.clone(), async move { + with_recording_broadcaster(inner, async move { + let seen = current_recording_broadcaster().expect("should be set"); + assert!(Arc::ptr_eq(&seen, &inner_for_check)); + }) + .await; + // Outer still in scope. + let seen = current_recording_broadcaster().expect("should be set"); + assert!(Arc::ptr_eq(&seen, &outer)); + }) + .await; + } + + /// task-locals propagate across `await` (same tokio task), which is what + /// we rely on when the reverse-RPC dispatcher runs on the caller's task. + #[tokio::test] + async fn propagates_across_await_chain() { + let b = Arc::new(EventBroadcaster::new(8)); + let b_for_check = b.clone(); + with_recording_broadcaster(b, async move { + // Yield then check — task-local survives across await boundaries + // on the same task. + tokio::task::yield_now().await; + tokio::task::yield_now().await; + let seen = current_recording_broadcaster().expect("should be set"); + assert!(Arc::ptr_eq(&seen, &b_for_check)); + }) + .await; + } +} diff --git a/src/events/types.rs b/src/events/types.rs index 4c10002d..9b7e547a 100644 --- a/src/events/types.rs +++ b/src/events/types.rs @@ -168,6 +168,53 @@ pub enum EntityEvent { #[serde(rename = "pluginId")] plugin_id: Uuid, }, + /// A new release was recorded in the ledger. + /// + /// Emitted once per accepted, non-deduped ledger insert by the polling + /// task and the `releases/record` reverse-RPC handler. The frontend uses + /// this to bump the Releases nav badge, surface a toast on the inbox + /// page, and refresh the per-series Releases tab. + ReleaseAnnounced { + #[serde(rename = "ledgerId")] + ledger_id: Uuid, + #[serde(rename = "seriesId")] + series_id: Uuid, + #[serde(rename = "sourceId")] + source_id: Uuid, + /// Plugin name that owns the source (`release_sources.plugin_id`). + /// Helps the frontend filter without an extra lookup. + #[serde(rename = "pluginId")] + plugin_id: String, + /// Chapter announced (if the source emits chapters). + #[serde(skip_serializing_if = "Option::is_none")] + chapter: Option<f64>, + /// Volume announced (if the source emits volumes). + #[serde(skip_serializing_if = "Option::is_none")] + volume: Option<i32>, + /// Language code (e.g. `"en"`); used by client-side notification + /// preference filters. + language: String, + }, + /// A release source's poll task completed. + /// + /// Emitted at the end of every `poll_release_source` task run, after + /// `release_sources.last_summary` / `last_polled_at` / `etag` have been + /// persisted. The frontend uses this to refresh the Release tracking + /// settings page in real time so users don't have to reload to see a + /// "Poll now" finish. Carries no diff details — receivers should + /// invalidate the source query and re-read the row. + ReleaseSourcePolled { + #[serde(rename = "sourceId")] + source_id: Uuid, + /// Plugin that owns the source (`release_sources.plugin_id`). + /// Cheap filter for clients only watching certain plugins. + #[serde(rename = "pluginId")] + plugin_id: String, + /// `true` if the poll wrote a `last_error`. Cheap "did it fail" + /// hint without forcing the client to refetch. + #[serde(rename = "hadError")] + had_error: bool, + }, /// Internal signal to indicate shutdown (not sent to clients) #[serde(skip)] Shutdown, @@ -216,6 +263,8 @@ impl EntityChangeEvent { | EntityEvent::PluginEnabled { .. } | EntityEvent::PluginDisabled { .. } | EntityEvent::PluginDeleted { .. } + | EntityEvent::ReleaseAnnounced { .. } + | EntityEvent::ReleaseSourcePolled { .. } | EntityEvent::Shutdown => None, } } @@ -233,6 +282,44 @@ impl EntityChangeEvent { pub fn is_shutdown(&self) -> bool { matches!(self.event, EntityEvent::Shutdown) } + + /// Build a `ReleaseAnnounced` event from a freshly-inserted ledger row. + /// + /// Wraps the variant construction so callers in the polling task and the + /// reverse-RPC handler share one source of truth for the event shape. + pub fn release_announced( + row: &crate::db::entities::release_ledger::Model, + plugin_id: &str, + ) -> Self { + Self::new( + EntityEvent::ReleaseAnnounced { + ledger_id: row.id, + series_id: row.series_id, + source_id: row.source_id, + plugin_id: plugin_id.to_string(), + chapter: row.chapter, + volume: row.volume, + language: row.language.clone().unwrap_or_default(), + }, + None, + ) + } + + /// Build a `ReleaseSourcePolled` event for the end of a poll task run. + /// + /// Carries only IDs and a single boolean error hint; receivers should + /// invalidate any cached `release_sources` query and re-read the row + /// for fresh `last_summary` / `last_polled_at` / etc. + pub fn release_source_polled(source_id: Uuid, plugin_id: &str, had_error: bool) -> Self { + Self::new( + EntityEvent::ReleaseSourcePolled { + source_id, + plugin_id: plugin_id.to_string(), + had_error, + }, + None, + ) + } } /// Task progress event for background operations diff --git a/src/scheduler/mod.rs b/src/scheduler/mod.rs index 674ecddc..133da74b 100644 --- a/src/scheduler/mod.rs +++ b/src/scheduler/mod.rs @@ -1,3 +1,5 @@ +pub mod release_sources; + use anyhow::{Context, Result}; use chrono_tz::Tz; use sea_orm::DatabaseConnection; @@ -19,6 +21,8 @@ pub struct Scheduler { db: DatabaseConnection, /// Server-level default timezone for all cron schedules default_tz: Tz, + /// Reconcile state for the per-source release-polling jobs. + release_sources: release_sources::ReleaseSourceSchedule, } impl Scheduler { @@ -49,9 +53,25 @@ impl Scheduler { scheduler, db, default_tz, + release_sources: release_sources::ReleaseSourceSchedule::new(), }) } + /// Trigger a release-source reconcile. Call after writes to the + /// `release_sources` table so the scheduler picks up enable/disable + /// changes without a full restart. + pub async fn reconcile_release_sources(&mut self) -> Result<()> { + let settings = SettingsService::new(self.db.clone()).await?; + let server_default = release_sources::read_server_default_cron(&settings).await; + release_sources::reconcile( + &mut self.scheduler, + &mut self.release_sources, + &self.db, + server_default, + ) + .await + } + /// Start the scheduler and load all scheduled jobs pub async fn start(&mut self) -> Result<()> { info!("Starting job scheduler"); @@ -78,6 +98,11 @@ impl Scheduler { self.load_book_thumbnail_schedule().await?; self.load_series_thumbnail_schedule().await?; + // Load release-source polling schedules. + if let Err(e) = self.reconcile_release_sources().await { + warn!("Failed to load release-source schedules: {}", e); + } + // Start the scheduler self.scheduler .start() diff --git a/src/scheduler/release_sources.rs b/src/scheduler/release_sources.rs new file mode 100644 index 00000000..8563b31f --- /dev/null +++ b/src/scheduler/release_sources.rs @@ -0,0 +1,243 @@ +//! Release-source polling scheduler integration. +//! +//! Each enabled `release_sources` row is registered as a tokio-cron-scheduler +//! job whose schedule is the row's effective cron expression: +//! +//! 1. `release_sources.cron_schedule` (per-source override) when non-NULL. +//! 2. Otherwise the server-wide `release_tracking.default_cron_schedule` +//! setting. +//! 3. Otherwise the compile-time fallback (`"0 0 * * *"`, daily). +//! +//! When the cron fires, the job enqueues a `PollReleaseSource` task. The +//! task itself maintains per-host backoff via [`super::super::services:: +//! release::backoff::HostBackoff`] (recording 429/503 from upstream and +//! resetting on success), so the scheduler does not need to skip cron +//! ticks based on backoff state. A cron firing during a throttled window +//! returns a 429 quickly without doing real work, and the task's recorded +//! error feeds the backoff state for the next tick. + +use anyhow::{Context, Result}; +use sea_orm::DatabaseConnection; +use std::collections::HashMap; +use tokio_cron_scheduler::{Job, JobScheduler}; +use tracing::{debug, error, info, warn}; +use uuid::Uuid; + +use crate::db::repositories::{ReleaseSourceRepository, TaskRepository}; +use crate::services::release::schedule::{read_default_cron_schedule, resolve_cron_schedule}; +use crate::services::settings::SettingsService; +use crate::tasks::types::TaskType; +use crate::utils::cron::normalize_cron_expression; + +/// Tracks scheduler-registered jobs per source row so we can reconcile. +#[derive(Debug, Default)] +pub struct ReleaseSourceSchedule { + /// Map of `release_sources.id` → tokio-cron-scheduler job UUID. + jobs: HashMap<Uuid, Uuid>, + /// Map of `release_sources.id` → effective cron expression currently + /// registered (post-resolution, pre-normalization). Lets `reconcile` + /// detect schedule changes without rebuilding every job on every pass. + last_cron: HashMap<Uuid, String>, +} + +impl ReleaseSourceSchedule { + pub fn new() -> Self { + Self::default() + } + + pub fn registered_count(&self) -> usize { + self.jobs.len() + } + + pub fn contains(&self, source_id: Uuid) -> bool { + self.jobs.contains_key(&source_id) + } +} + +/// Reconcile the scheduler's release-source jobs against the current set of +/// enabled rows. Adds new sources, removes disabled/deleted ones, and +/// re-registers any whose `cron_schedule` (or the inherited default) changed. +/// +/// Idempotent: safe to call repeatedly (e.g. after a `release_sources` write). +pub async fn reconcile( + scheduler: &mut JobScheduler, + state: &mut ReleaseSourceSchedule, + db: &DatabaseConnection, + server_default: String, +) -> Result<()> { + let enabled = ReleaseSourceRepository::list_enabled(db) + .await + .context("Failed to load enabled release sources")?; + + let mut seen: std::collections::HashSet<Uuid> = std::collections::HashSet::new(); + for source in &enabled { + seen.insert(source.id); + let effective_cron = + resolve_cron_schedule(source.cron_schedule.as_deref(), &server_default); + + if let Some(prev) = state.last_cron.get(&source.id) + && prev == &effective_cron + && state.contains(source.id) + { + // Same schedule, already registered — nothing to do. + continue; + } + + // Schedule changed (or first time we see this source) — drop any + // existing job and register fresh. + if let Some(job_id) = state.jobs.remove(&source.id) + && let Err(e) = scheduler.remove(&job_id).await + { + warn!( + "Failed to remove stale schedule for source {}: {}", + source.id, e + ); + } + + if let Err(e) = register_one(scheduler, state, db, source, &effective_cron).await { + warn!( + "Failed to register schedule for source {} ({}): {}", + source.id, source.display_name, e + ); + } + } + + // Remove jobs whose source row is no longer enabled. + let stale: Vec<Uuid> = state + .jobs + .keys() + .copied() + .filter(|id| !seen.contains(id)) + .collect(); + for source_id in stale { + if let Some(job_id) = state.jobs.remove(&source_id) { + state.last_cron.remove(&source_id); + if let Err(e) = scheduler.remove(&job_id).await { + warn!( + "Failed to remove stale schedule for source {}: {}", + source_id, e + ); + } else { + debug!("Removed stale schedule for source {}", source_id); + } + } + } + + info!( + "Reconciled release-source schedules: {} active", + state.registered_count() + ); + Ok(()) +} + +async fn register_one( + scheduler: &mut JobScheduler, + state: &mut ReleaseSourceSchedule, + db: &DatabaseConnection, + source: &crate::db::entities::release_sources::Model, + effective_cron: &str, +) -> Result<()> { + // Normalize 5-field POSIX cron to the 6-field form tokio-cron-scheduler + // expects (or accept 6-field expressions as-is). + let cron = normalize_cron_expression(effective_cron).with_context(|| { + format!( + "Invalid cron expression for source {} ({}): {}", + source.id, source.display_name, effective_cron + ) + })?; + + let db_clone = db.clone(); + let source_id = source.id; + let display_name = source.display_name.clone(); + let job = Job::new_async(cron.as_str(), move |_uuid, _lock| { + let db = db_clone.clone(); + let display_name = display_name.clone(); + Box::pin(async move { + debug!( + "Triggering scheduled poll for source {} ({})", + source_id, display_name + ); + let task_type = TaskType::PollReleaseSource { source_id }; + if let Err(e) = TaskRepository::enqueue(&db, task_type, None).await { + error!( + "Failed to enqueue PollReleaseSource for source {}: {}", + source_id, e + ); + } + }) + }) + .with_context(|| format!("Failed to build cron job for source {}", source.id))?; + + let job_uuid = scheduler + .add(job) + .await + .with_context(|| format!("Failed to add cron job for source {}", source.id))?; + state.jobs.insert(source.id, job_uuid); + state + .last_cron + .insert(source.id, effective_cron.to_string()); + + info!( + "Scheduled poll for source {} ({}) with cron `{}`", + source.id, source.display_name, effective_cron + ); + Ok(()) +} + +/// Outcome of an `enqueue_poll_now` call. +#[derive(Debug, Clone, Copy)] +pub struct EnqueuePollOutcome { + /// The ID of the task — either the freshly enqueued one or the + /// in-flight task we coalesced onto. + pub task_id: Uuid, + /// `true` when a pending/processing task already existed for this + /// source and we returned its ID instead of enqueuing a new one. + pub coalesced: bool, +} + +/// Wrapper for callers (e.g., HTTP handlers) that want to enqueue a poll +/// directly instead of waiting for the scheduler tick. +/// +/// **Dedup**: if a `poll_release_source` task for the same `source_id` is +/// already pending or processing, returns that task's ID instead of +/// enqueuing another one. This guards against the "click Poll now twice +/// and only one finishes" footgun: with a worker pool size > 1, two +/// independent tasks for the same source would race on `last_summary` / +/// `last_polled_at` writes and overlap upstream fetches. Coalescing onto +/// the in-flight task gives the user the same UX (their click acks) and +/// keeps the source's state coherent. +pub async fn enqueue_poll_now( + db: &DatabaseConnection, + source_id: Uuid, +) -> Result<EnqueuePollOutcome> { + if let Some(existing) = TaskRepository::find_pending_or_processing_by_param( + db, + "poll_release_source", + "source_id", + &source_id.to_string(), + ) + .await + .context("Failed to check for in-flight poll task")? + { + return Ok(EnqueuePollOutcome { + task_id: existing, + coalesced: true, + }); + } + + let task_type = TaskType::PollReleaseSource { source_id }; + let task_id = TaskRepository::enqueue(db, task_type, None) + .await + .context("Failed to enqueue PollReleaseSource task")?; + Ok(EnqueuePollOutcome { + task_id, + coalesced: false, + }) +} + +/// Read the resolved server-wide default cron schedule. Convenience for +/// callers (HTTP handlers, scheduler reconcile) that need it without +/// pulling in the schedule module directly. +pub async fn read_server_default_cron(settings: &SettingsService) -> String { + read_default_cron_schedule(settings).await +} diff --git a/src/services/mod.rs b/src/services/mod.rs index 385ab3a6..9c40e801 100644 --- a/src/services/mod.rs +++ b/src/services/mod.rs @@ -15,6 +15,7 @@ pub mod plugin_file_storage; pub mod plugin_metrics; pub mod rate_limiter; pub mod read_progress; +pub mod release; pub mod series_export_collector; pub mod series_export_writer; pub mod settings; diff --git a/src/services/plugin/handle.rs b/src/services/plugin/handle.rs index 8af98615..f93419f6 100644 --- a/src/services/plugin/handle.rs +++ b/src/services/plugin/handle.rs @@ -6,6 +6,7 @@ use std::sync::Arc; use std::time::Duration; +use sea_orm::DatabaseConnection; use serde::Serialize; use serde::de::DeserializeOwned; use serde_json::Value; @@ -19,6 +20,7 @@ use super::protocol::{ MetadataSearchResponse, PluginBookMetadata, PluginManifest, PluginSeriesMetadata, SearchResult, methods, }; +use super::releases_handler::ReleasesRequestHandler; use super::rpc::{RpcClient, RpcError}; use super::secrets::SecretValue; use super::storage_handler::StorageRequestHandler; @@ -143,6 +145,12 @@ pub struct PluginHandle { health: Arc<HealthTracker>, /// Optional storage handler for user plugin reverse RPC storage_handler: Option<StorageRequestHandler>, + /// Optional database connection for handlers that need DB access + /// post-initialization (releases handler, etc.). + release_db: Option<DatabaseConnection>, + /// Optional scheduler reference so the releases handler can reconcile + /// release-source schedules immediately after `releases/register_sources`. + scheduler: Option<Arc<tokio::sync::Mutex<crate::scheduler::Scheduler>>>, } impl PluginHandle { @@ -155,6 +163,8 @@ impl PluginHandle { client: Arc::new(RwLock::new(None)), manifest: Arc::new(RwLock::new(None)), storage_handler: None, + release_db: None, + scheduler: None, } } @@ -170,9 +180,30 @@ impl PluginHandle { client: Arc::new(RwLock::new(None)), manifest: Arc::new(RwLock::new(None)), storage_handler: Some(storage_handler), + release_db: None, + scheduler: None, } } + /// Attach a database connection so the handle can install the releases + /// reverse-RPC handler post-initialization when the plugin declares the + /// `release_source` capability. Builder-style, returns `self`. + pub fn with_release_db(mut self, db: DatabaseConnection) -> Self { + self.release_db = Some(db); + self + } + + /// Attach a scheduler reference so the releases reverse-RPC handler can + /// trigger a release-source reconcile when the plugin calls + /// `releases/register_sources`. Builder-style. + pub fn with_scheduler( + mut self, + scheduler: Arc<tokio::sync::Mutex<crate::scheduler::Scheduler>>, + ) -> Self { + self.scheduler = Some(scheduler); + self + } + /// Get the current plugin state pub async fn state(&self) -> PluginState { self.state.read().await.clone() @@ -294,6 +325,35 @@ impl PluginHandle { "Plugin initialized successfully" ); + // Push the capability snapshot into the reverse-RPC context. If the + // plugin declared `release_source` and we have a database + // connection, install the releases handler too. Both happen under + // the same write lock so the dispatcher sees them together. + // + // The releases handler emits `ReleaseAnnounced` through the + // task-local recording broadcaster set by `crate::tasks::worker` + // around the running task — no broadcaster injection needed here. + // See [`crate::events::with_recording_broadcaster`]. + let manifest_for_ctx = manifest.clone(); + let plugin_name = manifest.name.clone(); + let release_db = self.release_db.clone(); + let scheduler = self.scheduler.clone(); + client + .update_reverse_ctx(move |ctx| { + ctx.set_capabilities(manifest_for_ctx.capabilities.clone()); + if let (Some(cap), Some(db)) = ( + manifest_for_ctx.capabilities.release_source.clone(), + release_db, + ) { + let mut handler = ReleasesRequestHandler::new(db, plugin_name, cap); + if let Some(s) = scheduler { + handler = handler.with_scheduler(s); + } + ctx.set_releases_handler(handler); + } + }) + .await; + // Store the client and manifest { let mut client_lock = self.client.write().await; diff --git a/src/services/plugin/manager.rs b/src/services/plugin/manager.rs index fd2e3597..edd8bfd3 100644 --- a/src/services/plugin/manager.rs +++ b/src/services/plugin/manager.rs @@ -332,6 +332,10 @@ pub struct PluginManager { metrics_service: Option<Arc<PluginMetricsService>>, /// Optional plugin file storage for resolving plugin data directories plugin_file_storage: Option<Arc<crate::services::PluginFileStorage>>, + /// Optional scheduler handle so the releases reverse-RPC handler can + /// trigger a release-source reconcile when a plugin calls + /// `releases/register_sources`. + scheduler: Option<Arc<tokio::sync::Mutex<crate::scheduler::Scheduler>>>, } impl PluginManager { @@ -346,6 +350,7 @@ impl PluginManager { health_check_handle: RwLock::new(None), metrics_service: None, plugin_file_storage: None, + scheduler: None, } } @@ -369,6 +374,17 @@ impl PluginManager { self } + /// Hand the scheduler to per-plugin handles so the releases reverse-RPC + /// handler can reconcile release-source schedules when a plugin calls + /// `releases/register_sources`. Builder-style. + pub fn with_scheduler( + mut self, + scheduler: Arc<tokio::sync::Mutex<crate::scheduler::Scheduler>>, + ) -> Self { + self.scheduler = Some(scheduler); + self + } + /// Load all enabled plugins from database pub async fn load_all(&self) -> Result<usize, PluginManagerError> { debug!("Loading enabled plugins from database..."); @@ -376,36 +392,59 @@ impl PluginManager { let count = enabled_plugins.len(); debug!("Found {} enabled plugins in database", count); - let mut plugins = self.plugins.write().await; + // Identify release-source plugins so we can eager-spawn them after the + // write lock is released; this lets their `onInitialize` run and call + // `releases/register_sources` to materialize source rows on startup. + let eager_spawn_ids: Vec<Uuid> = enabled_plugins + .iter() + .filter(|p| Self::is_release_source(p)) + .map(|p| p.id) + .collect(); - // Preserve existing handles - we don't want to kill running plugin processes - // Just update the db_config for existing entries and add new ones - let mut existing_handles: HashMap<Uuid, Option<Arc<PluginHandle>>> = HashMap::new(); - for (id, entry) in plugins.drain() { - existing_handles.insert(id, entry.handle); - } + { + let mut plugins = self.plugins.write().await; - for plugin in enabled_plugins { - let id = plugin.id; - debug!("Loading plugin: {} ({})", plugin.name, id); - let mut entry = PluginEntry::new(plugin); - // Restore handle if we had one - if let Some(handle) = existing_handles.remove(&id) { - entry.handle = handle; + // Preserve existing handles - we don't want to kill running plugin processes + // Just update the db_config for existing entries and add new ones + let mut existing_handles: HashMap<Uuid, Option<Arc<PluginHandle>>> = HashMap::new(); + for (id, entry) in plugins.drain() { + existing_handles.insert(id, entry.handle); + } + + for plugin in enabled_plugins { + let id = plugin.id; + debug!("Loading plugin: {} ({})", plugin.name, id); + let mut entry = PluginEntry::new(plugin); + // Restore handle if we had one + if let Some(handle) = existing_handles.remove(&id) { + entry.handle = handle; + } + plugins.insert(id, entry); } - plugins.insert(id, entry); - } - // Stop any handles for plugins that are no longer enabled - for (_id, handle) in existing_handles { - if let Some(h) = handle { - let _ = h.stop().await; + // Stop any handles for plugins that are no longer enabled + for (_id, handle) in existing_handles { + if let Some(h) = handle { + let _ = h.stop().await; + } } } // Update cache timestamp *self.cache_loaded_at.write().await = Some(Instant::now()); + // Eager-start release-source plugins so they can register their + // sources on boot. Best-effort: a single plugin failing must not + // block the rest of startup. + for id in eager_spawn_ids { + if let Err(e) = self.get_or_spawn(id).await { + warn!( + "Eager start of release-source plugin {} failed on load_all: {}", + id, e + ); + } + } + info!("Loaded {} enabled plugins from database", count); Ok(count) } @@ -462,28 +501,54 @@ impl PluginManager { plugin_id, plugin.name, plugin.enabled, plugin.scopes ); - let mut plugins = self.plugins.write().await; + // Note whether this plugin should be eagerly spawned after the + // reload completes (release-source plugins need their onInitialize + // to run so they can call `releases/register_sources` — nothing + // else would trigger a spawn). + let eager_spawn = plugin.enabled && Self::is_release_source(&plugin); - if plugin.enabled { - // If plugin exists and has a handle, stop it first - if let Some(entry) = plugins.get_mut(&plugin_id) { - debug!("Updating existing plugin entry for {}", plugin_id); - if let Some(handle) = entry.handle.take() { - let _ = handle.stop().await; + { + let mut plugins = self.plugins.write().await; + + if plugin.enabled { + // If plugin exists and has a handle, stop it first + if let Some(entry) = plugins.get_mut(&plugin_id) { + debug!("Updating existing plugin entry for {}", plugin_id); + if let Some(handle) = entry.handle.take() { + let _ = handle.stop().await; + } + entry.update_config(plugin); + } else { + debug!("Inserting new plugin entry for {}", plugin_id); + plugins.insert(plugin_id, PluginEntry::new(plugin)); } - entry.update_config(plugin); + debug!("Plugin manager now has {} plugins loaded", plugins.len()); } else { - debug!("Inserting new plugin entry for {}", plugin_id); - plugins.insert(plugin_id, PluginEntry::new(plugin)); + // Plugin is disabled, remove it from managed plugins + debug!("Plugin {} is disabled, removing from memory", plugin_id); + if let Some(entry) = plugins.remove(&plugin_id) + && let Some(handle) = entry.handle + { + let _ = handle.stop().await; + } } - debug!("Plugin manager now has {} plugins loaded", plugins.len()); - } else { - // Plugin is disabled, remove it from managed plugins - debug!("Plugin {} is disabled, removing from memory", plugin_id); - if let Some(entry) = plugins.remove(&plugin_id) - && let Some(handle) = entry.handle - { - let _ = handle.stop().await; + } + + if eager_spawn { + // Spawn out-of-band so reload returns promptly even if the + // plugin's onInitialize is slow. `get_or_spawn` takes its own + // locks and is safe to call here. + if let Err(e) = self.get_or_spawn(plugin_id).await { + warn!( + "Eager start of release-source plugin {} failed: {}", + plugin_id, e + ); + } else { + debug!( + "Eager-started release-source plugin {} so onInitialize \ + can register its sources", + plugin_id + ); } } @@ -584,7 +649,10 @@ impl PluginManager { // Need to spawn/initialize the plugin let handle_config = self.create_plugin_config(&entry.db_config).await?; - let handle = PluginHandle::new(handle_config); + let mut handle = PluginHandle::new(handle_config).with_release_db(self.db.as_ref().clone()); + if let Some(ref s) = self.scheduler { + handle = handle.with_scheduler(s.clone()); + } // Start the plugin match handle.start().await { @@ -720,7 +788,11 @@ impl PluginManager { // Create handle with storage support for user plugins let storage_handler = StorageRequestHandler::new(self.db.as_ref().clone(), user_plugin.id); - let handle = PluginHandle::new_with_storage(handle_config, storage_handler); + let mut handle = PluginHandle::new_with_storage(handle_config, storage_handler) + .with_release_db(self.db.as_ref().clone()); + if let Some(ref s) = self.scheduler { + handle = handle.with_scheduler(s.clone()); + } // Start the plugin match handle.start().await { @@ -910,6 +982,22 @@ impl PluginManager { manifest.oauth } + /// Whether this plugin's cached manifest declares the `release_source` + /// capability. Release-source plugins must be eagerly spawned (rather + /// than lazy on first call) so their `onInitialize` runs and the plugin + /// can call `releases/register_sources` to materialize its source rows + /// — otherwise the scheduler has nothing to poll, and nothing else + /// would ever trigger a spawn. + fn is_release_source(plugin: &plugins::Model) -> bool { + let Some(manifest_json) = plugin.manifest.as_ref() else { + return false; + }; + let Ok(manifest) = serde_json::from_value::<PluginManifest>(manifest_json.clone()) else { + return false; + }; + manifest.capabilities.release_source.is_some() + } + /// Get the OAuth client_id for a plugin (config override > manifest default) fn get_oauth_client_id(plugin: &plugins::Model) -> Option<String> { // Check plugin config for client_id override diff --git a/src/services/plugin/mod.rs b/src/services/plugin/mod.rs index 10835a1f..40c8d437 100644 --- a/src/services/plugin/mod.rs +++ b/src/services/plugin/mod.rs @@ -73,9 +73,11 @@ pub mod handle; pub mod health; pub mod library; pub mod manager; +pub mod permissions; pub mod process; pub mod protocol; pub mod recommendations; +pub mod releases_handler; pub mod rpc; pub mod secrets; pub mod storage; diff --git a/src/services/plugin/permissions.rs b/src/services/plugin/permissions.rs new file mode 100644 index 00000000..695f5a17 --- /dev/null +++ b/src/services/plugin/permissions.rs @@ -0,0 +1,219 @@ +//! Permission enforcement for reverse-RPC method dispatch. +//! +//! Plugins declare their capabilities in their manifest. When a plugin makes +//! a reverse-RPC call (e.g. `storage/get`, `releases/record`), the host +//! resolves the method namespace to a [`RequiredCapability`] and checks the +//! manifest before dispatching. Calls without the right capability are +//! rejected with [`PermissionError`]. +//! +//! ## Why this exists +//! +//! The plugin survey identified that capabilities were declared but not +//! actually enforced at dispatch — a metadata-only plugin could still call +//! `sync/*` methods if the host happened to wire them up. Adding the +//! `release_source` capability is the forcing function for closing that gap +//! uniformly across every reverse-RPC namespace. +//! +//! ## Mapping +//! +//! The mapping table here is the single source of truth for "which capability +//! is needed to call this method." Adding a new namespace means adding a +//! mapping here AND wiring the handler — the dispatcher won't route a method +//! that has no mapping. + +use super::protocol::{PluginCapabilities, methods}; + +/// Capability required to call a particular reverse-RPC namespace. +#[derive(Debug, Clone, Copy, PartialEq, Eq)] +#[allow(dead_code)] // Variants below are part of the mapping vocabulary; not all +// namespaces (sync/recommendations) have reverse-RPC methods yet. +pub enum RequiredCapability { + /// `metadata_provider` (any non-empty content type list). + MetadataProvider, + /// `user_read_sync = true`. + UserReadSync, + /// `user_recommendation_provider = true`. + UserRecommendationProvider, + /// `release_source` capability declared. + ReleaseSource, + /// Always allowed; e.g. `storage/*` is scoped per user-plugin instance, + /// so any plugin that's been spawned has implicit storage access. + AlwaysAllowed, +} + +/// Why a permission check failed. +#[derive(Debug, Clone, PartialEq, Eq, thiserror::Error)] +pub enum PermissionError { + #[error( + "method `{method}` requires the `{required:?}` capability, which the plugin did not declare" + )] + Denied { + method: String, + required: RequiredCapability, + }, + #[error("unknown method `{method}` (no permission mapping)")] + UnknownMethod { method: String }, +} + +/// Resolve the method name to its required capability. +/// +/// Returns `None` if the method has no permission mapping. The dispatcher +/// treats `None` as "method not found" — adding new methods requires +/// updating this mapping. +pub fn required_capability(method: &str) -> Option<RequiredCapability> { + match method { + // Storage is scoped per (user, plugin) at handler-construction time; + // any plugin that has been started can call storage. The mapping is + // explicit so it doesn't fall through to UnknownMethod. + methods::STORAGE_GET + | methods::STORAGE_SET + | methods::STORAGE_DELETE + | methods::STORAGE_LIST + | methods::STORAGE_CLEAR => Some(RequiredCapability::AlwaysAllowed), + + // Releases — gated on the `release_source` capability. + methods::RELEASES_LIST_TRACKED + | methods::RELEASES_RECORD + | methods::RELEASES_SOURCE_STATE_GET + | methods::RELEASES_SOURCE_STATE_SET + | methods::RELEASES_REGISTER_SOURCES => Some(RequiredCapability::ReleaseSource), + + _ => None, + } +} + +/// Check whether `caps` satisfies `required`. +pub fn capability_satisfied(caps: &PluginCapabilities, required: RequiredCapability) -> bool { + match required { + RequiredCapability::MetadataProvider => !caps.metadata_provider.is_empty(), + RequiredCapability::UserReadSync => caps.user_read_sync, + RequiredCapability::UserRecommendationProvider => caps.user_recommendation_provider, + RequiredCapability::ReleaseSource => caps.is_release_source(), + RequiredCapability::AlwaysAllowed => true, + } +} + +/// Convenience: enforce a method against a manifest's capabilities. +/// +/// Returns `Ok(())` if the call should proceed, or [`PermissionError`] if +/// the dispatcher should refuse it. +pub fn enforce(method: &str, caps: &PluginCapabilities) -> Result<(), PermissionError> { + let Some(required) = required_capability(method) else { + return Err(PermissionError::UnknownMethod { + method: method.to_string(), + }); + }; + if !capability_satisfied(caps, required) { + return Err(PermissionError::Denied { + method: method.to_string(), + required, + }); + } + Ok(()) +} + +#[cfg(test)] +mod tests { + use super::super::protocol::{ + MetadataContentType, PluginCapabilities, ReleaseSourceCapability, ReleaseSourceKind, + }; + use super::*; + + fn metadata_caps() -> PluginCapabilities { + PluginCapabilities { + metadata_provider: vec![MetadataContentType::Series], + ..Default::default() + } + } + + fn release_caps() -> PluginCapabilities { + PluginCapabilities { + release_source: Some(ReleaseSourceCapability { + kinds: vec![ReleaseSourceKind::RssUploader], + ..Default::default() + }), + ..Default::default() + } + } + + fn sync_caps() -> PluginCapabilities { + PluginCapabilities { + user_read_sync: true, + external_id_source: Some("api:anilist".to_string()), + ..Default::default() + } + } + + #[test] + fn storage_methods_always_allowed() { + for m in [ + methods::STORAGE_GET, + methods::STORAGE_SET, + methods::STORAGE_DELETE, + methods::STORAGE_LIST, + methods::STORAGE_CLEAR, + ] { + assert!(enforce(m, &PluginCapabilities::default()).is_ok()); + assert!(enforce(m, &metadata_caps()).is_ok()); + } + } + + #[test] + fn releases_methods_require_release_source_capability() { + for m in [ + methods::RELEASES_LIST_TRACKED, + methods::RELEASES_RECORD, + methods::RELEASES_SOURCE_STATE_GET, + methods::RELEASES_SOURCE_STATE_SET, + methods::RELEASES_REGISTER_SOURCES, + ] { + // Release-source plugin: allowed. + assert!(enforce(m, &release_caps()).is_ok(), "{m} should be allowed"); + // Other capability set: denied. + let err = enforce(m, &metadata_caps()).unwrap_err(); + assert!( + matches!( + err, + PermissionError::Denied { + required: RequiredCapability::ReleaseSource, + .. + } + ), + "{m}: expected Denied(ReleaseSource), got {err:?}" + ); + let err = enforce(m, &sync_caps()).unwrap_err(); + assert!(matches!(err, PermissionError::Denied { .. })); + } + } + + #[test] + fn unmapped_method_is_unknown() { + let err = enforce("frobnicate/zap", &release_caps()).unwrap_err(); + assert!(matches!(err, PermissionError::UnknownMethod { .. })); + } + + #[test] + fn capability_satisfied_for_metadata_provider() { + assert!(capability_satisfied( + &metadata_caps(), + RequiredCapability::MetadataProvider + )); + assert!(!capability_satisfied( + &PluginCapabilities::default(), + RequiredCapability::MetadataProvider + )); + } + + #[test] + fn required_capability_returns_some_for_known_methods() { + assert_eq!( + required_capability(methods::RELEASES_RECORD), + Some(RequiredCapability::ReleaseSource) + ); + assert_eq!( + required_capability(methods::STORAGE_GET), + Some(RequiredCapability::AlwaysAllowed) + ); + assert_eq!(required_capability("not/a/method"), None); + } +} diff --git a/src/services/plugin/protocol.rs b/src/services/plugin/protocol.rs index fcaa6715..cf08858d 100644 --- a/src/services/plugin/protocol.rs +++ b/src/services/plugin/protocol.rs @@ -62,6 +62,17 @@ pub struct JsonRpcRequest { pub method: String, #[serde(default, skip_serializing_if = "Option::is_none")] pub params: Option<Value>, + /// Reverse-RPC only: id of the forward call the plugin is currently + /// servicing. Lets the host route the reverse-RPC back to the originating + /// caller's task so emitted events land in that caller's recording + /// broadcaster. Absent for forward calls and for plugins that predate the + /// field. + #[serde( + default, + rename = "parentRequestId", + skip_serializing_if = "Option::is_none" + )] + pub parent_request_id: Option<RequestId>, } impl JsonRpcRequest { @@ -73,6 +84,7 @@ impl JsonRpcRequest { id: id.into(), method: method.into(), params, + parent_request_id: None, } } @@ -245,6 +257,24 @@ pub mod methods { pub const RECOMMENDATIONS_CLEAR: &str = "recommendations/clear"; /// Dismiss a recommendation (user not interested) pub const RECOMMENDATIONS_DISMISS: &str = "recommendations/dismiss"; + + // Release-source methods (host -> plugin) + /// Ask the plugin to poll its source for new releases. + pub const RELEASES_POLL: &str = "releases/poll"; + + // Release-source reverse-RPC methods (plugin -> host) + /// List tracked series scoped to what the source needs. + pub const RELEASES_LIST_TRACKED: &str = "releases/list_tracked"; + /// Record a release candidate in the ledger. + pub const RELEASES_RECORD: &str = "releases/record"; + /// Get the persisted state for a release source (etag, cursor, etc.). + pub const RELEASES_SOURCE_STATE_GET: &str = "releases/source_state/get"; + /// Set persisted state for a release source. + pub const RELEASES_SOURCE_STATE_SET: &str = "releases/source_state/set"; + /// Replace the set of release-source rows owned by this plugin. + /// The host upserts each entry by `(plugin_id, source_key)` and prunes + /// rows whose `source_key` is no longer in the input list. + pub const RELEASES_REGISTER_SOURCES: &str = "releases/register_sources"; } // ============================================================================= @@ -351,6 +381,84 @@ pub struct PluginCapabilities { /// Can provide personalized recommendations (v2) #[serde(default)] pub user_recommendation_provider: bool, + /// Can announce new releases (chapters/volumes) for tracked series. + /// When present, the plugin may invoke the `releases/*` reverse-RPC + /// methods. The capability struct declares the data the plugin needs + /// (aliases, external IDs) so the host can scope its responses. + #[serde(default, skip_serializing_if = "Option::is_none")] + pub release_source: Option<ReleaseSourceCapability>, +} + +/// Release-source capability declaration. +/// +/// Plugins that want to announce releases declare this capability in their +/// manifest. The struct describes both *what* the plugin can announce and +/// *what* it needs from the host. The host uses these fields when filling +/// `releases/list_tracked` responses so plugins only see data they asked for. +#[derive(Debug, Clone, Serialize, Deserialize)] +#[serde(rename_all = "camelCase")] +pub struct ReleaseSourceCapability { + /// Source kinds this plugin exposes (e.g. `["rss-uploader"]`). + #[serde(default)] + pub kinds: Vec<ReleaseSourceKind>, + /// Whether the plugin needs title aliases (set when the plugin matches + /// by title rather than by external ID, e.g. Nyaa). + #[serde(default)] + pub requires_aliases: bool, + /// External-ID sources the plugin needs, e.g. `["mangaupdates"]` or + /// `["mangadex"]`. The host filters `series_external_ids` to these + /// sources when responding to `releases/list_tracked`. + #[serde(default)] + pub requires_external_ids: Vec<String>, + /// Whether the plugin announces chapter-level releases. + #[serde(default)] + pub can_announce_chapters: bool, + /// Whether the plugin announces volume-level releases. + #[serde(default)] + pub can_announce_volumes: bool, +} + +impl Default for ReleaseSourceCapability { + fn default() -> Self { + Self { + kinds: Vec::new(), + requires_aliases: false, + requires_external_ids: Vec::new(), + can_announce_chapters: true, + can_announce_volumes: true, + } + } +} + +/// Kind of release source. Mirrors the `release_sources.kind` column on the +/// host side, but lives here so plugins can declare it without depending on +/// the database schema. +#[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize, Deserialize)] +#[serde(rename_all = "kebab-case")] +pub enum ReleaseSourceKind { + /// Per-uploader feed (e.g., a Nyaa user RSS feed). + RssUploader, + /// Per-series feed (e.g., MangaUpdates RSS for a single series). + RssSeries, + /// Generic API-driven feed. + ApiFeed, + /// Metadata-derived signal (informational; usually doesn't write the + /// ledger - see Phase 5). + MetadataFeed, +} + +impl ReleaseSourceKind { + /// Canonical kebab-case string matching `release_sources.kind` and the + /// serde representation. Used when comparing against string-typed + /// `kind` fields parsed from RPC requests. + pub fn as_str(&self) -> &'static str { + match self { + Self::RssUploader => "rss-uploader", + Self::RssSeries => "rss-series", + Self::ApiFeed => "api-feed", + Self::MetadataFeed => "metadata-feed", + } + } } impl PluginCapabilities { @@ -365,15 +473,21 @@ impl PluginCapabilities { self.metadata_provider.contains(&MetadataContentType::Book) } + /// Whether this plugin declares the `release_source` capability. + pub fn is_release_source(&self) -> bool { + self.release_source.is_some() + } + /// Infer the plugin type from capabilities. /// /// User-facing capabilities (`user_read_sync`, `user_recommendation_provider`) - /// indicate a "user" plugin. Metadata provider capabilities indicate a - /// "system" plugin. Returns `None` when capabilities are empty. + /// indicate a "user" plugin. Metadata-provider and release-source + /// capabilities indicate a "system" plugin. Returns `None` when + /// capabilities are empty. pub fn inferred_plugin_type(&self) -> Option<PluginManifestType> { if self.user_read_sync || self.user_recommendation_provider { Some(PluginManifestType::User) - } else if !self.metadata_provider.is_empty() { + } else if !self.metadata_provider.is_empty() || self.release_source.is_some() { Some(PluginManifestType::System) } else { None @@ -1299,6 +1413,98 @@ pub struct InitializeParams { pub data_dir: Option<String>, } +// ============================================================================= +// Releases Poll (host -> plugin) +// ============================================================================= + +/// Parameters for `releases/poll` (host → plugin call). +/// +/// The host invokes this once per scheduled poll for a single +/// `release_sources` row. The plugin uses the `source_id` to scope its work +/// (which feed/uploader/series to query) and may consult the supplied +/// `etag` for conditional GETs. +#[derive(Debug, Clone, Serialize, Deserialize)] +#[serde(rename_all = "camelCase")] +pub struct ReleasePollRequest { + /// Source row the plugin should poll. The plugin can call back into + /// `releases/source_state/get` for richer state (etag, last_polled_at) + /// or `releases/list_tracked` to harvest the tracked-series scope. + pub source_id: uuid::Uuid, + /// Plugin-defined stable key for this source row (the same value the + /// plugin originally passed to `releases/register_sources`). Carried in + /// the poll request so the plugin can dispatch directly without a + /// reverse-RPC roundtrip — useful when one plugin process owns multiple + /// source rows (e.g., one per Nyaa uploader). + #[serde(default, skip_serializing_if = "Option::is_none")] + pub source_key: Option<String>, + /// Snapshot of `release_sources.config` at poll time, if any. Plugins + /// that store per-source config on register can read it back here to + /// avoid keeping their own `(sourceKey, config)` map in memory. + #[serde(default, skip_serializing_if = "Option::is_none")] + pub config: Option<serde_json::Value>, + /// Etag value from the previous successful poll, if any. Plugins doing + /// HTTP conditional GETs (`If-None-Match`) can use it directly. + #[serde(default, skip_serializing_if = "Option::is_none")] + pub etag: Option<String>, +} + +/// Response from `releases/poll`. +/// +/// Plugins MAY also call `releases/record` directly during polling (the +/// reverse-RPC channel is open). The `candidates` field is convenience for +/// plugins that prefer to return everything at once; both styles are +/// supported and the host treats them identically. +/// +/// Plugins that stream via `releases/record` should also populate the +/// counter fields (`parsed`, `matched`, `recorded`, `deduped`) so the host +/// can build an accurate `last_summary` for the source. Without those, the +/// host can only see what came back in `candidates` and a streaming +/// plugin's status badge will read "Fetched 0 items" no matter what. +#[derive(Debug, Clone, Default, Serialize, Deserialize)] +#[serde(rename_all = "camelCase")] +pub struct ReleasePollResponse { + /// Optional batch of candidates the host should evaluate and ledger + /// (in addition to anything the plugin already streamed via + /// `releases/record`). + #[serde(default)] + pub candidates: Vec<crate::services::release::candidate::ReleaseCandidate>, + /// New etag observed by the plugin (e.g. from the upstream feed's + /// `ETag` header). The host stores this on the source row for the + /// next poll's conditional-GET. + #[serde(default, skip_serializing_if = "Option::is_none")] + pub etag: Option<String>, + /// Whether the upstream returned `304 Not Modified` (or equivalent + /// "no work" signal). Purely informational; the host doesn't act on it + /// beyond logging. + #[serde(default, skip_serializing_if = "Option::is_none")] + pub not_modified: Option<bool>, + /// HTTP status code observed from the upstream feed, if any. Used by + /// the host's per-host backoff layer to detect 429 / 503. + #[serde(default, skip_serializing_if = "Option::is_none")] + pub upstream_status: Option<u16>, + /// Items the plugin parsed from the upstream feed before any matching + /// or threshold filtering. Streaming plugins should populate this so + /// the host's `last_summary` reflects upstream activity, not just the + /// shape of the response payload. + #[serde(default, skip_serializing_if = "Option::is_none")] + pub parsed: Option<u32>, + /// Of those parsed, the count that matched a tracked series alias + /// (i.e. that became candidates the plugin then evaluated/streamed). + #[serde(default, skip_serializing_if = "Option::is_none")] + pub matched: Option<u32>, + /// Of those matched, the count actually inserted into the ledger + /// (excludes dedupes). For plugins that stream via `releases/record`, + /// this is the count of non-deduped record outcomes. + #[serde(default, skip_serializing_if = "Option::is_none")] + pub recorded: Option<u32>, + /// Of those matched, the count that the host deduped onto an existing + /// ledger row. Optional; when omitted the host infers `matched - + /// recorded`. Provided explicitly by streaming plugins that already + /// know. + #[serde(default, skip_serializing_if = "Option::is_none")] + pub deduped: Option<u32>, +} + // ============================================================================= // Rate Limit Error Data // ============================================================================= @@ -2053,6 +2259,74 @@ mod tests { assert_eq!(caps.inferred_plugin_type(), None); } + #[test] + fn test_release_source_capability_serializes_camel_case() { + let cap = ReleaseSourceCapability { + kinds: vec![ReleaseSourceKind::RssUploader], + requires_aliases: true, + requires_external_ids: vec!["mangaupdates".to_string()], + can_announce_chapters: true, + can_announce_volumes: false, + }; + let json = serde_json::to_value(&cap).unwrap(); + assert_eq!(json["kinds"], json!(["rss-uploader"])); + assert!(json["requiresAliases"].as_bool().unwrap()); + assert_eq!(json["requiresExternalIds"], json!(["mangaupdates"])); + assert!(json["canAnnounceChapters"].as_bool().unwrap()); + assert!(!json["canAnnounceVolumes"].as_bool().unwrap()); + } + + #[test] + fn test_release_source_capability_kind_round_trip() { + for kind in [ + ReleaseSourceKind::RssUploader, + ReleaseSourceKind::RssSeries, + ReleaseSourceKind::ApiFeed, + ReleaseSourceKind::MetadataFeed, + ] { + let json = serde_json::to_value(kind).unwrap(); + let back: ReleaseSourceKind = serde_json::from_value(json).unwrap(); + assert_eq!(kind, back); + } + } + + #[test] + fn test_plugin_capabilities_release_source_inferred_type() { + let caps = PluginCapabilities { + release_source: Some(ReleaseSourceCapability::default()), + ..Default::default() + }; + assert!(caps.is_release_source()); + assert_eq!( + caps.inferred_plugin_type(), + Some(PluginManifestType::System) + ); + } + + #[test] + fn test_plugin_capabilities_manifest_parses_release_source() { + let manifest_json = json!({ + "name": "release-nyaa", + "displayName": "Nyaa Releases", + "version": "0.1.0", + "protocolVersion": "1.2", + "capabilities": { + "releaseSource": { + "kinds": ["rss-uploader"], + "requiresAliases": true, + "requiresExternalIds": [], + "canAnnounceChapters": true, + "canAnnounceVolumes": true + } + } + }); + let manifest: PluginManifest = serde_json::from_value(manifest_json).unwrap(); + assert!(manifest.capabilities.is_release_source()); + let cap = manifest.capabilities.release_source.unwrap(); + assert_eq!(cap.kinds, vec![ReleaseSourceKind::RssUploader]); + assert!(cap.requires_aliases); + } + #[test] fn test_oauth_config_serialization() { let config = OAuthConfig { diff --git a/src/services/plugin/releases_handler.rs b/src/services/plugin/releases_handler.rs new file mode 100644 index 00000000..ac8e70ce --- /dev/null +++ b/src/services/plugin/releases_handler.rs @@ -0,0 +1,2155 @@ +//! Release-source reverse-RPC handler. +//! +//! Plugins that declare the `release_source` capability call these methods +//! to read tracked-series rows scoped to their declared needs (aliases / +//! external IDs), record release candidates in the host-side ledger, and +//! persist per-source state (etag, cursor, etc.) across polls. +//! +//! The dispatcher in [`super::rpc`] checks the plugin's manifest before +//! routing here (see [`super::permissions`]); this handler trusts that the +//! caller has the `release_source` capability and focuses on data scoping +//! and validation. + +use std::collections::HashMap; +use std::sync::Arc; + +use chrono::{DateTime, Utc}; +use sea_orm::DatabaseConnection; +use serde::{Deserialize, Serialize}; +use serde_json::Value; +use tokio::sync::Mutex; +use tracing::{debug, error, info, warn}; +use uuid::Uuid; + +use super::protocol::{ + JsonRpcError, JsonRpcRequest, JsonRpcResponse, ReleaseSourceCapability, RequestId, error_codes, + methods, +}; +use crate::db::entities::release_ledger::state as ledger_state; +use crate::db::entities::release_sources::kind as source_kind; +use crate::db::repositories::{ + NewReleaseSource, ReleaseLedgerRepository, ReleaseSourceRepository, SeriesAliasRepository, + SeriesExternalIdRepository, SeriesRepository, SeriesTrackingRepository, TrackingUpdate, +}; +use crate::scheduler::Scheduler; +use crate::services::release::auto_ignore::should_auto_ignore; +use crate::services::release::candidate::ReleaseCandidate; +use crate::services::release::languages::{includes, resolve_for_series}; +use crate::services::release::matcher::{evaluate, resolve_threshold}; + +/// Default page size for `releases/list_tracked` when the caller doesn't +/// specify one. Matches the Phase 3 risk-mitigation note. +const DEFAULT_TRACKED_PAGE_SIZE: u64 = 200; +/// Hard cap on `limit` to keep a single page bounded. +const MAX_TRACKED_PAGE_SIZE: u64 = 1_000; + +/// Reverse-RPC handler for the `releases/*` namespace. +/// +/// Like [`super::storage_handler::StorageRequestHandler`], one instance is +/// created per plugin connection so the handler captures the plugin's +/// identity and capability declaration without re-querying on every call. +#[derive(Clone)] +pub struct ReleasesRequestHandler { + db: DatabaseConnection, + /// Plugin name (`manifest.name`). Must match `release_sources.plugin_id` + /// for any source the plugin operates on. + plugin_name: String, + /// Snapshot of the plugin's `release_source` capability declaration. Used + /// to scope `releases/list_tracked` responses to what the plugin asked + /// for. + capability: ReleaseSourceCapability, + /// Optional scheduler reference used by `releases/register_sources` to + /// reconcile schedules immediately after the source set changes. + scheduler: Option<Arc<Mutex<Scheduler>>>, +} + +impl ReleasesRequestHandler { + pub fn new( + db: DatabaseConnection, + plugin_name: String, + capability: ReleaseSourceCapability, + ) -> Self { + Self { + db, + plugin_name, + capability, + scheduler: None, + } + } + + /// Attach a scheduler reference so `releases/register_sources` reconciles + /// schedules without waiting for a server restart. Builder-style. + pub fn with_scheduler(mut self, scheduler: Arc<Mutex<Scheduler>>) -> Self { + self.scheduler = Some(scheduler); + self + } + + /// Handle a `releases/*` JSON-RPC request and return a response. + pub async fn handle_request(&self, request: &JsonRpcRequest) -> JsonRpcResponse { + let id = request.id.clone(); + let method = request.method.as_str(); + + debug!( + method = method, + plugin_name = %self.plugin_name, + "Handling releases request" + ); + + match method { + methods::RELEASES_LIST_TRACKED => self.handle_list_tracked(request).await, + methods::RELEASES_RECORD => self.handle_record(request).await, + methods::RELEASES_SOURCE_STATE_GET => self.handle_state_get(request).await, + methods::RELEASES_SOURCE_STATE_SET => self.handle_state_set(request).await, + methods::RELEASES_REGISTER_SOURCES => self.handle_register_sources(request).await, + _ => JsonRpcResponse::error( + Some(id), + JsonRpcError::new( + error_codes::METHOD_NOT_FOUND, + format!("Unknown releases method: {}", method), + ), + ), + } + } + + async fn handle_list_tracked(&self, request: &JsonRpcRequest) -> JsonRpcResponse { + let id = request.id.clone(); + let params: ListTrackedRequest = match parse_params(&request.params) { + Ok(p) => p, + Err(resp) => return resp.with_id(id), + }; + + if let Err(resp) = self.assert_source_belongs(¶ms.source_id, &id).await { + return resp; + } + + let limit = params + .limit + .map(|n| n.min(MAX_TRACKED_PAGE_SIZE)) + .unwrap_or(DEFAULT_TRACKED_PAGE_SIZE); + let offset = params.offset.unwrap_or(0); + + // 1. List tracked series IDs. + let series_ids = + match SeriesTrackingRepository::list_tracked_ids(&self.db, limit, offset).await { + Ok(ids) => ids, + Err(e) => { + error!(error = %e, "tracked-series listing failed"); + return JsonRpcResponse::error( + Some(id), + JsonRpcError::new(error_codes::INTERNAL_ERROR, format!("db error: {}", e)), + ); + } + }; + + // 2. Fetch the tracking rows for those series (so we can return + // latest_known_chapter / latest_known_volume). + let mut entries: Vec<TrackedSeriesEntry> = Vec::with_capacity(series_ids.len()); + for sid in &series_ids { + let tracking = match SeriesTrackingRepository::get(&self.db, *sid).await { + Ok(Some(row)) => row, + Ok(None) => continue, // Race: entry vanished between list and read. + Err(e) => { + error!(error = %e, "series_tracking lookup failed"); + return JsonRpcResponse::error( + Some(id), + JsonRpcError::new(error_codes::INTERNAL_ERROR, format!("db error: {}", e)), + ); + } + }; + entries.push(TrackedSeriesEntry { + series_id: *sid, + aliases: None, + external_ids: None, + latest_known_chapter: tracking.latest_known_chapter, + latest_known_volume: tracking.latest_known_volume, + }); + } + + // 3. Scope the response based on what the plugin asked for in its + // manifest. Plugins that didn't declare `requires_aliases` don't + // get aliases; same for external IDs. + if self.capability.requires_aliases { + for entry in &mut entries { + match SeriesAliasRepository::get_for_series(&self.db, entry.series_id).await { + Ok(rows) => { + entry.aliases = Some(rows.into_iter().map(|r| r.alias).collect::<Vec<_>>()); + } + Err(e) => { + warn!(error = %e, series_id = %entry.series_id, "alias lookup failed"); + } + } + } + } + + if !self.capability.requires_external_ids.is_empty() { + for entry in &mut entries { + match SeriesExternalIdRepository::get_for_series(&self.db, entry.series_id).await { + Ok(rows) => { + // Filter: only sources the plugin asked for. + // + // Two namespace conventions exist in stored + // `series_external_ids.source` strings: + // + // - `api:<service>` (used by metadata plugins + // like MangaBaka, OpenLibrary, AniList — this is + // the dominant convention and the SDK docs). + // - `plugin:<name>` (legacy / plugin-private). + // + // Plugin manifests declare `requiresExternalIds` + // with the bare service name (e.g. "mangaupdates"), + // so we strip both prefixes before matching. The + // returned map is keyed by the bare name so plugins + // can read `externalIds["mangaupdates"]` regardless + // of how the row was stored. + let mut by_source: HashMap<String, String> = HashMap::new(); + for row in rows { + let normalized = strip_external_id_namespace(&row.source); + if self + .capability + .requires_external_ids + .iter() + .any(|req| req == normalized) + { + by_source.insert(normalized.to_string(), row.external_id); + } + } + if !by_source.is_empty() { + entry.external_ids = Some(by_source); + } + } + Err(e) => { + warn!(error = %e, series_id = %entry.series_id, "external_id lookup failed"); + } + } + } + } + + let next_offset = if (entries.len() as u64) < limit { + None + } else { + Some(offset + entries.len() as u64) + }; + + let response = ListTrackedResponse { + tracked: entries, + next_offset, + }; + JsonRpcResponse::success(id, serde_json::to_value(response).unwrap()) + } + + async fn handle_record(&self, request: &JsonRpcRequest) -> JsonRpcResponse { + let id = request.id.clone(); + let params: RecordRequest = match parse_params(&request.params) { + Ok(p) => p, + Err(resp) => return resp.with_id(id), + }; + + // 1. Verify the source belongs to this plugin. + if let Err(resp) = self.assert_source_belongs(¶ms.source_id, &id).await { + return resp; + } + + // 2. Look up the tracking row up front. We need it both for the + // threshold and (post-insert) for the latest_known_* gate. + let series_id = params.candidate.series_match.codex_series_id; + let tracking_row = match SeriesTrackingRepository::get(&self.db, series_id).await { + Ok(row) => row, + Err(e) => { + error!(error = %e, "tracking lookup failed during record"); + return JsonRpcResponse::error( + Some(id), + JsonRpcError::new(error_codes::INTERNAL_ERROR, format!("db error: {}", e)), + ); + } + }; + let threshold = resolve_threshold( + tracking_row + .as_ref() + .and_then(|r| r.confidence_threshold_override), + ); + + // 3. Validate + threshold-gate the candidate. + let accepted = match evaluate(params.candidate, threshold) { + Ok(a) => a, + Err(reason) => { + debug!( + plugin = %self.plugin_name, + reject = %reason, + "candidate rejected" + ); + return JsonRpcResponse::error( + Some(id), + JsonRpcError::new(error_codes::INVALID_PARAMS, reason.to_string()), + ); + } + }; + + // Snapshot the candidate fields needed for the latest_known_* gate + // before the move into the ledger entry. + let candidate_chapter = accepted.candidate.chapter; + let candidate_volume = accepted.candidate.volume; + let candidate_language = accepted.candidate.language.clone(); + + // Auto-ignore: if the user already owns this volume/chapter, insert + // the row directly as `ignored` so it skips the inbox + notify path. + // Best-effort; on failure we fall back to the default state. + let initial_state = if candidate_volume.is_some() || candidate_chapter.is_some() { + match SeriesRepository::get_owned_release_keys_for_series(&self.db, series_id).await { + Ok(owned) => { + if should_auto_ignore(candidate_volume, candidate_chapter, &owned) { + Some(ledger_state::IGNORED.to_string()) + } else { + None + } + } + Err(e) => { + warn!(error = %e, %series_id, "owned-keys lookup failed; defaulting to announced"); + None + } + } + } else { + None + }; + + // 4. Hand off to the ledger (which is itself idempotent). + let mut entry = accepted.into_ledger_entry(params.source_id); + entry.initial_state = initial_state; + let outcome = match ReleaseLedgerRepository::record(&self.db, entry).await { + Ok(o) => o, + Err(e) => { + error!(error = %e, "ledger record failed"); + return JsonRpcResponse::error( + Some(id), + JsonRpcError::new(error_codes::INTERNAL_ERROR, format!("db error: {}", e)), + ); + } + }; + + // 5. Advance series_tracking.latest_known_* to the high-water mark. + // + // Skipped on dedup (the ledger already saw this release; we don't + // re-tick the high-water mark). Gated on the per-axis track_* + // flag — a series tracked only for volumes shouldn't have its + // chapter mark moved by chapter announcements. Also gated on the + // candidate's language being in the effective list, so that a + // plugin which forgets to filter by language can't pollute + // `latest_known_*` with out-of-language releases. + if !outcome.deduped { + if let Err(e) = self + .advance_latest_known( + series_id, + tracking_row.as_ref(), + candidate_chapter, + candidate_volume, + &candidate_language, + ) + .await + { + // The ledger row is already persisted; a follow-up tracking + // failure is logged but does not fail the call. The next + // successful record will catch up. + warn!(error = %e, %series_id, "latest_known advance failed; ledger insert preserved"); + } + + // Emit through the task-local recording broadcaster set up by + // `crate::tasks::worker` around the running task. This routes + // the event into `tasks.result.emitted_events` so the web + // server's `TaskListener` replays it to live SSE subscribers in + // distributed deployments. In single-process mode the same + // task-local points at the live broadcaster, so subscribers see + // the event directly. + // + // No task-local set means we're handling a reverse-RPC outside + // any task context (today: shouldn't happen for releases since + // every record path runs inside a poll task). We log and skip + // rather than silently emitting into a void. + // Auto-ignored rows skip the announce event: the row is on the + // ledger for audit/recovery, but the user already owns the + // matching volume/chapter so there's nothing to notify about. + if outcome.row.state != ledger_state::ANNOUNCED { + debug!( + series_id = %outcome.row.series_id, + plugin = %self.plugin_name, + state = %outcome.row.state, + "Skipping release_announced emit for non-announced state" + ); + } else if let Some(broadcaster) = crate::events::current_recording_broadcaster() { + let _ = broadcaster.emit(crate::events::EntityChangeEvent::release_announced( + &outcome.row, + &self.plugin_name, + )); + } else { + debug!( + series_id = %outcome.row.series_id, + plugin = %self.plugin_name, + "No recording broadcaster in scope; skipping release_announced emit" + ); + } + } + + let resp = RecordResponse { + ledger_id: outcome.row.id, + deduped: outcome.deduped, + }; + JsonRpcResponse::success(id, serde_json::to_value(resp).unwrap()) + } + + /// Move `series_tracking.latest_known_chapter` and `latest_known_volume` + /// forward to the candidate's values, gated on the per-axis `track_*` flag + /// and the per-series effective language list. Stale candidates (smaller + /// than current) and out-of-language candidates are silently no-ops on + /// their respective axes. Out-of-language candidates skip *both* axes + /// because the language gate sits above per-axis tracking. + async fn advance_latest_known( + &self, + series_id: Uuid, + tracking_row: Option<&crate::db::entities::series_tracking::Model>, + candidate_chapter: Option<f64>, + candidate_volume: Option<i32>, + candidate_language: &str, + ) -> Result<(), anyhow::Error> { + // No tracking row → series isn't being tracked. Don't auto-create one + // just because a stray candidate came in; the user explicitly opts in + // via the tracking panel. + let Some(row) = tracking_row else { + return Ok(()); + }; + if !row.tracked { + return Ok(()); + } + + // Language gate: out-of-language candidates do not advance the + // high-water mark even if a buggy plugin records them. + let effective = resolve_for_series(&self.db, row.languages.as_ref()).await?; + if !includes(&effective, candidate_language) { + return Ok(()); + } + + let mut update = TrackingUpdate::default(); + let mut dirty = false; + + if let Some(ch) = candidate_chapter + && row.track_chapters + && ch.is_finite() + { + let current = row.latest_known_chapter.unwrap_or(f64::NEG_INFINITY); + if ch > current { + update.latest_known_chapter = Some(Some(ch)); + dirty = true; + } + } + + if let Some(vol) = candidate_volume + && row.track_volumes + { + let current = row.latest_known_volume.unwrap_or(i32::MIN); + if vol > current { + update.latest_known_volume = Some(Some(vol)); + dirty = true; + } + } + + if !dirty { + return Ok(()); + } + SeriesTrackingRepository::upsert(&self.db, series_id, update).await?; + Ok(()) + } + + async fn handle_state_get(&self, request: &JsonRpcRequest) -> JsonRpcResponse { + let id = request.id.clone(); + let params: SourceStateGetRequest = match parse_params(&request.params) { + Ok(p) => p, + Err(resp) => return resp.with_id(id), + }; + + if let Err(resp) = self.assert_source_belongs(¶ms.source_id, &id).await { + return resp; + } + + match ReleaseSourceRepository::get_by_id(&self.db, params.source_id).await { + Ok(Some(row)) => { + let resp = SourceStateView { + etag: row.etag, + last_polled_at: row.last_polled_at, + last_error: row.last_error, + last_error_at: row.last_error_at, + }; + JsonRpcResponse::success(id, serde_json::to_value(resp).unwrap()) + } + Ok(None) => JsonRpcResponse::error( + Some(id), + JsonRpcError::new(error_codes::NOT_FOUND, "source not found"), + ), + Err(e) => { + error!(error = %e, "source state read failed"); + JsonRpcResponse::error( + Some(id), + JsonRpcError::new(error_codes::INTERNAL_ERROR, format!("db error: {}", e)), + ) + } + } + } + + async fn handle_state_set(&self, request: &JsonRpcRequest) -> JsonRpcResponse { + let id = request.id.clone(); + let params: SourceStateSetRequest = match parse_params(&request.params) { + Ok(p) => p, + Err(resp) => return resp.with_id(id), + }; + + if let Err(resp) = self.assert_source_belongs(¶ms.source_id, &id).await { + return resp; + } + + // Only `etag` is plugin-writable here. `last_polled_at` is set by the + // host's poll task; status fields (`last_error`) are owned by the + // host. If a plugin needs richer per-source state, it should use + // `storage/*` against its own KV bucket. + if params.etag.is_none() { + return JsonRpcResponse::error( + Some(id), + JsonRpcError::new(error_codes::INVALID_PARAMS, "no writable fields supplied"), + ); + } + + // record_poll_success has the side effect of clearing `last_error` — + // that's not what plugins want here. Instead update etag in-place via + // a small read-modify-write. + let row = match ReleaseSourceRepository::get_by_id(&self.db, params.source_id).await { + Ok(Some(r)) => r, + Ok(None) => { + return JsonRpcResponse::error( + Some(id), + JsonRpcError::new(error_codes::NOT_FOUND, "source not found"), + ); + } + Err(e) => { + error!(error = %e, "source state lookup failed"); + return JsonRpcResponse::error( + Some(id), + JsonRpcError::new(error_codes::INTERNAL_ERROR, format!("db error: {}", e)), + ); + } + }; + + use sea_orm::{ActiveModelTrait, Set}; + let mut active: crate::db::entities::release_sources::ActiveModel = row.into(); + active.etag = Set(params.etag.clone()); + active.updated_at = Set(Utc::now()); + match active.update(&self.db).await { + Ok(_) => JsonRpcResponse::success( + id, + serde_json::json!({"success": true, "etag": params.etag}), + ), + Err(e) => { + error!(error = %e, "source state write failed"); + JsonRpcResponse::error( + Some(id), + JsonRpcError::new(error_codes::INTERNAL_ERROR, format!("db error: {}", e)), + ) + } + } + } + + /// Replace the set of `release_sources` rows owned by this plugin. + /// + /// This is the materialization endpoint plugins call from `onInitialize` + /// (and on any subsequent config change, which is delivered via plugin + /// process restart). Each call carries the plugin's full desired-state + /// list: + /// + /// - **Upsert** every entry on `(plugin_id, source_key)`. New rows are + /// inserted; existing rows have only the plugin-owned descriptive + /// fields refreshed. User-managed fields (`enabled`, `cron_schedule`) + /// survive across re-registrations so an admin's schedule override or + /// disable toggle isn't trampled when the plugin restarts. + /// - **Prune** rows owned by this plugin whose `source_key` is not in the + /// request. Deletes cascade to `release_ledger`. An empty `sources` + /// list wipes the plugin's row set, which is the correct behavior when + /// an admin clears the plugin's config. + /// - **Reconcile** the scheduler so newly-registered sources start polling + /// on their next cron tick (and pruned ones stop). Best-effort: if the + /// reconcile fails (or no scheduler is wired), the call still succeeds + /// because the row writes are persisted. + /// + /// `kind` is validated against the `release_source` capability the plugin + /// declared in its manifest, so a plugin can't register sources of a + /// `kind` outside its declared capability surface. New rows always start + /// with `cron_schedule = NULL` (inherit the server-wide default); admins + /// override per-row in the settings UI. + async fn handle_register_sources(&self, request: &JsonRpcRequest) -> JsonRpcResponse { + let id = request.id.clone(); + let params: RegisterSourcesRequest = match parse_params(&request.params) { + Ok(p) => p, + Err(resp) => return resp.with_id(id), + }; + + // Validate every source up front so we don't write partial state on a + // bad request. + for src in ¶ms.sources { + if src.source_key.trim().is_empty() { + return JsonRpcResponse::error( + Some(id), + JsonRpcError::new(error_codes::INVALID_PARAMS, "source_key cannot be empty"), + ); + } + if src.display_name.trim().is_empty() { + return JsonRpcResponse::error( + Some(id), + JsonRpcError::new(error_codes::INVALID_PARAMS, "display_name cannot be empty"), + ); + } + if !source_kind::is_valid(&src.kind) { + return JsonRpcResponse::error( + Some(id), + JsonRpcError::new( + error_codes::INVALID_PARAMS, + format!("invalid kind: {}", src.kind), + ), + ); + } + if !self.capability.kinds.iter().any(|k| k.as_str() == src.kind) { + return JsonRpcResponse::error( + Some(id), + JsonRpcError::new( + error_codes::INVALID_PARAMS, + format!( + "kind {} not declared in plugin's release_source capability", + src.kind + ), + ), + ); + } + } + // Reject duplicate source_keys in the same request — they would + // collapse to one row at upsert time and silently drop the second + // entry's display_name/config, which is almost always a plugin bug. + let mut seen: std::collections::HashSet<&str> = std::collections::HashSet::new(); + for src in ¶ms.sources { + if !seen.insert(src.source_key.as_str()) { + return JsonRpcResponse::error( + Some(id), + JsonRpcError::new( + error_codes::INVALID_PARAMS, + format!("duplicate source_key in request: {}", src.source_key), + ), + ); + } + } + + let keep_keys: Vec<String> = params + .sources + .iter() + .map(|s| s.source_key.clone()) + .collect(); + + // Upsert each source. New rows start with `cron_schedule = NULL`, + // i.e. they inherit the server-wide + // `release_tracking.default_cron_schedule`. Admins override per-row + // via the settings UI; existing rows preserve their override on + // re-register. + let mut registered = 0u32; + for src in params.sources { + let new = NewReleaseSource { + plugin_id: self.plugin_name.clone(), + source_key: src.source_key, + display_name: src.display_name, + kind: src.kind, + enabled: None, + config: src.config, + }; + if let Err(e) = ReleaseSourceRepository::upsert(&self.db, new).await { + error!(error = %e, "release source upsert failed"); + return JsonRpcResponse::error( + Some(id), + JsonRpcError::new(error_codes::INTERNAL_ERROR, format!("db error: {}", e)), + ); + } + registered += 1; + } + + // Prune sources the plugin no longer declares. + let pruned = match ReleaseSourceRepository::delete_by_plugin_excluding( + &self.db, + &self.plugin_name, + &keep_keys, + ) + .await + { + Ok(n) => n, + Err(e) => { + error!(error = %e, "release source prune failed"); + return JsonRpcResponse::error( + Some(id), + JsonRpcError::new(error_codes::INTERNAL_ERROR, format!("db error: {}", e)), + ); + } + }; + + info!( + plugin = %self.plugin_name, + registered, + pruned, + "release sources registered" + ); + + // Reconcile schedules. Best-effort — log failures but don't fail the + // RPC, since the rows are already persisted and the next scheduler + // start (or HTTP-driven reconcile) will catch up. + if let Some(ref scheduler) = self.scheduler { + let mut guard = scheduler.lock().await; + if let Err(e) = guard.reconcile_release_sources().await { + warn!(error = %e, "scheduler reconcile after register_sources failed"); + } + } + + let response = RegisterSourcesResponse { + registered, + pruned: pruned as u32, + }; + JsonRpcResponse::success(id, serde_json::to_value(response).unwrap()) + } + + /// Confirm `source_id` exists and belongs to the calling plugin. Returns + /// an error response if either check fails. + async fn assert_source_belongs( + &self, + source_id: &Uuid, + request_id: &RequestId, + ) -> Result<(), JsonRpcResponse> { + let row = match ReleaseSourceRepository::get_by_id(&self.db, *source_id).await { + Ok(Some(r)) => r, + Ok(None) => { + return Err(JsonRpcResponse::error( + Some(request_id.clone()), + JsonRpcError::new(error_codes::NOT_FOUND, "source not found"), + )); + } + Err(e) => { + error!(error = %e, "source lookup failed"); + return Err(JsonRpcResponse::error( + Some(request_id.clone()), + JsonRpcError::new(error_codes::INTERNAL_ERROR, format!("db error: {}", e)), + )); + } + }; + if row.plugin_id != self.plugin_name { + warn!( + source_id = %source_id, + source_plugin = %row.plugin_id, + caller = %self.plugin_name, + "plugin tried to operate on a source it does not own" + ); + return Err(JsonRpcResponse::error( + Some(request_id.clone()), + JsonRpcError::new( + error_codes::AUTH_FAILED, + "source does not belong to calling plugin", + ), + )); + } + Ok(()) + } +} + +// ============================================================================= +// Wire-format request/response types +// ============================================================================= + +#[derive(Debug, Clone, Deserialize)] +#[serde(rename_all = "camelCase")] +struct ListTrackedRequest { + source_id: Uuid, + #[serde(default)] + limit: Option<u64>, + /// Offset-based pagination is the simplest fit for SeaORM's + /// `list_tracked_ids` helper. Plugins call with `next_offset` from the + /// previous response. + #[serde(default)] + offset: Option<u64>, +} + +#[derive(Debug, Clone, Serialize, Deserialize)] +#[serde(rename_all = "camelCase")] +struct ListTrackedResponse { + tracked: Vec<TrackedSeriesEntry>, + #[serde(skip_serializing_if = "Option::is_none")] + next_offset: Option<u64>, +} + +#[derive(Debug, Clone, Serialize, Deserialize)] +#[serde(rename_all = "camelCase")] +struct TrackedSeriesEntry { + series_id: Uuid, + #[serde(default, skip_serializing_if = "Option::is_none")] + aliases: Option<Vec<String>>, + #[serde(default, skip_serializing_if = "Option::is_none")] + external_ids: Option<HashMap<String, String>>, + #[serde(default, skip_serializing_if = "Option::is_none")] + latest_known_chapter: Option<f64>, + #[serde(default, skip_serializing_if = "Option::is_none")] + latest_known_volume: Option<i32>, +} + +#[derive(Debug, Clone, Deserialize)] +#[serde(rename_all = "camelCase")] +struct RecordRequest { + source_id: Uuid, + candidate: ReleaseCandidate, +} + +#[derive(Debug, Clone, Serialize, Deserialize)] +#[serde(rename_all = "camelCase")] +struct RecordResponse { + ledger_id: Uuid, + deduped: bool, +} + +#[derive(Debug, Clone, Deserialize)] +#[serde(rename_all = "camelCase")] +struct SourceStateGetRequest { + source_id: Uuid, +} + +#[derive(Debug, Clone, Deserialize)] +#[serde(rename_all = "camelCase")] +struct SourceStateSetRequest { + source_id: Uuid, + /// Only `etag` is plugin-writable. Future plugin-controlled fields can + /// be added here. + #[serde(default)] + etag: Option<String>, +} + +#[derive(Debug, Clone, Deserialize)] +#[serde(rename_all = "camelCase")] +struct RegisterSourcesRequest { + sources: Vec<RegisteredSourceInput>, +} + +#[derive(Debug, Clone, Deserialize)] +#[serde(rename_all = "camelCase")] +struct RegisteredSourceInput { + /// Stable per-plugin identifier for the source. Opaque to the host. + source_key: String, + /// Human-readable label shown in the Release tracking settings table. + display_name: String, + /// One of the canonical `release_sources.kind` values; must also be + /// declared in the plugin's `release_source` capability. + kind: String, + /// Optional opaque per-source config snapshot. Stored on the row for + /// the host's reference; the plugin reads its own admin config directly. + #[serde(default)] + config: Option<Value>, +} + +#[derive(Debug, Clone, Serialize)] +#[serde(rename_all = "camelCase")] +struct RegisterSourcesResponse { + /// Number of sources upserted (created or refreshed). + registered: u32, + /// Number of sources removed because they were not in the request. + pruned: u32, +} + +#[derive(Debug, Clone, Serialize, Deserialize)] +#[serde(rename_all = "camelCase")] +struct SourceStateView { + #[serde(default, skip_serializing_if = "Option::is_none")] + etag: Option<String>, + #[serde(default, skip_serializing_if = "Option::is_none")] + last_polled_at: Option<DateTime<Utc>>, + #[serde(default, skip_serializing_if = "Option::is_none")] + last_error: Option<String>, + #[serde(default, skip_serializing_if = "Option::is_none")] + last_error_at: Option<DateTime<Utc>>, +} + +/// Strip a leading namespace prefix (`api:`, `plugin:`) from an external-ID +/// `source` string and return the bare service name. +/// +/// Stored `series_external_ids.source` values use one of: +/// - `api:<service>` (dominant; written by metadata plugins like +/// MangaBaka, OpenLibrary, AniList). +/// - `plugin:<name>` (legacy plugin-private form). +/// - `<service>` (bare; older rows). +/// +/// Plugin manifests declare `requiresExternalIds` with the bare service +/// name, so we normalize on read. Anything else (`urn:...`, `mal:`, etc.) +/// passes through unchanged. +pub(crate) fn strip_external_id_namespace(source: &str) -> &str { + if let Some(rest) = source.strip_prefix("api:") { + return rest; + } + if let Some(rest) = source.strip_prefix("plugin:") { + return rest; + } + source +} + +// ============================================================================= +// Param parsing helpers +// ============================================================================= + +#[allow(clippy::result_large_err)] +fn parse_params<T: serde::de::DeserializeOwned>( + params: &Option<Value>, +) -> Result<T, JsonRpcResponse> { + let params = params.as_ref().ok_or_else(|| { + JsonRpcResponse::error( + None, + JsonRpcError::new(error_codes::INVALID_PARAMS, "params is required"), + ) + })?; + serde_json::from_value::<T>(params.clone()).map_err(|e| { + JsonRpcResponse::error( + None, + JsonRpcError::new( + error_codes::INVALID_PARAMS, + format!("Invalid params: {}", e), + ), + ) + }) +} + +trait WithId { + fn with_id(self, id: RequestId) -> Self; +} + +impl WithId for JsonRpcResponse { + fn with_id(mut self, id: RequestId) -> Self { + self.id = Some(id); + self + } +} + +/// Whether a method belongs to the `releases/*` namespace. +pub fn is_releases_method(method: &str) -> bool { + matches!( + method, + "releases/list_tracked" + | "releases/record" + | "releases/source_state/get" + | "releases/source_state/set" + | "releases/register_sources" + ) +} + +#[cfg(test)] +mod tests { + use super::*; + use crate::db::ScanningStrategy; + use crate::db::entities::release_sources::{self, kind}; + use crate::db::repositories::{ + LibraryRepository, NewReleaseSource, ReleaseSourceRepository, ReleaseSourceUpdate, + SeriesAliasRepository, SeriesExternalIdRepository, SeriesRepository, + SeriesTrackingRepository, TrackingUpdate, + }; + use crate::db::test_helpers::create_test_db; + use crate::services::plugin::protocol::ReleaseSourceKind; + use crate::services::release::candidate::SeriesMatch; + use serde_json::json; + + fn make_capability( + requires_aliases: bool, + requires_external_ids: Vec<&str>, + ) -> ReleaseSourceCapability { + ReleaseSourceCapability { + kinds: vec![ReleaseSourceKind::RssUploader], + requires_aliases, + requires_external_ids: requires_external_ids + .into_iter() + .map(|s| s.to_string()) + .collect(), + can_announce_chapters: true, + can_announce_volumes: true, + } + } + + async fn setup(db: &DatabaseConnection, plugin_name: &str) -> (Uuid, Uuid) { + let library = LibraryRepository::create(db, "Lib", "/lib", ScanningStrategy::Default) + .await + .unwrap(); + let series = SeriesRepository::create(db, library.id, "Series", None) + .await + .unwrap(); + SeriesTrackingRepository::upsert( + db, + series.id, + TrackingUpdate { + tracked: Some(true), + ..Default::default() + }, + ) + .await + .unwrap(); + let source = ReleaseSourceRepository::create( + db, + NewReleaseSource { + plugin_id: plugin_name.to_string(), + source_key: "feed:1".to_string(), + display_name: "Feed 1".to_string(), + kind: kind::RSS_UPLOADER.to_string(), + enabled: None, + config: None, + }, + ) + .await + .unwrap(); + (series.id, source.id) + } + + fn make_request(method: &str, params: Value) -> JsonRpcRequest { + JsonRpcRequest::new(1i64, method, Some(params)) + } + + fn good_candidate(series_id: Uuid) -> ReleaseCandidate { + ReleaseCandidate { + series_match: SeriesMatch { + codex_series_id: series_id, + confidence: 0.95, + reason: "alias-exact".to_string(), + }, + external_release_id: "rel-1".to_string(), + chapter: Some(143.0), + volume: None, + language: "en".to_string(), + format_hints: None, + group_or_uploader: Some("tsuna69".to_string()), + payload_url: "https://example.com/r/1".to_string(), + media_url: None, + media_url_kind: None, + info_hash: None, + metadata: None, + observed_at: Utc::now(), + } + } + + #[tokio::test] + async fn list_tracked_returns_tracked_series_only() { + let (db, _t) = create_test_db().await; + let conn = db.sea_orm_connection(); + let (_series, source_id) = setup(conn, "release-nyaa").await; + + let handler = ReleasesRequestHandler::new( + conn.clone(), + "release-nyaa".to_string(), + make_capability(false, vec![]), + ); + let req = make_request( + methods::RELEASES_LIST_TRACKED, + json!({"sourceId": source_id}), + ); + let resp = handler.handle_request(&req).await; + assert!(!resp.is_error(), "unexpected error: {:?}", resp.error); + let body: ListTrackedResponse = serde_json::from_value(resp.result.unwrap()).unwrap(); + assert_eq!(body.tracked.len(), 1); + // No aliases/external_ids requested. + assert!(body.tracked[0].aliases.is_none()); + assert!(body.tracked[0].external_ids.is_none()); + } + + #[tokio::test] + async fn list_tracked_includes_aliases_when_requested() { + let (db, _t) = create_test_db().await; + let conn = db.sea_orm_connection(); + let (series_id, source_id) = setup(conn, "release-nyaa").await; + SeriesAliasRepository::create(conn, series_id, "Punpun", "manual") + .await + .unwrap(); + + let handler = ReleasesRequestHandler::new( + conn.clone(), + "release-nyaa".to_string(), + make_capability(true, vec![]), + ); + let req = make_request( + methods::RELEASES_LIST_TRACKED, + json!({"sourceId": source_id}), + ); + let resp = handler.handle_request(&req).await; + let body: ListTrackedResponse = serde_json::from_value(resp.result.unwrap()).unwrap(); + let entry = &body.tracked[0]; + let aliases = entry.aliases.as_ref().unwrap(); + assert_eq!(aliases, &vec!["Punpun".to_string()]); + } + + #[tokio::test] + async fn list_tracked_filters_external_ids_to_declared_sources() { + let (db, _t) = create_test_db().await; + let conn = db.sea_orm_connection(); + let (series_id, source_id) = setup(conn, "release-mu").await; + // Two external IDs - one matching the manifest, one not. + SeriesExternalIdRepository::upsert( + conn, + series_id, + "plugin:mangaupdates", + "12345", + None, + None, + ) + .await + .unwrap(); + SeriesExternalIdRepository::upsert(conn, series_id, "plugin:anilist", "999", None, None) + .await + .unwrap(); + + let handler = ReleasesRequestHandler::new( + conn.clone(), + "release-mu".to_string(), + make_capability(false, vec!["mangaupdates"]), + ); + let req = make_request( + methods::RELEASES_LIST_TRACKED, + json!({"sourceId": source_id}), + ); + let resp = handler.handle_request(&req).await; + let body: ListTrackedResponse = serde_json::from_value(resp.result.unwrap()).unwrap(); + let ext = body.tracked[0].external_ids.as_ref().unwrap(); + assert_eq!(ext.len(), 1, "only requested source should leak"); + assert_eq!(ext.get("mangaupdates").map(String::as_str), Some("12345")); + assert!(ext.get("anilist").is_none()); + } + + #[tokio::test] + async fn list_tracked_accepts_api_prefixed_external_ids() { + // Regression: MangaBaka writes external IDs as `api:mangaupdates` + // (the dominant convention per the SDK docs). The host used to + // strip only `plugin:`, so MangaUpdates plugins received zero IDs + // and reported "Fetched 0 items" forever. Strip both prefixes. + let (db, _t) = create_test_db().await; + let conn = db.sea_orm_connection(); + let (series_id, source_id) = setup(conn, "release-mu").await; + + SeriesExternalIdRepository::upsert( + conn, + series_id, + "api:mangaupdates", + "12345", + None, + None, + ) + .await + .unwrap(); + + let handler = ReleasesRequestHandler::new( + conn.clone(), + "release-mu".to_string(), + make_capability(false, vec!["mangaupdates"]), + ); + let req = make_request( + methods::RELEASES_LIST_TRACKED, + json!({"sourceId": source_id}), + ); + let resp = handler.handle_request(&req).await; + let body: ListTrackedResponse = serde_json::from_value(resp.result.unwrap()).unwrap(); + let ext = body.tracked[0].external_ids.as_ref().unwrap(); + assert_eq!( + ext.get("mangaupdates").map(String::as_str), + Some("12345"), + "api: prefix should be stripped to match bare-name manifest declaration" + ); + } + + #[test] + fn strip_external_id_namespace_handles_known_prefixes() { + assert_eq!( + strip_external_id_namespace("api:mangaupdates"), + "mangaupdates" + ); + assert_eq!(strip_external_id_namespace("plugin:anilist"), "anilist"); + assert_eq!(strip_external_id_namespace("mangadex"), "mangadex"); + // Unknown prefixes pass through — we'd rather fail closed than guess. + assert_eq!( + strip_external_id_namespace("urn:isbn:1234"), + "urn:isbn:1234" + ); + assert_eq!(strip_external_id_namespace(""), ""); + } + + #[tokio::test] + async fn list_tracked_rejects_source_owned_by_other_plugin() { + let (db, _t) = create_test_db().await; + let conn = db.sea_orm_connection(); + let (_series, source_id) = setup(conn, "release-nyaa").await; + + let handler = ReleasesRequestHandler::new( + conn.clone(), + "release-other".to_string(), + make_capability(false, vec![]), + ); + let req = make_request( + methods::RELEASES_LIST_TRACKED, + json!({"sourceId": source_id}), + ); + let resp = handler.handle_request(&req).await; + assert!(resp.is_error()); + assert_eq!(resp.error.unwrap().code, error_codes::AUTH_FAILED); + } + + /// `releases/record` emits a `ReleaseAnnounced` event on insert (via the + /// task-local recording broadcaster set up by the worker) and suppresses + /// it on dedup. + #[tokio::test] + async fn record_emits_release_announced_on_insert_only() { + use crate::events::{EntityEvent, EventBroadcaster, with_recording_broadcaster}; + + let (db, _t) = create_test_db().await; + let conn = db.sea_orm_connection(); + let (series_id, source_id) = setup(conn, "release-nyaa").await; + + let broadcaster = std::sync::Arc::new(EventBroadcaster::new(8)); + let mut rx = broadcaster.subscribe(); + + let handler = ReleasesRequestHandler::new( + conn.clone(), + "release-nyaa".to_string(), + make_capability(false, vec![]), + ); + + let cand = good_candidate(series_id); + let req = make_request( + methods::RELEASES_RECORD, + json!({"sourceId": source_id, "candidate": cand}), + ); + + let req_clone = req.clone(); + let handler_clone = handler.clone(); + let first = with_recording_broadcaster(broadcaster.clone(), async move { + handler_clone.handle_request(&req_clone).await + }) + .await; + assert!(!first.is_error(), "unexpected error: {:?}", first.error); + let body: RecordResponse = serde_json::from_value(first.result.unwrap()).unwrap(); + assert!(!body.deduped); + + let event = rx.try_recv().expect("expected ReleaseAnnounced"); + match event.event { + EntityEvent::ReleaseAnnounced { + series_id: ev_series, + source_id: ev_source, + plugin_id, + chapter, + language, + .. + } => { + assert_eq!(ev_series, series_id); + assert_eq!(ev_source, source_id); + assert_eq!(plugin_id, "release-nyaa"); + assert_eq!(chapter, Some(143.0)); + assert_eq!(language, "en"); + } + other => panic!("unexpected event: {:?}", other), + } + + // Re-recording the same release dedups; no new event should fire. + let req_clone = req.clone(); + let handler_clone = handler.clone(); + let second = with_recording_broadcaster(broadcaster.clone(), async move { + handler_clone.handle_request(&req_clone).await + }) + .await; + let body: RecordResponse = serde_json::from_value(second.result.unwrap()).unwrap(); + assert!(body.deduped); + assert!( + rx.try_recv().is_err(), + "dedup must not emit a new ReleaseAnnounced event" + ); + } + + /// Without a task-local recording broadcaster in scope, `releases/record` + /// completes successfully but emits no event (the operation is logged + /// at debug; we don't surface a fake "live" emit anywhere). + #[tokio::test] + async fn record_skips_emit_when_no_broadcaster_in_scope() { + let (db, _t) = create_test_db().await; + let conn = db.sea_orm_connection(); + let (series_id, source_id) = setup(conn, "release-nyaa").await; + + let handler = ReleasesRequestHandler::new( + conn.clone(), + "release-nyaa".to_string(), + make_capability(false, vec![]), + ); + + let cand = good_candidate(series_id); + let req = make_request( + methods::RELEASES_RECORD, + json!({"sourceId": source_id, "candidate": cand}), + ); + + let resp = handler.handle_request(&req).await; + assert!(!resp.is_error(), "unexpected error: {:?}", resp.error); + let body: RecordResponse = serde_json::from_value(resp.result.unwrap()).unwrap(); + assert!(!body.deduped, "ledger row still inserted"); + } + + #[tokio::test] + async fn record_inserts_then_dedups() { + let (db, _t) = create_test_db().await; + let conn = db.sea_orm_connection(); + let (series_id, source_id) = setup(conn, "release-nyaa").await; + + let handler = ReleasesRequestHandler::new( + conn.clone(), + "release-nyaa".to_string(), + make_capability(false, vec![]), + ); + let cand = good_candidate(series_id); + let req = make_request( + methods::RELEASES_RECORD, + json!({"sourceId": source_id, "candidate": cand}), + ); + + let first = handler.handle_request(&req).await; + assert!(!first.is_error(), "unexpected error: {:?}", first.error); + let body: RecordResponse = serde_json::from_value(first.result.unwrap()).unwrap(); + assert!(!body.deduped); + + let second = handler.handle_request(&req).await; + let body: RecordResponse = serde_json::from_value(second.result.unwrap()).unwrap(); + assert!(body.deduped, "second insert should dedup"); + } + + #[tokio::test] + async fn record_drops_below_threshold_candidate() { + let (db, _t) = create_test_db().await; + let conn = db.sea_orm_connection(); + let (series_id, source_id) = setup(conn, "release-nyaa").await; + + let handler = ReleasesRequestHandler::new( + conn.clone(), + "release-nyaa".to_string(), + make_capability(false, vec![]), + ); + let mut cand = good_candidate(series_id); + cand.series_match.confidence = 0.5; + let req = make_request( + methods::RELEASES_RECORD, + json!({"sourceId": source_id, "candidate": cand}), + ); + let resp = handler.handle_request(&req).await; + assert!(resp.is_error()); + let err = resp.error.unwrap(); + assert_eq!(err.code, error_codes::INVALID_PARAMS); + assert!(err.message.contains("below threshold")); + } + + #[tokio::test] + async fn record_honors_per_series_threshold_override() { + let (db, _t) = create_test_db().await; + let conn = db.sea_orm_connection(); + let (series_id, source_id) = setup(conn, "release-nyaa").await; + // Lower threshold for this series only. + SeriesTrackingRepository::upsert( + conn, + series_id, + TrackingUpdate { + confidence_threshold_override: Some(Some(0.4)), + ..Default::default() + }, + ) + .await + .unwrap(); + + let handler = ReleasesRequestHandler::new( + conn.clone(), + "release-nyaa".to_string(), + make_capability(false, vec![]), + ); + let mut cand = good_candidate(series_id); + cand.series_match.confidence = 0.5; + let req = make_request( + methods::RELEASES_RECORD, + json!({"sourceId": source_id, "candidate": cand}), + ); + let resp = handler.handle_request(&req).await; + assert!(!resp.is_error(), "override should accept 0.5 candidate"); + } + + #[tokio::test] + async fn record_rejects_source_owned_by_other_plugin() { + let (db, _t) = create_test_db().await; + let conn = db.sea_orm_connection(); + let (series_id, source_id) = setup(conn, "release-nyaa").await; + + let handler = ReleasesRequestHandler::new( + conn.clone(), + "release-other".to_string(), + make_capability(false, vec![]), + ); + let cand = good_candidate(series_id); + let req = make_request( + methods::RELEASES_RECORD, + json!({"sourceId": source_id, "candidate": cand}), + ); + let resp = handler.handle_request(&req).await; + assert!(resp.is_error()); + assert_eq!(resp.error.unwrap().code, error_codes::AUTH_FAILED); + } + + #[tokio::test] + async fn source_state_get_returns_view() { + let (db, _t) = create_test_db().await; + let conn = db.sea_orm_connection(); + let (_series, source_id) = setup(conn, "release-nyaa").await; + ReleaseSourceRepository::record_poll_success( + conn, + source_id, + Utc::now(), + Some("etag-123".to_string()), + None, + ) + .await + .unwrap(); + + let handler = ReleasesRequestHandler::new( + conn.clone(), + "release-nyaa".to_string(), + make_capability(false, vec![]), + ); + let req = make_request( + methods::RELEASES_SOURCE_STATE_GET, + json!({"sourceId": source_id}), + ); + let resp = handler.handle_request(&req).await; + assert!(!resp.is_error()); + let body: SourceStateView = serde_json::from_value(resp.result.unwrap()).unwrap(); + assert_eq!(body.etag.as_deref(), Some("etag-123")); + assert!(body.last_polled_at.is_some()); + } + + #[tokio::test] + async fn source_state_set_writes_etag() { + let (db, _t) = create_test_db().await; + let conn = db.sea_orm_connection(); + let (_series, source_id) = setup(conn, "release-nyaa").await; + + let handler = ReleasesRequestHandler::new( + conn.clone(), + "release-nyaa".to_string(), + make_capability(false, vec![]), + ); + let req = make_request( + methods::RELEASES_SOURCE_STATE_SET, + json!({"sourceId": source_id, "etag": "\"abc\""}), + ); + let resp = handler.handle_request(&req).await; + assert!(!resp.is_error()); + + let row = ReleaseSourceRepository::get_by_id(conn, source_id) + .await + .unwrap() + .unwrap(); + assert_eq!(row.etag.as_deref(), Some("\"abc\"")); + } + + #[tokio::test] + async fn source_state_set_rejects_when_no_writable_field() { + let (db, _t) = create_test_db().await; + let conn = db.sea_orm_connection(); + let (_series, source_id) = setup(conn, "release-nyaa").await; + + let handler = ReleasesRequestHandler::new( + conn.clone(), + "release-nyaa".to_string(), + make_capability(false, vec![]), + ); + let req = make_request( + methods::RELEASES_SOURCE_STATE_SET, + json!({"sourceId": source_id}), + ); + let resp = handler.handle_request(&req).await; + assert!(resp.is_error()); + assert_eq!(resp.error.unwrap().code, error_codes::INVALID_PARAMS); + } + + #[tokio::test] + async fn unknown_method_returns_method_not_found() { + let (db, _t) = create_test_db().await; + let conn = db.sea_orm_connection(); + + let handler = ReleasesRequestHandler::new( + conn.clone(), + "release-nyaa".to_string(), + make_capability(false, vec![]), + ); + let req = make_request("releases/unknown", json!({})); + let resp = handler.handle_request(&req).await; + assert!(resp.is_error()); + assert_eq!(resp.error.unwrap().code, error_codes::METHOD_NOT_FOUND); + } + + #[test] + fn is_releases_method_detects_namespace() { + assert!(is_releases_method(methods::RELEASES_LIST_TRACKED)); + assert!(is_releases_method(methods::RELEASES_RECORD)); + assert!(is_releases_method(methods::RELEASES_SOURCE_STATE_GET)); + assert!(is_releases_method(methods::RELEASES_SOURCE_STATE_SET)); + assert!(is_releases_method(methods::RELEASES_REGISTER_SOURCES)); + assert!(!is_releases_method("releases/poll")); + assert!(!is_releases_method("storage/get")); + } + + // ------------------------------------------------------------------------- + // latest_known_* advancement tests (Phase 6) + // ------------------------------------------------------------------------- + + async fn record_candidate( + handler: &ReleasesRequestHandler, + source_id: Uuid, + cand: ReleaseCandidate, + ) -> RecordResponse { + let req = make_request( + methods::RELEASES_RECORD, + json!({"sourceId": source_id, "candidate": cand}), + ); + let resp = handler.handle_request(&req).await; + assert!(!resp.is_error(), "unexpected error: {:?}", resp.error); + serde_json::from_value(resp.result.unwrap()).unwrap() + } + + fn candidate_with( + series_id: Uuid, + external_release_id: &str, + chapter: Option<f64>, + volume: Option<i32>, + language: &str, + ) -> ReleaseCandidate { + ReleaseCandidate { + series_match: SeriesMatch { + codex_series_id: series_id, + confidence: 0.95, + reason: "test".to_string(), + }, + external_release_id: external_release_id.to_string(), + chapter, + volume, + language: language.to_string(), + format_hints: None, + group_or_uploader: Some("group-x".to_string()), + payload_url: format!("https://example.com/{}", external_release_id), + media_url: None, + media_url_kind: None, + info_hash: None, + metadata: None, + observed_at: Utc::now(), + } + } + + #[tokio::test] + async fn record_advances_latest_known_chapter() { + let (db, _t) = create_test_db().await; + let conn = db.sea_orm_connection(); + let (series_id, source_id) = setup(conn, "release-mu").await; + + // Seed tracking with chapter=142. + SeriesTrackingRepository::upsert( + conn, + series_id, + TrackingUpdate { + tracked: Some(true), + latest_known_chapter: Some(Some(142.0)), + ..Default::default() + }, + ) + .await + .unwrap(); + + let handler = ReleasesRequestHandler::new( + conn.clone(), + "release-mu".to_string(), + make_capability(false, vec![]), + ); + + record_candidate( + &handler, + source_id, + candidate_with(series_id, "rel-143", Some(143.0), None, "en"), + ) + .await; + + let row = SeriesTrackingRepository::get(conn, series_id) + .await + .unwrap() + .unwrap(); + assert_eq!(row.latest_known_chapter, Some(143.0)); + } + + #[tokio::test] + async fn record_does_not_advance_for_stale_chapter() { + let (db, _t) = create_test_db().await; + let conn = db.sea_orm_connection(); + let (series_id, source_id) = setup(conn, "release-mu").await; + + SeriesTrackingRepository::upsert( + conn, + series_id, + TrackingUpdate { + tracked: Some(true), + latest_known_chapter: Some(Some(143.0)), + ..Default::default() + }, + ) + .await + .unwrap(); + + let handler = ReleasesRequestHandler::new( + conn.clone(), + "release-mu".to_string(), + make_capability(false, vec![]), + ); + + record_candidate( + &handler, + source_id, + candidate_with(series_id, "rel-140", Some(140.0), None, "en"), + ) + .await; + + let row = SeriesTrackingRepository::get(conn, series_id) + .await + .unwrap() + .unwrap(); + assert_eq!( + row.latest_known_chapter, + Some(143.0), + "stale candidate must not move the high-water mark backwards" + ); + } + + #[tokio::test] + async fn record_skips_chapter_advance_when_track_chapters_false() { + let (db, _t) = create_test_db().await; + let conn = db.sea_orm_connection(); + let (series_id, source_id) = setup(conn, "release-mu").await; + + SeriesTrackingRepository::upsert( + conn, + series_id, + TrackingUpdate { + tracked: Some(true), + track_chapters: Some(false), + track_volumes: Some(true), + latest_known_chapter: Some(Some(140.0)), + ..Default::default() + }, + ) + .await + .unwrap(); + + let handler = ReleasesRequestHandler::new( + conn.clone(), + "release-mu".to_string(), + make_capability(false, vec![]), + ); + + record_candidate( + &handler, + source_id, + candidate_with(series_id, "rel-143", Some(143.0), None, "en"), + ) + .await; + + let row = SeriesTrackingRepository::get(conn, series_id) + .await + .unwrap() + .unwrap(); + assert_eq!( + row.latest_known_chapter, + Some(140.0), + "track_chapters=false must suppress chapter advance" + ); + } + + #[tokio::test] + async fn record_advances_volume_independently_of_chapter() { + let (db, _t) = create_test_db().await; + let conn = db.sea_orm_connection(); + let (series_id, source_id) = setup(conn, "release-mu").await; + + SeriesTrackingRepository::upsert( + conn, + series_id, + TrackingUpdate { + tracked: Some(true), + latest_known_volume: Some(Some(14)), + ..Default::default() + }, + ) + .await + .unwrap(); + + let handler = ReleasesRequestHandler::new( + conn.clone(), + "release-mu".to_string(), + make_capability(false, vec![]), + ); + + record_candidate( + &handler, + source_id, + candidate_with(series_id, "rel-vol-15", None, Some(15), "en"), + ) + .await; + + let row = SeriesTrackingRepository::get(conn, series_id) + .await + .unwrap() + .unwrap(); + assert_eq!(row.latest_known_volume, Some(15)); + } + + #[tokio::test] + async fn record_skips_advance_when_language_outside_effective_list() { + let (db, _t) = create_test_db().await; + let conn = db.sea_orm_connection(); + let (series_id, source_id) = setup(conn, "release-mu").await; + + // Per-series languages = ["en"]; candidate is "id" (Indonesian). + SeriesTrackingRepository::upsert( + conn, + series_id, + TrackingUpdate { + tracked: Some(true), + languages: Some(Some(serde_json::json!(["en"]))), + latest_known_chapter: Some(Some(142.0)), + ..Default::default() + }, + ) + .await + .unwrap(); + + let handler = ReleasesRequestHandler::new( + conn.clone(), + "release-mu".to_string(), + make_capability(false, vec![]), + ); + + let resp = record_candidate( + &handler, + source_id, + candidate_with(series_id, "rel-id-145", Some(145.0), None, "id"), + ) + .await; + // Ledger row is still created — language filtering is the plugin's + // job. The handler only enforces that out-of-language records don't + // move the high-water mark. + assert!(!resp.deduped); + + let row = SeriesTrackingRepository::get(conn, series_id) + .await + .unwrap() + .unwrap(); + assert_eq!( + row.latest_known_chapter, + Some(142.0), + "out-of-language candidate must not move latest_known_chapter" + ); + } + + #[tokio::test] + async fn record_dedup_does_not_re_advance_latest_known() { + let (db, _t) = create_test_db().await; + let conn = db.sea_orm_connection(); + let (series_id, source_id) = setup(conn, "release-mu").await; + + SeriesTrackingRepository::upsert( + conn, + series_id, + TrackingUpdate { + tracked: Some(true), + ..Default::default() + }, + ) + .await + .unwrap(); + + let handler = ReleasesRequestHandler::new( + conn.clone(), + "release-mu".to_string(), + make_capability(false, vec![]), + ); + + let cand = candidate_with(series_id, "rel-143", Some(143.0), None, "en"); + let first = record_candidate(&handler, source_id, cand.clone()).await; + assert!(!first.deduped); + + // Manually wind back latest_known_chapter to detect a spurious advance + // on the dedup path. + SeriesTrackingRepository::upsert( + conn, + series_id, + TrackingUpdate { + latest_known_chapter: Some(Some(100.0)), + ..Default::default() + }, + ) + .await + .unwrap(); + + let second = record_candidate(&handler, source_id, cand).await; + assert!(second.deduped); + + let row = SeriesTrackingRepository::get(conn, series_id) + .await + .unwrap() + .unwrap(); + assert_eq!( + row.latest_known_chapter, + Some(100.0), + "dedup path must not re-tick latest_known_chapter" + ); + } + + #[tokio::test] + async fn record_does_not_create_tracking_row_for_untracked_series() { + let (db, _t) = create_test_db().await; + let conn = db.sea_orm_connection(); + let (series_id, source_id) = setup(conn, "release-mu").await; + + // Flip the tracking row off so the series is not being tracked. + SeriesTrackingRepository::upsert( + conn, + series_id, + TrackingUpdate { + tracked: Some(false), + ..Default::default() + }, + ) + .await + .unwrap(); + + let handler = ReleasesRequestHandler::new( + conn.clone(), + "release-mu".to_string(), + make_capability(false, vec![]), + ); + + record_candidate( + &handler, + source_id, + candidate_with(series_id, "rel-143", Some(143.0), None, "en"), + ) + .await; + + let row = SeriesTrackingRepository::get(conn, series_id) + .await + .unwrap() + .unwrap(); + assert_eq!( + row.latest_known_chapter, None, + "untracked series must not have its high-water mark moved" + ); + } + + // ------------------------------------------------------------------------- + // register_sources + // ------------------------------------------------------------------------- + + fn register_request(sources: Value) -> JsonRpcRequest { + make_request( + methods::RELEASES_REGISTER_SOURCES, + json!({ "sources": sources }), + ) + } + + #[tokio::test] + async fn register_sources_creates_rows_for_a_fresh_plugin() { + let (db, _t) = create_test_db().await; + let conn = db.sea_orm_connection(); + let handler = ReleasesRequestHandler::new( + conn.clone(), + "release-nyaa".to_string(), + make_capability(false, vec![]), + ); + + let req = register_request(json!([ + { + "sourceKey": "user:tsuna69", + "displayName": "Nyaa: tsuna69", + "kind": "rss-uploader", + "config": { "subscription": { "kind": "user", "identifier": "tsuna69" } } + }, + { + "sourceKey": "query:LuminousScans", + "displayName": "Nyaa search: LuminousScans", + "kind": "rss-uploader" + } + ])); + let resp = handler.handle_request(&req).await; + assert!(!resp.is_error(), "unexpected error: {:?}", resp.error); + let body: Value = resp.result.unwrap(); + assert_eq!(body["registered"], 2); + assert_eq!(body["pruned"], 0); + + let rows = ReleaseSourceRepository::list_by_plugin(conn, "release-nyaa") + .await + .unwrap(); + assert_eq!(rows.len(), 2); + let by_key: HashMap<&str, &release_sources::Model> = + rows.iter().map(|r| (r.source_key.as_str(), r)).collect(); + assert!(by_key.contains_key("user:tsuna69")); + assert!(by_key.contains_key("query:LuminousScans")); + assert!( + by_key["user:tsuna69"].enabled, + "new rows default to enabled" + ); + } + + #[tokio::test] + async fn register_sources_prunes_rows_no_longer_declared() { + let (db, _t) = create_test_db().await; + let conn = db.sea_orm_connection(); + let handler = ReleasesRequestHandler::new( + conn.clone(), + "release-nyaa".to_string(), + make_capability(false, vec![]), + ); + + // First call creates two rows. + let _ = handler + .handle_request(®ister_request(json!([ + { "sourceKey": "user:a", "displayName": "A", "kind": "rss-uploader" }, + { "sourceKey": "user:b", "displayName": "B", "kind": "rss-uploader" } + ]))) + .await; + + // Second call drops `user:b` and adds `user:c`. + let resp = handler + .handle_request(®ister_request(json!([ + { "sourceKey": "user:a", "displayName": "A", "kind": "rss-uploader" }, + { "sourceKey": "user:c", "displayName": "C", "kind": "rss-uploader" } + ]))) + .await; + assert!(!resp.is_error()); + let body: Value = resp.result.unwrap(); + assert_eq!(body["registered"], 2); + assert_eq!(body["pruned"], 1); + + let rows = ReleaseSourceRepository::list_by_plugin(conn, "release-nyaa") + .await + .unwrap(); + let keys: Vec<&str> = rows.iter().map(|r| r.source_key.as_str()).collect(); + assert!(keys.contains(&"user:a")); + assert!(keys.contains(&"user:c")); + assert!(!keys.contains(&"user:b"), "stale source must be pruned"); + } + + #[tokio::test] + async fn register_sources_with_empty_list_wipes_plugins_rows() { + let (db, _t) = create_test_db().await; + let conn = db.sea_orm_connection(); + let handler = ReleasesRequestHandler::new( + conn.clone(), + "release-nyaa".to_string(), + make_capability(false, vec![]), + ); + + let _ = handler + .handle_request(®ister_request(json!([ + { "sourceKey": "user:a", "displayName": "A", "kind": "rss-uploader" } + ]))) + .await; + + let resp = handler.handle_request(®ister_request(json!([]))).await; + assert!(!resp.is_error()); + let body: Value = resp.result.unwrap(); + assert_eq!(body["registered"], 0); + assert_eq!(body["pruned"], 1); + + let rows = ReleaseSourceRepository::list_by_plugin(conn, "release-nyaa") + .await + .unwrap(); + assert!(rows.is_empty()); + } + + #[tokio::test] + async fn register_sources_preserves_user_managed_fields_on_re_register() { + let (db, _t) = create_test_db().await; + let conn = db.sea_orm_connection(); + let handler = ReleasesRequestHandler::new( + conn.clone(), + "release-nyaa".to_string(), + make_capability(false, vec![]), + ); + + // Initial register. + let _ = handler + .handle_request(®ister_request(json!([ + { "sourceKey": "user:tsuna69", "displayName": "Nyaa: tsuna69", "kind": "rss-uploader" } + ]))) + .await; + + // Admin disables it and pins a custom interval. + let row = ReleaseSourceRepository::find_by_key(conn, "release-nyaa", "user:tsuna69") + .await + .unwrap() + .unwrap(); + ReleaseSourceRepository::update( + conn, + row.id, + ReleaseSourceUpdate { + enabled: Some(false), + cron_schedule: Some(Some("0 */6 * * *".to_string())), + ..Default::default() + }, + ) + .await + .unwrap(); + + // Plugin re-registers (e.g., after restart) with a refreshed display name + new config. + let _ = handler + .handle_request(®ister_request(json!([ + { + "sourceKey": "user:tsuna69", + "displayName": "Nyaa: tsuna69 (refreshed)", + "kind": "rss-uploader", + "config": { "subscription": "fresh" } + } + ]))) + .await; + + let after = ReleaseSourceRepository::find_by_key(conn, "release-nyaa", "user:tsuna69") + .await + .unwrap() + .unwrap(); + assert_eq!(after.display_name, "Nyaa: tsuna69 (refreshed)"); + assert_eq!(after.config, Some(json!({ "subscription": "fresh" }))); + assert!(!after.enabled, "user-set disabled must survive re-register"); + assert_eq!( + after.cron_schedule.as_deref(), + Some("0 */6 * * *"), + "user-set cron_schedule must survive re-register" + ); + } + + #[tokio::test] + async fn register_sources_does_not_touch_other_plugins_rows() { + let (db, _t) = create_test_db().await; + let conn = db.sea_orm_connection(); + + // Pre-existing source from a different plugin. + ReleaseSourceRepository::create( + conn, + NewReleaseSource { + plugin_id: "release-mangaupdates".to_string(), + source_key: "default".to_string(), + display_name: "MangaUpdates".to_string(), + kind: kind::RSS_SERIES.to_string(), + enabled: None, + config: None, + }, + ) + .await + .unwrap(); + + let handler = ReleasesRequestHandler::new( + conn.clone(), + "release-nyaa".to_string(), + make_capability(false, vec![]), + ); + // Empty register from nyaa — must not nuke mangaupdates' row. + let _ = handler.handle_request(®ister_request(json!([]))).await; + + let mu_rows = ReleaseSourceRepository::list_by_plugin(conn, "release-mangaupdates") + .await + .unwrap(); + assert_eq!(mu_rows.len(), 1); + } + + #[tokio::test] + async fn register_sources_rejects_kind_outside_capability() { + let (db, _t) = create_test_db().await; + let conn = db.sea_orm_connection(); + let handler = ReleasesRequestHandler::new( + conn.clone(), + "release-nyaa".to_string(), + // Only declares rss-uploader. + make_capability(false, vec![]), + ); + + let resp = handler + .handle_request(®ister_request(json!([ + { "sourceKey": "x", "displayName": "X", "kind": "rss-series" } + ]))) + .await; + assert!(resp.is_error()); + assert!(resp.error.unwrap().message.contains("not declared")); + + // Nothing was written. + let rows = ReleaseSourceRepository::list_by_plugin(conn, "release-nyaa") + .await + .unwrap(); + assert!(rows.is_empty()); + } + + #[tokio::test] + async fn register_sources_rejects_invalid_kind_string() { + let (db, _t) = create_test_db().await; + let conn = db.sea_orm_connection(); + let handler = ReleasesRequestHandler::new( + conn.clone(), + "release-nyaa".to_string(), + make_capability(false, vec![]), + ); + + let resp = handler + .handle_request(®ister_request(json!([ + { "sourceKey": "x", "displayName": "X", "kind": "frobnicate" } + ]))) + .await; + assert!(resp.is_error()); + assert!(resp.error.unwrap().message.contains("invalid kind")); + } + + #[tokio::test] + async fn register_sources_rejects_duplicate_keys_in_request() { + let (db, _t) = create_test_db().await; + let conn = db.sea_orm_connection(); + let handler = ReleasesRequestHandler::new( + conn.clone(), + "release-nyaa".to_string(), + make_capability(false, vec![]), + ); + + let resp = handler + .handle_request(®ister_request(json!([ + { "sourceKey": "dup", "displayName": "A", "kind": "rss-uploader" }, + { "sourceKey": "dup", "displayName": "B", "kind": "rss-uploader" } + ]))) + .await; + assert!(resp.is_error()); + assert!(resp.error.unwrap().message.contains("duplicate")); + let rows = ReleaseSourceRepository::list_by_plugin(conn, "release-nyaa") + .await + .unwrap(); + assert!(rows.is_empty(), "validation must run before any write"); + } + + #[tokio::test] + async fn register_sources_rejects_empty_source_key_or_display_name() { + let (db, _t) = create_test_db().await; + let conn = db.sea_orm_connection(); + let handler = ReleasesRequestHandler::new( + conn.clone(), + "release-nyaa".to_string(), + make_capability(false, vec![]), + ); + + let resp1 = handler + .handle_request(®ister_request(json!([ + { "sourceKey": " ", "displayName": "X", "kind": "rss-uploader" } + ]))) + .await; + assert!(resp1.is_error()); + + let resp2 = handler + .handle_request(®ister_request(json!([ + { "sourceKey": "x", "displayName": " ", "kind": "rss-uploader" } + ]))) + .await; + assert!(resp2.is_error()); + } +} diff --git a/src/services/plugin/rpc.rs b/src/services/plugin/rpc.rs index 05d6a39a..fccfe664 100644 --- a/src/services/plugin/rpc.rs +++ b/src/services/plugin/rpc.rs @@ -10,17 +10,69 @@ use std::time::Duration; use serde::Serialize; use serde::de::DeserializeOwned; use serde_json::Value; -use tokio::sync::{Mutex, oneshot}; +use tokio::sync::{Mutex, RwLock, mpsc}; use tokio::time::timeout; use tracing::{debug, error, warn}; +use super::permissions::{self, PermissionError}; use super::process::{PluginProcess, ProcessError}; use super::protocol::{ - JSONRPC_VERSION, JsonRpcError, JsonRpcRequest, JsonRpcResponse, RequestId, error_codes, + JSONRPC_VERSION, JsonRpcError, JsonRpcRequest, JsonRpcResponse, PluginCapabilities, RequestId, + error_codes, }; +use super::releases_handler::{ReleasesRequestHandler, is_releases_method}; use super::storage::is_storage_method; use super::storage_handler::StorageRequestHandler; +/// Bag of handlers + capabilities that mediate plugin reverse-RPC calls. +/// +/// Constructed before the plugin starts, but the capability snapshot and the +/// release-source handler are filled in once `initialize` returns and the +/// host knows what the plugin can do. The reader task holds an `Arc<RwLock>` +/// to this struct so updates land without restarting the task. +pub struct ReverseRpcContext { + storage_handler: Option<StorageRequestHandler>, + releases_handler: Option<ReleasesRequestHandler>, + /// `None` until the plugin has been initialized. + capabilities: Option<PluginCapabilities>, +} + +impl ReverseRpcContext { + pub fn new() -> Self { + Self { + storage_handler: None, + releases_handler: None, + capabilities: None, + } + } + + pub fn with_storage(storage_handler: StorageRequestHandler) -> Self { + Self { + storage_handler: Some(storage_handler), + releases_handler: None, + capabilities: None, + } + } + + /// Replace the plugin's capability snapshot, used by [`super::handle::PluginHandle`] + /// once `initialize` returns. + pub fn set_capabilities(&mut self, caps: PluginCapabilities) { + self.capabilities = Some(caps); + } + + /// Install the releases handler. Called after capabilities are known + /// and the plugin declared `release_source`. + pub fn set_releases_handler(&mut self, handler: ReleasesRequestHandler) { + self.releases_handler = Some(handler); + } +} + +impl Default for ReverseRpcContext { + fn default() -> Self { + Self::new() + } +} + /// Error type for RPC operations #[derive(Debug, thiserror::Error)] pub enum RpcError { @@ -89,9 +141,32 @@ impl From<JsonRpcError> for RpcError { } } +/// Frame delivered from the response reader to a pending forward call. +/// +/// Forward calls await an `mpsc::Receiver<PendingFrame>` instead of a single +/// `oneshot::Receiver`. The reader pushes either: +/// - one `Response` (terminal — the receiver loop stops), or +/// - zero or more `ReverseRpc` frames (mid-flight — the caller dispatches +/// each one on its own tokio task and writes the response back to the +/// plugin), followed eventually by exactly one `Response`. +/// +/// Routing reverse-RPCs back to the caller (instead of dispatching them on +/// the reader task) is what lets task-local context — most importantly the +/// recording broadcaster set up by [`crate::tasks::worker`] — propagate into +/// the dispatcher. Without this, events emitted by reverse-RPC handlers +/// (like `releases/record`) would have no recording context and would never +/// reach the web server's SSE stream in distributed deployments. +enum PendingFrame { + /// The plugin returned a response for this forward call. Terminal. + Response(Result<Value, RpcError>), + /// The plugin made a reverse-RPC call while servicing this forward + /// call. The caller must dispatch and write the response back. + ReverseRpc(JsonRpcRequest), +} + /// Pending request waiting for a response struct PendingRequest { - tx: oneshot::Sender<Result<Value, RpcError>>, + tx: mpsc::UnboundedSender<PendingFrame>, } /// JSON-RPC client for communicating with a plugin process @@ -110,12 +185,14 @@ pub struct RpcClient { /// Set to false when the response reader task detects process termination. /// This prevents writing to a dead process, which would cause EPIPE errors. process_alive: Arc<AtomicBool>, + /// Reverse-RPC handlers + capability snapshot, mutable after init. + reverse_ctx: Arc<RwLock<ReverseRpcContext>>, } impl RpcClient { /// Create a new RPC client wrapping a plugin process pub fn new(process: PluginProcess, default_timeout: Duration) -> Self { - Self::new_internal(process, default_timeout, None) + Self::new_internal(process, default_timeout, ReverseRpcContext::new()) } /// Create a new RPC client with storage request handling support. @@ -129,26 +206,32 @@ impl RpcClient { default_timeout: Duration, storage_handler: StorageRequestHandler, ) -> Self { - Self::new_internal(process, default_timeout, Some(storage_handler)) + Self::new_internal( + process, + default_timeout, + ReverseRpcContext::with_storage(storage_handler), + ) } fn new_internal( process: PluginProcess, default_timeout: Duration, - storage_handler: Option<StorageRequestHandler>, + ctx: ReverseRpcContext, ) -> Self { let process = Arc::new(Mutex::new(process)); let pending: Arc<Mutex<HashMap<i64, PendingRequest>>> = Arc::new(Mutex::new(HashMap::new())); let process_alive = Arc::new(AtomicBool::new(true)); + let reverse_ctx = Arc::new(RwLock::new(ctx)); // Start the response reader task let reader_handle = { let process = Arc::clone(&process); let pending = Arc::clone(&pending); let process_alive = Arc::clone(&process_alive); + let reverse_ctx = Arc::clone(&reverse_ctx); tokio::spawn(async move { - response_reader_task(process, pending, process_alive, storage_handler).await; + response_reader_task(process, pending, process_alive, reverse_ctx).await; }) }; @@ -159,9 +242,21 @@ impl RpcClient { default_timeout, reader_handle: Some(reader_handle), process_alive, + reverse_ctx, } } + /// Update the reverse-RPC context after initialization. Used by + /// [`super::handle::PluginHandle`] to inject the capability snapshot and + /// install the releases handler once the manifest is known. + pub async fn update_reverse_ctx<F>(&self, f: F) + where + F: FnOnce(&mut ReverseRpcContext), + { + let mut ctx = self.reverse_ctx.write().await; + f(&mut ctx); + } + /// Send a request and wait for a response pub async fn call<P, R>(&self, method: &str, params: P) -> Result<R, RpcError> where @@ -172,7 +267,18 @@ impl RpcClient { .await } - /// Send a request and wait for a response with custom timeout + /// Send a request and wait for a response with custom timeout. + /// + /// While awaiting the response, this also services any reverse-RPC + /// requests the plugin makes that are tagged with `parent_request_id = + /// id` of this call. Dispatching here (rather than on the reader task) + /// keeps the dispatch on the caller's tokio task, so task-local state + /// (notably the recording broadcaster set by the worker) propagates into + /// the reverse-RPC handlers — see [`PendingFrame`] for context. + /// + /// The `request_timeout` bounds *the entire forward call*, including + /// any reverse-RPC servicing in between. That matches the previous + /// semantics from the caller's point of view. pub async fn call_with_timeout<P, R>( &self, method: &str, @@ -205,6 +311,7 @@ impl RpcClient { } else { Some(params_value) }, + parent_request_id: None, }; let request_json = serde_json::to_string(&request)?; @@ -215,8 +322,11 @@ impl RpcClient { "Sending RPC request" ); - // Create response channel - let (tx, rx) = oneshot::channel(); + // Create response channel. Unbounded because reverse-RPCs are + // dispatched inline and the queue depth is naturally bounded by the + // plugin's behavior; bounding it would risk deadlock if the plugin + // bursts reverse-RPCs faster than the caller drains them. + let (tx, mut rx) = mpsc::unbounded_channel::<PendingFrame>(); { let mut pending = self.pending.lock().await; pending.insert(id, PendingRequest { tx }); @@ -237,20 +347,68 @@ impl RpcClient { process.write_line(&request_json).await?; } - // Wait for response with timeout + // Loop, servicing reverse-RPC frames until the response frame + // arrives or we time out. Dispatching reverse-RPCs here (on the + // caller's task) is what lets task-local recording broadcasters + // propagate into the handlers — see [`PendingFrame`]. debug!( id = id, timeout_ms = request_timeout.as_millis(), "Waiting for RPC response" ); - let result = match timeout(request_timeout, rx).await { + let response_result = timeout(request_timeout, async { + loop { + match rx.recv().await { + Some(PendingFrame::Response(result)) => return Ok::<_, RpcError>(result), + Some(PendingFrame::ReverseRpc(reverse_request)) => { + // Dispatch on this task so task-locals propagate. + let reverse_method = reverse_request.method.clone(); + let response = dispatch_reverse_rpc( + &reverse_method, + &reverse_request, + &self.reverse_ctx, + ) + .await; + // Write the response back to the plugin. Best-effort: + // a write failure here is logged but doesn't abort + // the forward call (the plugin may still complete). + match serde_json::to_string(&response) { + Ok(response_json) => { + let process_guard = self.process.lock().await; + if let Err(e) = process_guard.write_line(&response_json).await { + error!( + error = %e, + method = %reverse_method, + forward_id = id, + "Failed to write reverse-RPC response to plugin" + ); + } + } + Err(e) => { + error!( + error = %e, + method = %reverse_method, + "Failed to serialize reverse-RPC response" + ); + } + } + } + None => { + // Channel closed — plugin process died and the + // reader cancelled all pending requests. + return Err(RpcError::Cancelled); + } + } + } + }) + .await; + + let result = match response_result { Ok(Ok(result)) => { debug!(id = id, "RPC response received"); result } - Ok(Err(_)) => { - // Channel was closed - likely because the plugin process died - // and the response reader task cancelled all pending requests + Ok(Err(RpcError::Cancelled)) => { error!( id = id, method = method, @@ -259,8 +417,11 @@ impl RpcClient { self.remove_pending(id).await; return Err(RpcError::Cancelled); } + Ok(Err(e)) => { + self.remove_pending(id).await; + return Err(e); + } Err(_) => { - // Timeout error!( id = id, timeout_ms = request_timeout.as_millis(), @@ -301,7 +462,9 @@ impl RpcClient { { let mut pending = self.pending.lock().await; for (_, req) in pending.drain() { - let _ = req.tx.send(Err(RpcError::Cancelled)); + let _ = req + .tx + .send(PendingFrame::Response(Err(RpcError::Cancelled))); } } @@ -329,17 +492,137 @@ impl Drop for RpcClient { } } -/// Task that reads lines from the plugin process and dispatches them. +/// Dispatch a single reverse-RPC request to the appropriate handler after +/// running the permission check. /// -/// Handles two types of messages: -/// 1. **Responses**: Lines with `result` or `error` → dispatched to pending requests -/// 2. **Reverse RPC requests**: Lines with `method` (e.g., `storage/*`) → handled by -/// the storage handler and response written back to the plugin's stdin +/// Permission failures map to: +/// - `Denied` → `AUTH_FAILED` (the plugin called a method it isn't allowed +/// to call; tracing-friendly). +/// - `UnknownMethod` → `METHOD_NOT_FOUND` (no mapping; either a typo or the +/// method belongs to a future namespace). +async fn dispatch_reverse_rpc( + method: &str, + request: &JsonRpcRequest, + reverse_ctx: &Arc<RwLock<ReverseRpcContext>>, +) -> JsonRpcResponse { + let request_id = request.id.clone(); + + // Take a read snapshot of the context. We keep it as long as we're + // dispatching so the handlers don't get swapped mid-call. + let ctx_guard = reverse_ctx.read().await; + + // 1. Permission check. If capabilities haven't been set yet (i.e. the + // plugin tried to make a reverse-RPC call before the host installed + // the per-plugin reverse-RPC handlers), we return METHOD_NOT_FOUND + // rather than AUTH_FAILED. From the plugin's perspective the method + // isn't dispatchable *yet* — distinguishing this from a real + // permission denial lets the plugin SDK retry with backoff to ride + // out the brief initialization race (see e.g. release-nyaa's + // `registerSources` retry on -32601). AUTH_FAILED stays reserved + // for actual capability-declined-method denials. + let caps = match ctx_guard.capabilities.as_ref() { + Some(c) => c, + None => { + warn!( + method = %method, + "Reverse-RPC call before plugin initialized; deferring (METHOD_NOT_FOUND)" + ); + return JsonRpcResponse::error( + Some(request_id), + JsonRpcError::new( + error_codes::METHOD_NOT_FOUND, + "plugin not initialized; capabilities unknown", + ), + ); + } + }; + + if let Err(err) = permissions::enforce(method, caps) { + match &err { + PermissionError::Denied { required, .. } => { + warn!(method = %method, required = ?required, "Permission denied for reverse-RPC call"); + return JsonRpcResponse::error( + Some(request_id), + JsonRpcError::new(error_codes::AUTH_FAILED, err.to_string()), + ); + } + PermissionError::UnknownMethod { .. } => { + warn!(method = %method, "Unknown reverse-RPC method"); + return JsonRpcResponse::error( + Some(request_id), + JsonRpcError::new(error_codes::METHOD_NOT_FOUND, err.to_string()), + ); + } + } + } + + // 2. Route to the matching handler. + if is_storage_method(method) { + match ctx_guard.storage_handler.as_ref() { + Some(handler) => { + debug!(method = %method, "Routing to storage handler"); + handler.handle_request(request).await + } + None => { + warn!(method = %method, "Storage method called but no storage handler installed"); + JsonRpcResponse::error( + Some(request_id), + JsonRpcError::new( + error_codes::METHOD_NOT_FOUND, + "Storage is not available for this plugin", + ), + ) + } + } + } else if is_releases_method(method) { + match ctx_guard.releases_handler.as_ref() { + Some(handler) => { + debug!(method = %method, "Routing to releases handler"); + handler.handle_request(request).await + } + None => { + warn!(method = %method, "Releases method called but no releases handler installed"); + JsonRpcResponse::error( + Some(request_id), + JsonRpcError::new( + error_codes::INTERNAL_ERROR, + "Releases handler not configured", + ), + ) + } + } + } else { + // Permission check passed but no handler match — should be + // unreachable if the permissions table and handler set agree. + warn!(method = %method, "Permission-allowed method has no handler routing"); + JsonRpcResponse::error( + Some(request_id), + JsonRpcError::new( + error_codes::METHOD_NOT_FOUND, + format!("No handler for method `{}`", method), + ), + ) + } +} + +/// Task that reads lines from the plugin process and routes them. +/// +/// Handles three categories of message: +/// 1. **Responses**: Lines with `result` or `error` → routed to the matching +/// pending caller via [`PendingFrame::Response`]. +/// 2. **Reverse-RPC requests with a `parentRequestId`**: routed to the +/// pending caller of that forward call via [`PendingFrame::ReverseRpc`]. +/// The caller dispatches on its own tokio task so task-locals propagate. +/// 3. **Reverse-RPC requests without a `parentRequestId`** (legacy plugins +/// that predate the field, or true orphans): dispatched on the reader +/// task as before. These won't have a recording broadcaster in scope and +/// won't replay in distributed deployments — but that's no regression +/// from the prior behavior. async fn response_reader_task( process: Arc<Mutex<PluginProcess>>, pending: Arc<Mutex<HashMap<i64, PendingRequest>>>, process_alive: Arc<AtomicBool>, - storage_handler: Option<StorageRequestHandler>, + reverse_ctx: Arc<RwLock<ReverseRpcContext>>, ) { debug!("RPC response reader task started"); loop { @@ -399,57 +682,56 @@ async fn response_reader_task( .map(|m| m.to_string()); if let Some(method) = is_request { - if is_storage_method(&method) { - if let Some(ref handler) = storage_handler { - // Parse as a full request - let request: JsonRpcRequest = match serde_json::from_value(json_value) { - Ok(r) => r, - Err(e) => { - warn!(error = %e, "Failed to parse storage request"); - continue; - } - }; - - debug!(method = %method, "Handling reverse RPC storage request from plugin"); - let response = handler.handle_request(&request).await; - - // Write the response back to the plugin's stdin - let response_json = match serde_json::to_string(&response) { - Ok(j) => j, - Err(e) => { - error!(error = %e, "Failed to serialize storage response"); - continue; - } - }; + let request: JsonRpcRequest = match serde_json::from_value(json_value) { + Ok(r) => r, + Err(e) => { + warn!(error = %e, method = %method, "Failed to parse reverse-RPC request"); + continue; + } + }; - let process = process.lock().await; - if let Err(e) = process.write_line(&response_json).await { - error!(error = %e, "Failed to write storage response to plugin"); - } - } else { - warn!( - method = %method, - "Plugin sent storage request but no storage handler is configured" - ); - // Send error response back to plugin - if let Ok(request) = serde_json::from_value::<JsonRpcRequest>(json_value) { - let error_response = JsonRpcResponse::error( - Some(request.id), - JsonRpcError::new( - error_codes::METHOD_NOT_FOUND, - "Storage is not available for this plugin", - ), + // Try to route to the originating forward call so dispatch + // happens on the caller's task (and task-locals propagate). + let parent_id = request + .parent_request_id + .as_ref() + .and_then(parent_id_to_i64); + + if let Some(parent_id) = parent_id { + let routed = { + let pending_map = pending.lock().await; + pending_map.get(&parent_id).map(|p| p.tx.clone()) + }; + if let Some(tx) = routed { + if let Err(send_err) = tx.send(PendingFrame::ReverseRpc(request)) { + // Receiver dropped between lookup and send — race + // with timeout/shutdown. Fall back to dispatching + // on the reader so the plugin still gets a response. + let dropped = match send_err.0 { + PendingFrame::ReverseRpc(req) => req, + // Unreachable: we just constructed a ReverseRpc + // frame above, and `send` returns whatever it + // failed to deliver. + PendingFrame::Response(_) => continue, + }; + warn!( + method = %method, + parent_id = parent_id, + "Caller dropped pending channel; falling back to reader-task dispatch" ); - if let Ok(resp_json) = serde_json::to_string(&error_response) { - let process = process.lock().await; - let _ = process.write_line(&resp_json).await; - } + dispatch_and_write(dropped, method.clone(), &reverse_ctx, &process).await; } + continue; } - continue; + warn!( + method = %method, + parent_id = parent_id, + "Reverse-RPC parent request id not found in pending map; dispatching on reader" + ); } - // Non-storage methods from the plugin are not supported - warn!(method = %method, "Plugin sent unsupported reverse RPC request"); + + // No parent id, or parent not pending: dispatch on the reader. + dispatch_and_write(request, method, &reverse_ctx, &process).await; continue; } @@ -512,7 +794,7 @@ async fn response_reader_task( )) }; - if req.tx.send(result).is_err() { + if req.tx.send(PendingFrame::Response(result)).is_err() { debug!("Request {} receiver dropped", id); } } else { @@ -541,9 +823,49 @@ async fn response_reader_task( request_id = id, "Cancelling pending request due to plugin process exit" ); - let _ = req - .tx - .send(Err(RpcError::Process(ProcessError::ProcessTerminated))); + let _ = req.tx.send(PendingFrame::Response(Err(RpcError::Process( + ProcessError::ProcessTerminated, + )))); + } +} + +/// Coerce a reverse-RPC `parentRequestId` to the `i64` we use as our +/// pending-map key. Numbers map directly; strings parse as numbers (the host +/// only ever issues numeric ids, but the field type is `RequestId` for +/// protocol generality). +fn parent_id_to_i64(id: &RequestId) -> Option<i64> { + match id { + RequestId::Number(n) => Some(*n), + RequestId::String(s) => s.parse::<i64>().ok(), + } +} + +/// Dispatch a reverse-RPC on the *current* task and write the response back +/// to the plugin. Used as the fallback when no parent forward call is +/// available to dispatch on (legacy plugins, or the parent's caller has +/// already gone away). +async fn dispatch_and_write( + request: JsonRpcRequest, + method: String, + reverse_ctx: &Arc<RwLock<ReverseRpcContext>>, + process: &Arc<Mutex<PluginProcess>>, +) { + let request_id = request.id.clone(); + let response = dispatch_reverse_rpc(&method, &request, reverse_ctx).await; + let response_json = match serde_json::to_string(&response) { + Ok(j) => j, + Err(e) => { + error!(error = %e, method = %method, "Failed to serialize reverse-RPC response"); + let fallback = JsonRpcResponse::error( + Some(request_id), + JsonRpcError::new(error_codes::INTERNAL_ERROR, "failed to serialize response"), + ); + serde_json::to_string(&fallback).unwrap_or_default() + } + }; + let process_guard = process.lock().await; + if let Err(e) = process_guard.write_line(&response_json).await { + error!(error = %e, method = %method, "Failed to write reverse-RPC response to plugin"); } } @@ -668,6 +990,132 @@ mod tests { } } + /// Reverse-RPC dispatch should reject calls before the plugin has been + /// initialized — at that point the host doesn't yet know the plugin's + /// capabilities. Returned as `METHOD_NOT_FOUND` (rather than + /// `AUTH_FAILED`) so plugin SDKs can retry with backoff to ride out the + /// brief init race; an `AUTH_FAILED` response would tell the SDK to + /// give up. See the doc comment on `dispatch_reverse_rpc`. + #[tokio::test] + async fn test_dispatch_rejects_before_init() { + let ctx = Arc::new(RwLock::new(ReverseRpcContext::new())); + let request = JsonRpcRequest::new( + 1i64, + super::super::protocol::methods::STORAGE_GET, + Some(json!({"key": "x"})), + ); + let resp = dispatch_reverse_rpc(&request.method, &request, &ctx).await; + assert!(resp.is_error()); + assert_eq!(resp.error.unwrap().code, error_codes::METHOD_NOT_FOUND); + } + + /// A plugin without `release_source` calling `releases/record` should be + /// rejected with AUTH_FAILED, regardless of payload. + #[tokio::test] + async fn test_dispatch_denies_release_method_without_capability() { + use super::super::protocol::{MetadataContentType, PluginCapabilities}; + + let mut ctx_inner = ReverseRpcContext::new(); + ctx_inner.set_capabilities(PluginCapabilities { + metadata_provider: vec![MetadataContentType::Series], + ..Default::default() + }); + let ctx = Arc::new(RwLock::new(ctx_inner)); + + let request = JsonRpcRequest::new( + 1i64, + super::super::protocol::methods::RELEASES_RECORD, + Some(json!({})), + ); + let resp = dispatch_reverse_rpc(&request.method, &request, &ctx).await; + assert!(resp.is_error()); + assert_eq!(resp.error.unwrap().code, error_codes::AUTH_FAILED); + } + + /// Unknown methods are rejected with `METHOD_NOT_FOUND` (rather than + /// silently ignored, as the previous code did). + #[tokio::test] + async fn test_dispatch_unknown_method_returns_method_not_found() { + use super::super::protocol::PluginCapabilities; + + let mut ctx_inner = ReverseRpcContext::new(); + ctx_inner.set_capabilities(PluginCapabilities::default()); + let ctx = Arc::new(RwLock::new(ctx_inner)); + + let request = JsonRpcRequest::new(1i64, "frobnicate/zap", Some(json!({}))); + let resp = dispatch_reverse_rpc(&request.method, &request, &ctx).await; + assert!(resp.is_error()); + assert_eq!(resp.error.unwrap().code, error_codes::METHOD_NOT_FOUND); + } + + /// Storage methods (`AlwaysAllowed`) work for any plugin once initialized, + /// but if no storage handler is installed they fall through to a clear + /// error rather than silently failing. + #[tokio::test] + async fn test_dispatch_storage_without_handler_returns_method_not_found() { + use super::super::protocol::PluginCapabilities; + + let mut ctx_inner = ReverseRpcContext::new(); + ctx_inner.set_capabilities(PluginCapabilities::default()); + let ctx = Arc::new(RwLock::new(ctx_inner)); + + let request = JsonRpcRequest::new( + 1i64, + super::super::protocol::methods::STORAGE_GET, + Some(json!({"key": "x"})), + ); + let resp = dispatch_reverse_rpc(&request.method, &request, &ctx).await; + assert!(resp.is_error()); + assert_eq!(resp.error.unwrap().code, error_codes::METHOD_NOT_FOUND); + } + + /// `parentRequestId` round-trips through serde with the camelCase wire + /// name and is omitted when None. This is the protocol contract we + /// share with the plugin SDK. + #[test] + fn parent_request_id_serializes_as_camel_case_and_omits_when_none() { + let mut req = JsonRpcRequest::new(42i64, "releases/record", Some(json!({"x": 1}))); + // Default: omitted on the wire. + let json = serde_json::to_string(&req).unwrap(); + assert!( + !json.contains("parentRequestId"), + "absent field should be skipped: {json}" + ); + + // Set: serialized as camelCase. + req.parent_request_id = Some(RequestId::Number(7)); + let json = serde_json::to_string(&req).unwrap(); + assert!( + json.contains("\"parentRequestId\":7"), + "expected camelCase parentRequestId in: {json}" + ); + + // Round-trip: a wire payload deserializes back with the field set. + let wire = r#"{"jsonrpc":"2.0","id":1,"method":"releases/record","parentRequestId":99}"#; + let parsed: JsonRpcRequest = serde_json::from_str(wire).unwrap(); + assert!(matches!( + parsed.parent_request_id, + Some(RequestId::Number(99)) + )); + } + + /// `parent_id_to_i64` accepts both numeric and string ids — we use it to + /// look up the parent forward call in the pending map, which is keyed by + /// `i64`. The host only ever issues numeric ids, but the protocol type + /// is `RequestId` for generality. + #[test] + fn parent_id_to_i64_handles_numeric_and_string_ids() { + assert_eq!(parent_id_to_i64(&RequestId::Number(42)), Some(42)); + assert_eq!( + parent_id_to_i64(&RequestId::String("17".to_string())), + Some(17) + ); + assert_eq!( + parent_id_to_i64(&RequestId::String("nope".to_string())), + None + ); + } + /// Verify that dropping an RpcClient aborts the reader task, releasing the /// Arc<Mutex<PluginProcess>> so kill_on_drop(true) can fire on the child process. #[tokio::test] diff --git a/src/services/release/auto_ignore.rs b/src/services/release/auto_ignore.rs new file mode 100644 index 00000000..ca7a2843 --- /dev/null +++ b/src/services/release/auto_ignore.rs @@ -0,0 +1,220 @@ +//! Decide whether an incoming release matches something the user already +//! owns, so ingestion can mark it `ignored` instead of `announced`. +//! +//! Direct matches only. We do not infer chapter ownership from owned +//! volumes (chapter→volume mapping is unreliable upstream) or vice versa. +//! +//! Inputs come from [`crate::db::repositories::SeriesRepository::get_owned_release_keys_for_series`]: +//! the set of `(volume, chapter)` pairs derived from book metadata, plus +//! a count fallback used only when no book in the series has any volume +//! metadata. +//! +//! Whole-volume ownership is signaled by `chapter = None` in the owned set; +//! chapter ownership by `chapter = Some(_)`. A release for "Vol 3" matches +//! an owned `(Some(3), None)`; a release for "Ch 12" matches an owned +//! `(_, Some(12))` regardless of volume. + +/// Per-series ownership signature consumed by [`should_auto_ignore`]. +#[derive(Debug, Default, Clone)] +pub struct OwnedReleaseKeys { + /// `(volume, chapter)` pairs from book metadata, after filtering out + /// rows with both fields null. + /// + /// - `(Some(v), None)` — whole volume `v` owned (no specific chapter). + /// - `(Some(v), Some(c))` — chapter `c` of volume `v` owned. + /// - `(None, Some(c))` — chapter `c` owned, volume unknown. + pub keys: Vec<(Option<i32>, Option<f64>)>, + /// `true` if at least one book in the series carries volume metadata. + /// When `false`, we fall back to [`Self::volumes_owned_count`]. + pub has_any_volume_metadata: bool, + /// Count of "complete-volume" books (volume IS NOT NULL AND chapter + /// IS NULL). Only consulted in the count-fallback branch when + /// [`Self::has_any_volume_metadata`] is `false`. + pub volumes_owned_count: i64, +} + +/// True when the release matches a directly-owned key. +/// +/// Matching rules: +/// - **Volume + chapter release**: matches an owned `(Some(v), Some(c))`, +/// or an owned whole volume `(Some(v), None)` (whole volume implies all +/// chapters in it). +/// - **Volume-only release**: matches an owned whole volume +/// `(Some(v), None)`. Does NOT match if the user only owns specific +/// chapters of that volume. +/// - **Chapter-only release**: matches any owned key with the same +/// chapter, regardless of volume. +/// - **No volume and no chapter**: never auto-ignored. +/// +/// **Count fallback**: only when `has_any_volume_metadata` is false (no +/// book has volume metadata at all). For a volume-N release, treat +/// `1..=volumes_owned_count` as owned. We do not apply the count fallback +/// to chapter-only releases. +pub fn should_auto_ignore( + release_volume: Option<i32>, + release_chapter: Option<f64>, + owned: &OwnedReleaseKeys, +) -> bool { + match (release_volume, release_chapter) { + (None, None) => false, + + (Some(v), Some(c)) => owned.keys.iter().any(|(ov, oc)| match (ov, oc) { + (Some(ov), Some(oc)) => *ov == v && chapter_eq(*oc, c), + (Some(ov), None) => *ov == v, + _ => false, + }), + + (Some(v), None) => { + let direct = owned + .keys + .iter() + .any(|(ov, oc)| matches!((ov, oc), (Some(ov), None) if *ov == v)); + if direct { + return true; + } + // Count fallback: only when no book has volume metadata. + if !owned.has_any_volume_metadata && owned.volumes_owned_count > 0 { + return (v as i64) <= owned.volumes_owned_count; + } + false + } + + (None, Some(c)) => owned + .keys + .iter() + .any(|(_, oc)| matches!(oc, Some(oc) if chapter_eq(*oc, c))), + } +} + +/// Tolerant equality for chapter numbers. `f64` because both sides come +/// from DB columns; the values are typically small decimals (e.g. `12.5`) +/// and exact equality is fine for the realistic range. +fn chapter_eq(a: f64, b: f64) -> bool { + (a - b).abs() < 1e-6 +} + +#[cfg(test)] +mod tests { + use super::*; + + fn owned(keys: Vec<(Option<i32>, Option<f64>)>) -> OwnedReleaseKeys { + let has_any_volume_metadata = keys.iter().any(|(v, _)| v.is_some()); + let volumes_owned_count = keys + .iter() + .filter(|(v, c)| v.is_some() && c.is_none()) + .count() as i64; + OwnedReleaseKeys { + keys, + has_any_volume_metadata, + volumes_owned_count, + } + } + + #[test] + fn volume_release_owned_as_whole_volume() { + let o = owned(vec![(Some(1), None), (Some(2), None)]); + assert!(should_auto_ignore(Some(1), None, &o)); + assert!(should_auto_ignore(Some(2), None, &o)); + assert!(!should_auto_ignore(Some(3), None, &o)); + } + + #[test] + fn volume_release_not_matched_by_chapter_in_volume() { + // User only has chapter 5 of volume 1, not the whole volume. + let o = owned(vec![(Some(1), Some(5.0))]); + assert!(!should_auto_ignore(Some(1), None, &o)); + } + + #[test] + fn chapter_release_matches_any_volume() { + let o = owned(vec![(Some(2), Some(12.0))]); + // Release "Ch 12, vol unknown" → owned by virtue of having ch 12 of vol 2. + assert!(should_auto_ignore(None, Some(12.0), &o)); + assert!(!should_auto_ignore(None, Some(13.0), &o)); + } + + #[test] + fn chapter_release_matches_chapter_only_owned() { + let o = owned(vec![(None, Some(7.0))]); + assert!(should_auto_ignore(None, Some(7.0), &o)); + assert!(!should_auto_ignore(None, Some(8.0), &o)); + } + + #[test] + fn chapter_release_not_matched_by_owned_volume() { + // User owns volume 1 (whole). Release is "Ch 5". + // We do NOT infer ch 5 is in vol 1 — chapter→volume mapping unreliable. + let o = owned(vec![(Some(1), None)]); + assert!(!should_auto_ignore(None, Some(5.0), &o)); + } + + #[test] + fn vol_plus_chapter_release_matches_exact_pair() { + let o = owned(vec![(Some(1), Some(5.0))]); + assert!(should_auto_ignore(Some(1), Some(5.0), &o)); + assert!(!should_auto_ignore(Some(1), Some(6.0), &o)); + assert!(!should_auto_ignore(Some(2), Some(5.0), &o)); + } + + #[test] + fn vol_plus_chapter_release_matches_whole_volume() { + // Whole volume implies all chapters in it. + let o = owned(vec![(Some(1), None)]); + assert!(should_auto_ignore(Some(1), Some(5.0), &o)); + assert!(should_auto_ignore(Some(1), Some(99.5), &o)); + } + + #[test] + fn count_fallback_active_when_no_metadata() { + // No book has volume metadata, but volumes_owned_count = 2. + let o = OwnedReleaseKeys { + keys: vec![], + has_any_volume_metadata: false, + volumes_owned_count: 2, + }; + assert!(should_auto_ignore(Some(1), None, &o)); + assert!(should_auto_ignore(Some(2), None, &o)); + assert!(!should_auto_ignore(Some(3), None, &o)); + } + + #[test] + fn count_fallback_inactive_when_metadata_present() { + // User owns vols 3, 5, 7 (with metadata). Count fallback must NOT + // hide vol 1 — that's the bug the metadata path fixes. + let o = owned(vec![(Some(3), None), (Some(5), None), (Some(7), None)]); + assert!(!should_auto_ignore(Some(1), None, &o)); + assert!(should_auto_ignore(Some(3), None, &o)); + assert!(!should_auto_ignore(Some(4), None, &o)); + } + + #[test] + fn count_fallback_does_not_apply_to_chapter_releases() { + let o = OwnedReleaseKeys { + keys: vec![], + has_any_volume_metadata: false, + volumes_owned_count: 5, + }; + assert!(!should_auto_ignore(None, Some(3.0), &o)); + } + + #[test] + fn release_with_no_volume_or_chapter_never_ignored() { + let o = owned(vec![(Some(1), None)]); + assert!(!should_auto_ignore(None, None, &o)); + } + + #[test] + fn empty_owned_set_never_ignores() { + let o = OwnedReleaseKeys::default(); + assert!(!should_auto_ignore(Some(1), None, &o)); + assert!(!should_auto_ignore(None, Some(1.0), &o)); + assert!(!should_auto_ignore(Some(1), Some(1.0), &o)); + } + + #[test] + fn fractional_chapter_matches() { + let o = owned(vec![(Some(1), Some(12.5))]); + assert!(should_auto_ignore(None, Some(12.5), &o)); + assert!(!should_auto_ignore(None, Some(12.0), &o)); + } +} diff --git a/src/services/release/backoff.rs b/src/services/release/backoff.rs new file mode 100644 index 00000000..c0c48a39 --- /dev/null +++ b/src/services/release/backoff.rs @@ -0,0 +1,236 @@ +//! Per-host backoff for release-source polls. +//! +//! Plugins that share a domain (rare today, but possible in the future when, +//! e.g., a Nyaa scraper and a Nyaa RSS plugin both hit `nyaa.si`) need to +//! cooperate on rate-limit signals. The signal is observed by the polling +//! task (via `ReleasePollResponse.upstream_status` or an RPC error) and +//! converted into a per-domain backoff multiplier that the scheduler +//! consults when picking the next poll time. +//! +//! Implementation details: +//! +//! - Backoff state is in-memory only. A scheduler restart clears it; the +//! next poll will hit the upstream cleanly. We could persist it, but +//! 429/503 signals are typically minutes-fresh, not hours-fresh. +//! - State is keyed by `host` (lowercased), which we extract via a small +//! parser to avoid pulling in the `url` crate. Inputs we expect (`https://nyaa.si/...`, +//! `nyaa.si`, etc.) all extract identically. +//! - Multiplier doubles per consecutive failure (1.0 → 2.0 → 4.0 → … → cap). +//! - On success the multiplier resets to 1.0 immediately. + +use std::collections::HashMap; +use std::sync::Arc; +use std::time::{Duration, Instant}; + +use tokio::sync::RwLock; + +/// Maximum backoff multiplier. Caps the doubling growth so a long stretch +/// of 503s doesn't push the next poll to "next month." +pub const MAX_BACKOFF_MULTIPLIER: f64 = 16.0; + +/// Maximum age we trust a backoff signal for. After this elapses, the +/// stored multiplier is treated as 1.0 (a multiplier "expires"). +/// +/// Set to 24h: longer than any reasonable upstream rate-limit window, but +/// short enough that a stale signal can't permanently throttle a source. +pub const BACKOFF_MAX_AGE: Duration = Duration::from_secs(24 * 3_600); + +/// The two status codes we treat as backoff signals. +pub const HTTP_TOO_MANY_REQUESTS: u16 = 429; +pub const HTTP_SERVICE_UNAVAILABLE: u16 = 503; + +/// Returns true if a status code should trigger backoff growth. +pub fn is_backoff_status(status: u16) -> bool { + matches!(status, HTTP_TOO_MANY_REQUESTS | HTTP_SERVICE_UNAVAILABLE) +} + +/// Tracker for per-host backoff multipliers. Cheap to clone — wraps an +/// `Arc<RwLock<...>>` internally so the scheduler and polling tasks see a +/// shared view. +#[derive(Debug, Clone, Default)] +pub struct HostBackoff { + inner: Arc<RwLock<HashMap<String, BackoffEntry>>>, +} + +#[derive(Debug, Clone, Copy)] +struct BackoffEntry { + multiplier: f64, + updated_at: Instant, +} + +impl HostBackoff { + pub fn new() -> Self { + Self::default() + } + + /// Record a successful poll for `url`. Resets the host's multiplier to 1.0. + /// Tolerates `url` being a host or a full URL. + pub async fn record_success(&self, url_or_host: &str) { + let host = match host_of(url_or_host) { + Some(h) => h, + None => return, + }; + self.inner.write().await.remove(&host); + } + + /// Record an HTTP error for `url`. If `status` is a recognized backoff + /// signal, doubles the host's multiplier (capped). Other statuses are + /// no-ops at this layer. + pub async fn record_http_error(&self, url_or_host: &str, status: u16) { + if !is_backoff_status(status) { + return; + } + let host = match host_of(url_or_host) { + Some(h) => h, + None => return, + }; + let mut guard = self.inner.write().await; + let entry = guard.entry(host).or_insert(BackoffEntry { + multiplier: 1.0, + updated_at: Instant::now(), + }); + entry.multiplier = (entry.multiplier * 2.0).min(MAX_BACKOFF_MULTIPLIER); + if entry.multiplier < 2.0 { + // First failure starts at 2.0, not 1.0 — we want immediate + // visible delay on the first 429/503. + entry.multiplier = 2.0; + } + entry.updated_at = Instant::now(); + } + + /// Return the current multiplier for `url`. `1.0` when there's no + /// backoff active or the entry has expired (`> BACKOFF_MAX_AGE`). + pub async fn multiplier(&self, url_or_host: &str) -> f64 { + let host = match host_of(url_or_host) { + Some(h) => h, + None => return 1.0, + }; + let guard = self.inner.read().await; + match guard.get(&host) { + Some(entry) if entry.updated_at.elapsed() <= BACKOFF_MAX_AGE => entry.multiplier, + _ => 1.0, + } + } +} + +/// Extract a normalized host from `url_or_host`. Returns `None` for empty +/// inputs or strings that don't look hostlike. +/// +/// This is intentionally tiny: we don't need full RFC 3986 parsing — +/// callers feed us either a bare host or `scheme://host[:port]/path`. +pub fn host_of(url_or_host: &str) -> Option<String> { + let s = url_or_host.trim(); + if s.is_empty() { + return None; + } + // Strip scheme. + let after_scheme = match s.split_once("://") { + Some((_, rest)) => rest, + None => s, + }; + // Strip path/query/fragment — first '/', '?', '#' wins. + let host_with_port = after_scheme.split(['/', '?', '#']).next().unwrap_or(""); + if host_with_port.is_empty() { + return None; + } + // Strip port (rightmost ':' that isn't inside brackets — IPv6 caveat + // is fine to ignore for our use case). + let host = host_with_port + .rsplit_once(':') + .map(|(h, _)| h) + .unwrap_or(host_with_port); + if host.is_empty() { + return None; + } + Some(host.to_ascii_lowercase()) +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn host_of_extracts_from_url() { + assert_eq!( + host_of("https://nyaa.si/?q=foo").as_deref(), + Some("nyaa.si") + ); + assert_eq!( + host_of("HTTP://Example.com/path").as_deref(), + Some("example.com") + ); + assert_eq!( + host_of("http://localhost:8080/x").as_deref(), + Some("localhost") + ); + } + + #[test] + fn host_of_accepts_bare_host() { + assert_eq!(host_of("nyaa.si").as_deref(), Some("nyaa.si")); + assert_eq!(host_of(" Foo.Bar ").as_deref(), Some("foo.bar")); + } + + #[test] + fn host_of_handles_empty_and_garbage() { + assert!(host_of("").is_none()); + assert!(host_of(" ").is_none()); + assert!(host_of("://").is_none()); + } + + #[tokio::test] + async fn no_backoff_by_default() { + let b = HostBackoff::new(); + assert_eq!(b.multiplier("https://nyaa.si/").await, 1.0); + } + + #[tokio::test] + async fn first_429_jumps_to_two() { + let b = HostBackoff::new(); + b.record_http_error("https://nyaa.si/", 429).await; + assert_eq!(b.multiplier("https://nyaa.si/").await, 2.0); + } + + #[tokio::test] + async fn consecutive_failures_double_until_cap() { + let b = HostBackoff::new(); + for _ in 0..10 { + b.record_http_error("https://nyaa.si/", 503).await; + } + assert_eq!(b.multiplier("nyaa.si").await, MAX_BACKOFF_MULTIPLIER); + } + + #[tokio::test] + async fn success_resets_backoff() { + let b = HostBackoff::new(); + b.record_http_error("https://nyaa.si/", 429).await; + b.record_http_error("https://nyaa.si/", 429).await; + assert!(b.multiplier("nyaa.si").await > 1.0); + + b.record_success("https://nyaa.si/").await; + assert_eq!(b.multiplier("nyaa.si").await, 1.0); + } + + #[tokio::test] + async fn unrelated_host_is_unaffected() { + let b = HostBackoff::new(); + b.record_http_error("https://nyaa.si/", 429).await; + assert_eq!(b.multiplier("mangaupdates.com").await, 1.0); + } + + #[tokio::test] + async fn non_backoff_status_is_ignored() { + let b = HostBackoff::new(); + b.record_http_error("https://nyaa.si/", 500).await; + b.record_http_error("https://nyaa.si/", 404).await; + assert_eq!(b.multiplier("nyaa.si").await, 1.0); + } + + #[test] + fn is_backoff_status_recognizes_429_and_503() { + assert!(is_backoff_status(429)); + assert!(is_backoff_status(503)); + assert!(!is_backoff_status(200)); + assert!(!is_backoff_status(500)); + } +} diff --git a/src/services/release/candidate.rs b/src/services/release/candidate.rs new file mode 100644 index 00000000..48ce99b5 --- /dev/null +++ b/src/services/release/candidate.rs @@ -0,0 +1,256 @@ +//! Wire-format `ReleaseCandidate` and parsing helpers. +//! +//! Plugins emit candidates over `releases/record` (and as the response of +//! `releases/poll` in Phase 4). The host rejects malformed candidates and +//! drops below-threshold candidates before reaching the ledger. + +use chrono::{DateTime, Utc}; +use serde::{Deserialize, Serialize}; +use uuid::Uuid; + +/// A release candidate emitted by a `release_source` plugin. +/// +/// The series match is split out into its own struct so the plugin can +/// communicate *why* it matched (alias hit vs external-ID hit) and *how +/// confident* it is. The host applies the threshold gate against the +/// `series_match.confidence` field. +#[derive(Debug, Clone, Serialize, Deserialize)] +#[serde(rename_all = "camelCase")] +pub struct ReleaseCandidate { + pub series_match: SeriesMatch, + /// Stable per-source release identifier (e.g. Nyaa view ID, MU release ID). + pub external_release_id: String, + /// Optional chapter number; supports decimals for fractional chapters. + #[serde(default, skip_serializing_if = "Option::is_none")] + pub chapter: Option<f64>, + /// Optional volume number. + #[serde(default, skip_serializing_if = "Option::is_none")] + pub volume: Option<i32>, + /// ISO 639-1 language code (`"en"`, `"es"`, etc.). + pub language: String, + /// Free-form per-source format hints (e.g. `{"jxl": true}`). + #[serde(default, skip_serializing_if = "Option::is_none")] + pub format_hints: Option<serde_json::Value>, + /// Group or uploader name for display. + #[serde(default, skip_serializing_if = "Option::is_none")] + pub group_or_uploader: Option<String>, + /// URL the user can navigate to in order to acquire/read the release. + /// Conventionally a human-readable landing page (Nyaa view page, + /// MangaUpdates release page). + pub payload_url: String, + /// Optional second URL describing how to actually fetch the bits — a + /// `.torrent` file, a magnet link, or a direct download. Set together + /// with [`Self::media_url_kind`] or leave both unset. + #[serde(default, skip_serializing_if = "Option::is_none")] + pub media_url: Option<String>, + /// Classifies what [`Self::media_url`] points at. Required when + /// `media_url` is set, must be unset otherwise. + #[serde(default, skip_serializing_if = "Option::is_none")] + pub media_url_kind: Option<MediaUrlKind>, + /// Optional torrent info hash (enables cross-source dedup). + #[serde(default, skip_serializing_if = "Option::is_none")] + pub info_hash: Option<String>, + /// Free-form metadata bag (preserved on the ledger row). + #[serde(default, skip_serializing_if = "Option::is_none")] + pub metadata: Option<serde_json::Value>, + /// When the upstream source observed this release. + pub observed_at: DateTime<Utc>, +} + +/// Classifies what [`ReleaseCandidate::media_url`] points at so the UI can +/// pick a kind-specific icon and the host can surface it consistently across +/// sources. +#[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize, Deserialize)] +#[serde(rename_all = "lowercase")] +pub enum MediaUrlKind { + /// HTTP(S) URL to a `.torrent` file. + Torrent, + /// `magnet:` URI. + Magnet, + /// HTTP(S) URL to the file itself (DDL host, CDN, etc.). + Direct, + /// Anything that doesn't fit the above; UI renders a generic icon. + Other, +} + +impl MediaUrlKind { + /// Stable, lowercase string used in the database column and on the + /// wire. Mirrors the serde `rename_all = "lowercase"` representation. + pub fn as_str(&self) -> &'static str { + match self { + Self::Torrent => "torrent", + Self::Magnet => "magnet", + Self::Direct => "direct", + Self::Other => "other", + } + } +} + +/// Match details emitted alongside a candidate. +#[derive(Debug, Clone, Serialize, Deserialize)] +#[serde(rename_all = "camelCase")] +pub struct SeriesMatch { + /// Codex series ID (UUID). + pub codex_series_id: Uuid, + /// `0.0..=1.0`. The host drops candidates below the threshold. + pub confidence: f64, + /// Free-form reason string for tracing/debug. e.g. `"alias-exact"`, + /// `"mangaupdates_id"`, `"normalized-prefix"`. + pub reason: String, +} + +/// Reason a candidate was rejected by [`super::matcher::evaluate`]. +#[derive(Debug, Clone, PartialEq)] +pub enum CandidateReject { + /// `series_match.confidence` is NaN or outside `0.0..=1.0`. + InvalidConfidence(f64), + /// `series_match.confidence < threshold`. + BelowThreshold { confidence: f64, threshold: f64 }, + /// `payload_url` is empty / whitespace. + EmptyPayloadUrl, + /// `media_url` is set but empty / whitespace. + EmptyMediaUrl, + /// `media_url` is set without `media_url_kind`, or vice versa. + /// The host requires the two travel together so the UI can always + /// resolve a kind for the icon. + MediaUrlPairMismatch, + /// `external_release_id` is empty / whitespace. + EmptyExternalReleaseId, + /// `language` is empty. + EmptyLanguage, + /// `chapter` is NaN or non-finite. + InvalidChapter, + /// `observed_at` is in the future by more than [`MAX_FUTURE_SKEW_S`] seconds. + ObservedAtTooFarInFuture, +} + +impl std::fmt::Display for CandidateReject { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + match self { + Self::InvalidConfidence(v) => write!( + f, + "invalid confidence value {} (must be a finite number in [0, 1])", + v + ), + Self::BelowThreshold { + confidence, + threshold, + } => write!(f, "confidence {} below threshold {}", confidence, threshold), + Self::EmptyPayloadUrl => write!(f, "payload_url cannot be empty"), + Self::EmptyMediaUrl => write!(f, "media_url cannot be empty when set"), + Self::MediaUrlPairMismatch => write!( + f, + "media_url and media_url_kind must be set together (or both unset)" + ), + Self::EmptyExternalReleaseId => write!(f, "external_release_id cannot be empty"), + Self::EmptyLanguage => write!(f, "language cannot be empty"), + Self::InvalidChapter => write!(f, "chapter must be a finite number"), + Self::ObservedAtTooFarInFuture => { + write!(f, "observed_at is too far in the future") + } + } + } +} + +/// Maximum allowable skew when validating `observed_at` (1 hour). +/// +/// Plugins occasionally see clock skew between their host and the upstream +/// feed. We accept a small grace window so a slightly-future timestamp doesn't +/// drop the candidate, but reject obvious garbage (e.g. year 2099 dates). +pub const MAX_FUTURE_SKEW_S: i64 = 3_600; + +#[cfg(test)] +mod tests { + use super::*; + use serde_json::json; + + fn good_candidate() -> ReleaseCandidate { + ReleaseCandidate { + series_match: SeriesMatch { + codex_series_id: Uuid::new_v4(), + confidence: 0.92, + reason: "alias-exact".to_string(), + }, + external_release_id: "rel-123".to_string(), + chapter: Some(143.0), + volume: None, + language: "en".to_string(), + format_hints: Some(json!({"jxl": true})), + group_or_uploader: Some("tsuna69".to_string()), + payload_url: "https://nyaa.si/view/12345".to_string(), + media_url: Some("https://nyaa.si/download/12345.torrent".to_string()), + media_url_kind: Some(MediaUrlKind::Torrent), + info_hash: Some("deadbeef".to_string()), + metadata: None, + observed_at: Utc::now(), + } + } + + #[test] + fn round_trips_camel_case_json() { + let cand = good_candidate(); + let json = serde_json::to_value(&cand).unwrap(); + // Field naming sanity checks. + assert!(json["seriesMatch"].is_object()); + assert_eq!( + json["seriesMatch"]["codexSeriesId"], + json!(cand.series_match.codex_series_id) + ); + assert_eq!(json["externalReleaseId"], "rel-123"); + assert_eq!(json["payloadUrl"], "https://nyaa.si/view/12345"); + let back: ReleaseCandidate = serde_json::from_value(json).unwrap(); + assert_eq!(back.external_release_id, cand.external_release_id); + assert_eq!(back.series_match.confidence, cand.series_match.confidence); + } + + #[test] + fn optional_fields_are_skipped_when_none() { + let mut cand = good_candidate(); + cand.chapter = None; + cand.volume = None; + cand.format_hints = None; + cand.info_hash = None; + cand.metadata = None; + cand.group_or_uploader = None; + cand.media_url = None; + cand.media_url_kind = None; + let json = serde_json::to_value(&cand).unwrap(); + let obj = json.as_object().unwrap(); + for key in [ + "chapter", + "volume", + "formatHints", + "infoHash", + "metadata", + "groupOrUploader", + "mediaUrl", + "mediaUrlKind", + ] { + assert!(!obj.contains_key(key), "expected `{}` to be skipped", key); + } + } + + #[test] + fn media_url_kind_serializes_lowercase() { + let cand = good_candidate(); + let json = serde_json::to_value(&cand).unwrap(); + assert_eq!(json["mediaUrl"], "https://nyaa.si/download/12345.torrent"); + assert_eq!(json["mediaUrlKind"], "torrent"); + } + + #[test] + fn media_url_kind_round_trips_via_json() { + for (kind, expected) in [ + (MediaUrlKind::Torrent, "torrent"), + (MediaUrlKind::Magnet, "magnet"), + (MediaUrlKind::Direct, "direct"), + (MediaUrlKind::Other, "other"), + ] { + let json = serde_json::to_value(kind).unwrap(); + assert_eq!(json, serde_json::Value::String(expected.to_string())); + let back: MediaUrlKind = serde_json::from_value(json).unwrap(); + assert_eq!(back, kind); + assert_eq!(kind.as_str(), expected); + } + } +} diff --git a/src/services/release/languages.rs b/src/services/release/languages.rs new file mode 100644 index 00000000..ad9659f1 --- /dev/null +++ b/src/services/release/languages.rs @@ -0,0 +1,186 @@ +//! Language-preference resolution for release-tracking. +//! +//! Aggregation feeds (e.g. MangaUpdates) emit candidates in many languages. +//! Plugins filter client-side using a per-series preference list, falling back +//! to a server-wide default and finally to a hardcoded `["en"]` fallback if +//! the setting is absent. +//! +//! This module is the single canonical resolver so the API, the +//! `releases/list_tracked` reverse-RPC, and the matcher all agree on the +//! effective list. + +use anyhow::Result; +use sea_orm::DatabaseConnection; +use serde_json::Value; + +use crate::db::repositories::SettingsRepository; + +/// Settings key for the server-wide default language list. +pub const SERVER_DEFAULT_LANGUAGES_KEY: &str = "release_tracking.default_languages"; + +/// Hardcoded fallback when the server-wide setting is missing or unparseable. +/// Mirrors the migration seed. +pub fn hardcoded_default_languages() -> Vec<String> { + vec!["en".to_string()] +} + +/// Read the server-wide default-languages setting. Returns the hardcoded +/// fallback (`["en"]`) if the setting is missing or malformed. +pub async fn server_default_languages(db: &DatabaseConnection) -> Vec<String> { + match SettingsRepository::get_value::<Vec<String>>(db, SERVER_DEFAULT_LANGUAGES_KEY).await { + Ok(Some(langs)) if !langs.is_empty() => normalize(langs), + // Missing setting, empty list, or parse error — fall back to ["en"]. + // Empty-list as fallback is a footgun (would silently hide everything), + // so we treat it as "use the hardcoded default." + _ => hardcoded_default_languages(), + } +} + +/// Resolve the effective language list for a single tracked series. +/// +/// Precedence: per-series override (if non-empty) → server-wide default → +/// hardcoded `["en"]`. +pub fn effective_languages(per_series: Option<&Value>, server_default: &[String]) -> Vec<String> { + if let Some(v) = per_series + && let Some(arr) = v.as_array() + { + let langs: Vec<String> = arr + .iter() + .filter_map(|item| item.as_str().map(|s| s.to_string())) + .collect(); + if !langs.is_empty() { + return normalize(langs); + } + } + if !server_default.is_empty() { + return server_default.to_vec(); + } + hardcoded_default_languages() +} + +/// Whether a given candidate language is included in the effective list. +/// Case-insensitive on the language code. +pub fn includes(effective: &[String], language: &str) -> bool { + let lang = language.trim().to_lowercase(); + if lang.is_empty() { + return false; + } + effective.iter().any(|l| l.eq_ignore_ascii_case(&lang)) +} + +/// Normalize: trim, lowercase, drop empties, dedup-preserving-order. +fn normalize(langs: Vec<String>) -> Vec<String> { + let mut out: Vec<String> = Vec::with_capacity(langs.len()); + for raw in langs { + let lang = raw.trim().to_lowercase(); + if lang.is_empty() { + continue; + } + if !out.iter().any(|existing| existing == &lang) { + out.push(lang); + } + } + out +} + +/// Convenience: read both the per-series and server-default lists, return the +/// effective list. Used by the API and the reverse-RPC `list_tracked` handler. +pub async fn resolve_for_series( + db: &DatabaseConnection, + per_series: Option<&Value>, +) -> Result<Vec<String>> { + let server_default = server_default_languages(db).await; + Ok(effective_languages(per_series, &server_default)) +} + +#[cfg(test)] +mod tests { + use super::*; + use serde_json::json; + + #[test] + fn hardcoded_default_is_english() { + assert_eq!(hardcoded_default_languages(), vec!["en".to_string()]); + } + + #[test] + fn per_series_overrides_server_default() { + let per_series = json!(["es", "fr"]); + let server = vec!["en".to_string()]; + assert_eq!( + effective_languages(Some(&per_series), &server), + vec!["es".to_string(), "fr".to_string()] + ); + } + + #[test] + fn null_per_series_falls_back_to_server_default() { + let server = vec!["en".to_string(), "es".to_string()]; + assert_eq!( + effective_languages(None, &server), + vec!["en".to_string(), "es".to_string()] + ); + } + + #[test] + fn empty_per_series_falls_back_to_server_default() { + let per_series = json!([]); + let server = vec!["en".to_string()]; + assert_eq!( + effective_languages(Some(&per_series), &server), + vec!["en".to_string()] + ); + } + + #[test] + fn empty_server_default_falls_back_to_hardcoded() { + let server: Vec<String> = vec![]; + assert_eq!(effective_languages(None, &server), vec!["en".to_string()]); + } + + #[test] + fn normalizes_case_and_whitespace() { + let per_series = json!(["EN", " es ", "FR"]); + let server = vec!["en".to_string()]; + assert_eq!( + effective_languages(Some(&per_series), &server), + vec!["en".to_string(), "es".to_string(), "fr".to_string()] + ); + } + + #[test] + fn dedups_preserving_order() { + let per_series = json!(["en", "es", "EN", "es"]); + let server = vec!["en".to_string()]; + assert_eq!( + effective_languages(Some(&per_series), &server), + vec!["en".to_string(), "es".to_string()] + ); + } + + #[test] + fn ignores_non_string_entries() { + let per_series = json!(["en", 42, null, "es"]); + let server = vec!["en".to_string()]; + assert_eq!( + effective_languages(Some(&per_series), &server), + vec!["en".to_string(), "es".to_string()] + ); + } + + #[test] + fn includes_is_case_insensitive() { + let effective = vec!["en".to_string(), "es".to_string()]; + assert!(includes(&effective, "en")); + assert!(includes(&effective, "EN")); + assert!(includes(&effective, " es ")); + assert!(!includes(&effective, "fr")); + } + + #[test] + fn includes_rejects_empty_language() { + let effective = vec!["en".to_string()]; + assert!(!includes(&effective, "")); + assert!(!includes(&effective, " ")); + } +} diff --git a/src/services/release/matcher.rs b/src/services/release/matcher.rs new file mode 100644 index 00000000..64e5e5dc --- /dev/null +++ b/src/services/release/matcher.rs @@ -0,0 +1,307 @@ +//! Confidence-threshold gate and dedup-on-record orchestration. +//! +//! The plugin produces a [`ReleaseCandidate`]; the matcher decides whether +//! the host should record it in the ledger. Two-stage check: +//! +//! 1. Validate fields (no NaN, no empty IDs/URLs, sane `observed_at`). +//! 2. Confidence-threshold gate (default 0.7, override via per-series +//! `confidence_threshold_override`). +//! +//! The actual ledger write goes through +//! [`crate::db::repositories::ReleaseLedgerRepository::record`], which is +//! itself idempotent on `(source_id, external_release_id)` and `info_hash`. + +use chrono::Utc; +use uuid::Uuid; + +use super::candidate::{CandidateReject, MAX_FUTURE_SKEW_S, ReleaseCandidate}; +use crate::db::repositories::NewReleaseEntry; + +/// Default confidence threshold (`0.7`). +pub const DEFAULT_CONFIDENCE_THRESHOLD: f64 = 0.7; + +/// Validated candidate that has passed the threshold gate. Holds onto the +/// candidate so callers can map it directly into a ledger entry. +#[derive(Debug, Clone)] +pub struct AcceptedCandidate { + pub candidate: ReleaseCandidate, +} + +impl AcceptedCandidate { + /// Convert into the repository-facing insert payload, attaching the + /// `source_id` (the host knows which source the candidate came from - + /// the candidate itself doesn't carry it). + pub fn into_ledger_entry(self, source_id: Uuid) -> NewReleaseEntry { + let c = self.candidate; + let media_url_kind = c.media_url_kind.map(|k| k.as_str().to_string()); + NewReleaseEntry { + series_id: c.series_match.codex_series_id, + source_id, + external_release_id: c.external_release_id, + info_hash: c.info_hash, + chapter: c.chapter, + volume: c.volume, + language: Some(c.language), + format_hints: c.format_hints, + group_or_uploader: c.group_or_uploader, + payload_url: c.payload_url, + media_url: c.media_url, + media_url_kind, + confidence: c.series_match.confidence, + metadata: c.metadata, + observed_at: c.observed_at, + initial_state: None, + } + } +} + +/// Validate a candidate and apply the confidence threshold. +/// +/// Returns `Ok(AcceptedCandidate)` on accept, `Err(CandidateReject)` on reject. +pub fn evaluate( + candidate: ReleaseCandidate, + threshold: f64, +) -> Result<AcceptedCandidate, CandidateReject> { + // 1. Required-field validation. We do this before the threshold check so + // a malformed-but-high-confidence candidate still gets rejected with + // the most informative error. + if candidate.payload_url.trim().is_empty() { + return Err(CandidateReject::EmptyPayloadUrl); + } + // media_url and media_url_kind must travel together. Either both + // are present (and media_url is non-empty) or both are absent. + match (&candidate.media_url, &candidate.media_url_kind) { + (Some(url), Some(_)) if url.trim().is_empty() => { + return Err(CandidateReject::EmptyMediaUrl); + } + (Some(_), None) | (None, Some(_)) => { + return Err(CandidateReject::MediaUrlPairMismatch); + } + _ => {} + } + if candidate.external_release_id.trim().is_empty() { + return Err(CandidateReject::EmptyExternalReleaseId); + } + if candidate.language.trim().is_empty() { + return Err(CandidateReject::EmptyLanguage); + } + if let Some(ch) = candidate.chapter + && !ch.is_finite() + { + return Err(CandidateReject::InvalidChapter); + } + + let now = Utc::now(); + if (candidate.observed_at - now).num_seconds() > MAX_FUTURE_SKEW_S { + return Err(CandidateReject::ObservedAtTooFarInFuture); + } + + // 2. Confidence validation + threshold. + let confidence = candidate.series_match.confidence; + if !confidence.is_finite() || !(0.0..=1.0).contains(&confidence) { + return Err(CandidateReject::InvalidConfidence(confidence)); + } + if confidence < threshold { + return Err(CandidateReject::BelowThreshold { + confidence, + threshold, + }); + } + + Ok(AcceptedCandidate { candidate }) +} + +/// Resolve the active confidence threshold: per-series override wins, then +/// the global default. +pub fn resolve_threshold(per_series_override: Option<f64>) -> f64 { + match per_series_override { + Some(v) if v.is_finite() && (0.0..=1.0).contains(&v) => v, + _ => DEFAULT_CONFIDENCE_THRESHOLD, + } +} + +#[cfg(test)] +mod tests { + use super::*; + use crate::services::release::candidate::SeriesMatch; + use chrono::Duration; + + fn make_candidate(confidence: f64) -> ReleaseCandidate { + ReleaseCandidate { + series_match: SeriesMatch { + codex_series_id: Uuid::new_v4(), + confidence, + reason: "test".to_string(), + }, + external_release_id: "rel-1".to_string(), + chapter: Some(143.0), + volume: None, + language: "en".to_string(), + format_hints: None, + group_or_uploader: None, + payload_url: "https://example.com/r/1".to_string(), + media_url: None, + media_url_kind: None, + info_hash: None, + metadata: None, + observed_at: Utc::now(), + } + } + + #[test] + fn accepts_candidate_at_threshold() { + let cand = make_candidate(0.7); + let result = evaluate(cand, DEFAULT_CONFIDENCE_THRESHOLD); + assert!(result.is_ok()); + } + + #[test] + fn drops_below_threshold_candidate() { + let cand = make_candidate(0.5); + let err = evaluate(cand, DEFAULT_CONFIDENCE_THRESHOLD).unwrap_err(); + assert!(matches!(err, CandidateReject::BelowThreshold { .. })); + } + + #[test] + fn rejects_nan_confidence() { + let cand = make_candidate(f64::NAN); + let err = evaluate(cand, DEFAULT_CONFIDENCE_THRESHOLD).unwrap_err(); + assert!(matches!(err, CandidateReject::InvalidConfidence(_))); + } + + #[test] + fn rejects_out_of_range_confidence() { + let cand = make_candidate(1.5); + let err = evaluate(cand, DEFAULT_CONFIDENCE_THRESHOLD).unwrap_err(); + assert!(matches!(err, CandidateReject::InvalidConfidence(_))); + } + + #[test] + fn rejects_empty_payload_url() { + let mut cand = make_candidate(0.95); + cand.payload_url = " ".to_string(); + let err = evaluate(cand, DEFAULT_CONFIDENCE_THRESHOLD).unwrap_err(); + assert_eq!(err, CandidateReject::EmptyPayloadUrl); + } + + #[test] + fn rejects_empty_external_release_id() { + let mut cand = make_candidate(0.95); + cand.external_release_id = "".to_string(); + let err = evaluate(cand, DEFAULT_CONFIDENCE_THRESHOLD).unwrap_err(); + assert_eq!(err, CandidateReject::EmptyExternalReleaseId); + } + + #[test] + fn rejects_empty_language() { + let mut cand = make_candidate(0.95); + cand.language = "".to_string(); + let err = evaluate(cand, DEFAULT_CONFIDENCE_THRESHOLD).unwrap_err(); + assert_eq!(err, CandidateReject::EmptyLanguage); + } + + #[test] + fn rejects_invalid_chapter() { + let mut cand = make_candidate(0.95); + cand.chapter = Some(f64::INFINITY); + let err = evaluate(cand, DEFAULT_CONFIDENCE_THRESHOLD).unwrap_err(); + assert_eq!(err, CandidateReject::InvalidChapter); + } + + #[test] + fn rejects_far_future_observed_at() { + let mut cand = make_candidate(0.95); + cand.observed_at = Utc::now() + Duration::days(2); + let err = evaluate(cand, DEFAULT_CONFIDENCE_THRESHOLD).unwrap_err(); + assert_eq!(err, CandidateReject::ObservedAtTooFarInFuture); + } + + #[test] + fn accepts_candidate_within_clock_skew() { + let mut cand = make_candidate(0.95); + // Within MAX_FUTURE_SKEW_S grace. + cand.observed_at = Utc::now() + Duration::seconds(60); + assert!(evaluate(cand, DEFAULT_CONFIDENCE_THRESHOLD).is_ok()); + } + + #[test] + fn into_ledger_entry_carries_all_fields() { + let cand = make_candidate(0.85); + let series_id = cand.series_match.codex_series_id; + let accepted = evaluate(cand, DEFAULT_CONFIDENCE_THRESHOLD).unwrap(); + let source_id = Uuid::new_v4(); + let entry = accepted.into_ledger_entry(source_id); + assert_eq!(entry.series_id, series_id); + assert_eq!(entry.source_id, source_id); + assert_eq!(entry.external_release_id, "rel-1"); + assert_eq!(entry.confidence, 0.85); + assert_eq!(entry.language.as_deref(), Some("en")); + assert!(entry.media_url.is_none()); + assert!(entry.media_url_kind.is_none()); + } + + #[test] + fn into_ledger_entry_carries_media_url_pair() { + use crate::services::release::candidate::MediaUrlKind; + let mut cand = make_candidate(0.9); + cand.media_url = Some("https://nyaa.si/download/1.torrent".to_string()); + cand.media_url_kind = Some(MediaUrlKind::Torrent); + let entry = evaluate(cand, DEFAULT_CONFIDENCE_THRESHOLD) + .unwrap() + .into_ledger_entry(Uuid::new_v4()); + assert_eq!( + entry.media_url.as_deref(), + Some("https://nyaa.si/download/1.torrent") + ); + assert_eq!(entry.media_url_kind.as_deref(), Some("torrent")); + } + + #[test] + fn rejects_media_url_without_kind() { + let mut cand = make_candidate(0.95); + cand.media_url = Some("https://example.com/x.torrent".to_string()); + cand.media_url_kind = None; + let err = evaluate(cand, DEFAULT_CONFIDENCE_THRESHOLD).unwrap_err(); + assert_eq!(err, CandidateReject::MediaUrlPairMismatch); + } + + #[test] + fn rejects_kind_without_media_url() { + use crate::services::release::candidate::MediaUrlKind; + let mut cand = make_candidate(0.95); + cand.media_url = None; + cand.media_url_kind = Some(MediaUrlKind::Torrent); + let err = evaluate(cand, DEFAULT_CONFIDENCE_THRESHOLD).unwrap_err(); + assert_eq!(err, CandidateReject::MediaUrlPairMismatch); + } + + #[test] + fn rejects_empty_media_url() { + use crate::services::release::candidate::MediaUrlKind; + let mut cand = make_candidate(0.95); + cand.media_url = Some(" ".to_string()); + cand.media_url_kind = Some(MediaUrlKind::Torrent); + let err = evaluate(cand, DEFAULT_CONFIDENCE_THRESHOLD).unwrap_err(); + assert_eq!(err, CandidateReject::EmptyMediaUrl); + } + + #[test] + fn resolve_threshold_uses_default_when_override_is_none() { + assert_eq!(resolve_threshold(None), DEFAULT_CONFIDENCE_THRESHOLD); + } + + #[test] + fn resolve_threshold_uses_override_when_valid() { + assert_eq!(resolve_threshold(Some(0.5)), 0.5); + } + + #[test] + fn resolve_threshold_falls_back_for_invalid_override() { + assert_eq!( + resolve_threshold(Some(f64::NAN)), + DEFAULT_CONFIDENCE_THRESHOLD + ); + assert_eq!(resolve_threshold(Some(1.5)), DEFAULT_CONFIDENCE_THRESHOLD); + assert_eq!(resolve_threshold(Some(-0.1)), DEFAULT_CONFIDENCE_THRESHOLD); + } +} diff --git a/src/services/release/mod.rs b/src/services/release/mod.rs new file mode 100644 index 00000000..8bd9a5bd --- /dev/null +++ b/src/services/release/mod.rs @@ -0,0 +1,30 @@ +//! Release-tracking services. +//! +//! Hosts core-side logic for the release-source plugin pipeline: +//! +//! - [`candidate`] — wire-format `ReleaseCandidate` and parsing helpers. +//! - [`matcher`] — confidence-threshold gate and dedup-on-record orchestration. +//! - [`backoff`] — per-host backoff state for rate-limit (429) and +//! unavailability (503) signals, shared across plugins that hit the +//! same domain. +//! - [`schedule`] — interval resolution and jitter for the polling +//! scheduler. +//! - [`upstream_gap`] — Phase 5 metadata-derived publication-gap signal +//! surfaced on the series DTO. Read-side only; does not write to the +//! release ledger. +//! - [`seed`] — derives tracking defaults (aliases, `latest_known_*`, +//! per-axis tracking flags) from existing series data so a user toggling +//! tracking on doesn't have to fill in a setup form. +//! +//! Plugins emit candidates over the reverse-RPC channel; the matcher applies +//! the threshold and hands the survivors to the ledger repository, which is +//! itself idempotent on the natural dedup keys. + +pub mod auto_ignore; +pub mod backoff; +pub mod candidate; +pub mod languages; +pub mod matcher; +pub mod schedule; +pub mod seed; +pub mod upstream_gap; diff --git a/src/services/release/schedule.rs b/src/services/release/schedule.rs new file mode 100644 index 00000000..001f15c0 --- /dev/null +++ b/src/services/release/schedule.rs @@ -0,0 +1,80 @@ +//! Cron-schedule resolution for release-source polling. +//! +//! Resolution chain (consumed by [`crate::scheduler::release_sources`]): +//! +//! 1. `release_sources.cron_schedule` (per-source override) wins when set. +//! 2. Otherwise the server-wide `release_tracking.default_cron_schedule` +//! setting. +//! 3. Otherwise the compile-time fallback ([`DEFAULT_CRON_SCHEDULE`]). +//! +//! Per-host backoff lives in [`super::backoff`] and is consulted at +//! poll-fire time (not at scheduler-load time): a throttled host's tick is +//! short-circuited rather than rewriting the cron expression. This keeps +//! the cron source-of-truth simple: one row, one schedule. + +use crate::services::settings::SettingsService; + +/// Compile-time fallback when neither the per-source override nor the +/// server-wide setting are present. Daily at midnight (5-field POSIX cron). +pub const DEFAULT_CRON_SCHEDULE: &str = "0 0 * * *"; + +/// Setting key for the server-wide default. +pub const SETTING_DEFAULT_CRON_SCHEDULE: &str = "release_tracking.default_cron_schedule"; + +/// Read the server-wide default cron schedule. Falls back to +/// [`DEFAULT_CRON_SCHEDULE`] when the setting is missing or blank. +pub async fn read_default_cron_schedule(settings: &SettingsService) -> String { + let raw = settings + .get_string(SETTING_DEFAULT_CRON_SCHEDULE, DEFAULT_CRON_SCHEDULE) + .await + .unwrap_or_else(|_| DEFAULT_CRON_SCHEDULE.to_string()); + let trimmed = raw.trim(); + if trimmed.is_empty() { + DEFAULT_CRON_SCHEDULE.to_string() + } else { + trimmed.to_string() + } +} + +/// Resolve the effective cron schedule for a source row. +/// +/// `per_source` is `release_sources.cron_schedule` (NULL when the row is +/// inheriting). `server_default` is the resolved server-wide default. The +/// returned string is the raw 5- or 6-field cron expression; callers +/// normalize to the 6-field tokio-cron-scheduler format via +/// [`crate::utils::cron::normalize_cron_expression`]. +pub fn resolve_cron_schedule(per_source: Option<&str>, server_default: &str) -> String { + if let Some(cron) = per_source.map(str::trim).filter(|s| !s.is_empty()) { + cron.to_string() + } else if !server_default.trim().is_empty() { + server_default.trim().to_string() + } else { + DEFAULT_CRON_SCHEDULE.to_string() + } +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn resolve_uses_per_source_when_set() { + assert_eq!( + resolve_cron_schedule(Some("0 */6 * * *"), "0 0 * * *"), + "0 */6 * * *" + ); + } + + #[test] + fn resolve_falls_back_to_server_default_when_per_source_blank() { + assert_eq!(resolve_cron_schedule(None, "0 0 * * *"), "0 0 * * *"); + assert_eq!(resolve_cron_schedule(Some(""), "0 0 * * *"), "0 0 * * *"); + assert_eq!(resolve_cron_schedule(Some(" "), "0 0 * * *"), "0 0 * * *"); + } + + #[test] + fn resolve_uses_compile_time_default_when_both_blank() { + assert_eq!(resolve_cron_schedule(None, ""), DEFAULT_CRON_SCHEDULE); + assert_eq!(resolve_cron_schedule(None, " "), DEFAULT_CRON_SCHEDULE); + } +} diff --git a/src/services/release/seed.rs b/src/services/release/seed.rs new file mode 100644 index 00000000..3a824bea --- /dev/null +++ b/src/services/release/seed.rs @@ -0,0 +1,525 @@ +//! Seed defaults for `series_tracking` rows. +//! +//! Called whenever a series transitions to `tracked = true`, and from the +//! retired-but-still-routed `BackfillTrackingFromMetadata` task. The goal is +//! to remove the empty-form UX where a user toggles tracking on and is then +//! presented with a panel full of inputs they have to manually populate. +//! +//! What gets seeded: +//! +//! - **Aliases** (`series_aliases`): inserted from `series.name`, +//! `series_metadata.title`, `series_metadata.title_sort`, and English +//! alternate titles. Non-Latin (CJK, Korean, Cyrillic, …) aliases are +//! skipped today because the alias matcher in the Nyaa / MangaUpdates +//! plugins normalizes Latin text only — non-Latin entries would never +//! match against typical uploader filenames and would just clutter the +//! alias list. Append-only: existing aliases (including user-added) are +//! never deleted by re-seeding. +//! +//! - **`latest_known_chapter` / `latest_known_volume`**: set to the local +//! max chapter / volume across the series's books. The first poll after +//! seeding then announces only releases strictly above the high-water +//! mark, so a user with v01..v15 on disk doesn't get spammed with +//! announcements for chapters they already own. Overwritten on every +//! re-seed (per the "reset all to derived defaults on re-track" rule). +//! +//! - **`track_chapters` / `track_volumes`**: inferred from the series's +//! book classification. If any book in the series has +//! `book_metadata.chapter` populated, `track_chapters = true`; same for +//! volumes. A series organized purely by volume gets `track_chapters = +//! false`, suppressing chapter-axis announcements. If neither axis has +//! any classified data (fresh import), both default to `true` so +//! announcements aren't silently dropped. +//! +//! `tracked` itself is **not** flipped here — that's the caller's +//! responsibility, since this function is called from both the per-series +//! PATCH handler (which interprets the user's intent) and the bulk +//! track-all endpoint. +//! +//! Re-running the seed on an already-tracked series is safe and is the +//! intended idempotent behavior. The retired backfill task uses this +//! property to refresh derived state across all series after a metadata +//! refresh. + +use anyhow::{Context, Result}; +use sea_orm::DatabaseConnection; +use uuid::Uuid; + +use crate::db::entities::series_aliases::alias_source; +use crate::db::repositories::{ + AlternateTitleRepository, SeriesAliasRepository, SeriesMetadataRepository, SeriesRepository, + SeriesTrackingRepository, TrackingUpdate, +}; + +/// Outcome of a seed run, suitable for logging and surfacing in API responses. +/// +/// `PartialEq` (not `Eq`) because `f32` doesn't have total equality. Tests +/// compare individual fields rather than whole reports anyway. +#[derive(Debug, Default, Clone, PartialEq)] +pub struct SeedReport { + /// Aliases newly inserted (does not count duplicates skipped). + pub aliases_inserted: usize, + /// Aliases skipped because they were not Latin-script. + pub aliases_skipped_non_latin: usize, + /// Aliases skipped because an equivalent already existed for the series. + pub aliases_skipped_duplicate: usize, + /// Final `track_chapters` value after seeding. + pub track_chapters: bool, + /// Final `track_volumes` value after seeding. + pub track_volumes: bool, + /// Final `latest_known_chapter` after seeding (`None` when no books + /// have a classified chapter). f32 to match the aggregate column. + pub latest_known_chapter: Option<f32>, + /// Final `latest_known_volume` after seeding (`None` when no books + /// have a classified volume). + pub latest_known_volume: Option<i32>, +} + +/// Seed (or re-seed) tracking defaults for a single series. +/// +/// Updates / inserts a `series_tracking` row with the auto-derived +/// `track_chapters`, `track_volumes`, `latest_known_chapter`, +/// `latest_known_volume` fields. Does **not** modify `tracked` — the caller +/// owns that flip. +/// +/// Idempotent: safe to call repeatedly. Aliases are append-only; tracking +/// flags overwrite on every call. +pub async fn seed_tracking_for_series( + db: &DatabaseConnection, + series_id: Uuid, +) -> Result<SeedReport> { + let series = SeriesRepository::get_by_id(db, series_id) + .await + .with_context(|| format!("Failed to load series {} for seeding", series_id))? + .ok_or_else(|| anyhow::anyhow!("series {} not found", series_id))?; + + let metadata = SeriesMetadataRepository::get_by_series_id(db, series_id) + .await + .context("Failed to load series metadata for seeding")?; + + let mut report = SeedReport::default(); + + // ------------------------------------------------------------------- + // 1. Aliases — collect Latin-script candidates from name + metadata, + // bulk-insert (idempotent on duplicates). + // ------------------------------------------------------------------- + let mut candidates: Vec<String> = Vec::new(); + candidates.push(series.name.clone()); + if let Some(meta) = metadata.as_ref() { + candidates.push(meta.title.clone()); + if let Some(sort) = meta.title_sort.as_ref() + && !sort.trim().is_empty() + { + candidates.push(sort.clone()); + } + } + let alt_titles = AlternateTitleRepository::get_for_series(db, series_id) + .await + .context("Failed to load alternate titles")?; + for alt in alt_titles { + if !alt.title.trim().is_empty() { + candidates.push(alt.title); + } + } + + // Filter and dedupe (case-insensitive trimmed) so the bulk-insert call + // doesn't churn on identical inputs from different sources. + let mut seen: std::collections::HashSet<String> = std::collections::HashSet::new(); + let mut accepted: Vec<String> = Vec::new(); + for raw in candidates { + let trimmed = raw.trim(); + if trimmed.is_empty() { + continue; + } + if !is_latin_alias(trimmed) { + report.aliases_skipped_non_latin += 1; + continue; + } + let key = trimmed.to_lowercase(); + if !seen.insert(key) { + continue; + } + accepted.push(trimmed.to_string()); + } + + if !accepted.is_empty() { + let refs: Vec<&str> = accepted.iter().map(|s| s.as_str()).collect(); + let inserted = + SeriesAliasRepository::bulk_create(db, series_id, &refs, alias_source::METADATA) + .await + .context("Failed to bulk-insert seeded aliases")?; + report.aliases_inserted = inserted; + report.aliases_skipped_duplicate = accepted.len().saturating_sub(inserted); + } + + // ------------------------------------------------------------------- + // 2. Per-axis tracking flags + latest_known_* from book classification. + // ------------------------------------------------------------------- + let aggregates = SeriesRepository::get_book_classification_aggregates(db, series_id) + .await + .context("Failed to load book classification aggregates for seeding")?; + + // Default both axes to true when nothing is classified — losing + // announcements silently on a fresh series is worse than getting one + // false-positive on an axis the series doesn't actually use. + let any_classified = + aggregates.local_max_chapter.is_some() || aggregates.local_max_volume.is_some(); + let track_chapters = if any_classified { + aggregates.local_max_chapter.is_some() + } else { + true + }; + let track_volumes = if any_classified { + aggregates.local_max_volume.is_some() + } else { + true + }; + + let update = TrackingUpdate { + track_chapters: Some(track_chapters), + track_volumes: Some(track_volumes), + // The persisted column is f64; widen from the aggregate's f32. + latest_known_chapter: Some(aggregates.local_max_chapter.map(f64::from)), + latest_known_volume: Some(aggregates.local_max_volume), + ..Default::default() + }; + SeriesTrackingRepository::upsert(db, series_id, update) + .await + .context("Failed to upsert series tracking row during seeding")?; + + report.track_chapters = track_chapters; + report.track_volumes = track_volumes; + report.latest_known_chapter = aggregates.local_max_chapter; + report.latest_known_volume = aggregates.local_max_volume; + + Ok(report) +} + +/// Whether an alias string is composed entirely of Latin-script characters +/// plus common typography (digits, whitespace, punctuation). Non-Latin +/// scripts (CJK, Korean, Cyrillic, etc.) are rejected today because the +/// alias matcher in the Nyaa / MangaUpdates plugins normalizes Latin text +/// only; a non-Latin alias would never match against typical uploader +/// filenames and would just clutter the alias list. +/// +/// Conservative implementation: accept if every alphabetic character is +/// ASCII. This passes "Solo Leveling", "Don't Toy with Me", "Re:Zero", +/// "Bocchi the Rock!", and rejects anything containing CJK ideographs, +/// Hangul, Hiragana/Katakana, Cyrillic, etc. Diacritics (é, ñ, ü, …) are +/// non-ASCII alphabetic and are also rejected — users with such titles can +/// add them as manual aliases. We can widen this later if it bites. +fn is_latin_alias(s: &str) -> bool { + s.chars() + .filter(|c| c.is_alphabetic()) + .all(|c| c.is_ascii()) + // Reject empty / pure-punctuation strings as well; downstream + // create() would error on them anyway. + && s.chars().any(|c| c.is_ascii_alphanumeric()) +} + +#[cfg(test)] +mod tests { + use super::*; + use chrono::Utc; + use sea_orm::{ActiveModelTrait, Set}; + + use crate::db::ScanningStrategy; + use crate::db::entities::{book_metadata, books}; + use crate::db::repositories::{ + AlternateTitleRepository, BookMetadataRepository, BookRepository, LibraryRepository, + SeriesAliasRepository, SeriesRepository, SeriesTrackingRepository, + }; + use crate::db::test_helpers::create_test_db; + + #[test] + fn is_latin_alias_accepts_latin_strings() { + assert!(is_latin_alias("Solo Leveling")); + assert!(is_latin_alias("Don't Toy with Me")); + assert!(is_latin_alias("Re:Zero - Starting Life in Another World")); + assert!(is_latin_alias("Bocchi the Rock!")); + assert!(is_latin_alias("JoJo's Bizarre Adventure Part 7")); + assert!(is_latin_alias("Boruto: Two Blue Vortex")); + } + + #[test] + fn is_latin_alias_rejects_non_latin_strings() { + assert!(!is_latin_alias("나 혼자만 레벨업")); // Korean Hangul + assert!(!is_latin_alias("僕のヒーローアカデミア")); // Japanese + assert!(!is_latin_alias("ダンダダン")); // Katakana + assert!(!is_latin_alias("Война и мир")); // Cyrillic + } + + #[test] + fn is_latin_alias_rejects_diacritics_and_empty_inputs() { + // Conservative: diacritics are non-ASCII, rejected for now. + assert!(!is_latin_alias("Pokémon")); + assert!(!is_latin_alias("Crónica")); + // Pure punctuation / whitespace. + assert!(!is_latin_alias("")); + assert!(!is_latin_alias(" ")); + assert!(!is_latin_alias("!!!---!!!")); + } + + async fn make_series(db: &DatabaseConnection, library_id: Uuid, name: &str) -> Uuid { + let series = SeriesRepository::create(db, library_id, name, None) + .await + .unwrap(); + // SeriesRepository::create already creates a metadata row with title = + // name, so we don't need to insert another one. + let _ = library_id; + series.id + } + + async fn add_classified_book( + db: &DatabaseConnection, + series_id: Uuid, + library_id: Uuid, + path: &str, + volume: Option<i32>, + chapter: Option<f32>, + ) { + let book = books::Model { + id: Uuid::new_v4(), + series_id, + library_id, + file_path: path.to_string(), + file_name: path.rsplit('/').next().unwrap_or(path).to_string(), + file_size: 1024, + file_hash: format!("hash_{}", Uuid::new_v4()), + partial_hash: String::new(), + format: "cbz".to_string(), + page_count: 10, + deleted: false, + analyzed: false, + analysis_error: None, + analysis_errors: None, + modified_at: Utc::now(), + created_at: Utc::now(), + updated_at: Utc::now(), + thumbnail_path: None, + thumbnail_generated_at: None, + koreader_hash: None, + epub_positions: None, + epub_spine_items: None, + }; + let created = BookRepository::create(db, &book, None).await.unwrap(); + let meta = BookMetadataRepository::create_with_title_and_number(db, created.id, None, None) + .await + .unwrap(); + let mut active: book_metadata::ActiveModel = meta.into(); + active.volume = Set(volume); + active.chapter = Set(chapter); + active.update(db).await.unwrap(); + } + + #[tokio::test] + async fn seed_inserts_latin_aliases_and_skips_non_latin() { + let (db, _temp) = create_test_db().await; + let conn = db.sea_orm_connection(); + let lib = LibraryRepository::create(conn, "L", "/p", ScanningStrategy::Default) + .await + .unwrap(); + let s = make_series(conn, lib.id, "Solo Leveling").await; + AlternateTitleRepository::create(conn, s, "Korean", "나 혼자만 레벨업") + .await + .unwrap(); + AlternateTitleRepository::create(conn, s, "Romaji", "Na Honjaman Lebel-eob") + .await + .unwrap(); + + let report = seed_tracking_for_series(conn, s).await.unwrap(); + // "Solo Leveling" is in both `series.name` and `series_metadata.title`, + // so dedup folds them; "Na Honjaman Lebel-eob" adds one. Korean alt + // is rejected as non-Latin. + assert_eq!(report.aliases_inserted, 2); + assert_eq!(report.aliases_skipped_non_latin, 1); + + let aliases = SeriesAliasRepository::get_for_series(conn, s) + .await + .unwrap(); + let texts: Vec<&str> = aliases.iter().map(|a| a.alias.as_str()).collect(); + assert!(texts.contains(&"Solo Leveling")); + assert!(texts.contains(&"Na Honjaman Lebel-eob")); + assert!(!texts.iter().any(|a| a.contains('나'))); + } + + #[tokio::test] + async fn seed_is_idempotent_for_aliases() { + let (db, _temp) = create_test_db().await; + let conn = db.sea_orm_connection(); + let lib = LibraryRepository::create(conn, "L", "/p", ScanningStrategy::Default) + .await + .unwrap(); + let s = make_series(conn, lib.id, "Berserk").await; + + let first = seed_tracking_for_series(conn, s).await.unwrap(); + assert_eq!(first.aliases_inserted, 1); + + let second = seed_tracking_for_series(conn, s).await.unwrap(); + assert_eq!(second.aliases_inserted, 0); + assert_eq!(second.aliases_skipped_duplicate, 1); + + let aliases = SeriesAliasRepository::get_for_series(conn, s) + .await + .unwrap(); + assert_eq!(aliases.len(), 1); + } + + #[tokio::test] + async fn seed_preserves_user_added_aliases_on_re_seed() { + let (db, _temp) = create_test_db().await; + let conn = db.sea_orm_connection(); + let lib = LibraryRepository::create(conn, "L", "/p", ScanningStrategy::Default) + .await + .unwrap(); + let s = make_series(conn, lib.id, "Boruto").await; + + seed_tracking_for_series(conn, s).await.unwrap(); + // User adds a custom alias their uploader uses. + SeriesAliasRepository::create(conn, s, "Boruto: Two Blue Vortex", alias_source::MANUAL) + .await + .unwrap(); + + // Re-seed should not remove the manual alias. + let _ = seed_tracking_for_series(conn, s).await.unwrap(); + let aliases = SeriesAliasRepository::get_for_series(conn, s) + .await + .unwrap(); + let texts: Vec<&str> = aliases.iter().map(|a| a.alias.as_str()).collect(); + assert!(texts.contains(&"Boruto")); + assert!(texts.contains(&"Boruto: Two Blue Vortex")); + } + + #[tokio::test] + async fn seed_writes_track_flags_and_latest_known_with_no_books() { + let (db, _temp) = create_test_db().await; + let conn = db.sea_orm_connection(); + let lib = LibraryRepository::create(conn, "L", "/p", ScanningStrategy::Default) + .await + .unwrap(); + let s = make_series(conn, lib.id, "Empty Series").await; + + let report = seed_tracking_for_series(conn, s).await.unwrap(); + // Nothing classified — both axes default to true. + assert!(report.track_chapters); + assert!(report.track_volumes); + assert_eq!(report.latest_known_chapter, None); + assert_eq!(report.latest_known_volume, None); + + let row = SeriesTrackingRepository::get(conn, s) + .await + .unwrap() + .unwrap(); + assert!(row.track_chapters); + assert!(row.track_volumes); + assert!(!row.tracked, "seeding must not flip `tracked` on"); + assert_eq!(row.latest_known_chapter, None); + assert_eq!(row.latest_known_volume, None); + } + + #[tokio::test] + async fn seed_infers_track_volumes_only_for_volume_organized_series() { + let (db, _temp) = create_test_db().await; + let conn = db.sea_orm_connection(); + let lib = LibraryRepository::create(conn, "L", "/p", ScanningStrategy::Default) + .await + .unwrap(); + let s = make_series(conn, lib.id, "Volume Series").await; + add_classified_book(conn, s, lib.id, "/v1.cbz", Some(1), None).await; + add_classified_book(conn, s, lib.id, "/v2.cbz", Some(2), None).await; + + let report = seed_tracking_for_series(conn, s).await.unwrap(); + assert!(!report.track_chapters); + assert!(report.track_volumes); + assert_eq!(report.latest_known_chapter, None); + assert_eq!(report.latest_known_volume, Some(2)); + } + + #[tokio::test] + async fn seed_infers_track_chapters_only_for_chapter_organized_series() { + let (db, _temp) = create_test_db().await; + let conn = db.sea_orm_connection(); + let lib = LibraryRepository::create(conn, "L", "/p", ScanningStrategy::Default) + .await + .unwrap(); + let s = make_series(conn, lib.id, "Chapter Series").await; + add_classified_book(conn, s, lib.id, "/c1.cbz", None, Some(1.0)).await; + add_classified_book(conn, s, lib.id, "/c2.cbz", None, Some(142.5)).await; + + let report = seed_tracking_for_series(conn, s).await.unwrap(); + assert!(report.track_chapters); + assert!(!report.track_volumes); + assert_eq!(report.latest_known_chapter, Some(142.5)); + assert_eq!(report.latest_known_volume, None); + } + + #[tokio::test] + async fn seed_keeps_both_axes_when_books_have_both_classifications() { + let (db, _temp) = create_test_db().await; + let conn = db.sea_orm_connection(); + let lib = LibraryRepository::create(conn, "L", "/p", ScanningStrategy::Default) + .await + .unwrap(); + let s = make_series(conn, lib.id, "Mixed Series").await; + add_classified_book(conn, s, lib.id, "/v1.cbz", Some(1), None).await; + add_classified_book(conn, s, lib.id, "/v2c10.cbz", Some(2), Some(10.0)).await; + + let report = seed_tracking_for_series(conn, s).await.unwrap(); + assert!(report.track_chapters); + assert!(report.track_volumes); + assert_eq!(report.latest_known_chapter, Some(10.0)); + assert_eq!(report.latest_known_volume, Some(2)); + } + + #[tokio::test] + async fn seed_overwrites_track_flags_and_latest_known_on_re_seed() { + let (db, _temp) = create_test_db().await; + let conn = db.sea_orm_connection(); + let lib = LibraryRepository::create(conn, "L", "/p", ScanningStrategy::Default) + .await + .unwrap(); + let s = make_series(conn, lib.id, "Repolled").await; + add_classified_book(conn, s, lib.id, "/v1.cbz", Some(1), None).await; + + // First seed: only volume axis on disk, latest_known_volume = 1. + let first = seed_tracking_for_series(conn, s).await.unwrap(); + assert_eq!(first.latest_known_volume, Some(1)); + + // User adds a new book with vol 5; re-seed bumps latest_known_volume. + add_classified_book(conn, s, lib.id, "/v5.cbz", Some(5), None).await; + let second = seed_tracking_for_series(conn, s).await.unwrap(); + assert_eq!(second.latest_known_volume, Some(5)); + + let row = SeriesTrackingRepository::get(conn, s) + .await + .unwrap() + .unwrap(); + assert_eq!(row.latest_known_volume, Some(5)); + } + + #[tokio::test] + async fn seed_does_not_flip_tracked() { + let (db, _temp) = create_test_db().await; + let conn = db.sea_orm_connection(); + let lib = LibraryRepository::create(conn, "L", "/p", ScanningStrategy::Default) + .await + .unwrap(); + let s = make_series(conn, lib.id, "Untracked").await; + + seed_tracking_for_series(conn, s).await.unwrap(); + let row = SeriesTrackingRepository::get(conn, s) + .await + .unwrap() + .unwrap(); + assert!(!row.tracked); + } + + #[tokio::test] + async fn seed_reports_missing_series_as_error() { + let (db, _temp) = create_test_db().await; + let conn = db.sea_orm_connection(); + let bogus = Uuid::new_v4(); + let err = seed_tracking_for_series(conn, bogus).await.unwrap_err(); + assert!(err.to_string().contains("not found")); + } +} diff --git a/src/services/release/upstream_gap.rs b/src/services/release/upstream_gap.rs new file mode 100644 index 00000000..e9bf97dd --- /dev/null +++ b/src/services/release/upstream_gap.rs @@ -0,0 +1,440 @@ +//! Upstream-publication gap signal (Phase 5 of release-tracking). +//! +//! Computes the per-series delta between *original-language* publication +//! counts (from MangaBaka / AniList / etc., stored as +//! `series_metadata.total_chapter_count` / `total_volume_count`) and +//! *local* counts (the highest classified `book_metadata.chapter|volume` +//! across the series, surfaced as `local_max_chapter` / `local_max_volume`). +//! +//! The gap is purely a UI signal — it does **not** write `release_ledger` +//! rows and does **not** advance `series_tracking.latest_known_*`. Original- +//! language publication facts are not the same category as +//! translation/scanlation releases (which Phase 6's MangaUpdates plugin +//! handles). See the `release-tracking` plan, Key Technical Decisions, for +//! the three-signal separation. + +use crate::db::entities::series_external_ids::Model as SeriesExternalId; +use crate::db::entities::series_tracking::Model as SeriesTrackingRow; + +/// Computed gap between upstream publication and local content for a series. +/// +/// `None` fields collapse the corresponding badge in the UI: untracked +/// series, axis-disabled series (`track_chapters: false`), missing provider +/// counts, and zero/negative gaps all yield `None` for that axis. +#[derive(Debug, Clone, PartialEq, Default)] +pub struct UpstreamGap { + pub chapter_gap: Option<f32>, + pub volume_gap: Option<i32>, + /// Display name of the metadata provider that supplied the upstream + /// counts (e.g., "MangaBaka", "AniList"). Populated whenever at least + /// one axis has a positive gap; set to `None` when both axes are `None` + /// or when no recognized provider external ID is associated with the + /// series. + pub provider: Option<String>, +} + +impl UpstreamGap { + /// Returns `true` when neither axis has a positive gap. + #[allow(dead_code)] + pub fn is_empty(&self) -> bool { + self.chapter_gap.is_none() && self.volume_gap.is_none() + } +} + +/// Inputs for computing the upstream gap. All inputs are already loaded by +/// the series DTO build path (no new query required). +pub struct UpstreamGapInputs<'a> { + pub tracking: Option<&'a SeriesTrackingRow>, + pub total_chapter_count: Option<f32>, + pub total_volume_count: Option<i32>, + pub local_max_chapter: Option<f32>, + pub local_max_volume: Option<i32>, + pub external_ids: &'a [SeriesExternalId], +} + +/// Compute the upstream gap for a series given preloaded inputs. +/// +/// Returns an empty `UpstreamGap` when: +/// - the series is not tracked (no row, or `tracked = false`); +/// - the corresponding `track_*` axis is disabled; +/// - the provider count is `None`; +/// - the local max is `None` (chapter axis only — for the volume axis a +/// missing local max is treated as `0` so a brand-new tracked series with +/// `total_volume_count = 14` and no local books shows "+14 vol upstream"). +/// +/// Float chapter math is rounded to 1 decimal place to suppress +/// `145.0 - 144.9999 = 0.0001`-style noise. +pub fn compute_upstream_gap(inputs: &UpstreamGapInputs<'_>) -> UpstreamGap { + let tracking = match inputs.tracking { + Some(t) if t.tracked => t, + _ => return UpstreamGap::default(), + }; + + let chapter_gap = if tracking.track_chapters { + compute_chapter_gap(inputs.total_chapter_count, inputs.local_max_chapter) + } else { + None + }; + + let volume_gap = if tracking.track_volumes { + compute_volume_gap(inputs.total_volume_count, inputs.local_max_volume) + } else { + None + }; + + let provider = if chapter_gap.is_some() || volume_gap.is_some() { + pick_provider(inputs.external_ids) + } else { + None + }; + + UpstreamGap { + chapter_gap, + volume_gap, + provider, + } +} + +fn compute_chapter_gap(total: Option<f32>, local_max: Option<f32>) -> Option<f32> { + let total = total?; + // Treat a missing local max as 0 so newly-tracked series surface the + // full upstream count rather than silently hiding it. + let local = local_max.unwrap_or(0.0); + let raw = total - local; + let rounded = (raw * 10.0).round() / 10.0; + if rounded > 0.0 { Some(rounded) } else { None } +} + +fn compute_volume_gap(total: Option<i32>, local_max: Option<i32>) -> Option<i32> { + let total = total?; + let local = local_max.unwrap_or(0); + let gap = total - local; + if gap > 0 { Some(gap) } else { None } +} + +/// Pick the provider display name to attribute the gap to. +/// +/// We have no per-field provenance on `series_metadata.total_*_count` +/// (every metadata-provider plugin merges into the same column). This +/// helper falls back to a fixed priority order keyed off the series' +/// external IDs — MangaBaka first (it's the primary count source for +/// manga), then AniList, MAL, MangaDex, and finally any other plugin +/// source. Manual / file-derived sources (`comicinfo`, `epub`, `manual`) +/// are not displayed as providers because they don't supply upstream +/// counts. +/// +/// Returns `None` when no recognized provider source is attached to the +/// series; the badge tooltip in Phase 7 then falls back to a generic +/// message. +fn pick_provider(external_ids: &[SeriesExternalId]) -> Option<String> { + const PRIORITY: &[(&str, &str)] = &[ + ("plugin:mangabaka", "MangaBaka"), + ("plugin:anilist", "AniList"), + ("plugin:myanimelist", "MyAnimeList"), + ("plugin:mangadex", "MangaDex"), + ("plugin:kitsu", "Kitsu"), + ("plugin:comicvine", "ComicVine"), + ("plugin:openlibrary", "OpenLibrary"), + ]; + + for (source_key, display) in PRIORITY { + if external_ids.iter().any(|x| x.source == *source_key) { + return Some((*display).to_string()); + } + } + + // Fallback: any plugin source that wasn't in the priority list. + external_ids + .iter() + .find_map(|x| x.plugin_name().map(capitalize)) +} + +fn capitalize(s: &str) -> String { + let mut chars = s.chars(); + match chars.next() { + None => String::new(), + Some(c) => c.to_uppercase().chain(chars).collect(), + } +} + +#[cfg(test)] +mod tests { + use super::*; + use chrono::Utc; + use uuid::Uuid; + + fn tracking_row(tracked: bool, track_chapters: bool, track_volumes: bool) -> SeriesTrackingRow { + SeriesTrackingRow { + series_id: Uuid::new_v4(), + tracked, + track_chapters, + track_volumes, + latest_known_chapter: None, + latest_known_volume: None, + volume_chapter_map: None, + poll_interval_override_s: None, + confidence_threshold_override: None, + languages: None, + created_at: Utc::now(), + updated_at: Utc::now(), + } + } + + fn ext_id(source: &str) -> SeriesExternalId { + SeriesExternalId { + id: Uuid::new_v4(), + series_id: Uuid::new_v4(), + source: source.to_string(), + external_id: "1234".to_string(), + external_url: None, + metadata_hash: None, + last_synced_at: None, + created_at: Utc::now(), + updated_at: Utc::now(), + } + } + + #[test] + fn untracked_series_has_no_gap() { + let tracking = tracking_row(false, true, true); + let inputs = UpstreamGapInputs { + tracking: Some(&tracking), + total_chapter_count: Some(145.0), + total_volume_count: Some(15), + local_max_chapter: Some(142.0), + local_max_volume: Some(14), + external_ids: &[ext_id("plugin:mangabaka")], + }; + let gap = compute_upstream_gap(&inputs); + assert!(gap.is_empty()); + assert_eq!(gap.provider, None); + } + + #[test] + fn no_tracking_row_has_no_gap() { + let inputs = UpstreamGapInputs { + tracking: None, + total_chapter_count: Some(145.0), + total_volume_count: Some(15), + local_max_chapter: Some(142.0), + local_max_volume: Some(14), + external_ids: &[ext_id("plugin:mangabaka")], + }; + let gap = compute_upstream_gap(&inputs); + assert!(gap.is_empty()); + } + + #[test] + fn tracked_series_with_provider_ahead_returns_positive_gap() { + let tracking = tracking_row(true, true, true); + let inputs = UpstreamGapInputs { + tracking: Some(&tracking), + total_chapter_count: Some(145.0), + total_volume_count: Some(15), + local_max_chapter: Some(142.0), + local_max_volume: Some(14), + external_ids: &[ext_id("plugin:mangabaka")], + }; + let gap = compute_upstream_gap(&inputs); + assert_eq!(gap.chapter_gap, Some(3.0)); + assert_eq!(gap.volume_gap, Some(1)); + assert_eq!(gap.provider.as_deref(), Some("MangaBaka")); + } + + #[test] + fn track_chapters_false_suppresses_chapter_gap_only() { + let tracking = tracking_row(true, false, true); + let inputs = UpstreamGapInputs { + tracking: Some(&tracking), + total_chapter_count: Some(145.0), + total_volume_count: Some(15), + local_max_chapter: Some(142.0), + local_max_volume: Some(14), + external_ids: &[ext_id("plugin:mangabaka")], + }; + let gap = compute_upstream_gap(&inputs); + assert_eq!(gap.chapter_gap, None); + assert_eq!(gap.volume_gap, Some(1)); + assert_eq!(gap.provider.as_deref(), Some("MangaBaka")); + } + + #[test] + fn track_volumes_false_suppresses_volume_gap_only() { + let tracking = tracking_row(true, true, false); + let inputs = UpstreamGapInputs { + tracking: Some(&tracking), + total_chapter_count: Some(145.0), + total_volume_count: Some(15), + local_max_chapter: Some(142.0), + local_max_volume: Some(14), + external_ids: &[ext_id("plugin:mangabaka")], + }; + let gap = compute_upstream_gap(&inputs); + assert_eq!(gap.chapter_gap, Some(3.0)); + assert_eq!(gap.volume_gap, None); + } + + #[test] + fn missing_provider_count_suppresses_axis() { + let tracking = tracking_row(true, true, true); + let inputs = UpstreamGapInputs { + tracking: Some(&tracking), + total_chapter_count: None, + total_volume_count: Some(15), + local_max_chapter: Some(142.0), + local_max_volume: Some(14), + external_ids: &[ext_id("plugin:mangabaka")], + }; + let gap = compute_upstream_gap(&inputs); + assert_eq!(gap.chapter_gap, None); + assert_eq!(gap.volume_gap, Some(1)); + } + + #[test] + fn zero_or_negative_gap_yields_none() { + let tracking = tracking_row(true, true, true); + let inputs = UpstreamGapInputs { + tracking: Some(&tracking), + total_chapter_count: Some(142.0), + total_volume_count: Some(14), + local_max_chapter: Some(142.0), + local_max_volume: Some(14), + external_ids: &[ext_id("plugin:mangabaka")], + }; + let gap = compute_upstream_gap(&inputs); + assert!(gap.is_empty()); + + let inputs_local_ahead = UpstreamGapInputs { + tracking: Some(&tracking), + total_chapter_count: Some(140.0), + total_volume_count: Some(13), + local_max_chapter: Some(142.0), + local_max_volume: Some(14), + external_ids: &[ext_id("plugin:mangabaka")], + }; + assert!(compute_upstream_gap(&inputs_local_ahead).is_empty()); + } + + #[test] + fn float_noise_within_one_decimal_collapses_to_no_gap() { + let tracking = tracking_row(true, true, true); + let inputs = UpstreamGapInputs { + tracking: Some(&tracking), + total_chapter_count: Some(145.0), + total_volume_count: None, + local_max_chapter: Some(144.9999), + local_max_volume: None, + external_ids: &[ext_id("plugin:mangabaka")], + }; + let gap = compute_upstream_gap(&inputs); + // 145.0 - 144.9999 = 0.0001 -> rounds to 0.0 -> None. + assert_eq!(gap.chapter_gap, None); + } + + #[test] + fn fractional_chapter_gap_rounds_to_one_decimal() { + let tracking = tracking_row(true, true, true); + let inputs = UpstreamGapInputs { + tracking: Some(&tracking), + total_chapter_count: Some(145.5), + total_volume_count: None, + local_max_chapter: Some(143.0), + local_max_volume: None, + external_ids: &[ext_id("plugin:mangabaka")], + }; + let gap = compute_upstream_gap(&inputs); + assert_eq!(gap.chapter_gap, Some(2.5)); + } + + #[test] + fn missing_local_max_chapter_treats_local_as_zero() { + let tracking = tracking_row(true, true, true); + let inputs = UpstreamGapInputs { + tracking: Some(&tracking), + total_chapter_count: Some(10.0), + total_volume_count: None, + local_max_chapter: None, + local_max_volume: None, + external_ids: &[ext_id("plugin:mangabaka")], + }; + let gap = compute_upstream_gap(&inputs); + assert_eq!(gap.chapter_gap, Some(10.0)); + } + + #[test] + fn provider_priority_prefers_mangabaka_over_anilist() { + let tracking = tracking_row(true, true, true); + let inputs = UpstreamGapInputs { + tracking: Some(&tracking), + total_chapter_count: Some(145.0), + total_volume_count: None, + local_max_chapter: Some(142.0), + local_max_volume: None, + external_ids: &[ext_id("plugin:anilist"), ext_id("plugin:mangabaka")], + }; + let gap = compute_upstream_gap(&inputs); + assert_eq!(gap.provider.as_deref(), Some("MangaBaka")); + } + + #[test] + fn provider_falls_back_to_anilist_when_no_mangabaka() { + let tracking = tracking_row(true, true, true); + let inputs = UpstreamGapInputs { + tracking: Some(&tracking), + total_chapter_count: Some(145.0), + total_volume_count: None, + local_max_chapter: Some(142.0), + local_max_volume: None, + external_ids: &[ext_id("plugin:anilist")], + }; + let gap = compute_upstream_gap(&inputs); + assert_eq!(gap.provider.as_deref(), Some("AniList")); + } + + #[test] + fn provider_uses_unknown_plugin_as_capitalized_fallback() { + let tracking = tracking_row(true, true, true); + let inputs = UpstreamGapInputs { + tracking: Some(&tracking), + total_chapter_count: Some(145.0), + total_volume_count: None, + local_max_chapter: Some(142.0), + local_max_volume: None, + external_ids: &[ext_id("plugin:newprovider")], + }; + let gap = compute_upstream_gap(&inputs); + assert_eq!(gap.provider.as_deref(), Some("Newprovider")); + } + + #[test] + fn non_plugin_sources_yield_no_provider() { + let tracking = tracking_row(true, true, true); + let inputs = UpstreamGapInputs { + tracking: Some(&tracking), + total_chapter_count: Some(145.0), + total_volume_count: None, + local_max_chapter: Some(142.0), + local_max_volume: None, + external_ids: &[ext_id("comicinfo"), ext_id("manual")], + }; + let gap = compute_upstream_gap(&inputs); + assert_eq!(gap.chapter_gap, Some(3.0)); + assert_eq!(gap.provider, None); + } + + #[test] + fn provider_omitted_when_both_axes_have_no_gap() { + let tracking = tracking_row(true, true, true); + let inputs = UpstreamGapInputs { + tracking: Some(&tracking), + total_chapter_count: Some(142.0), + total_volume_count: Some(14), + local_max_chapter: Some(142.0), + local_max_volume: Some(14), + external_ids: &[ext_id("plugin:mangabaka")], + }; + let gap = compute_upstream_gap(&inputs); + assert!(gap.is_empty()); + assert_eq!(gap.provider, None); + } +} diff --git a/src/tasks/handlers/backfill_tracking.rs b/src/tasks/handlers/backfill_tracking.rs new file mode 100644 index 00000000..0ff2d78b --- /dev/null +++ b/src/tasks/handlers/backfill_tracking.rs @@ -0,0 +1,316 @@ +//! `BackfillTrackingFromMetadata` task handler. +//! +//! Walks series in scope and (re-)seeds tracking defaults from existing +//! data: aliases from metadata, `latest_known_*` from local book +//! classification, and per-axis `track_*` flags from book metadata. Routes +//! through `services::release::seed::seed_tracking_for_series` so the per- +//! series PATCH path, the bulk track-for-releases endpoint, and this task +//! all share one canonical seeding implementation. +//! +//! Does NOT toggle `tracked`. Enabling tracking is always an explicit user +//! action; this task is a maintenance pass that refreshes auto-derived +//! fields after a metadata refresh or library re-scan. + +use anyhow::Result; +use sea_orm::DatabaseConnection; +use std::sync::Arc; +use tracing::{info, warn}; +use uuid::Uuid; + +use crate::db::entities::tasks; +use crate::db::repositories::SeriesRepository; +use crate::events::EventBroadcaster; +use crate::services::release::seed::{SeedReport, seed_tracking_for_series}; +use crate::tasks::handlers::TaskHandler; +use crate::tasks::types::TaskResult; + +pub struct BackfillTrackingFromMetadataHandler; + +impl BackfillTrackingFromMetadataHandler { + pub fn new() -> Self { + Self + } +} + +impl Default for BackfillTrackingFromMetadataHandler { + fn default() -> Self { + Self::new() + } +} + +impl TaskHandler for BackfillTrackingFromMetadataHandler { + fn handle<'a>( + &'a self, + task: &'a tasks::Model, + db: &'a DatabaseConnection, + _event_broadcaster: Option<&'a Arc<EventBroadcaster>>, + ) -> std::pin::Pin<Box<dyn std::future::Future<Output = Result<TaskResult>> + Send + 'a>> { + Box::pin(async move { + let library_id = task.library_id; + let series_ids: Option<Vec<Uuid>> = task + .params + .as_ref() + .and_then(|p| p.get("series_ids")) + .and_then(|v| serde_json::from_value(v.clone()).ok()); + + let scope = describe_scope(library_id, series_ids.as_deref()); + info!("Task {}: Backfilling tracking aliases ({})", task.id, scope); + + let series_to_process = resolve_series_scope(db, library_id, series_ids).await?; + let total = series_to_process.len(); + info!("Found {} series in scope", total); + + let mut summary = BackfillSummary::default(); + for series_id in series_to_process { + match seed_tracking_for_series(db, series_id).await { + Ok(report) => summary.merge(report), + Err(e) => { + warn!("Seed failed for series {}: {}", series_id, e); + summary.errors += 1; + } + } + } + + info!( + "Backfill complete ({}): {} series processed, {} aliases inserted, \ + {} skipped duplicate, {} skipped non-latin, {} errors", + scope, + summary.processed, + summary.aliases_inserted, + summary.aliases_skipped_duplicate, + summary.aliases_skipped_non_latin, + summary.errors, + ); + + Ok(TaskResult::success_with_data( + format!( + "Processed {} series, inserted {} new aliases \ + ({} duplicates, {} non-Latin skipped, {} errors)", + summary.processed, + summary.aliases_inserted, + summary.aliases_skipped_duplicate, + summary.aliases_skipped_non_latin, + summary.errors, + ), + serde_json::json!({ + "scope": scope, + "series_processed": summary.processed, + "aliases_inserted": summary.aliases_inserted, + "aliases_skipped_duplicate": summary.aliases_skipped_duplicate, + "aliases_skipped_non_latin": summary.aliases_skipped_non_latin, + "errors": summary.errors, + }), + )) + }) + } +} + +#[derive(Default)] +struct BackfillSummary { + processed: usize, + aliases_inserted: usize, + aliases_skipped_duplicate: usize, + aliases_skipped_non_latin: usize, + errors: usize, +} + +impl BackfillSummary { + fn merge(&mut self, report: SeedReport) { + self.processed += 1; + self.aliases_inserted += report.aliases_inserted; + self.aliases_skipped_duplicate += report.aliases_skipped_duplicate; + self.aliases_skipped_non_latin += report.aliases_skipped_non_latin; + } +} + +fn describe_scope(library_id: Option<Uuid>, series_ids: Option<&[Uuid]>) -> String { + match (library_id, series_ids) { + (_, Some(ids)) => format!("scope=series_ids:{}", ids.len()), + (Some(lib), _) => format!("scope=library:{}", lib), + (None, None) => "scope=all".to_string(), + } +} + +async fn resolve_series_scope( + db: &DatabaseConnection, + library_id: Option<Uuid>, + series_ids: Option<Vec<Uuid>>, +) -> Result<Vec<Uuid>> { + if let Some(ids) = series_ids { + return Ok(ids); + } + if let Some(lib_id) = library_id { + let series_list = SeriesRepository::list_by_library(db, lib_id).await?; + return Ok(series_list.into_iter().map(|s| s.id).collect()); + } + let all = SeriesRepository::list_all(db).await?; + Ok(all.into_iter().map(|s| s.id).collect()) +} + +#[cfg(test)] +mod tests { + use super::*; + use crate::db::ScanningStrategy; + use crate::db::repositories::{ + AlternateTitleRepository, LibraryRepository, SeriesAliasRepository, SeriesRepository, + SeriesTrackingRepository, + }; + use crate::db::test_helpers::create_test_db; + + async fn make_series( + db: &DatabaseConnection, + library_id: Uuid, + name: &str, + japanese: Option<&str>, + ) -> Uuid { + let series = SeriesRepository::create(db, library_id, name, None) + .await + .unwrap(); + if let Some(jp) = japanese { + AlternateTitleRepository::create(db, series.id, "Japanese", jp) + .await + .unwrap(); + } + series.id + } + + /// The handler now delegates to `seed_tracking_for_series`; this test + /// pins the latin-only filtering behavior at the seeded layer (the + /// previous handler-internal logic seeded all scripts and is gone). + #[tokio::test] + async fn delegated_seed_inserts_latin_aliases_skipping_non_latin() { + let (db, _temp) = create_test_db().await; + let conn = db.sea_orm_connection(); + let lib = LibraryRepository::create(conn, "L", "/p", ScanningStrategy::Default) + .await + .unwrap(); + let s1 = make_series( + conn, + lib.id, + "My Hero Academia", + Some("僕のヒーローアカデミア"), + ) + .await; + + let report = seed_tracking_for_series(conn, s1).await.unwrap(); + // "My Hero Academia" appears in both `series.name` and metadata title; + // dedup folds them. Japanese alt is skipped. + assert_eq!(report.aliases_inserted, 1); + assert_eq!(report.aliases_skipped_non_latin, 1); + + let aliases = SeriesAliasRepository::get_for_series(conn, s1) + .await + .unwrap(); + let texts: Vec<&str> = aliases.iter().map(|a| a.alias.as_str()).collect(); + assert!(texts.contains(&"My Hero Academia")); + assert!(!texts.iter().any(|a| a.contains('僕'))); + } + + #[tokio::test] + async fn delegated_seed_is_idempotent_on_rerun() { + let (db, _temp) = create_test_db().await; + let conn = db.sea_orm_connection(); + let lib = LibraryRepository::create(conn, "L", "/p", ScanningStrategy::Default) + .await + .unwrap(); + let s1 = make_series(conn, lib.id, "Series A", Some("Alt A")).await; + + let first = seed_tracking_for_series(conn, s1).await.unwrap(); + // "Series A" + "Alt A" — both Latin, both inserted. + assert_eq!(first.aliases_inserted, 2); + + let second = seed_tracking_for_series(conn, s1).await.unwrap(); + assert_eq!(second.aliases_inserted, 0); + assert_eq!(second.aliases_skipped_duplicate, 2); + + let aliases = SeriesAliasRepository::get_for_series(conn, s1) + .await + .unwrap(); + assert_eq!(aliases.len(), 2); + } + + #[tokio::test] + async fn delegated_seed_does_not_enable_tracking() { + let (db, _temp) = create_test_db().await; + let conn = db.sea_orm_connection(); + let lib = LibraryRepository::create(conn, "L", "/p", ScanningStrategy::Default) + .await + .unwrap(); + let s1 = make_series(conn, lib.id, "Some Title", None).await; + + seed_tracking_for_series(conn, s1).await.unwrap(); + + let row = SeriesTrackingRepository::get(conn, s1).await.unwrap(); + assert!( + row.map(|r| !r.tracked).unwrap_or(true), + "seeding must not flip `tracked` on" + ); + } + + #[tokio::test] + async fn resolve_scope_prefers_explicit_series_ids() { + let (db, _temp) = create_test_db().await; + let conn = db.sea_orm_connection(); + let lib = LibraryRepository::create(conn, "L", "/p", ScanningStrategy::Default) + .await + .unwrap(); + let s1 = make_series(conn, lib.id, "A", None).await; + let _s2 = make_series(conn, lib.id, "B", None).await; + + let scoped = resolve_series_scope(conn, Some(lib.id), Some(vec![s1])) + .await + .unwrap(); + assert_eq!(scoped, vec![s1]); + } + + #[tokio::test] + async fn resolve_scope_library_returns_all_in_library() { + let (db, _temp) = create_test_db().await; + let conn = db.sea_orm_connection(); + let lib1 = LibraryRepository::create(conn, "L1", "/p1", ScanningStrategy::Default) + .await + .unwrap(); + let lib2 = LibraryRepository::create(conn, "L2", "/p2", ScanningStrategy::Default) + .await + .unwrap(); + let _a = make_series(conn, lib1.id, "A", None).await; + let _b = make_series(conn, lib1.id, "B", None).await; + let _c = make_series(conn, lib2.id, "C", None).await; + + let scoped = resolve_series_scope(conn, Some(lib1.id), None) + .await + .unwrap(); + assert_eq!(scoped.len(), 2); + } + + #[tokio::test] + async fn resolve_scope_no_args_returns_all_series() { + let (db, _temp) = create_test_db().await; + let conn = db.sea_orm_connection(); + let lib = LibraryRepository::create(conn, "L", "/p", ScanningStrategy::Default) + .await + .unwrap(); + let _a = make_series(conn, lib.id, "A", None).await; + let _b = make_series(conn, lib.id, "B", None).await; + + let scoped = resolve_series_scope(conn, None, None).await.unwrap(); + assert_eq!(scoped.len(), 2); + } + + #[test] + fn describe_scope_strings() { + let lib = Uuid::new_v4(); + assert!(describe_scope(None, None).starts_with("scope=all")); + assert!(describe_scope(Some(lib), None).starts_with("scope=library:")); + assert_eq!( + describe_scope(Some(lib), Some(&[Uuid::new_v4(), Uuid::new_v4()])), + "scope=series_ids:2", + ); + } + + #[test] + fn handler_creation() { + let _ = BackfillTrackingFromMetadataHandler::new(); + let _ = BackfillTrackingFromMetadataHandler; + } +} diff --git a/src/tasks/handlers/mod.rs b/src/tasks/handlers/mod.rs index edd6b5d6..98baaf11 100644 --- a/src/tasks/handlers/mod.rs +++ b/src/tasks/handlers/mod.rs @@ -8,6 +8,7 @@ use crate::tasks::types::TaskResult; pub mod analyze_book; pub mod analyze_series; +pub mod backfill_tracking; pub mod cleanup_book_files; pub mod cleanup_orphaned_files; pub mod cleanup_pdf_cache; @@ -21,6 +22,7 @@ pub mod generate_series_thumbnails; pub mod generate_thumbnail; pub mod generate_thumbnails; pub mod plugin_auto_match; +pub mod poll_release_source; pub mod purge_deleted; pub mod refresh_library_metadata; pub mod renumber_series; @@ -32,6 +34,7 @@ pub mod user_plugin_sync; pub use analyze_book::AnalyzeBookHandler; pub use analyze_series::AnalyzeSeriesHandler; +pub use backfill_tracking::BackfillTrackingFromMetadataHandler; pub use cleanup_book_files::CleanupBookFilesHandler; pub use cleanup_orphaned_files::CleanupOrphanedFilesHandler; pub use cleanup_pdf_cache::CleanupPdfCacheHandler; @@ -45,6 +48,7 @@ pub use generate_series_thumbnails::GenerateSeriesThumbnailsHandler; pub use generate_thumbnail::GenerateThumbnailHandler; pub use generate_thumbnails::GenerateThumbnailsHandler; pub use plugin_auto_match::PluginAutoMatchHandler; +pub use poll_release_source::PollReleaseSourceHandler; pub use purge_deleted::PurgeDeletedHandler; pub use refresh_library_metadata::RefreshLibraryMetadataHandler; pub use renumber_series::{RenumberSeriesBatchHandler, RenumberSeriesHandler}; diff --git a/src/tasks/handlers/poll_release_source.rs b/src/tasks/handlers/poll_release_source.rs new file mode 100644 index 00000000..73f5796b --- /dev/null +++ b/src/tasks/handlers/poll_release_source.rs @@ -0,0 +1,1105 @@ +//! Handler for the `PollReleaseSource` task. +//! +//! Resolves the source's owning plugin, calls `releases/poll`, runs returned +//! candidates through the matcher + threshold gate, and writes accepted +//! candidates to the ledger. On success updates `last_polled_at` (and +//! optionally `etag`); on failure records `last_error`. +//! +//! Plugins MAY also stream candidates via the `releases/record` reverse-RPC +//! during the poll call. Both paths land in the same ledger; cross-channel +//! dedup is handled by the ledger's `(source_id, external_release_id)` +//! constraint. +//! +//! Key invariants: +//! +//! - **Idempotent.** Re-running this task for a source that polled +//! successfully a moment ago re-hits the upstream but the ledger drops +//! duplicates. +//! - **Bounded by per-task timeout.** A long-running plugin call won't +//! block the worker pool indefinitely (see `plugin.task_request_timeout_seconds` +//! setting; defaults inherit `PluginManager::default_request_timeout`). +//! - **Permission-gated upstream.** The plugin's manifest must declare the +//! `release_source` capability; the `releases/*` reverse-RPC dispatcher +//! enforces this. This handler trusts the plugin name on the source row +//! and lets the dispatcher reject misuse. + +use anyhow::Result; +use chrono::Utc; +use sea_orm::DatabaseConnection; +use serde::{Deserialize, Serialize}; +use serde_json::json; +use std::sync::Arc; +use std::time::Duration; +use tracing::{debug, error, info, warn}; +use uuid::Uuid; + +use crate::db::entities::release_ledger::state as ledger_state; +use crate::db::entities::release_sources::plugin_id as source_plugin_id; +use crate::db::entities::tasks; +use crate::db::repositories::{ + NewReleaseEntry, PluginsRepository, ReleaseLedgerRepository, ReleaseSourceRepository, + SeriesRepository, SeriesTrackingRepository, +}; +use crate::events::{EntityChangeEvent, EventBroadcaster}; +use crate::services::SettingsService; +use crate::services::plugin::PluginManager; +use crate::services::plugin::handle::PluginError; +use crate::services::plugin::protocol::{ReleasePollRequest, ReleasePollResponse, methods}; +use crate::services::release::auto_ignore::{OwnedReleaseKeys, should_auto_ignore}; +use crate::services::release::backoff::{HostBackoff, is_backoff_status}; +use crate::services::release::matcher::{evaluate, resolve_threshold}; +use crate::tasks::handlers::TaskHandler; +use crate::tasks::types::TaskResult; + +/// Default plugin task timeout in seconds (5 minutes — same as user_plugin_sync). +const DEFAULT_TASK_TIMEOUT_SECS: u64 = 300; + +/// Result of a `PollReleaseSource` task. Stored on the `tasks.result` JSON +/// column for observability and consumed by tests. +#[derive(Debug, Clone, Default, Serialize, Deserialize)] +#[serde(rename_all = "camelCase")] +pub struct PollReleaseSourceResult { + pub source_id: Uuid, + /// Number of upstream items the plugin produced for this poll. Counts + /// both candidates returned inline in the response payload AND any + /// items the plugin streamed via `releases/record` (reported back via + /// `ReleasePollResponse.parsed`). Used to drive the `last_summary` + /// "Fetched N items" line. + pub candidates_returned: u32, + /// Number of candidates accepted by the matcher and recorded. + /// Includes both host-side records (from inline `response.candidates`) + /// and plugin-streamed records (from `ReleasePollResponse.recorded`). + pub candidates_recorded: u32, + /// Number of candidates dropped before the ledger (validation failures + /// or below-threshold). + pub candidates_rejected: u32, + /// Number of accepted candidates that landed as a duplicate. Includes + /// host-side dedupes and plugin-reported dedupes. + pub candidates_deduped: u32, + /// Whether the upstream returned `304 Not Modified` (or the plugin's + /// equivalent). + pub not_modified: bool, + /// Whether the source was skipped because the plugin couldn't be + /// reached or the source was disabled. + #[serde(default, skip_serializing_if = "Option::is_none")] + pub skipped_reason: Option<String>, +} + +/// Handler for `PollReleaseSource`. +pub struct PollReleaseSourceHandler { + plugin_manager: Arc<PluginManager>, + settings_service: Option<Arc<SettingsService>>, + backoff: HostBackoff, +} + +impl PollReleaseSourceHandler { + pub fn new(plugin_manager: Arc<PluginManager>) -> Self { + Self { + plugin_manager, + settings_service: None, + backoff: HostBackoff::new(), + } + } + + pub fn with_settings_service(mut self, settings_service: Arc<SettingsService>) -> Self { + self.settings_service = Some(settings_service); + self + } + + /// Override the shared backoff tracker. Most callers want the default + /// (each handler with its own state); the scheduler may pass a shared + /// one once it consumes backoff for interval scaling. + pub fn with_backoff(mut self, backoff: HostBackoff) -> Self { + self.backoff = backoff; + self + } + + #[allow(dead_code)] // Public for tests + scheduler reuse. + pub fn backoff(&self) -> HostBackoff { + self.backoff.clone() + } + + async fn task_request_timeout(&self) -> Option<Duration> { + if let Some(ref settings) = self.settings_service { + let secs = settings + .get_uint( + "plugin.task_request_timeout_seconds", + DEFAULT_TASK_TIMEOUT_SECS, + ) + .await + .unwrap_or(DEFAULT_TASK_TIMEOUT_SECS); + Some(Duration::from_secs(secs)) + } else { + None + } + } +} + +impl TaskHandler for PollReleaseSourceHandler { + fn handle<'a>( + &'a self, + task: &'a tasks::Model, + db: &'a DatabaseConnection, + event_broadcaster: Option<&'a Arc<EventBroadcaster>>, + ) -> std::pin::Pin<Box<dyn std::future::Future<Output = Result<TaskResult>> + Send + 'a>> { + Box::pin(async move { + // Extract task params. + let params = task + .params + .as_ref() + .ok_or_else(|| anyhow::anyhow!("Missing params in poll_release_source task"))?; + let source_id: Uuid = params + .get("source_id") + .and_then(|v| v.as_str()) + .and_then(|s| Uuid::parse_str(s).ok()) + .ok_or_else(|| anyhow::anyhow!("Missing or invalid source_id in params"))?; + + info!("Task {}: Polling release source {}", task.id, source_id); + + // Load the source row. + let source = match ReleaseSourceRepository::get_by_id(db, source_id).await { + Ok(Some(s)) => s, + Ok(None) => { + let msg = format!("source {} not found", source_id); + warn!("Task {}: {}", task.id, msg); + return Ok(TaskResult::failure(msg)); + } + Err(e) => { + let msg = format!("failed to load source {}: {}", source_id, e); + error!("Task {}: {}", task.id, msg); + return Ok(TaskResult::failure(msg)); + } + }; + + if !source.enabled { + debug!( + "Task {}: Source {} is disabled; skipping", + task.id, source.id + ); + return Ok(TaskResult::success_with_data( + "source disabled", + json!(PollReleaseSourceResult { + source_id, + skipped_reason: Some("source_disabled".to_string()), + ..Default::default() + }), + )); + } + + // Synthetic in-core sources (Phase 5 metadata-piggyback) don't + // route through a plugin process. We don't have a code path for + // them yet; record a benign skip so the scheduler doesn't loop. + if source.plugin_id == source_plugin_id::CORE { + debug!( + "Task {}: Source {} is in-core (plugin_id=core); skipping (Phase 5 territory)", + task.id, source.id + ); + return Ok(TaskResult::success_with_data( + "core-source has no poll path yet", + json!(PollReleaseSourceResult { + source_id, + skipped_reason: Some("core_source_no_poll_path".to_string()), + ..Default::default() + }), + )); + } + + // Resolve the plugin row by name. + let plugin_row = match PluginsRepository::get_by_name(db, &source.plugin_id).await { + Ok(Some(p)) => p, + Ok(None) => { + let msg = format!("plugin {} not registered", source.plugin_id); + warn!("Task {}: {}", task.id, msg); + record_error(db, &source, event_broadcaster, &msg).await; + return Ok(TaskResult::failure(msg)); + } + Err(e) => { + let msg = format!("failed to lookup plugin: {}", e); + error!("Task {}: {}", task.id, msg); + record_error(db, &source, event_broadcaster, &msg).await; + return Ok(TaskResult::failure(msg)); + } + }; + + if !plugin_row.enabled { + let msg = format!("plugin {} disabled", source.plugin_id); + warn!("Task {}: {}", task.id, msg); + return Ok(TaskResult::success_with_data( + "plugin disabled", + json!(PollReleaseSourceResult { + source_id, + skipped_reason: Some("plugin_disabled".to_string()), + ..Default::default() + }), + )); + } + + // Spawn / get the plugin handle. + let handle = match self.plugin_manager.get_or_spawn(plugin_row.id).await { + Ok(h) => h, + Err(e) => { + let msg = format!("failed to start plugin: {}", e); + error!("Task {}: {}", task.id, msg); + record_error(db, &source, event_broadcaster, &msg).await; + return Ok(TaskResult::failure(msg)); + } + }; + + // Build the poll request. + let req = ReleasePollRequest { + source_id: source.id, + source_key: Some(source.source_key.clone()), + config: source.config.clone(), + etag: source.etag.clone(), + }; + let timeout = self.task_request_timeout().await; + let response_fut = handle.call_method::<ReleasePollRequest, ReleasePollResponse>( + methods::RELEASES_POLL, + req, + ); + let response_result = if let Some(t) = timeout { + match tokio::time::timeout(t, response_fut).await { + Ok(r) => r, + Err(_) => { + let msg = format!("poll timed out after {:?}", t); + warn!("Task {}: {}", task.id, msg); + record_error(db, &source, event_broadcaster, &msg).await; + return Ok(TaskResult::failure(msg)); + } + } + } else { + response_fut.await + }; + + let response = match response_result { + Ok(r) => r, + Err(e) => { + // Plugin errors map to source `last_error`. Backoff: + // if the plugin reports a rate limit via RPC we honor + // it host-wide on best-effort (hostname unknown here), + // but we still mark the source as errored. + let msg = format!("plugin call failed: {}", e); + if let PluginError::Rpc(_) = &e + && let Some(retry_after) = e.rpc_retry_after_seconds() + { + debug!( + "Task {}: Plugin reported rate-limit retryAfter={}s", + task.id, retry_after + ); + } + error!("Task {}: {}", task.id, msg); + record_error(db, &source, event_broadcaster, &msg).await; + return Ok(TaskResult::failure(msg)); + } + }; + + // Apply per-host backoff signals based on the upstream status + // the plugin observed (if any). The plugin is expected to set + // `upstream_status` on its response so we can throttle without + // each plugin re-implementing backoff. + let mut backoff_url: Option<String> = None; + if let Some(status) = response.upstream_status { + if is_backoff_status(status) { + // Pluck a host hint from the source's `display_name` or + // `config` if present. Many plugins encode the upstream + // base URL in `config.url`. If we can't find one, the + // backoff is keyed by the plugin name as a fallback so + // siblings on the same plugin still cooperate. + let url_hint = derive_url_hint(&source); + self.backoff.record_http_error(&url_hint, status).await; + backoff_url = Some(url_hint); + warn!( + "Task {}: Source {} got upstream status {}; backoff multiplier {}", + task.id, + source.id, + status, + self.backoff + .multiplier(backoff_url.as_deref().unwrap_or("")) + .await + ); + } else if (200..400).contains(&status) { + let url_hint = derive_url_hint(&source); + self.backoff.record_success(&url_hint).await; + backoff_url = Some(url_hint); + } + } + + // Process candidates (the plugin may have streamed some via + // reverse-RPC already; those are already on the ledger). + // Snapshot fields needed *after* the consume-loop below so we + // can still build the `last_summary` once `response.candidates` + // is moved. + let response_etag = response.etag.clone(); + let response_not_modified = response.not_modified; + let response_upstream_status = response.upstream_status; + // Plugin-reported counters (populated by streaming plugins that + // record via `releases/record` mid-poll, since their response's + // `candidates` array is empty). When present, these win over the + // host-side count below. + let plugin_reported_parsed = response.parsed; + let plugin_reported_matched = response.matched; + let plugin_reported_recorded = response.recorded; + let plugin_reported_deduped = response.deduped; + + let mut result = PollReleaseSourceResult { + source_id, + candidates_returned: response.candidates.len() as u32, + not_modified: response.not_modified.unwrap_or(false), + ..Default::default() + }; + + // Cache per-series owned-keys lookups across candidates in this + // poll. A single source typically returns many candidates for + // the same series, so we don't want N+1 queries here. + let mut owned_cache: std::collections::HashMap<Uuid, OwnedReleaseKeys> = + std::collections::HashMap::new(); + + for cand in response.candidates { + let series_id = cand.series_match.codex_series_id; + let threshold = match SeriesTrackingRepository::get(db, series_id).await { + Ok(Some(row)) => resolve_threshold(row.confidence_threshold_override), + Ok(None) => resolve_threshold(None), + Err(e) => { + warn!( + "Task {}: tracking lookup failed for series {}: {}", + task.id, series_id, e + ); + result.candidates_rejected += 1; + continue; + } + }; + match evaluate(cand, threshold) { + Ok(accepted) => { + let cand_volume = accepted.candidate.volume; + let cand_chapter = accepted.candidate.chapter; + + let initial_state = match resolve_initial_state( + db, + &mut owned_cache, + series_id, + cand_volume, + cand_chapter, + ) + .await + { + Ok(s) => s, + Err(e) => { + warn!( + "Task {}: owned-keys lookup failed for series {}: {} \ + (defaulting to announced)", + task.id, series_id, e + ); + None + } + }; + + let mut entry: NewReleaseEntry = accepted.into_ledger_entry(source.id); + entry.initial_state = initial_state.clone(); + match ReleaseLedgerRepository::record(db, entry).await { + Ok(outcome) => { + if outcome.deduped { + result.candidates_deduped += 1; + } else { + result.candidates_recorded += 1; + // Only emit the SSE/notify event when the + // row landed as `announced`. Auto-ignored + // rows are bookkeeping; users see them + // only on demand via the "All" filter. + let landed_announced = + outcome.row.state == ledger_state::ANNOUNCED; + if landed_announced && let Some(broadcaster) = event_broadcaster + { + emit_release_announced( + broadcaster, + &outcome.row, + &source.plugin_id, + ); + } + } + } + Err(e) => { + warn!( + "Task {}: ledger write failed for source {}: {}", + task.id, source.id, e + ); + result.candidates_rejected += 1; + } + } + } + Err(reason) => { + debug!("Task {}: candidate rejected: {}", task.id, reason); + result.candidates_rejected += 1; + } + } + } + + fold_streaming_counters( + &mut result, + plugin_reported_parsed, + plugin_reported_matched, + plugin_reported_recorded, + plugin_reported_deduped, + ); + + // Persist source state. If we hit a successful 2xx upstream we + // already noted it for backoff; clear `last_error` and stamp + // `last_polled_at`. The one-line `summary` is surfaced in the + // Release tracking UI under the per-row status badge so users + // can see *why* a poll returned no announcements (no tracked + // series, upstream not modified, …) without container logs. + let polled_at = Utc::now(); + let summary = + build_poll_summary(response_not_modified, response_upstream_status, &result); + if let Err(e) = ReleaseSourceRepository::record_poll_success( + db, + source.id, + polled_at, + response_etag, + Some(summary), + ) + .await + { + warn!("Task {}: failed to persist source state: {}", task.id, e); + } + + // If the plugin signalled an upstream error code but didn't + // return an RPC error, also stamp `last_error` so admins see + // it in the UI. + if let Some(status) = response_upstream_status + && is_backoff_status(status) + { + let _ = ReleaseSourceRepository::record_poll_error( + db, + source.id, + &format!("upstream returned {}", status), + polled_at, + ) + .await; + } + + // Reset backoff on a clean run if we didn't already. + if backoff_url.is_none() && response_upstream_status.is_none() { + let url_hint = derive_url_hint(&source); + self.backoff.record_success(&url_hint).await; + } + + // Emit a `ReleaseSourcePolled` event so the Release tracking + // settings page refreshes the row in real time. Best-effort: + // missing subscribers are a benign noop, the persisted source + // state is the source of truth. + let had_error = response_upstream_status + .map(is_backoff_status) + .unwrap_or(false); + if let Some(b) = event_broadcaster { + let _ = b.emit(EntityChangeEvent::release_source_polled( + source.id, + &source.plugin_id, + had_error, + )); + } + + let message = format!( + "Polled {}: returned {}, recorded {}, deduped {}, rejected {}", + source.display_name, + result.candidates_returned, + result.candidates_recorded, + result.candidates_deduped, + result.candidates_rejected + ); + info!("Task {}: {}", task.id, message); + Ok(TaskResult::success_with_data(message, json!(result))) + }) + } +} + +/// Merge plugin-reported counters from `ReleasePollResponse` into the +/// host's running `PollReleaseSourceResult`. +/// +/// Streaming plugins (Nyaa, MangaUpdates) record via `releases/record` +/// mid-poll and return an empty `candidates` array. They report what they +/// saw via the response's optional counter fields; the host's `result` +/// already counts whatever came back inline in `candidates`, so we +/// **additively merge** the two so a plugin that mixes both modes gets a +/// correct summary. +/// +/// `deduped` falls back to `matched - recorded` when the plugin only sent +/// the latter two — older plugins won't know about the dedicated field. +pub(crate) fn fold_streaming_counters( + result: &mut PollReleaseSourceResult, + parsed: Option<u32>, + matched: Option<u32>, + recorded: Option<u32>, + deduped: Option<u32>, +) { + if let Some(p) = parsed { + result.candidates_returned = result.candidates_returned.saturating_add(p); + } + if let Some(r) = recorded { + result.candidates_recorded = result.candidates_recorded.saturating_add(r); + } + if let Some(d) = deduped { + result.candidates_deduped = result.candidates_deduped.saturating_add(d); + } else if let (Some(m), Some(r)) = (matched, recorded) + && m >= r + { + result.candidates_deduped = result.candidates_deduped.saturating_add(m - r); + } +} + +/// Build the one-line `last_summary` string written to `release_sources` +/// after a successful poll, intended for direct display under the Release +/// tracking row's status badge. +/// +/// Example outputs: +/// - `"Up to date — upstream returned 304 (not modified)"` +/// - `"Fetched 0 items"` (e.g. no tracked series with aliases for the source) +/// - `"Fetched 12 items, recorded 0 (12 already in ledger)"` +/// - `"Fetched 5 items, recorded 1, dropped 4 below threshold"` +/// - `"Upstream warning: HTTP 429"` (when the plugin reports an error code +/// but didn't fail the RPC outright) +pub(crate) fn build_poll_summary( + not_modified: Option<bool>, + upstream_status: Option<u16>, + result: &PollReleaseSourceResult, +) -> String { + if matches!(not_modified, Some(true)) { + return "Up to date — upstream returned 304 (not modified)".to_string(); + } + + let returned = result.candidates_returned; + let recorded = result.candidates_recorded; + let deduped = result.candidates_deduped; + let rejected = result.candidates_rejected; + + let mut s = match returned { + 0 => "Fetched 0 items".to_string(), + 1 => format!("Fetched 1 item, recorded {}", recorded), + n => format!("Fetched {} items, recorded {}", n, recorded), + }; + if deduped > 0 { + s.push_str(&format!(" ({} already in ledger)", deduped)); + } + if rejected > 0 { + s.push_str(&format!(", dropped {} below threshold", rejected)); + } + + // Upstream warning takes a trailing-suffix slot so the count info isn't + // lost. Backoff-significant statuses (429 / 5xx) are paired with a + // `last_error` write elsewhere; this is just a friendly inline note. + if let Some(status) = upstream_status + && is_backoff_status(status) + { + s.push_str(&format!(" · upstream warning: HTTP {}", status)); + } + + s +} + +/// Emit a `ReleaseAnnounced` event for a freshly-inserted ledger row. +/// +/// Failure to broadcast (no subscribers, channel closed) is a benign noop — +/// the ledger row is the source of truth, the SSE event is a UX nicety. +pub(crate) fn emit_release_announced( + broadcaster: &EventBroadcaster, + row: &crate::db::entities::release_ledger::Model, + plugin_id: &str, +) { + let _ = broadcaster.emit(EntityChangeEvent::release_announced(row, plugin_id)); +} + +/// Compute the initial ledger state for a candidate. Returns +/// `Some("ignored")` when the user already owns this volume/chapter; +/// `None` falls back to the repository's default (`announced`). +/// +/// Uses `owned_cache` so multiple candidates against the same series in +/// one poll only hit the DB once. +async fn resolve_initial_state( + db: &DatabaseConnection, + owned_cache: &mut std::collections::HashMap<Uuid, OwnedReleaseKeys>, + series_id: Uuid, + volume: Option<i32>, + chapter: Option<f64>, +) -> Result<Option<String>> { + // Skip the lookup entirely when the candidate has nothing to match against. + if volume.is_none() && chapter.is_none() { + return Ok(None); + } + if let std::collections::hash_map::Entry::Vacant(e) = owned_cache.entry(series_id) { + let owned = SeriesRepository::get_owned_release_keys_for_series(db, series_id).await?; + e.insert(owned); + } + let owned = &owned_cache[&series_id]; + if should_auto_ignore(volume, chapter, owned) { + Ok(Some(ledger_state::IGNORED.to_string())) + } else { + Ok(None) + } +} + +/// Best-effort URL hint extraction used for backoff keying. +/// +/// Looks in `config.url`, `config.feed_url`, and `config.base_url` in that +/// order; falls back to the plugin name (so all sources on the same plugin +/// share a backoff key). +fn derive_url_hint(source: &crate::db::entities::release_sources::Model) -> String { + if let Some(cfg) = source.config.as_ref() { + for key in ["url", "feedUrl", "feed_url", "baseUrl", "base_url"] { + if let Some(v) = cfg.get(key).and_then(|v| v.as_str()) + && !v.is_empty() + { + return v.to_string(); + } + } + } + source.plugin_id.clone() +} + +async fn record_error( + db: &DatabaseConnection, + source: &crate::db::entities::release_sources::Model, + event_broadcaster: Option<&Arc<EventBroadcaster>>, + message: &str, +) { + if let Err(e) = + ReleaseSourceRepository::record_poll_error(db, source.id, message, Utc::now()).await + { + warn!( + "Failed to persist poll error on source {}: {}", + source.id, e + ); + } + // Emit a `ReleaseSourcePolled` event so the Release tracking settings + // page refreshes in real time. Best-effort: missing subscribers are a + // benign noop, the persisted state is the source of truth. + if let Some(b) = event_broadcaster { + let _ = b.emit(EntityChangeEvent::release_source_polled( + source.id, + &source.plugin_id, + true, // had_error + )); + } +} + +#[cfg(test)] +mod tests { + use super::*; + use crate::db::ScanningStrategy; + use crate::db::entities::release_sources::kind; + use crate::db::repositories::{ + LibraryRepository, NewReleaseSource, ReleaseSourceRepository, SeriesRepository, + SeriesTrackingRepository, TrackingUpdate, + }; + use crate::db::test_helpers::create_test_db; + + use crate::events::EntityEvent; + + /// `emit_release_announced` produces a `ReleaseAnnounced` event whose + /// fields mirror the ledger row and the source's plugin id. + #[test] + fn emit_release_announced_emits_matching_event() { + let broadcaster = EventBroadcaster::new(8); + let mut rx = broadcaster.subscribe(); + + let row = crate::db::entities::release_ledger::Model { + id: Uuid::new_v4(), + series_id: Uuid::new_v4(), + source_id: Uuid::new_v4(), + external_release_id: "ext-1".to_string(), + info_hash: None, + chapter: Some(143.0), + volume: Some(15), + language: Some("en".to_string()), + format_hints: None, + group_or_uploader: None, + payload_url: "https://example.com/r/1".to_string(), + media_url: None, + media_url_kind: None, + confidence: 0.95, + state: "announced".to_string(), + metadata: None, + observed_at: Utc::now(), + created_at: Utc::now(), + }; + + emit_release_announced(&broadcaster, &row, "release-mangaupdates"); + + let event = rx.try_recv().expect("expected one event"); + match event.event { + EntityEvent::ReleaseAnnounced { + ledger_id, + series_id, + source_id, + plugin_id, + chapter, + volume, + language, + } => { + assert_eq!(ledger_id, row.id); + assert_eq!(series_id, row.series_id); + assert_eq!(source_id, row.source_id); + assert_eq!(plugin_id, "release-mangaupdates"); + assert_eq!(chapter, Some(143.0)); + assert_eq!(volume, Some(15)); + assert_eq!(language, "en"); + } + other => panic!("unexpected event: {:?}", other), + } + } + + /// Emitting with no subscribers must not panic — the broadcast send error + /// is intentionally swallowed. + #[test] + fn emit_release_announced_tolerates_no_subscribers() { + let broadcaster = EventBroadcaster::new(8); + let row = crate::db::entities::release_ledger::Model { + id: Uuid::new_v4(), + series_id: Uuid::new_v4(), + source_id: Uuid::new_v4(), + external_release_id: "ext-2".to_string(), + info_hash: None, + chapter: None, + volume: None, + language: None, + format_hints: None, + group_or_uploader: None, + payload_url: "https://example.com/r/2".to_string(), + media_url: None, + media_url_kind: None, + confidence: 0.8, + state: "announced".to_string(), + metadata: None, + observed_at: Utc::now(), + created_at: Utc::now(), + }; + emit_release_announced(&broadcaster, &row, "release-nyaa"); + } + + #[test] + fn derive_url_hint_uses_config_url_when_present() { + let mut model = make_model(); + model.config = Some(json!({"url": "https://nyaa.si/feed"})); + assert_eq!(derive_url_hint(&model), "https://nyaa.si/feed"); + } + + #[test] + fn derive_url_hint_falls_back_to_plugin_name() { + let model = make_model(); + assert_eq!(derive_url_hint(&model), "release-nyaa"); + } + + #[test] + fn derive_url_hint_supports_alternate_keys() { + let mut model = make_model(); + model.config = Some(json!({"feedUrl": "https://example.com/x"})); + assert_eq!(derive_url_hint(&model), "https://example.com/x"); + } + + fn make_model() -> crate::db::entities::release_sources::Model { + crate::db::entities::release_sources::Model { + id: Uuid::new_v4(), + plugin_id: "release-nyaa".to_string(), + source_key: "k".to_string(), + display_name: "n".to_string(), + kind: kind::RSS_UPLOADER.to_string(), + enabled: true, + cron_schedule: None, + last_polled_at: None, + last_error: None, + last_error_at: None, + etag: None, + config: None, + last_summary: None, + created_at: Utc::now(), + updated_at: Utc::now(), + } + } + + /// A poll task referencing a missing source must fail without panic. + #[tokio::test] + async fn task_fails_when_source_missing() { + let (db, _t) = create_test_db().await; + let conn = db.sea_orm_connection().clone(); + let plugin_manager = Arc::new(PluginManager::with_defaults(Arc::new(conn.clone()))); + let handler = PollReleaseSourceHandler::new(plugin_manager); + + let task = make_task(json!({"source_id": Uuid::new_v4().to_string()})); + let result = handler.handle(&task, &conn, None).await.unwrap(); + assert!(!result.success); + assert!(result.message.unwrap().contains("not found")); + } + + /// A disabled source short-circuits cleanly. + #[tokio::test] + async fn task_skips_disabled_source() { + let (db, _t) = create_test_db().await; + let conn = db.sea_orm_connection().clone(); + let library = LibraryRepository::create(&conn, "L", "/l", ScanningStrategy::Default) + .await + .unwrap(); + let _series = SeriesRepository::create(&conn, library.id, "S", None) + .await + .unwrap(); + + let source = ReleaseSourceRepository::create( + &conn, + NewReleaseSource { + plugin_id: "release-nyaa".to_string(), + source_key: "k".to_string(), + display_name: "Nyaa".to_string(), + kind: kind::RSS_UPLOADER.to_string(), + enabled: Some(false), + config: None, + }, + ) + .await + .unwrap(); + + let plugin_manager = Arc::new(PluginManager::with_defaults(Arc::new(conn.clone()))); + let handler = PollReleaseSourceHandler::new(plugin_manager); + + let task = make_task(json!({"source_id": source.id.to_string()})); + let result = handler.handle(&task, &conn, None).await.unwrap(); + assert!(result.success); + let data = result.data.unwrap(); + assert_eq!(data["skippedReason"], "source_disabled"); + } + + /// A source with `plugin_id="core"` short-circuits with the + /// `core_source_no_poll_path` reason instead of trying to spawn a plugin. + #[tokio::test] + async fn task_skips_core_source() { + let (db, _t) = create_test_db().await; + let conn = db.sea_orm_connection().clone(); + let _library = LibraryRepository::create(&conn, "L", "/l", ScanningStrategy::Default) + .await + .unwrap(); + + let source = ReleaseSourceRepository::create( + &conn, + NewReleaseSource { + plugin_id: source_plugin_id::CORE.to_string(), + source_key: "metadata-piggyback".to_string(), + display_name: "Metadata gap".to_string(), + kind: kind::METADATA_PIGGYBACK.to_string(), + enabled: None, + config: None, + }, + ) + .await + .unwrap(); + + let plugin_manager = Arc::new(PluginManager::with_defaults(Arc::new(conn.clone()))); + let handler = PollReleaseSourceHandler::new(plugin_manager); + + let task = make_task(json!({"source_id": source.id.to_string()})); + let result = handler.handle(&task, &conn, None).await.unwrap(); + assert!(result.success); + assert_eq!( + result.data.unwrap()["skippedReason"], + "core_source_no_poll_path" + ); + } + + /// A source whose `plugin_id` doesn't match a `plugins` row records the + /// error on `last_error` and surfaces a failure result. + #[tokio::test] + async fn task_fails_when_plugin_not_registered() { + let (db, _t) = create_test_db().await; + let conn = db.sea_orm_connection().clone(); + let _library = LibraryRepository::create(&conn, "L", "/l", ScanningStrategy::Default) + .await + .unwrap(); + let source = ReleaseSourceRepository::create( + &conn, + NewReleaseSource { + plugin_id: "release-nonexistent".to_string(), + source_key: "k".to_string(), + display_name: "Nope".to_string(), + kind: kind::RSS_UPLOADER.to_string(), + enabled: None, + config: None, + }, + ) + .await + .unwrap(); + + // Pre-existing tracking row makes the path complete. + let series = SeriesRepository::create(&conn, _library.id, "X", None) + .await + .unwrap(); + SeriesTrackingRepository::upsert( + &conn, + series.id, + TrackingUpdate { + tracked: Some(true), + ..Default::default() + }, + ) + .await + .unwrap(); + + let plugin_manager = Arc::new(PluginManager::with_defaults(Arc::new(conn.clone()))); + let handler = PollReleaseSourceHandler::new(plugin_manager); + + let task = make_task(json!({"source_id": source.id.to_string()})); + let result = handler.handle(&task, &conn, None).await.unwrap(); + assert!(!result.success); + + let after = ReleaseSourceRepository::get_by_id(&conn, source.id) + .await + .unwrap() + .unwrap(); + assert!(after.last_error.is_some()); + } + + fn make_task(params: serde_json::Value) -> tasks::Model { + tasks::Model { + id: Uuid::new_v4(), + task_type: "poll_release_source".to_string(), + library_id: None, + series_id: None, + book_id: None, + params: Some(params), + status: "pending".to_string(), + priority: 170, + locked_by: None, + locked_until: None, + attempts: 0, + max_attempts: 3, + last_error: None, + reschedule_count: 0, + max_reschedules: 5, + result: None, + scheduled_for: Utc::now(), + created_at: Utc::now(), + started_at: None, + completed_at: None, + } + } + + // ------------------------------------------------------------------------- + // build_poll_summary — pins the user-facing copy that lands under the + // Release tracking row's status badge. + // ------------------------------------------------------------------------- + + fn empty_result() -> PollReleaseSourceResult { + PollReleaseSourceResult { + source_id: Uuid::new_v4(), + ..Default::default() + } + } + + #[test] + fn build_poll_summary_reports_not_modified_explicitly() { + let r = empty_result(); + let s = build_poll_summary(Some(true), None, &r); + assert_eq!(s, "Up to date — upstream returned 304 (not modified)"); + } + + #[test] + fn build_poll_summary_zero_items() { + let r = empty_result(); + let s = build_poll_summary(Some(false), None, &r); + assert_eq!(s, "Fetched 0 items"); + } + + #[test] + fn build_poll_summary_one_item_uses_singular() { + let mut r = empty_result(); + r.candidates_returned = 1; + r.candidates_recorded = 1; + let s = build_poll_summary(None, None, &r); + assert_eq!(s, "Fetched 1 item, recorded 1"); + } + + #[test] + fn build_poll_summary_includes_dedup_and_threshold_breakdown() { + let mut r = empty_result(); + r.candidates_returned = 12; + r.candidates_recorded = 1; + r.candidates_deduped = 7; + r.candidates_rejected = 4; + let s = build_poll_summary(None, None, &r); + assert_eq!( + s, + "Fetched 12 items, recorded 1 (7 already in ledger), dropped 4 below threshold" + ); + } + + #[test] + fn build_poll_summary_appends_upstream_warning_for_backoff_status() { + let mut r = empty_result(); + r.candidates_returned = 0; + let s = build_poll_summary(None, Some(429), &r); + assert_eq!(s, "Fetched 0 items · upstream warning: HTTP 429"); + } + + #[test] + fn build_poll_summary_does_not_append_for_clean_2xx() { + let mut r = empty_result(); + r.candidates_returned = 2; + r.candidates_recorded = 2; + let s = build_poll_summary(None, Some(200), &r); + assert_eq!(s, "Fetched 2 items, recorded 2"); + } + + // ------------------------------------------------------------------------- + // fold_streaming_counters — protects against the regression where a + // streaming plugin (Nyaa, MangaUpdates) records via reverse-RPC and the + // host's summary always reads "Fetched 0 items" because the response's + // `candidates` array was empty. + // ------------------------------------------------------------------------- + + #[test] + fn fold_streaming_counters_adds_plugin_reported_values() { + let mut r = empty_result(); + fold_streaming_counters(&mut r, Some(12), Some(3), Some(1), Some(2)); + assert_eq!(r.candidates_returned, 12); + assert_eq!(r.candidates_recorded, 1); + assert_eq!(r.candidates_deduped, 2); + } + + #[test] + fn fold_streaming_counters_infers_deduped_when_only_matched_and_recorded() { + let mut r = empty_result(); + fold_streaming_counters(&mut r, Some(10), Some(8), Some(3), None); + assert_eq!(r.candidates_returned, 10); + assert_eq!(r.candidates_recorded, 3); + assert_eq!(r.candidates_deduped, 5, "matched - recorded fallback"); + } + + #[test] + fn fold_streaming_counters_handles_absent_fields_for_older_plugins() { + let mut r = empty_result(); + r.candidates_returned = 4; + r.candidates_recorded = 4; + fold_streaming_counters(&mut r, None, None, None, None); + assert_eq!(r.candidates_returned, 4, "host counts preserved"); + assert_eq!(r.candidates_recorded, 4); + assert_eq!(r.candidates_deduped, 0); + } + + #[test] + fn fold_streaming_counters_additively_merges_with_inline_candidates() { + let mut r = empty_result(); + // Host counted some inline candidates already. + r.candidates_returned = 2; + r.candidates_recorded = 2; + // Plugin also streamed a few. + fold_streaming_counters(&mut r, Some(3), Some(3), Some(2), Some(1)); + assert_eq!(r.candidates_returned, 5); + assert_eq!(r.candidates_recorded, 4); + assert_eq!(r.candidates_deduped, 1); + } + + #[test] + fn build_poll_summary_uses_streaming_counters_via_fold() { + // Pin the end-to-end shape: a streaming plugin returns no inline + // candidates but reports it parsed 5 and recorded 5 — the badge + // must say "Fetched 5 items, recorded 5", not "Fetched 0 items". + let mut r = empty_result(); + fold_streaming_counters(&mut r, Some(5), Some(5), Some(5), Some(0)); + let s = build_poll_summary(None, Some(200), &r); + assert_eq!(s, "Fetched 5 items, recorded 5"); + } +} diff --git a/src/tasks/types.rs b/src/tasks/types.rs index 6e38bf47..dfd8573b 100644 --- a/src/tasks/types.rs +++ b/src/tasks/types.rs @@ -206,6 +206,34 @@ pub enum TaskType { #[serde(default)] reason: Option<String>, }, + + /// Backfill release-tracking aliases from existing series metadata. + /// + /// Walks series in scope, harvests the canonical title plus alternate titles + /// from `series_metadata` and `series_alternate_titles`, and seeds them as + /// `metadata`-source aliases in `series_aliases`. Idempotent — re-runs do + /// not create duplicates. Does NOT enable tracking; that stays explicit. + BackfillTrackingFromMetadata { + /// If set, scope to this library; otherwise all series. + #[serde(rename = "libraryId", default)] + library_id: Option<Uuid>, + /// If set, scope to these specific series (takes precedence over library_id). + #[serde(rename = "seriesIds", default)] + series_ids: Option<Vec<Uuid>>, + }, + + /// Poll a single `release_sources` row for new releases. + /// + /// Resolves the source's owning plugin, calls `releases/poll` over the + /// existing plugin host, runs returned candidates through the matcher + + /// threshold, and writes accepted candidates to the ledger. On success + /// updates `last_polled_at` (and optionally `etag`); on failure records + /// `last_error`. Idempotent: ledger writes dedup on + /// `(source_id, external_release_id)` and `info_hash`. + PollReleaseSource { + #[serde(rename = "sourceId")] + source_id: Uuid, + }, } fn default_mode() -> String { @@ -251,6 +279,10 @@ impl TaskType { TaskType::UserPluginRecommendationDismiss { .. } => 200, TaskType::UserPluginSync { .. } => 190, TaskType::UserPluginRecommendations { .. } => 180, + // Release tracking maintenance + TaskType::BackfillTrackingFromMetadata { .. } => 150, + // Release polling: scheduled background discovery + TaskType::PollReleaseSource { .. } => 170, // Cleanup TaskType::CleanupBookFiles { .. } | TaskType::CleanupSeriesFiles { .. } @@ -292,6 +324,8 @@ impl TaskType { TaskType::UserPluginRecommendationDismiss { .. } => { "user_plugin_recommendation_dismiss" } + TaskType::BackfillTrackingFromMetadata { .. } => "backfill_tracking_from_metadata", + TaskType::PollReleaseSource { .. } => "poll_release_source", } } @@ -308,6 +342,7 @@ impl TaskType { TaskType::GenerateThumbnails { library_id, .. } => *library_id, TaskType::GenerateSeriesThumbnails { library_id, .. } => *library_id, TaskType::ReprocessSeriesTitles { library_id, .. } => *library_id, + TaskType::BackfillTrackingFromMetadata { library_id, .. } => *library_id, _ => None, } } @@ -407,6 +442,12 @@ impl TaskType { "reason": reason, }) } + TaskType::BackfillTrackingFromMetadata { series_ids, .. } => { + serde_json::json!({ "series_ids": series_ids }) + } + TaskType::PollReleaseSource { source_id } => { + serde_json::json!({ "source_id": source_id }) + } _ => serde_json::json!({}), } } @@ -440,6 +481,26 @@ impl TaskType { } } + /// JSON-param key/value pair to use as a dedup discriminator for task + /// types whose identity lives in `params` rather than in FK columns. + /// + /// Returning `Some((key, value))` tells the dedup path in + /// `TaskRepository::find_existing_task` to additionally filter by + /// `params->>key = value`. Without this, two `poll_release_source` tasks + /// for *different* `source_id`s would falsely collide because they share + /// the same `task_type` and have no FK columns set, causing the second + /// "Poll now" click to be silently coalesced onto the first source's + /// in-flight poll. + /// + /// `key` must be a simple identifier (alphanumeric + underscore) since + /// SQLite splices it into a JSON path string. + pub fn dedup_params(&self) -> Option<(&'static str, String)> { + match self { + TaskType::PollReleaseSource { source_id } => Some(("source_id", source_id.to_string())), + _ => None, + } + } + /// Extract all fields needed for database insertion /// Returns: (type_string, library_id, series_id, book_id, params) pub fn extract_fields( @@ -1076,6 +1137,47 @@ mod tests { assert_eq!(deserialized.job_id(), Some(job_id)); } + #[test] + fn test_poll_release_source_extraction() { + let source_id = Uuid::new_v4(); + let task = TaskType::PollReleaseSource { source_id }; + + assert_eq!(task.type_string(), "poll_release_source"); + assert_eq!(task.library_id(), None); + assert_eq!(task.series_id(), None); + assert_eq!(task.book_id(), None); + assert_eq!(task.default_priority(), 170); + + let (type_str, lib_id, series_id, book_id, params) = task.extract_fields(); + assert_eq!(type_str, "poll_release_source"); + assert_eq!(lib_id, None); + assert_eq!(series_id, None); + assert_eq!(book_id, None); + let params = params.expect("expected source_id params"); + assert_eq!(params["source_id"], serde_json::json!(source_id)); + } + + #[test] + fn test_poll_release_source_serialization() { + let source_id = Uuid::new_v4(); + let task = TaskType::PollReleaseSource { source_id }; + + let json = serde_json::to_string(&task).unwrap(); + assert!(json.contains("poll_release_source")); + assert!(json.contains(&source_id.to_string())); + // sourceId is the camelCase rename. + assert!(json.contains("sourceId")); + + let deserialized: TaskType = serde_json::from_str(&json).unwrap(); + assert_eq!(deserialized.type_string(), "poll_release_source"); + match deserialized { + TaskType::PollReleaseSource { source_id: id } => { + assert_eq!(id, source_id); + } + _ => panic!("wrong variant"), + } + } + #[test] fn test_default_priority_values() { let library_id = Uuid::new_v4(); diff --git a/src/tasks/worker.rs b/src/tasks/worker.rs index cfb09f1c..70ca269b 100644 --- a/src/tasks/worker.rs +++ b/src/tasks/worker.rs @@ -26,11 +26,12 @@ use crate::services::user_plugin::OAuthStateManager; use crate::services::{SettingsService, TaskMetricsService, ThumbnailService}; use crate::tasks::error::check_rate_limited; use crate::tasks::handlers::{ - AnalyzeBookHandler, AnalyzeSeriesHandler, CleanupBookFilesHandler, CleanupOrphanedFilesHandler, - CleanupPdfCacheHandler, CleanupPluginDataHandler, CleanupSeriesExportsHandler, - CleanupSeriesFilesHandler, ExportSeriesHandler, FindDuplicatesHandler, - GenerateSeriesThumbnailHandler, GenerateSeriesThumbnailsHandler, GenerateThumbnailHandler, - GenerateThumbnailsHandler, PluginAutoMatchHandler, PurgeDeletedHandler, + AnalyzeBookHandler, AnalyzeSeriesHandler, BackfillTrackingFromMetadataHandler, + CleanupBookFilesHandler, CleanupOrphanedFilesHandler, CleanupPdfCacheHandler, + CleanupPluginDataHandler, CleanupSeriesExportsHandler, CleanupSeriesFilesHandler, + ExportSeriesHandler, FindDuplicatesHandler, GenerateSeriesThumbnailHandler, + GenerateSeriesThumbnailsHandler, GenerateThumbnailHandler, GenerateThumbnailsHandler, + PluginAutoMatchHandler, PollReleaseSourceHandler, PurgeDeletedHandler, RefreshLibraryMetadataHandler, RenumberSeriesBatchHandler, RenumberSeriesHandler, ReprocessSeriesTitleHandler, ReprocessSeriesTitlesHandler, ScanLibraryHandler, TaskHandler, UserPluginRecommendationDismissHandler, UserPluginRecommendationsHandler, @@ -48,6 +49,10 @@ pub struct TaskWorker { thumbnail_service: Option<Arc<ThumbnailService>>, task_metrics_service: Option<Arc<TaskMetricsService>>, plugin_manager: Option<Arc<PluginManager>>, + /// Shared per-host backoff state used by the `PollReleaseSourceHandler`. + /// Exposed via [`Self::release_backoff`] so the scheduler can read the + /// same multipliers when picking next-poll intervals. + release_backoff: crate::services::release::backoff::HostBackoff, shutdown_tx: Option<broadcast::Sender<()>>, } @@ -101,6 +106,11 @@ impl TaskWorker { "cleanup_plugin_data".to_string(), Arc::new(CleanupPluginDataHandler::new()), ); + // Release-tracking maintenance: backfill aliases from metadata. + handlers.insert( + "backfill_tracking_from_metadata".to_string(), + Arc::new(BackfillTrackingFromMetadataHandler::new()), + ); // Generate worker ID from hostname or random UUID let worker_id = std::env::var("HOSTNAME") @@ -117,10 +127,18 @@ impl TaskWorker { thumbnail_service: None, task_metrics_service: None, plugin_manager: None, + release_backoff: crate::services::release::backoff::HostBackoff::new(), shutdown_tx: None, } } + /// Shared per-host backoff used by `PollReleaseSourceHandler`. The + /// scheduler reads this when computing the effective interval for the + /// next poll. + pub fn release_backoff(&self) -> crate::services::release::backoff::HostBackoff { + self.release_backoff.clone() + } + /// Set the poll interval pub fn with_poll_interval(mut self, interval: Duration) -> Self { self.poll_interval = interval; @@ -270,6 +288,15 @@ impl TaskWorker { "user_plugin_recommendation_dismiss".to_string(), Arc::new(dismiss_handler), ); + // Register release-polling handler. Shares the worker's HostBackoff + // so the scheduler can also consult the same multipliers. + let mut poll_handler = PollReleaseSourceHandler::new(plugin_manager.clone()) + .with_backoff(self.release_backoff.clone()); + if let Some(ref settings_service) = self.settings_service { + poll_handler = poll_handler.with_settings_service(settings_service.clone()); + } + self.handlers + .insert("poll_release_source".to_string(), Arc::new(poll_handler)); self.plugin_manager = Some(plugin_manager); self } @@ -577,10 +604,20 @@ impl TaskWorker { let recording_broadcaster = Arc::new(EventBroadcaster::new_with_recording(1000, true)); let broadcaster_clone = recording_broadcaster.clone(); - // Execute task with recording broadcaster - let result = handler - .handle(&task, &self.db, Some(&recording_broadcaster)) - .await; + // Execute the handler inside a task-local scope that exposes the + // recording broadcaster to any code on this task's await chain — + // including reverse-RPC handlers (e.g. `releases/record`), which + // are dispatched on this task by `RpcClient::call_with_timeout` + // when the plugin tags reverse-RPCs with the parent forward + // request id. Without this, plugins that emit events via + // reverse-RPC (rather than synchronously through the handler's + // broadcaster argument) would have no recording context and + // their events would never replay. + let result = crate::events::with_recording_broadcaster( + recording_broadcaster.clone(), + handler.handle(&task, &self.db, Some(&recording_broadcaster)), + ) + .await; // Get recorded events before returning let events = broadcaster_clone.take_recorded_events(); @@ -607,10 +644,22 @@ impl TaskWorker { (self.event_broadcaster.clone(), None) }; - // Execute task with shared broadcaster (single-process mode) - let result = handler - .handle(&task, &self.db, task_broadcaster.as_ref()) - .await; + // Execute task with shared broadcaster (single-process mode). + // Set the task-local to the shared broadcaster too, so reverse-RPC + // handlers see *the same* broadcaster the rest of the task uses. + // The shared broadcaster has recording disabled here (web/single- + // process mode), so emits flow straight to live SSE subscribers. + let result = if let Some(ref shared) = task_broadcaster { + crate::events::with_recording_broadcaster( + shared.clone(), + handler.handle(&task, &self.db, task_broadcaster.as_ref()), + ) + .await + } else { + handler + .handle(&task, &self.db, task_broadcaster.as_ref()) + .await + }; // Update task status based on result match result { diff --git a/tests/api.rs b/tests/api.rs index 73b81c0a..8d5c9caf 100644 --- a/tests/api.rs +++ b/tests/api.rs @@ -36,6 +36,7 @@ mod api { mod rate_limit; mod read_progress; mod recommendations; + mod releases; mod renumber; mod scan; mod series; @@ -47,6 +48,7 @@ mod api { mod tags; mod task_metrics; mod thumbnails; + mod tracking; mod user_plugins; mod user_preferences; mod user_ratings; diff --git a/tests/api/bulk_operations.rs b/tests/api/bulk_operations.rs index 5346a9e5..56926dfc 100644 --- a/tests/api/bulk_operations.rs +++ b/tests/api/bulk_operations.rs @@ -638,3 +638,298 @@ async fn test_bulk_analyze_series_unauthorized() { assert_eq!(status, StatusCode::UNAUTHORIZED); } + +// ============================================================================ +// Bulk track / untrack for releases (round D) +// ============================================================================ + +use codex::api::routes::v1::dto::tracking::BulkTrackForReleasesResponse; + +#[tokio::test] +async fn bulk_track_for_releases_flips_tracked_and_seeds() { + use codex::db::repositories::{SeriesAliasRepository, SeriesTrackingRepository}; + + let (db, _temp_dir) = setup_test_db().await; + let library = LibraryRepository::create(&db, "Lib", "/lib", ScanningStrategy::Default) + .await + .unwrap(); + let s1 = SeriesRepository::create(&db, library.id, "Vinland Saga", None) + .await + .unwrap(); + let s2 = SeriesRepository::create(&db, library.id, "Berserk", None) + .await + .unwrap(); + + let state = create_test_auth_state(db.clone()).await; + let (_user_id, token) = create_admin_and_token(&db, &state).await; + let app = create_test_router(state).await; + + let request_body = BulkSeriesRequest { + series_ids: vec![s1.id, s2.id], + }; + let request = post_json_request_with_auth( + "/api/v1/series/bulk/track-for-releases", + &request_body, + &token, + ); + let (status, body): (StatusCode, Option<BulkTrackForReleasesResponse>) = + make_json_request(app, request).await; + + assert_eq!(status, StatusCode::OK); + let body = body.unwrap(); + assert_eq!(body.changed, 2); + assert_eq!(body.already_in_state, 0); + assert_eq!(body.errored, 0); + assert_eq!(body.results.len(), 2); + for r in &body.results { + assert_eq!(r.outcome, "tracked"); + } + + // Both series should now be tracked + have at least one seeded alias. + for series_id in [s1.id, s2.id] { + let row = SeriesTrackingRepository::get(&db, series_id) + .await + .unwrap() + .unwrap(); + assert!(row.tracked, "series {} should be tracked", series_id); + + let aliases = SeriesAliasRepository::get_for_series(&db, series_id) + .await + .unwrap(); + assert!( + !aliases.is_empty(), + "series {} should have a seeded alias", + series_id + ); + } +} + +#[tokio::test] +async fn bulk_track_for_releases_skips_already_tracked() { + use codex::db::repositories::{SeriesTrackingRepository, TrackingUpdate}; + + let (db, _temp_dir) = setup_test_db().await; + let library = LibraryRepository::create(&db, "Lib", "/lib", ScanningStrategy::Default) + .await + .unwrap(); + let already = SeriesRepository::create(&db, library.id, "Already", None) + .await + .unwrap(); + let fresh = SeriesRepository::create(&db, library.id, "Fresh", None) + .await + .unwrap(); + + // Pre-track `already`. + SeriesTrackingRepository::upsert( + &db, + already.id, + TrackingUpdate { + tracked: Some(true), + ..Default::default() + }, + ) + .await + .unwrap(); + + let state = create_test_auth_state(db.clone()).await; + let (_user_id, token) = create_admin_and_token(&db, &state).await; + let app = create_test_router(state).await; + + let request_body = BulkSeriesRequest { + series_ids: vec![already.id, fresh.id], + }; + let request = post_json_request_with_auth( + "/api/v1/series/bulk/track-for-releases", + &request_body, + &token, + ); + let (status, body): (StatusCode, Option<BulkTrackForReleasesResponse>) = + make_json_request(app, request).await; + + assert_eq!(status, StatusCode::OK); + let body = body.unwrap(); + assert_eq!(body.changed, 1, "only `fresh` should flip"); + assert_eq!(body.already_in_state, 1, "`already` is a no-op"); + assert_eq!(body.errored, 0); + + // Per-series outcomes preserved in input order. + assert_eq!(body.results[0].series_id, already.id); + assert_eq!(body.results[0].outcome, "skipped"); + assert_eq!(body.results[1].series_id, fresh.id); + assert_eq!(body.results[1].outcome, "tracked"); +} + +#[tokio::test] +async fn bulk_track_for_releases_reports_missing_series() { + let (db, _temp_dir) = setup_test_db().await; + let library = LibraryRepository::create(&db, "Lib", "/lib", ScanningStrategy::Default) + .await + .unwrap(); + let real = SeriesRepository::create(&db, library.id, "Real", None) + .await + .unwrap(); + let bogus = uuid::Uuid::new_v4(); + + let state = create_test_auth_state(db.clone()).await; + let (_user_id, token) = create_admin_and_token(&db, &state).await; + let app = create_test_router(state).await; + + let request_body = BulkSeriesRequest { + series_ids: vec![bogus, real.id], + }; + let request = post_json_request_with_auth( + "/api/v1/series/bulk/track-for-releases", + &request_body, + &token, + ); + let (status, body): (StatusCode, Option<BulkTrackForReleasesResponse>) = + make_json_request(app, request).await; + + // The whole request still succeeds (200) — one bad series doesn't + // poison the others. The bogus row gets `outcome: skipped` with a + // detail string, by design (see bulk handler doc). + assert_eq!(status, StatusCode::OK); + let body = body.unwrap(); + assert_eq!(body.changed, 1); + assert_eq!(body.already_in_state, 1); + assert_eq!(body.errored, 0); + assert_eq!(body.results[0].series_id, bogus); + assert_eq!(body.results[0].outcome, "skipped"); + assert!( + body.results[0] + .detail + .as_deref() + .unwrap_or("") + .contains("not found"), + "missing series detail should mention 'not found'" + ); +} + +#[tokio::test] +async fn bulk_untrack_for_releases_flips_tracked_off_preserves_aliases() { + use codex::db::repositories::{ + SeriesAliasRepository, SeriesTrackingRepository, TrackingUpdate, + }; + + let (db, _temp_dir) = setup_test_db().await; + let library = LibraryRepository::create(&db, "Lib", "/lib", ScanningStrategy::Default) + .await + .unwrap(); + let s = SeriesRepository::create(&db, library.id, "Tracked", None) + .await + .unwrap(); + SeriesTrackingRepository::upsert( + &db, + s.id, + TrackingUpdate { + tracked: Some(true), + ..Default::default() + }, + ) + .await + .unwrap(); + // Add a manual alias the user may want to keep. + SeriesAliasRepository::create(&db, s.id, "User Alias", "manual") + .await + .unwrap(); + + let state = create_test_auth_state(db.clone()).await; + let (_user_id, token) = create_admin_and_token(&db, &state).await; + let app = create_test_router(state).await; + + let request_body = BulkSeriesRequest { + series_ids: vec![s.id], + }; + let request = post_json_request_with_auth( + "/api/v1/series/bulk/untrack-for-releases", + &request_body, + &token, + ); + let (status, body): (StatusCode, Option<BulkTrackForReleasesResponse>) = + make_json_request(app, request).await; + + assert_eq!(status, StatusCode::OK); + let body = body.unwrap(); + assert_eq!(body.changed, 1); + assert_eq!(body.results[0].outcome, "untracked"); + + let row = SeriesTrackingRepository::get(&db, s.id) + .await + .unwrap() + .unwrap(); + assert!(!row.tracked); + + // Aliases must survive — untrack is a soft toggle, not a delete. + let aliases = SeriesAliasRepository::get_for_series(&db, s.id) + .await + .unwrap(); + assert!(aliases.iter().any(|a| a.alias == "User Alias")); +} + +#[tokio::test] +async fn bulk_untrack_for_releases_skips_already_untracked() { + let (db, _temp_dir) = setup_test_db().await; + let library = LibraryRepository::create(&db, "Lib", "/lib", ScanningStrategy::Default) + .await + .unwrap(); + let s = SeriesRepository::create(&db, library.id, "Never tracked", None) + .await + .unwrap(); + + let state = create_test_auth_state(db.clone()).await; + let (_user_id, token) = create_admin_and_token(&db, &state).await; + let app = create_test_router(state).await; + + let request_body = BulkSeriesRequest { + series_ids: vec![s.id], + }; + let request = post_json_request_with_auth( + "/api/v1/series/bulk/untrack-for-releases", + &request_body, + &token, + ); + let (status, body): (StatusCode, Option<BulkTrackForReleasesResponse>) = + make_json_request(app, request).await; + + assert_eq!(status, StatusCode::OK); + let body = body.unwrap(); + assert_eq!(body.changed, 0); + assert_eq!(body.already_in_state, 1); + assert_eq!(body.results[0].outcome, "skipped"); +} + +#[tokio::test] +async fn bulk_track_for_releases_requires_series_write() { + use codex::api::error::ErrorResponse; + + let (db, _temp_dir) = setup_test_db().await; + let library = LibraryRepository::create(&db, "Lib", "/lib", ScanningStrategy::Default) + .await + .unwrap(); + let s = SeriesRepository::create(&db, library.id, "Anything", None) + .await + .unwrap(); + + // Regular (non-admin) user — has reads but not SeriesWrite. + let password_hash = password::hash_password("user123").unwrap(); + let user = create_test_user("regular", "regular@example.com", &password_hash, false); + let created = UserRepository::create(&db, &user).await.unwrap(); + + let state = create_test_auth_state(db.clone()).await; + let token = state + .jwt_service + .generate_token(created.id, created.username.clone(), created.get_role()) + .unwrap(); + let app = create_test_router(state).await; + + let request_body = BulkSeriesRequest { + series_ids: vec![s.id], + }; + let request = post_json_request_with_auth( + "/api/v1/series/bulk/track-for-releases", + &request_body, + &token, + ); + let (status, _): (StatusCode, Option<ErrorResponse>) = make_json_request(app, request).await; + assert_eq!(status, StatusCode::FORBIDDEN); +} diff --git a/tests/api/releases.rs b/tests/api/releases.rs new file mode 100644 index 00000000..de988941 --- /dev/null +++ b/tests/api/releases.rs @@ -0,0 +1,1612 @@ +//! Integration tests for the release ledger and release-source admin endpoints. + +#[path = "../common/mod.rs"] +mod common; + +use codex::api::error::ErrorResponse; +use codex::api::routes::v1::dto::release::{ + BulkReleaseAction, BulkReleaseActionRequest, BulkReleaseActionResponse, DeleteReleaseResponse, + PollNowResponse, ReleaseFacetsResponse, ReleaseLedgerEntryDto, ReleaseSourceDto, + ReleaseSourceListResponse, ResetReleaseSourceResponse, UpdateReleaseLedgerEntryRequest, + UpdateReleaseSourceRequest, +}; +use codex::db::ScanningStrategy; +use codex::db::entities::release_sources::kind; +use codex::db::repositories::{ + LibraryRepository, NewReleaseEntry, NewReleaseSource, ReleaseLedgerRepository, + ReleaseSourceRepository, ReleaseSourceUpdate, SeriesRepository, UserRepository, +}; +use codex::utils::password; +use common::*; +use hyper::StatusCode; +use sea_orm::DatabaseConnection; +use uuid::Uuid; + +// ============================================================================= +// Helpers +// ============================================================================= + +async fn create_admin_and_token( + db: &DatabaseConnection, + state: &codex::api::extractors::AuthState, +) -> String { + let password_hash = password::hash_password("admin123").unwrap(); + let user = create_test_user("admin", "admin@example.com", &password_hash, true); + let created = UserRepository::create(db, &user).await.unwrap(); + state + .jwt_service + .generate_token(created.id, created.username.clone(), created.get_role()) + .unwrap() +} + +async fn create_reader_and_token( + db: &DatabaseConnection, + state: &codex::api::extractors::AuthState, +) -> String { + let password_hash = password::hash_password("reader123").unwrap(); + let user = create_test_user("reader", "reader@example.com", &password_hash, false); + let created = UserRepository::create(db, &user).await.unwrap(); + state + .jwt_service + .generate_token(created.id, created.username.clone(), created.get_role()) + .unwrap() +} + +async fn make_series(db: &DatabaseConnection) -> Uuid { + let library = LibraryRepository::create(db, "Lib", "/lib", ScanningStrategy::Default) + .await + .unwrap(); + let series = SeriesRepository::create(db, library.id, "Series", None) + .await + .unwrap(); + series.id +} + +async fn make_source(db: &DatabaseConnection, source_key: &str) -> Uuid { + let s = ReleaseSourceRepository::create( + db, + NewReleaseSource { + plugin_id: "release-nyaa".to_string(), + source_key: source_key.to_string(), + display_name: format!("Nyaa - {}", source_key), + kind: kind::RSS_UPLOADER.to_string(), + enabled: None, + config: None, + }, + ) + .await + .unwrap(); + s.id +} + +async fn record_announced( + db: &DatabaseConnection, + series_id: Uuid, + source_id: Uuid, + external_id: &str, +) -> Uuid { + let outcome = ReleaseLedgerRepository::record( + db, + NewReleaseEntry { + series_id, + source_id, + external_release_id: external_id.to_string(), + info_hash: None, + chapter: Some(143.0), + volume: None, + language: Some("en".to_string()), + format_hints: None, + group_or_uploader: Some("uploader".to_string()), + payload_url: format!("https://nyaa.si/view/{}", external_id), + media_url: None, + media_url_kind: None, + confidence: 0.95, + metadata: None, + observed_at: chrono::Utc::now(), + initial_state: None, + }, + ) + .await + .unwrap(); + outcome.row.id +} + +#[derive(serde::Deserialize)] +#[serde(rename_all = "camelCase")] +struct PaginatedDtoResponse<T> { + data: Vec<T>, + page: u64, + page_size: u64, + total: u64, +} + +// ============================================================================= +// GET /series/{id}/releases +// ============================================================================= + +#[tokio::test] +async fn list_series_releases_returns_entries_for_series() { + let (db, _temp) = setup_test_db().await; + let series = make_series(&db).await; + let other = make_series(&db).await; + let source = make_source(&db, "nyaa:user:tsuna69").await; + record_announced(&db, series, source, "rel-1").await; + record_announced(&db, series, source, "rel-2").await; + record_announced(&db, other, source, "rel-3").await; + + let state = create_test_auth_state(db.clone()).await; + let token = create_admin_and_token(&db, &state).await; + let app = create_test_router(state).await; + + let req = get_request_with_auth(&format!("/api/v1/series/{}/releases", series), &token); + let (status, body): ( + StatusCode, + Option<PaginatedDtoResponse<ReleaseLedgerEntryDto>>, + ) = make_json_request(app, req).await; + assert_eq!(status, StatusCode::OK); + let body = body.unwrap(); + assert_eq!(body.total, 2); + assert_eq!(body.data.len(), 2); + for entry in &body.data { + assert_eq!(entry.series_id, series); + assert_eq!( + entry.series_title, "Series", + "DTO should carry the series title joined from the series row" + ); + } +} + +#[tokio::test] +async fn list_series_releases_404_when_series_missing() { + let (db, _temp) = setup_test_db().await; + let state = create_test_auth_state(db.clone()).await; + let token = create_admin_and_token(&db, &state).await; + let app = create_test_router(state).await; + + let fake = Uuid::new_v4(); + let req = get_request_with_auth(&format!("/api/v1/series/{}/releases", fake), &token); + let (status, _err): (StatusCode, Option<ErrorResponse>) = make_json_request(app, req).await; + assert_eq!(status, StatusCode::NOT_FOUND); +} + +#[tokio::test] +async fn list_series_releases_filters_by_state() { + let (db, _temp) = setup_test_db().await; + let series = make_series(&db).await; + let source = make_source(&db, "nyaa:user:tsuna69").await; + let r1 = record_announced(&db, series, source, "rel-1").await; + let _r2 = record_announced(&db, series, source, "rel-2").await; + + // Dismiss r1. + ReleaseLedgerRepository::set_state(&db, r1, "dismissed") + .await + .unwrap(); + + let state = create_test_auth_state(db.clone()).await; + let token = create_admin_and_token(&db, &state).await; + let app = create_test_router(state).await; + + let req = get_request_with_auth( + &format!("/api/v1/series/{}/releases?state=announced", series), + &token, + ); + let (status, body): ( + StatusCode, + Option<PaginatedDtoResponse<ReleaseLedgerEntryDto>>, + ) = make_json_request(app, req).await; + assert_eq!(status, StatusCode::OK); + let body = body.unwrap(); + assert_eq!(body.total, 1); + assert_eq!(body.data[0].state, "announced"); +} + +#[tokio::test] +async fn list_series_releases_rejects_invalid_state() { + let (db, _temp) = setup_test_db().await; + let series = make_series(&db).await; + let state = create_test_auth_state(db.clone()).await; + let token = create_admin_and_token(&db, &state).await; + let app = create_test_router(state).await; + + let req = get_request_with_auth( + &format!("/api/v1/series/{}/releases?state=garbage", series), + &token, + ); + let (status, _err): (StatusCode, Option<ErrorResponse>) = make_json_request(app, req).await; + assert_eq!(status, StatusCode::BAD_REQUEST); +} + +#[tokio::test] +async fn list_series_releases_requires_auth() { + let (db, _temp) = setup_test_db().await; + let series = make_series(&db).await; + let state = create_test_auth_state(db.clone()).await; + let app = create_test_router(state).await; + + let req = get_request(&format!("/api/v1/series/{}/releases", series)); + let (status, _): (StatusCode, Option<ErrorResponse>) = make_json_request(app, req).await; + assert_eq!(status, StatusCode::UNAUTHORIZED); +} + +// ============================================================================= +// GET /releases (inbox) +// ============================================================================= + +#[tokio::test] +async fn inbox_lists_announced_by_default() { + let (db, _temp) = setup_test_db().await; + let series = make_series(&db).await; + let source = make_source(&db, "nyaa:user:tsuna69").await; + let r1 = record_announced(&db, series, source, "rel-1").await; + let _r2 = record_announced(&db, series, source, "rel-2").await; + ReleaseLedgerRepository::set_state(&db, r1, "dismissed") + .await + .unwrap(); + + let state = create_test_auth_state(db.clone()).await; + let token = create_admin_and_token(&db, &state).await; + let app = create_test_router(state).await; + + let req = get_request_with_auth("/api/v1/releases", &token); + let (status, body): ( + StatusCode, + Option<PaginatedDtoResponse<ReleaseLedgerEntryDto>>, + ) = make_json_request(app, req).await; + assert_eq!(status, StatusCode::OK); + let body = body.unwrap(); + assert_eq!(body.total, 1); + assert_eq!(body.data[0].external_release_id, "rel-2"); + assert_eq!(body.page, 1); + assert_eq!(body.page_size, 50); +} + +#[tokio::test] +async fn inbox_filters_by_series() { + let (db, _temp) = setup_test_db().await; + let s1 = make_series(&db).await; + let s2 = make_series(&db).await; + let source = make_source(&db, "nyaa:user:tsuna69").await; + record_announced(&db, s1, source, "rel-1").await; + record_announced(&db, s2, source, "rel-2").await; + + let state = create_test_auth_state(db.clone()).await; + let token = create_admin_and_token(&db, &state).await; + let app = create_test_router(state).await; + + let req = get_request_with_auth(&format!("/api/v1/releases?seriesId={}", s1), &token); + let (status, body): ( + StatusCode, + Option<PaginatedDtoResponse<ReleaseLedgerEntryDto>>, + ) = make_json_request(app, req).await; + assert_eq!(status, StatusCode::OK); + let body = body.unwrap(); + assert_eq!(body.total, 1); + assert_eq!(body.data[0].external_release_id, "rel-1"); + assert_eq!( + body.data[0].series_title, "Series", + "inbox DTO should carry the series title for cross-series rendering" + ); +} + +// ============================================================================= +// PATCH /releases/{id} +// ============================================================================= + +#[tokio::test] +async fn patch_release_state_transitions() { + let (db, _temp) = setup_test_db().await; + let series = make_series(&db).await; + let source = make_source(&db, "nyaa:user:tsuna69").await; + let id = record_announced(&db, series, source, "rel-1").await; + + let state = create_test_auth_state(db.clone()).await; + let token = create_admin_and_token(&db, &state).await; + let app = create_test_router(state).await; + + let body = UpdateReleaseLedgerEntryRequest { + state: Some("dismissed".to_string()), + }; + let req = patch_json_request_with_auth(&format!("/api/v1/releases/{}", id), &body, &token); + let (status, dto): (StatusCode, Option<ReleaseLedgerEntryDto>) = + make_json_request(app, req).await; + assert_eq!(status, StatusCode::OK); + assert_eq!(dto.unwrap().state, "dismissed"); +} + +#[tokio::test] +async fn patch_release_state_rejects_invalid() { + let (db, _temp) = setup_test_db().await; + let series = make_series(&db).await; + let source = make_source(&db, "nyaa:user:tsuna69").await; + let id = record_announced(&db, series, source, "rel-1").await; + + let state = create_test_auth_state(db.clone()).await; + let token = create_admin_and_token(&db, &state).await; + let app = create_test_router(state).await; + + let body = UpdateReleaseLedgerEntryRequest { + state: Some("garbage".to_string()), + }; + let req = patch_json_request_with_auth(&format!("/api/v1/releases/{}", id), &body, &token); + let (status, _): (StatusCode, Option<ErrorResponse>) = make_json_request(app, req).await; + assert_eq!(status, StatusCode::BAD_REQUEST); +} + +#[tokio::test] +async fn patch_release_404_for_missing() { + let (db, _temp) = setup_test_db().await; + let state = create_test_auth_state(db.clone()).await; + let token = create_admin_and_token(&db, &state).await; + let app = create_test_router(state).await; + + let body = UpdateReleaseLedgerEntryRequest { + state: Some("dismissed".to_string()), + }; + let req = patch_json_request_with_auth( + &format!("/api/v1/releases/{}", Uuid::new_v4()), + &body, + &token, + ); + let (status, _): (StatusCode, Option<ErrorResponse>) = make_json_request(app, req).await; + assert_eq!(status, StatusCode::NOT_FOUND); +} + +#[tokio::test] +async fn dismiss_release_convenience_post() { + let (db, _temp) = setup_test_db().await; + let series = make_series(&db).await; + let source = make_source(&db, "nyaa:user:tsuna69").await; + let id = record_announced(&db, series, source, "rel-1").await; + + let state = create_test_auth_state(db.clone()).await; + let token = create_admin_and_token(&db, &state).await; + let app = create_test_router(state).await; + + let req = post_request_with_auth(&format!("/api/v1/releases/{}/dismiss", id), &token); + let (status, dto): (StatusCode, Option<ReleaseLedgerEntryDto>) = + make_json_request(app, req).await; + assert_eq!(status, StatusCode::OK); + assert_eq!(dto.unwrap().state, "dismissed"); +} + +#[tokio::test] +async fn mark_release_acquired_convenience_post() { + let (db, _temp) = setup_test_db().await; + let series = make_series(&db).await; + let source = make_source(&db, "nyaa:user:tsuna69").await; + let id = record_announced(&db, series, source, "rel-1").await; + + let state = create_test_auth_state(db.clone()).await; + let token = create_admin_and_token(&db, &state).await; + let app = create_test_router(state).await; + + let req = post_request_with_auth(&format!("/api/v1/releases/{}/mark-acquired", id), &token); + let (status, dto): (StatusCode, Option<ReleaseLedgerEntryDto>) = + make_json_request(app, req).await; + assert_eq!(status, StatusCode::OK); + assert_eq!(dto.unwrap().state, "marked_acquired"); +} + +#[tokio::test] +async fn patch_release_requires_write_permission() { + let (db, _temp) = setup_test_db().await; + let series = make_series(&db).await; + let source = make_source(&db, "nyaa:user:tsuna69").await; + let id = record_announced(&db, series, source, "rel-1").await; + + let state = create_test_auth_state(db.clone()).await; + let token = create_reader_and_token(&db, &state).await; + let app = create_test_router(state).await; + + let body = UpdateReleaseLedgerEntryRequest { + state: Some("dismissed".to_string()), + }; + let req = patch_json_request_with_auth(&format!("/api/v1/releases/{}", id), &body, &token); + let (status, _): (StatusCode, Option<ErrorResponse>) = make_json_request(app, req).await; + assert_eq!(status, StatusCode::FORBIDDEN); +} + +// ============================================================================= +// GET /release-sources (admin) +// ============================================================================= + +#[tokio::test] +async fn list_release_sources_returns_all() { + let (db, _temp) = setup_test_db().await; + make_source(&db, "nyaa:user:tsuna69").await; + make_source(&db, "nyaa:user:other").await; + + let state = create_test_auth_state(db.clone()).await; + let token = create_admin_and_token(&db, &state).await; + let app = create_test_router(state).await; + + let req = get_request_with_auth("/api/v1/release-sources", &token); + let (status, body): (StatusCode, Option<ReleaseSourceListResponse>) = + make_json_request(app, req).await; + assert_eq!(status, StatusCode::OK); + assert_eq!(body.unwrap().sources.len(), 2); +} + +#[tokio::test] +async fn list_release_sources_requires_plugins_manage() { + let (db, _temp) = setup_test_db().await; + make_source(&db, "nyaa:user:tsuna69").await; + + let state = create_test_auth_state(db.clone()).await; + let token = create_reader_and_token(&db, &state).await; + let app = create_test_router(state).await; + + let req = get_request_with_auth("/api/v1/release-sources", &token); + let (status, _): (StatusCode, Option<ErrorResponse>) = make_json_request(app, req).await; + assert_eq!(status, StatusCode::FORBIDDEN); +} + +// ============================================================================= +// PATCH /release-sources/{id} +// ============================================================================= + +#[tokio::test] +async fn patch_source_can_disable_and_change_interval() { + let (db, _temp) = setup_test_db().await; + let id = make_source(&db, "nyaa:user:tsuna69").await; + + let state = create_test_auth_state(db.clone()).await; + let token = create_admin_and_token(&db, &state).await; + let app = create_test_router(state).await; + + let body = UpdateReleaseSourceRequest { + enabled: Some(false), + cron_schedule: Some(Some("0 */6 * * *".to_string())), + ..Default::default() + }; + let req = + patch_json_request_with_auth(&format!("/api/v1/release-sources/{}", id), &body, &token); + let (status, dto): (StatusCode, Option<ReleaseSourceDto>) = make_json_request(app, req).await; + assert_eq!(status, StatusCode::OK); + let dto = dto.unwrap(); + assert!(!dto.enabled); + assert_eq!(dto.cron_schedule.as_deref(), Some("0 */6 * * *")); + assert_eq!(dto.effective_cron_schedule, "0 */6 * * *"); +} + +#[tokio::test] +async fn patch_source_rejects_invalid_cron() { + let (db, _temp) = setup_test_db().await; + let id = make_source(&db, "nyaa:user:tsuna69").await; + + let state = create_test_auth_state(db.clone()).await; + let token = create_admin_and_token(&db, &state).await; + let app = create_test_router(state).await; + + let body = UpdateReleaseSourceRequest { + cron_schedule: Some(Some("not a cron".to_string())), + ..Default::default() + }; + let req = + patch_json_request_with_auth(&format!("/api/v1/release-sources/{}", id), &body, &token); + let (status, _): (StatusCode, Option<ErrorResponse>) = make_json_request(app, req).await; + assert_eq!(status, StatusCode::BAD_REQUEST); +} + +#[tokio::test] +async fn patch_source_clears_cron_with_explicit_null() { + let (db, _temp) = setup_test_db().await; + let id = make_source(&db, "nyaa:user:tsuna69").await; + + // Seed a per-source override. + ReleaseSourceRepository::update( + &db, + id, + ReleaseSourceUpdate { + cron_schedule: Some(Some("0 */6 * * *".to_string())), + ..Default::default() + }, + ) + .await + .unwrap(); + + let state = create_test_auth_state(db.clone()).await; + let token = create_admin_and_token(&db, &state).await; + let app = create_test_router(state).await; + + // Send `cron_schedule: null` to clear the override. + let body = serde_json::json!({ "cronSchedule": null }); + let req = + patch_json_request_with_auth(&format!("/api/v1/release-sources/{}", id), &body, &token); + let (status, dto): (StatusCode, Option<ReleaseSourceDto>) = make_json_request(app, req).await; + assert_eq!(status, StatusCode::OK); + let dto = dto.unwrap(); + assert!(dto.cron_schedule.is_none(), "override cleared"); + // effectiveCronSchedule falls through to the server-wide default. + assert!(!dto.effective_cron_schedule.is_empty()); +} + +#[tokio::test] +async fn patch_source_404_for_missing() { + let (db, _temp) = setup_test_db().await; + let state = create_test_auth_state(db.clone()).await; + let token = create_admin_and_token(&db, &state).await; + let app = create_test_router(state).await; + + let body = UpdateReleaseSourceRequest { + enabled: Some(false), + ..Default::default() + }; + let req = patch_json_request_with_auth( + &format!("/api/v1/release-sources/{}", Uuid::new_v4()), + &body, + &token, + ); + let (status, _): (StatusCode, Option<ErrorResponse>) = make_json_request(app, req).await; + assert_eq!(status, StatusCode::NOT_FOUND); +} + +#[tokio::test] +async fn patch_source_requires_plugins_manage() { + let (db, _temp) = setup_test_db().await; + let id = make_source(&db, "nyaa:user:tsuna69").await; + + let state = create_test_auth_state(db.clone()).await; + let token = create_reader_and_token(&db, &state).await; + let app = create_test_router(state).await; + + let body = UpdateReleaseSourceRequest { + enabled: Some(false), + ..Default::default() + }; + let req = + patch_json_request_with_auth(&format!("/api/v1/release-sources/{}", id), &body, &token); + let (status, _): (StatusCode, Option<ErrorResponse>) = make_json_request(app, req).await; + assert_eq!(status, StatusCode::FORBIDDEN); +} + +// ============================================================================= +// POST /release-sources/{id}/poll-now +// ============================================================================= + +#[tokio::test] +async fn poll_now_enqueues_task_when_source_exists() { + let (db, _temp) = setup_test_db().await; + let id = make_source(&db, "nyaa:user:tsuna69").await; + + let state = create_test_auth_state(db.clone()).await; + let token = create_admin_and_token(&db, &state).await; + let app = create_test_router(state).await; + + let req = post_request_with_auth(&format!("/api/v1/release-sources/{}/poll-now", id), &token); + let (status, body): (StatusCode, Option<PollNowResponse>) = make_json_request(app, req).await; + assert_eq!(status, StatusCode::ACCEPTED); + let body = body.unwrap(); + assert_eq!(body.status, "enqueued"); + assert!(body.message.contains("task_id=")); + + // Verify the task landed on the queue. + use codex::db::repositories::TaskRepository; + let pending = TaskRepository::list( + &db, + Some("pending".to_string()), + Some("poll_release_source".to_string()), + Some(10), + ) + .await + .unwrap(); + assert!( + !pending.is_empty(), + "expected a poll_release_source task to be pending" + ); +} + +#[tokio::test] +async fn poll_now_dedupes_concurrent_requests_onto_in_flight_task() { + // Regression: clicking "Poll now" twice quickly previously enqueued + // two independent tasks. With worker_count >= 2 they'd race on + // last_summary / last_polled_at writes and overlap upstream fetches. + // We now coalesce onto the existing pending/processing task. + use codex::db::repositories::TaskRepository; + + let (db, _temp) = setup_test_db().await; + let id = make_source(&db, "nyaa:user:tsuna69").await; + + let state = create_test_auth_state(db.clone()).await; + let token = create_admin_and_token(&db, &state).await; + let app1 = create_test_router(state.clone()).await; + let app2 = create_test_router(state).await; + + // First click: enqueues a fresh task. + let req = post_request_with_auth(&format!("/api/v1/release-sources/{}/poll-now", id), &token); + let (s1, b1): (StatusCode, Option<PollNowResponse>) = make_json_request(app1, req).await; + assert_eq!(s1, StatusCode::ACCEPTED); + let b1 = b1.unwrap(); + assert_eq!(b1.status, "enqueued"); + + // Second click while the first is still pending: coalesce. + let req = post_request_with_auth(&format!("/api/v1/release-sources/{}/poll-now", id), &token); + let (s2, b2): (StatusCode, Option<PollNowResponse>) = make_json_request(app2, req).await; + assert_eq!(s2, StatusCode::ACCEPTED); + let b2 = b2.unwrap(); + assert_eq!( + b2.status, "already_running", + "second poll-now must coalesce onto the in-flight task" + ); + assert!( + b2.message.contains("coalesced"), + "human-readable message should explain the coalesce" + ); + + // Only one task should sit on the queue, not two. + let pending = TaskRepository::list( + &db, + Some("pending".to_string()), + Some("poll_release_source".to_string()), + Some(10), + ) + .await + .unwrap(); + assert_eq!( + pending.len(), + 1, + "duplicate poll-now must not stack tasks; got {} pending", + pending.len() + ); +} + +#[tokio::test] +async fn poll_now_conflicts_when_source_disabled() { + use codex::db::repositories::{ReleaseSourceRepository, ReleaseSourceUpdate}; + + let (db, _temp) = setup_test_db().await; + let id = make_source(&db, "nyaa:user:tsuna69").await; + // Disable it. + ReleaseSourceRepository::update( + &db, + id, + ReleaseSourceUpdate { + enabled: Some(false), + ..Default::default() + }, + ) + .await + .unwrap(); + + let state = create_test_auth_state(db.clone()).await; + let token = create_admin_and_token(&db, &state).await; + let app = create_test_router(state).await; + + let req = post_request_with_auth(&format!("/api/v1/release-sources/{}/poll-now", id), &token); + let (status, _): (StatusCode, Option<ErrorResponse>) = make_json_request(app, req).await; + assert_eq!(status, StatusCode::CONFLICT); +} + +#[tokio::test] +async fn poll_now_404_when_source_missing() { + let (db, _temp) = setup_test_db().await; + let state = create_test_auth_state(db.clone()).await; + let token = create_admin_and_token(&db, &state).await; + let app = create_test_router(state).await; + + let req = post_request_with_auth( + &format!("/api/v1/release-sources/{}/poll-now", Uuid::new_v4()), + &token, + ); + let (status, _): (StatusCode, Option<ErrorResponse>) = make_json_request(app, req).await; + assert_eq!(status, StatusCode::NOT_FOUND); +} + +#[tokio::test] +async fn poll_now_requires_plugins_manage() { + let (db, _temp) = setup_test_db().await; + let id = make_source(&db, "nyaa:user:tsuna69").await; + + let state = create_test_auth_state(db.clone()).await; + let token = create_reader_and_token(&db, &state).await; + let app = create_test_router(state).await; + + let req = post_request_with_auth(&format!("/api/v1/release-sources/{}/poll-now", id), &token); + let (status, _): (StatusCode, Option<ErrorResponse>) = make_json_request(app, req).await; + assert_eq!(status, StatusCode::FORBIDDEN); +} + +// ============================================================================= +// POST /release-sources/{id}/reset +// ============================================================================= + +#[tokio::test] +async fn reset_clears_ledger_rows_and_poll_state() { + let (db, _temp) = setup_test_db().await; + let series = make_series(&db).await; + let source = make_source(&db, "nyaa:user:tsuna69").await; + let other_source = make_source(&db, "nyaa:user:other").await; + + record_announced(&db, series, source, "rel-1").await; + record_announced(&db, series, source, "rel-2").await; + // A row on a different source must NOT be touched. + record_announced(&db, series, other_source, "rel-keep").await; + + // Seed poll state on the target source so we can prove it's cleared. + ReleaseSourceRepository::record_poll_success( + &db, + source, + chrono::Utc::now(), + Some("\"etag-1\"".to_string()), + Some("Fetched 2 items".to_string()), + ) + .await + .unwrap(); + + let state = create_test_auth_state(db.clone()).await; + let token = create_admin_and_token(&db, &state).await; + let app = create_test_router(state).await; + + let req = post_request_with_auth(&format!("/api/v1/release-sources/{}/reset", source), &token); + let (status, body): (StatusCode, Option<ResetReleaseSourceResponse>) = + make_json_request(app, req).await; + assert_eq!(status, StatusCode::OK); + assert_eq!(body.unwrap().deleted_ledger_entries, 2); + + // Target source: ledger rows gone, poll state cleared. + let after = ReleaseSourceRepository::get_by_id(&db, source) + .await + .unwrap() + .unwrap(); + assert!(after.etag.is_none()); + assert!(after.last_polled_at.is_none()); + assert!(after.last_summary.is_none()); + + // Other source's row survives. + let surviving = ReleaseLedgerRepository::list_for_series(&db, series, None, 100, 0) + .await + .unwrap(); + assert_eq!(surviving.len(), 1); + assert_eq!(surviving[0].source_id, other_source); + assert_eq!(surviving[0].external_release_id, "rel-keep"); +} + +#[tokio::test] +async fn reset_preserves_user_managed_source_fields() { + use codex::db::repositories::ReleaseSourceUpdate; + + let (db, _temp) = setup_test_db().await; + let source = make_source(&db, "nyaa:user:tsuna69").await; + + // Admin disables the source and overrides the schedule. + ReleaseSourceRepository::update( + &db, + source, + ReleaseSourceUpdate { + enabled: Some(false), + cron_schedule: Some(Some("0 */6 * * *".to_string())), + display_name: Some("Custom Name".to_string()), + ..Default::default() + }, + ) + .await + .unwrap(); + + let state = create_test_auth_state(db.clone()).await; + let token = create_admin_and_token(&db, &state).await; + let app = create_test_router(state).await; + + let req = post_request_with_auth(&format!("/api/v1/release-sources/{}/reset", source), &token); + let (status, _): (StatusCode, Option<ResetReleaseSourceResponse>) = + make_json_request(app, req).await; + assert_eq!(status, StatusCode::OK); + + let after = ReleaseSourceRepository::get_by_id(&db, source) + .await + .unwrap() + .unwrap(); + assert!(!after.enabled, "user-set enabled flag must survive a reset"); + assert_eq!( + after.cron_schedule.as_deref(), + Some("0 */6 * * *"), + "schedule override survives" + ); + assert_eq!(after.display_name, "Custom Name", "display name preserved"); +} + +#[tokio::test] +async fn reset_404_when_source_missing() { + let (db, _temp) = setup_test_db().await; + let state = create_test_auth_state(db.clone()).await; + let token = create_admin_and_token(&db, &state).await; + let app = create_test_router(state).await; + + let req = post_request_with_auth( + &format!("/api/v1/release-sources/{}/reset", Uuid::new_v4()), + &token, + ); + let (status, _): (StatusCode, Option<ErrorResponse>) = make_json_request(app, req).await; + assert_eq!(status, StatusCode::NOT_FOUND); +} + +#[tokio::test] +async fn reset_requires_plugins_manage() { + let (db, _temp) = setup_test_db().await; + let id = make_source(&db, "nyaa:user:tsuna69").await; + + let state = create_test_auth_state(db.clone()).await; + let token = create_reader_and_token(&db, &state).await; + let app = create_test_router(state).await; + + let req = post_request_with_auth(&format!("/api/v1/release-sources/{}/reset", id), &token); + let (status, _): (StatusCode, Option<ErrorResponse>) = make_json_request(app, req).await; + assert_eq!(status, StatusCode::FORBIDDEN); +} + +// ============================================================================= +// GET /release-sources/applicability (round D) +// ============================================================================= + +/// Helper: insert an enabled plugin row carrying a manifest with the +/// `releaseSource` capability, optionally scoped to `library_ids`. +async fn make_release_source_plugin( + db: &DatabaseConnection, + name: &str, + display_name: &str, + library_ids: Vec<Uuid>, +) -> Uuid { + use codex::db::repositories::PluginsRepository; + use serde_json::json; + + let plugin = PluginsRepository::create( + db, + name, + display_name, + Some("test release-source plugin"), + "system", + "echo", + vec!["ok".to_string()], + vec![], + None, + vec![], + vec![], + library_ids, + None, + "none", + None, + true, // enabled + None, + None, + ) + .await + .unwrap(); + + // Manifest must declare the release_source capability for the + // applicability handler to count this plugin. + let manifest = json!({ + "name": name, + "displayName": display_name, + "version": "1.0.0", + "protocolVersion": "1.0", + "capabilities": { + "releaseSource": { + "kinds": ["rss-uploader"], + "requiresAliases": true, + "canAnnounceChapters": true, + "canAnnounceVolumes": true, + "defaultPollIntervalS": 3600 + } + } + }); + PluginsRepository::update_manifest(db, plugin.id, Some(manifest)) + .await + .unwrap(); + plugin.id +} + +/// Helper: insert an enabled plugin without the release-source capability. +async fn make_metadata_only_plugin(db: &DatabaseConnection, name: &str) -> Uuid { + use codex::db::repositories::PluginsRepository; + use serde_json::json; + + let plugin = PluginsRepository::create( + db, + name, + name, + None, + "system", + "echo", + vec!["ok".to_string()], + vec![], + None, + vec![], + vec![], + vec![], + None, + "none", + None, + true, + None, + None, + ) + .await + .unwrap(); + let manifest = json!({ + "name": name, + "displayName": name, + "version": "1.0.0", + "protocolVersion": "1.0", + "capabilities": { + "metadataProvider": ["series"] + } + }); + PluginsRepository::update_manifest(db, plugin.id, Some(manifest)) + .await + .unwrap(); + plugin.id +} + +#[derive(serde::Deserialize)] +#[serde(rename_all = "camelCase")] +struct ApplicabilityResponseDto { + applicable: bool, + plugin_display_names: Vec<String>, +} + +#[tokio::test] +async fn applicability_false_when_no_release_source_plugins() { + let (db, _temp) = setup_test_db().await; + // A metadata-only plugin must not register as applicable. + make_metadata_only_plugin(&db, "metadata-only").await; + + let state = create_test_auth_state(db.clone()).await; + let token = create_admin_and_token(&db, &state).await; + let app = create_test_router(state).await; + + let req = get_request_with_auth("/api/v1/release-sources/applicability", &token); + let (status, body): (StatusCode, Option<ApplicabilityResponseDto>) = + make_json_request(app, req).await; + assert_eq!(status, StatusCode::OK); + let body = body.unwrap(); + assert!(!body.applicable); + assert!(body.plugin_display_names.is_empty()); +} + +#[tokio::test] +async fn applicability_true_when_global_plugin_no_library_filter() { + let (db, _temp) = setup_test_db().await; + // Empty library_ids means "all libraries". + make_release_source_plugin(&db, "release-nyaa", "Nyaa", vec![]).await; + + let state = create_test_auth_state(db.clone()).await; + let token = create_admin_and_token(&db, &state).await; + let app = create_test_router(state).await; + + let req = get_request_with_auth("/api/v1/release-sources/applicability", &token); + let (status, body): (StatusCode, Option<ApplicabilityResponseDto>) = + make_json_request(app, req).await; + assert_eq!(status, StatusCode::OK); + let body = body.unwrap(); + assert!(body.applicable); + assert_eq!(body.plugin_display_names, vec!["Nyaa".to_string()]); +} + +#[tokio::test] +async fn applicability_filters_by_library_when_plugin_is_scoped() { + let (db, _temp) = setup_test_db().await; + let lib_a = codex::db::repositories::LibraryRepository::create( + &db, + "A", + "/a", + codex::db::ScanningStrategy::Default, + ) + .await + .unwrap(); + let lib_b = codex::db::repositories::LibraryRepository::create( + &db, + "B", + "/b", + codex::db::ScanningStrategy::Default, + ) + .await + .unwrap(); + // Plugin scoped to lib_a only. + make_release_source_plugin(&db, "release-nyaa", "Nyaa", vec![lib_a.id]).await; + + let state = create_test_auth_state(db.clone()).await; + let token = create_admin_and_token(&db, &state).await; + + // Query for lib_a → applicable. + let app = create_test_router(state.clone()).await; + let req = get_request_with_auth( + &format!( + "/api/v1/release-sources/applicability?libraryId={}", + lib_a.id + ), + &token, + ); + let (s_a, b_a): (StatusCode, Option<ApplicabilityResponseDto>) = + make_json_request(app, req).await; + assert_eq!(s_a, StatusCode::OK); + assert!(b_a.unwrap().applicable); + + // Query for lib_b → not applicable. + let app = create_test_router(state.clone()).await; + let req = get_request_with_auth( + &format!( + "/api/v1/release-sources/applicability?libraryId={}", + lib_b.id + ), + &token, + ); + let (s_b, b_b): (StatusCode, Option<ApplicabilityResponseDto>) = + make_json_request(app, req).await; + assert_eq!(s_b, StatusCode::OK); + let b_b = b_b.unwrap(); + assert!(!b_b.applicable); + assert!(b_b.plugin_display_names.is_empty()); + + // No libraryId filter → applicable (the plugin still exists globally). + let app = create_test_router(state).await; + let req = get_request_with_auth("/api/v1/release-sources/applicability", &token); + let (s_all, b_all): (StatusCode, Option<ApplicabilityResponseDto>) = + make_json_request(app, req).await; + assert_eq!(s_all, StatusCode::OK); + assert!(b_all.unwrap().applicable); +} + +#[tokio::test] +async fn applicability_global_plugin_applies_to_any_library() { + let (db, _temp) = setup_test_db().await; + let lib = codex::db::repositories::LibraryRepository::create( + &db, + "L", + "/l", + codex::db::ScanningStrategy::Default, + ) + .await + .unwrap(); + // Global (empty library_ids) plugin should match any libraryId query. + make_release_source_plugin(&db, "release-mu", "MangaUpdates", vec![]).await; + + let state = create_test_auth_state(db.clone()).await; + let token = create_admin_and_token(&db, &state).await; + let app = create_test_router(state).await; + + let req = get_request_with_auth( + &format!("/api/v1/release-sources/applicability?libraryId={}", lib.id), + &token, + ); + let (status, body): (StatusCode, Option<ApplicabilityResponseDto>) = + make_json_request(app, req).await; + assert_eq!(status, StatusCode::OK); + let body = body.unwrap(); + assert!(body.applicable); + assert_eq!(body.plugin_display_names, vec!["MangaUpdates".to_string()]); +} + +#[tokio::test] +async fn applicability_aggregates_multiple_plugins() { + let (db, _temp) = setup_test_db().await; + make_release_source_plugin(&db, "release-nyaa", "Nyaa", vec![]).await; + make_release_source_plugin(&db, "release-mu", "MangaUpdates", vec![]).await; + // A non-release plugin should not bleed into the response. + make_metadata_only_plugin(&db, "metadata-only").await; + + let state = create_test_auth_state(db.clone()).await; + let token = create_admin_and_token(&db, &state).await; + let app = create_test_router(state).await; + + let req = get_request_with_auth("/api/v1/release-sources/applicability", &token); + let (status, body): (StatusCode, Option<ApplicabilityResponseDto>) = + make_json_request(app, req).await; + assert_eq!(status, StatusCode::OK); + let body = body.unwrap(); + assert!(body.applicable); + assert_eq!(body.plugin_display_names.len(), 2); + assert!(body.plugin_display_names.contains(&"Nyaa".to_string())); + assert!( + body.plugin_display_names + .contains(&"MangaUpdates".to_string()) + ); +} + +#[tokio::test] +async fn applicability_requires_series_read() { + let (db, _temp) = setup_test_db().await; + make_release_source_plugin(&db, "release-nyaa", "Nyaa", vec![]).await; + + // A user with no role at all (not even reader) — but our `create_reader_and_token` + // creates a regular non-admin user who DOES have SeriesRead, so the check + // would pass. Instead we exercise the unauthenticated path here, which is + // the only realistic 401/403 surface — every authenticated user has + // SeriesRead. This still proves the route enforces auth. + let state = create_test_auth_state(db.clone()).await; + let app = create_test_router(state).await; + + let req = get_request("/api/v1/release-sources/applicability"); + let (status, _): (StatusCode, Option<ErrorResponse>) = make_json_request(app, req).await; + assert_eq!(status, StatusCode::UNAUTHORIZED); +} + +#[tokio::test] +async fn applicability_skips_disabled_plugins() { + use codex::db::repositories::PluginsRepository; + + let (db, _temp) = setup_test_db().await; + let plugin_id = make_release_source_plugin(&db, "release-nyaa", "Nyaa", vec![]).await; + // Disable it — should drop out of the applicability list. + PluginsRepository::disable(&db, plugin_id, None) + .await + .unwrap(); + + let state = create_test_auth_state(db.clone()).await; + let token = create_admin_and_token(&db, &state).await; + let app = create_test_router(state).await; + + let req = get_request_with_auth("/api/v1/release-sources/applicability", &token); + let (status, body): (StatusCode, Option<ApplicabilityResponseDto>) = + make_json_request(app, req).await; + assert_eq!(status, StatusCode::OK); + let body = body.unwrap(); + assert!( + !body.applicable, + "disabled plugins must not contribute to applicability" + ); +} + +// ============================================================================= +// GET /releases (state=all + libraryId filter) +// ============================================================================= + +async fn make_series_in(db: &DatabaseConnection, library_name: &str, series_name: &str) -> Uuid { + let library = LibraryRepository::create( + db, + library_name, + &format!("/{}", library_name), + ScanningStrategy::Default, + ) + .await + .unwrap(); + let series = SeriesRepository::create(db, library.id, series_name, None) + .await + .unwrap(); + series.id +} + +async fn library_id_for_series(db: &DatabaseConnection, series_id: Uuid) -> Uuid { + SeriesRepository::get_by_id(db, series_id) + .await + .unwrap() + .unwrap() + .library_id +} + +#[tokio::test] +async fn inbox_state_all_returns_all_states() { + let (db, _temp) = setup_test_db().await; + let series = make_series(&db).await; + let source = make_source(&db, "nyaa:user:tsuna69").await; + let r1 = record_announced(&db, series, source, "rel-1").await; + record_announced(&db, series, source, "rel-2").await; + ReleaseLedgerRepository::set_state(&db, r1, "dismissed") + .await + .unwrap(); + + let state = create_test_auth_state(db.clone()).await; + let token = create_admin_and_token(&db, &state).await; + let app = create_test_router(state).await; + + let req = get_request_with_auth("/api/v1/releases?state=all", &token); + let (status, body): ( + StatusCode, + Option<PaginatedDtoResponse<ReleaseLedgerEntryDto>>, + ) = make_json_request(app, req).await; + assert_eq!(status, StatusCode::OK); + let body = body.unwrap(); + assert_eq!( + body.total, 2, + "state=all must return both announced and dismissed rows" + ); +} + +#[tokio::test] +async fn inbox_filters_by_library_id() { + let (db, _temp) = setup_test_db().await; + let s_manga = make_series_in(&db, "Manga", "Manga Series").await; + let s_books = make_series_in(&db, "Books", "Book Series").await; + let manga_lib = library_id_for_series(&db, s_manga).await; + let source = make_source(&db, "nyaa:user:tsuna69").await; + record_announced(&db, s_manga, source, "rel-manga").await; + record_announced(&db, s_books, source, "rel-book").await; + + let state = create_test_auth_state(db.clone()).await; + let token = create_admin_and_token(&db, &state).await; + let app = create_test_router(state).await; + + let req = get_request_with_auth(&format!("/api/v1/releases?libraryId={}", manga_lib), &token); + let (status, body): ( + StatusCode, + Option<PaginatedDtoResponse<ReleaseLedgerEntryDto>>, + ) = make_json_request(app, req).await; + assert_eq!(status, StatusCode::OK); + let body = body.unwrap(); + assert_eq!(body.total, 1); + assert_eq!(body.data[0].external_release_id, "rel-manga"); +} + +// ============================================================================= +// GET /releases/facets +// ============================================================================= + +#[tokio::test] +async fn facets_returns_distinct_languages_libraries_series() { + let (db, _temp) = setup_test_db().await; + let s_manga = make_series_in(&db, "Manga", "Manga Series").await; + let s_books = make_series_in(&db, "Books", "Book Series").await; + let source = make_source(&db, "nyaa:user:tsuna69").await; + record_announced(&db, s_manga, source, "rel-1").await; + record_announced(&db, s_manga, source, "rel-2").await; + record_announced(&db, s_books, source, "rel-3").await; + + let state = create_test_auth_state(db.clone()).await; + let token = create_admin_and_token(&db, &state).await; + let app = create_test_router(state).await; + + let req = get_request_with_auth("/api/v1/releases/facets", &token); + let (status, body): (StatusCode, Option<ReleaseFacetsResponse>) = + make_json_request(app, req).await; + assert_eq!(status, StatusCode::OK); + let body = body.unwrap(); + + // One language ("en"), two libraries, two series. + assert_eq!(body.languages.len(), 1); + assert_eq!(body.languages[0].language, "en"); + assert_eq!(body.languages[0].count, 3); + + assert_eq!(body.libraries.len(), 2); + let manga_lib = body + .libraries + .iter() + .find(|l| l.library_name == "Manga") + .expect("Manga library facet present"); + assert_eq!(manga_lib.count, 2); + + assert_eq!(body.series.len(), 2); + let manga_series = body + .series + .iter() + .find(|s| s.series_title == "Manga Series") + .expect("Manga series facet present"); + assert_eq!(manga_series.library_name, "Manga"); + assert_eq!(manga_series.count, 2); +} + +#[tokio::test] +async fn facets_excludes_self_dimension_so_dropdowns_dont_collapse() { + // When the caller passes seriesId=X, the *series* facet should still + // list all series (not just X). Otherwise the dropdown would collapse + // to the active selection and the user couldn't switch series. + let (db, _temp) = setup_test_db().await; + let s1 = make_series_in(&db, "Manga", "S1").await; + let s2 = make_series_in(&db, "Manga", "S2").await; + let source = make_source(&db, "nyaa:user:tsuna69").await; + record_announced(&db, s1, source, "rel-1").await; + record_announced(&db, s2, source, "rel-2").await; + + let state = create_test_auth_state(db.clone()).await; + let token = create_admin_and_token(&db, &state).await; + let app = create_test_router(state).await; + + let req = get_request_with_auth(&format!("/api/v1/releases/facets?seriesId={}", s1), &token); + let (status, body): (StatusCode, Option<ReleaseFacetsResponse>) = + make_json_request(app, req).await; + assert_eq!(status, StatusCode::OK); + let body = body.unwrap(); + assert_eq!( + body.series.len(), + 2, + "series facet must not be filtered by the active seriesId" + ); +} + +#[tokio::test] +async fn facets_requires_auth() { + let (db, _temp) = setup_test_db().await; + let state = create_test_auth_state(db.clone()).await; + let app = create_test_router(state).await; + + let req = get_request("/api/v1/releases/facets"); + let (status, _): (StatusCode, Option<ErrorResponse>) = make_json_request(app, req).await; + assert_eq!(status, StatusCode::UNAUTHORIZED); +} + +// ============================================================================= +// DELETE /releases/{id} +// ============================================================================= + +#[tokio::test] +async fn delete_release_removes_row_and_clears_source_etag() { + let (db, _temp) = setup_test_db().await; + let series = make_series(&db).await; + let source = make_source(&db, "nyaa:user:tsuna69").await; + let id = record_announced(&db, series, source, "rel-1").await; + + // Seed an etag so we can verify it gets cleared. + ReleaseSourceRepository::record_poll_success( + &db, + source, + chrono::Utc::now(), + Some("\"abc123\"".to_string()), + None, + ) + .await + .unwrap(); + let pre = ReleaseSourceRepository::get_by_id(&db, source) + .await + .unwrap() + .unwrap(); + assert!(pre.etag.is_some(), "test setup: etag should be set"); + + let state = create_test_auth_state(db.clone()).await; + let token = create_admin_and_token(&db, &state).await; + let app = create_test_router(state).await; + + let req = delete_request_with_auth(&format!("/api/v1/releases/{}", id), &token); + let (status, body): (StatusCode, Option<DeleteReleaseResponse>) = + make_json_request(app, req).await; + assert_eq!(status, StatusCode::OK); + assert!(body.unwrap().deleted); + + // Row gone. + assert!( + ReleaseLedgerRepository::get_by_id(&db, id) + .await + .unwrap() + .is_none() + ); + // Etag cleared. + let post = ReleaseSourceRepository::get_by_id(&db, source) + .await + .unwrap() + .unwrap(); + assert!( + post.etag.is_none(), + "delete must clear the source's etag so the next poll re-fetches" + ); +} + +#[tokio::test] +async fn delete_release_404_for_missing() { + let (db, _temp) = setup_test_db().await; + let state = create_test_auth_state(db.clone()).await; + let token = create_admin_and_token(&db, &state).await; + let app = create_test_router(state).await; + + let req = delete_request_with_auth(&format!("/api/v1/releases/{}", Uuid::new_v4()), &token); + let (status, _): (StatusCode, Option<ErrorResponse>) = make_json_request(app, req).await; + assert_eq!(status, StatusCode::NOT_FOUND); +} + +#[tokio::test] +async fn delete_release_forbidden_for_reader() { + let (db, _temp) = setup_test_db().await; + let series = make_series(&db).await; + let source = make_source(&db, "nyaa:user:tsuna69").await; + let id = record_announced(&db, series, source, "rel-1").await; + + let state = create_test_auth_state(db.clone()).await; + let token = create_reader_and_token(&db, &state).await; + let app = create_test_router(state).await; + + let req = delete_request_with_auth(&format!("/api/v1/releases/{}", id), &token); + let (status, _): (StatusCode, Option<ErrorResponse>) = make_json_request(app, req).await; + assert_eq!(status, StatusCode::FORBIDDEN); +} + +// ============================================================================= +// POST /releases/bulk +// ============================================================================= + +#[tokio::test] +async fn bulk_dismiss_updates_state_for_listed_ids() { + let (db, _temp) = setup_test_db().await; + let series = make_series(&db).await; + let source = make_source(&db, "nyaa:user:tsuna69").await; + let id1 = record_announced(&db, series, source, "rel-1").await; + let id2 = record_announced(&db, series, source, "rel-2").await; + let id3 = record_announced(&db, series, source, "rel-3").await; + + let state = create_test_auth_state(db.clone()).await; + let token = create_admin_and_token(&db, &state).await; + let app = create_test_router(state).await; + + let body = BulkReleaseActionRequest { + ids: vec![id1, id2], + action: BulkReleaseAction::Dismiss, + }; + let req = post_json_request_with_auth("/api/v1/releases/bulk", &body, &token); + let (status, resp): (StatusCode, Option<BulkReleaseActionResponse>) = + make_json_request(app, req).await; + assert_eq!(status, StatusCode::OK); + let resp = resp.unwrap(); + assert_eq!(resp.affected, 2); + assert_eq!(resp.action, BulkReleaseAction::Dismiss); + + // Selected rows were dismissed; the third stays announced. + assert_eq!( + ReleaseLedgerRepository::get_by_id(&db, id1) + .await + .unwrap() + .unwrap() + .state, + "dismissed" + ); + assert_eq!( + ReleaseLedgerRepository::get_by_id(&db, id3) + .await + .unwrap() + .unwrap() + .state, + "announced" + ); +} + +#[tokio::test] +async fn bulk_ignore_sets_state_to_ignored() { + let (db, _temp) = setup_test_db().await; + let series = make_series(&db).await; + let source = make_source(&db, "nyaa:user:tsuna69").await; + let id = record_announced(&db, series, source, "rel-i").await; + + let state = create_test_auth_state(db.clone()).await; + let token = create_admin_and_token(&db, &state).await; + let app = create_test_router(state).await; + + let body = BulkReleaseActionRequest { + ids: vec![id], + action: BulkReleaseAction::Ignore, + }; + let req = post_json_request_with_auth("/api/v1/releases/bulk", &body, &token); + let (status, resp): (StatusCode, Option<BulkReleaseActionResponse>) = + make_json_request(app, req).await; + assert_eq!(status, StatusCode::OK); + assert_eq!(resp.unwrap().affected, 1); + + assert_eq!( + ReleaseLedgerRepository::get_by_id(&db, id) + .await + .unwrap() + .unwrap() + .state, + "ignored" + ); +} + +#[tokio::test] +async fn bulk_reset_returns_state_to_announced_from_any_state() { + let (db, _temp) = setup_test_db().await; + let series = make_series(&db).await; + let source = make_source(&db, "nyaa:user:tsuna69").await; + let id_d = record_announced(&db, series, source, "rel-d").await; + let id_a = record_announced(&db, series, source, "rel-a").await; + let id_i = record_announced(&db, series, source, "rel-i").await; + + // Move each into a different non-announced state via direct repo call. + ReleaseLedgerRepository::set_state(&db, id_d, "dismissed") + .await + .unwrap(); + ReleaseLedgerRepository::set_state(&db, id_a, "marked_acquired") + .await + .unwrap(); + ReleaseLedgerRepository::set_state(&db, id_i, "ignored") + .await + .unwrap(); + + let state = create_test_auth_state(db.clone()).await; + let token = create_admin_and_token(&db, &state).await; + let app = create_test_router(state).await; + + let body = BulkReleaseActionRequest { + ids: vec![id_d, id_a, id_i], + action: BulkReleaseAction::Reset, + }; + let req = post_json_request_with_auth("/api/v1/releases/bulk", &body, &token); + let (status, resp): (StatusCode, Option<BulkReleaseActionResponse>) = + make_json_request(app, req).await; + assert_eq!(status, StatusCode::OK); + assert_eq!(resp.unwrap().affected, 3); + + for id in [id_d, id_a, id_i] { + assert_eq!( + ReleaseLedgerRepository::get_by_id(&db, id) + .await + .unwrap() + .unwrap() + .state, + "announced", + ); + } +} + +#[tokio::test] +async fn bulk_delete_clears_etags_on_affected_sources_only() { + let (db, _temp) = setup_test_db().await; + let series = make_series(&db).await; + let src_a = make_source(&db, "nyaa:user:a").await; + let src_b = make_source(&db, "nyaa:user:b").await; + let id_a = record_announced(&db, series, src_a, "rel-a").await; + let _id_b = record_announced(&db, series, src_b, "rel-b").await; + + // Seed etags on both sources. + for src in [src_a, src_b] { + ReleaseSourceRepository::record_poll_success( + &db, + src, + chrono::Utc::now(), + Some("\"etag\"".to_string()), + None, + ) + .await + .unwrap(); + } + + let state = create_test_auth_state(db.clone()).await; + let token = create_admin_and_token(&db, &state).await; + let app = create_test_router(state).await; + + let body = BulkReleaseActionRequest { + ids: vec![id_a], + action: BulkReleaseAction::Delete, + }; + let req = post_json_request_with_auth("/api/v1/releases/bulk", &body, &token); + let (status, resp): (StatusCode, Option<BulkReleaseActionResponse>) = + make_json_request(app, req).await; + assert_eq!(status, StatusCode::OK); + assert_eq!(resp.unwrap().affected, 1); + + // src_a touched (etag cleared), src_b untouched (etag preserved). + assert!( + ReleaseSourceRepository::get_by_id(&db, src_a) + .await + .unwrap() + .unwrap() + .etag + .is_none() + ); + assert!( + ReleaseSourceRepository::get_by_id(&db, src_b) + .await + .unwrap() + .unwrap() + .etag + .is_some(), + "untouched sources must keep their etag" + ); +} + +#[tokio::test] +async fn bulk_rejects_empty_ids() { + let (db, _temp) = setup_test_db().await; + let state = create_test_auth_state(db.clone()).await; + let token = create_admin_and_token(&db, &state).await; + let app = create_test_router(state).await; + + let body = BulkReleaseActionRequest { + ids: vec![], + action: BulkReleaseAction::Dismiss, + }; + let req = post_json_request_with_auth("/api/v1/releases/bulk", &body, &token); + let (status, _): (StatusCode, Option<ErrorResponse>) = make_json_request(app, req).await; + assert_eq!(status, StatusCode::BAD_REQUEST); +} + +#[tokio::test] +async fn bulk_forbidden_for_reader() { + let (db, _temp) = setup_test_db().await; + let series = make_series(&db).await; + let source = make_source(&db, "nyaa:user:tsuna69").await; + let id = record_announced(&db, series, source, "rel-1").await; + + let state = create_test_auth_state(db.clone()).await; + let token = create_reader_and_token(&db, &state).await; + let app = create_test_router(state).await; + + let body = BulkReleaseActionRequest { + ids: vec![id], + action: BulkReleaseAction::Dismiss, + }; + let req = post_json_request_with_auth("/api/v1/releases/bulk", &body, &token); + let (status, _): (StatusCode, Option<ErrorResponse>) = make_json_request(app, req).await; + assert_eq!(status, StatusCode::FORBIDDEN); +} diff --git a/tests/api/series.rs b/tests/api/series.rs index 3a5d9427..bbf656e6 100644 --- a/tests/api/series.rs +++ b/tests/api/series.rs @@ -6,8 +6,9 @@ use codex::api::routes::v1::dto::book::BookDto; use codex::api::routes::v1::dto::series::{SearchSeriesRequest, SeriesDto, SeriesListResponse}; use codex::db::ScanningStrategy; use codex::db::repositories::{ - BookMetadataRepository, BookRepository, LibraryRepository, SeriesMetadataRepository, - SeriesRepository, UserRepository, + BookMetadataRepository, BookRepository, LibraryRepository, SeriesExternalIdRepository, + SeriesMetadataRepository, SeriesRepository, SeriesTrackingRepository, TrackingUpdate, + UserRepository, }; use codex::utils::password; use common::*; @@ -695,6 +696,260 @@ async fn test_get_series_classification_aggregates_absent_when_unclassified() { assert_eq!(dto.volumes_owned, Some(0)); } +// ============================================================================ +// Phase 5: Upstream-publication gap signal +// ============================================================================ + +/// Inputs for [`setup_tracked_series_with_gap`]. +struct TrackedSeriesGapSetup<'a> { + track_chapters: bool, + track_volumes: bool, + total_chapter_count: Option<f32>, + total_volume_count: Option<i32>, + local_max_chapter: Option<f32>, + local_max_volume: Option<i32>, + external_id_source: Option<&'a str>, +} + +/// Helper: seed a tracked series with locally-classified books and a metadata +/// provider's `total_*_count` so the upstream gap signal has something to +/// compute against. +async fn setup_tracked_series_with_gap( + db: &sea_orm::DatabaseConnection, + setup: TrackedSeriesGapSetup<'_>, +) -> uuid::Uuid { + use sea_orm::{ActiveModelTrait, Set}; + + let library = LibraryRepository::create(db, "Library", "/lib", ScanningStrategy::Default) + .await + .unwrap(); + let series = SeriesRepository::create(db, library.id, "Tracked Series", None) + .await + .unwrap(); + + // Local classification: a single book carrying the desired aggregates. + let mut book = create_test_book(series.id, library.id, "/v1.cbz", "v1.cbz", None); + book.file_hash = format!("hash_{}", uuid::Uuid::new_v4()); + let created = BookRepository::create(db, &book, None).await.unwrap(); + let meta = BookMetadataRepository::create_with_title_and_number(db, created.id, None, None) + .await + .unwrap(); + let mut active: codex::db::entities::book_metadata::ActiveModel = meta.into(); + active.volume = Set(setup.local_max_volume); + active.chapter = Set(setup.local_max_chapter); + active.update(db).await.unwrap(); + + // Provider counts on series_metadata. + if let Some(c) = setup.total_chapter_count { + SeriesMetadataRepository::update_total_chapter_count(db, series.id, Some(c)) + .await + .unwrap(); + } + if let Some(v) = setup.total_volume_count { + SeriesMetadataRepository::update_total_volume_count(db, series.id, Some(v)) + .await + .unwrap(); + } + + // Tracking row. + SeriesTrackingRepository::upsert( + db, + series.id, + TrackingUpdate { + tracked: Some(true), + track_chapters: Some(setup.track_chapters), + track_volumes: Some(setup.track_volumes), + ..Default::default() + }, + ) + .await + .unwrap(); + + if let Some(source) = setup.external_id_source { + SeriesExternalIdRepository::create(db, series.id, source, "1234", None, None) + .await + .unwrap(); + } + + series.id +} + +#[tokio::test] +async fn test_get_series_upstream_gap_tracked_series_with_provider_ahead() { + let (db, _temp_dir) = setup_test_db().await; + + let series_id = setup_tracked_series_with_gap( + &db, + TrackedSeriesGapSetup { + track_chapters: true, + track_volumes: true, + total_chapter_count: Some(145.0), + total_volume_count: Some(15), + local_max_chapter: Some(142.0), + local_max_volume: Some(14), + external_id_source: Some("plugin:mangabaka"), + }, + ) + .await; + + let state = create_test_auth_state(db.clone()).await; + let token = create_admin_and_token(&db, &state).await; + let app = create_test_router(state).await; + + let request = get_request_with_auth(&format!("/api/v1/series/{}", series_id), &token); + let (status, response): (StatusCode, Option<SeriesDto>) = make_json_request(app, request).await; + + assert_eq!(status, StatusCode::OK); + let dto = response.unwrap(); + assert_eq!(dto.upstream_chapter_gap, Some(3.0)); + assert_eq!(dto.upstream_volume_gap, Some(1)); + assert_eq!(dto.upstream_gap_provider.as_deref(), Some("MangaBaka")); +} + +#[tokio::test] +async fn test_get_series_upstream_gap_omitted_when_not_tracked() { + use sea_orm::{ActiveModelTrait, Set}; + + let (db, _temp_dir) = setup_test_db().await; + + let library = LibraryRepository::create(&db, "Library", "/lib", ScanningStrategy::Default) + .await + .unwrap(); + let series = SeriesRepository::create(&db, library.id, "Untracked", None) + .await + .unwrap(); + // Local book. + let mut book = create_test_book(series.id, library.id, "/v1.cbz", "v1.cbz", None); + book.file_hash = "hash_untracked_1".to_string(); + let created = BookRepository::create(&db, &book, None).await.unwrap(); + let meta = BookMetadataRepository::create_with_title_and_number(&db, created.id, None, None) + .await + .unwrap(); + let mut active: codex::db::entities::book_metadata::ActiveModel = meta.into(); + active.volume = Set(Some(14)); + active.chapter = Set(Some(142.0)); + active.update(&db).await.unwrap(); + // Provider counts populated, but series is not tracked. + SeriesMetadataRepository::update_total_chapter_count(&db, series.id, Some(145.0)) + .await + .unwrap(); + SeriesMetadataRepository::update_total_volume_count(&db, series.id, Some(15)) + .await + .unwrap(); + // Note: no tracking row set up; series defaults to untracked. + + let state = create_test_auth_state(db.clone()).await; + let token = create_admin_and_token(&db, &state).await; + let app = create_test_router(state).await; + + let request = get_request_with_auth(&format!("/api/v1/series/{}", series.id), &token); + let (status, response): (StatusCode, Option<SeriesDto>) = make_json_request(app, request).await; + + assert_eq!(status, StatusCode::OK); + let dto = response.unwrap(); + assert_eq!(dto.upstream_chapter_gap, None); + assert_eq!(dto.upstream_volume_gap, None); + assert_eq!(dto.upstream_gap_provider, None); +} + +#[tokio::test] +async fn test_get_series_upstream_gap_track_chapters_false_suppresses_chapter_gap() { + let (db, _temp_dir) = setup_test_db().await; + + let series_id = setup_tracked_series_with_gap( + &db, + TrackedSeriesGapSetup { + track_chapters: false, + track_volumes: true, + total_chapter_count: Some(145.0), + total_volume_count: Some(15), + local_max_chapter: Some(142.0), + local_max_volume: Some(14), + external_id_source: Some("plugin:mangabaka"), + }, + ) + .await; + + let state = create_test_auth_state(db.clone()).await; + let token = create_admin_and_token(&db, &state).await; + let app = create_test_router(state).await; + + let request = get_request_with_auth(&format!("/api/v1/series/{}", series_id), &token); + let (status, response): (StatusCode, Option<SeriesDto>) = make_json_request(app, request).await; + + assert_eq!(status, StatusCode::OK); + let dto = response.unwrap(); + assert_eq!(dto.upstream_chapter_gap, None); + assert_eq!(dto.upstream_volume_gap, Some(1)); + // Provider is still attributed because at least one axis is populated. + assert_eq!(dto.upstream_gap_provider.as_deref(), Some("MangaBaka")); +} + +#[tokio::test] +async fn test_get_series_upstream_gap_provider_count_missing_suppresses_axis() { + let (db, _temp_dir) = setup_test_db().await; + + // total_chapter_count is None — chapter axis must be omitted. + let series_id = setup_tracked_series_with_gap( + &db, + TrackedSeriesGapSetup { + track_chapters: true, + track_volumes: true, + total_chapter_count: None, + total_volume_count: Some(15), + local_max_chapter: Some(142.0), + local_max_volume: Some(14), + external_id_source: Some("plugin:mangabaka"), + }, + ) + .await; + + let state = create_test_auth_state(db.clone()).await; + let token = create_admin_and_token(&db, &state).await; + let app = create_test_router(state).await; + + let request = get_request_with_auth(&format!("/api/v1/series/{}", series_id), &token); + let (status, response): (StatusCode, Option<SeriesDto>) = make_json_request(app, request).await; + + assert_eq!(status, StatusCode::OK); + let dto = response.unwrap(); + assert_eq!(dto.upstream_chapter_gap, None); + assert_eq!(dto.upstream_volume_gap, Some(1)); +} + +#[tokio::test] +async fn test_get_series_upstream_gap_axes_independent() { + let (db, _temp_dir) = setup_test_db().await; + + // Volume gap exists, chapter gap is zero (provider matches local). + let series_id = setup_tracked_series_with_gap( + &db, + TrackedSeriesGapSetup { + track_chapters: true, + track_volumes: true, + total_chapter_count: Some(142.0), + total_volume_count: Some(15), + local_max_chapter: Some(142.0), + local_max_volume: Some(14), + external_id_source: Some("plugin:anilist"), + }, + ) + .await; + + let state = create_test_auth_state(db.clone()).await; + let token = create_admin_and_token(&db, &state).await; + let app = create_test_router(state).await; + + let request = get_request_with_auth(&format!("/api/v1/series/{}", series_id), &token); + let (status, response): (StatusCode, Option<SeriesDto>) = make_json_request(app, request).await; + + assert_eq!(status, StatusCode::OK); + let dto = response.unwrap(); + assert_eq!(dto.upstream_chapter_gap, None); + assert_eq!(dto.upstream_volume_gap, Some(1)); + assert_eq!(dto.upstream_gap_provider.as_deref(), Some("AniList")); +} + #[tokio::test] async fn test_get_series_not_found() { let (db, _temp_dir) = setup_test_db().await; diff --git a/tests/api/tracking.rs b/tests/api/tracking.rs new file mode 100644 index 00000000..41080b3a --- /dev/null +++ b/tests/api/tracking.rs @@ -0,0 +1,622 @@ +//! Integration tests for release-tracking config + alias endpoints. + +#[path = "../common/mod.rs"] +mod common; + +use codex::api::error::ErrorResponse; +use codex::api::routes::v1::dto::tracking::{ + CreateSeriesAliasRequest, SeriesAliasDto, SeriesAliasListResponse, SeriesTrackingDto, + UpdateSeriesTrackingRequest, +}; +use codex::db::ScanningStrategy; +use codex::db::repositories::{LibraryRepository, SeriesRepository, UserRepository}; +use codex::utils::password; +use common::*; +use hyper::StatusCode; +use uuid::Uuid; + +async fn create_admin_and_token( + db: &sea_orm::DatabaseConnection, + state: &codex::api::extractors::AuthState, +) -> String { + let password_hash = password::hash_password("admin123").unwrap(); + let user = create_test_user("admin", "admin@example.com", &password_hash, true); + let created = UserRepository::create(db, &user).await.unwrap(); + state + .jwt_service + .generate_token(created.id, created.username.clone(), created.get_role()) + .unwrap() +} + +async fn create_regular_user_and_token( + db: &sea_orm::DatabaseConnection, + state: &codex::api::extractors::AuthState, +) -> String { + let password_hash = password::hash_password("user123").unwrap(); + let user = create_test_user("regular", "user@example.com", &password_hash, false); + let created = UserRepository::create(db, &user).await.unwrap(); + state + .jwt_service + .generate_token(created.id, created.username.clone(), created.get_role()) + .unwrap() +} + +async fn create_test_series(db: &sea_orm::DatabaseConnection) -> (Uuid, Uuid) { + let library = + LibraryRepository::create(db, "Test Library", "/test/path", ScanningStrategy::Default) + .await + .unwrap(); + let series = SeriesRepository::create(db, library.id, "Test Series", None) + .await + .unwrap(); + (library.id, series.id) +} + +// ============================================================================= +// GET /tracking +// ============================================================================= + +#[tokio::test] +async fn get_tracking_returns_virtual_default_when_no_row() { + let (db, _temp) = setup_test_db().await; + let (_lib, series_id) = create_test_series(&db).await; + + let state = create_test_auth_state(db.clone()).await; + let token = create_admin_and_token(&db, &state).await; + let app = create_test_router(state).await; + + let req = get_request_with_auth(&format!("/api/v1/series/{}/tracking", series_id), &token); + let (status, dto): (StatusCode, Option<SeriesTrackingDto>) = make_json_request(app, req).await; + + assert_eq!(status, StatusCode::OK); + let dto = dto.unwrap(); + assert_eq!(dto.series_id, series_id); + assert!(!dto.tracked); + assert!(dto.track_chapters); + assert!(dto.track_volumes); +} + +#[tokio::test] +async fn get_tracking_404_when_series_missing() { + let (db, _temp) = setup_test_db().await; + let state = create_test_auth_state(db.clone()).await; + let token = create_admin_and_token(&db, &state).await; + let app = create_test_router(state).await; + + let fake = Uuid::new_v4(); + let req = get_request_with_auth(&format!("/api/v1/series/{}/tracking", fake), &token); + let (status, _err): (StatusCode, Option<ErrorResponse>) = make_json_request(app, req).await; + assert_eq!(status, StatusCode::NOT_FOUND); +} + +// ============================================================================= +// PATCH /tracking +// ============================================================================= + +#[tokio::test] +async fn patch_tracking_creates_then_updates() { + let (db, _temp) = setup_test_db().await; + let (_lib, series_id) = create_test_series(&db).await; + let state = create_test_auth_state(db.clone()).await; + let token = create_admin_and_token(&db, &state).await; + + // First PATCH: insert. + let app1 = create_test_router(state.clone()).await; + let body = UpdateSeriesTrackingRequest { + tracked: Some(true), + latest_known_chapter: Some(Some(142.5)), + ..Default::default() + }; + let req = patch_json_request_with_auth( + &format!("/api/v1/series/{}/tracking", series_id), + &body, + &token, + ); + let (status, dto): (StatusCode, Option<SeriesTrackingDto>) = make_json_request(app1, req).await; + assert_eq!(status, StatusCode::OK); + let dto = dto.unwrap(); + assert!(dto.tracked); + assert_eq!(dto.latest_known_chapter, Some(142.5)); + + // Second PATCH: only update one field; others persist. + let app2 = create_test_router(state).await; + let body = UpdateSeriesTrackingRequest { + latest_known_chapter: Some(Some(143.0)), + ..Default::default() + }; + let req = patch_json_request_with_auth( + &format!("/api/v1/series/{}/tracking", series_id), + &body, + &token, + ); + let (status, dto): (StatusCode, Option<SeriesTrackingDto>) = make_json_request(app2, req).await; + assert_eq!(status, StatusCode::OK); + let dto = dto.unwrap(); + assert!(dto.tracked, "tracked should persist"); + assert_eq!(dto.latest_known_chapter, Some(143.0)); +} + +#[tokio::test] +async fn patch_tracking_requires_auth() { + let (db, _temp) = setup_test_db().await; + let (_lib, series_id) = create_test_series(&db).await; + let state = create_test_auth_state(db.clone()).await; + let app = create_test_router(state).await; + + let body = UpdateSeriesTrackingRequest { + tracked: Some(true), + ..Default::default() + }; + let req = patch_json_request(&format!("/api/v1/series/{}/tracking", series_id), &body); + let (status, _): (StatusCode, Option<ErrorResponse>) = make_json_request(app, req).await; + assert_eq!(status, StatusCode::UNAUTHORIZED); +} + +// ============================================================================= +// Aliases +// ============================================================================= + +#[tokio::test] +async fn list_aliases_empty_for_new_series() { + let (db, _temp) = setup_test_db().await; + let (_lib, series_id) = create_test_series(&db).await; + let state = create_test_auth_state(db.clone()).await; + let token = create_admin_and_token(&db, &state).await; + let app = create_test_router(state).await; + + let req = get_request_with_auth(&format!("/api/v1/series/{}/aliases", series_id), &token); + let (status, body): (StatusCode, Option<SeriesAliasListResponse>) = + make_json_request(app, req).await; + assert_eq!(status, StatusCode::OK); + assert!(body.unwrap().aliases.is_empty()); +} + +#[tokio::test] +async fn create_alias_inserts_then_idempotent() { + let (db, _temp) = setup_test_db().await; + let (_lib, series_id) = create_test_series(&db).await; + let state = create_test_auth_state(db.clone()).await; + let token = create_admin_and_token(&db, &state).await; + + let app1 = create_test_router(state.clone()).await; + let body = CreateSeriesAliasRequest { + alias: "My Hero Academia".to_string(), + source: None, + }; + let req = post_json_request_with_auth( + &format!("/api/v1/series/{}/aliases", series_id), + &body, + &token, + ); + let (status, dto): (StatusCode, Option<SeriesAliasDto>) = make_json_request(app1, req).await; + assert_eq!(status, StatusCode::CREATED); + let dto = dto.unwrap(); + assert_eq!(dto.series_id, series_id); + assert_eq!(dto.alias, "My Hero Academia"); + assert_eq!(dto.normalized, "my hero academia"); + assert_eq!(dto.source, "manual"); + + // Second call with same alias: idempotent OK (not CREATED), same id. + let app2 = create_test_router(state).await; + let body = CreateSeriesAliasRequest { + alias: "My Hero Academia".to_string(), + source: None, + }; + let req = post_json_request_with_auth( + &format!("/api/v1/series/{}/aliases", series_id), + &body, + &token, + ); + let (status, dto2): (StatusCode, Option<SeriesAliasDto>) = make_json_request(app2, req).await; + assert_eq!(status, StatusCode::OK); + assert_eq!(dto2.unwrap().id, dto.id); +} + +#[tokio::test] +async fn create_alias_rejects_blank() { + let (db, _temp) = setup_test_db().await; + let (_lib, series_id) = create_test_series(&db).await; + let state = create_test_auth_state(db.clone()).await; + let token = create_admin_and_token(&db, &state).await; + let app = create_test_router(state).await; + + let body = CreateSeriesAliasRequest { + alias: " ".to_string(), + source: None, + }; + let req = post_json_request_with_auth( + &format!("/api/v1/series/{}/aliases", series_id), + &body, + &token, + ); + let (status, _): (StatusCode, Option<ErrorResponse>) = make_json_request(app, req).await; + assert_eq!(status, StatusCode::BAD_REQUEST); +} + +#[tokio::test] +async fn create_alias_rejects_invalid_explicit_source() { + // An explicit invalid source falls back to `manual` (we filter via is_valid), + // so the create should succeed but with source = "manual". This guards + // against a 500: bad input shouldn't crash, even if we don't surface 400. + let (db, _temp) = setup_test_db().await; + let (_lib, series_id) = create_test_series(&db).await; + let state = create_test_auth_state(db.clone()).await; + let token = create_admin_and_token(&db, &state).await; + let app = create_test_router(state).await; + + let body = CreateSeriesAliasRequest { + alias: "Test".to_string(), + source: Some("garbage".to_string()), + }; + let req = post_json_request_with_auth( + &format!("/api/v1/series/{}/aliases", series_id), + &body, + &token, + ); + let (status, dto): (StatusCode, Option<SeriesAliasDto>) = make_json_request(app, req).await; + assert_eq!(status, StatusCode::CREATED); + assert_eq!(dto.unwrap().source, "manual"); +} + +#[tokio::test] +async fn delete_alias_removes_row() { + use codex::db::repositories::SeriesAliasRepository; + + let (db, _temp) = setup_test_db().await; + let (_lib, series_id) = create_test_series(&db).await; + let alias = SeriesAliasRepository::create(&db, series_id, "Manual Alias", "manual") + .await + .unwrap(); + + let state = create_test_auth_state(db.clone()).await; + let token = create_admin_and_token(&db, &state).await; + let app = create_test_router(state).await; + + let req = delete_request_with_auth( + &format!("/api/v1/series/{}/aliases/{}", series_id, alias.id), + &token, + ); + let (status, _bytes) = make_request(app, req).await; + assert_eq!(status, StatusCode::NO_CONTENT); + + let remaining = SeriesAliasRepository::get_for_series(&db, series_id) + .await + .unwrap(); + assert!(remaining.is_empty()); +} + +#[tokio::test] +async fn delete_alias_404_when_alias_missing() { + let (db, _temp) = setup_test_db().await; + let (_lib, series_id) = create_test_series(&db).await; + let state = create_test_auth_state(db.clone()).await; + let token = create_admin_and_token(&db, &state).await; + let app = create_test_router(state).await; + + let fake = Uuid::new_v4(); + let req = delete_request_with_auth( + &format!("/api/v1/series/{}/aliases/{}", series_id, fake), + &token, + ); + let (status, _): (StatusCode, Option<ErrorResponse>) = make_json_request(app, req).await; + assert_eq!(status, StatusCode::NOT_FOUND); +} + +#[tokio::test] +async fn delete_alias_404_when_belongs_to_other_series() { + use codex::db::repositories::SeriesAliasRepository; + + let (db, _temp) = setup_test_db().await; + let (lib_id, series_a) = create_test_series(&db).await; + let series_b = SeriesRepository::create(&db, lib_id, "Other", None) + .await + .unwrap(); + let alias_b = SeriesAliasRepository::create(&db, series_b.id, "Belongs To B", "manual") + .await + .unwrap(); + + let state = create_test_auth_state(db.clone()).await; + let token = create_admin_and_token(&db, &state).await; + let app = create_test_router(state).await; + + // Try to delete series_b's alias by quoting series_a's path. + let req = delete_request_with_auth( + &format!("/api/v1/series/{}/aliases/{}", series_a, alias_b.id), + &token, + ); + let (status, _): (StatusCode, Option<ErrorResponse>) = make_json_request(app, req).await; + assert_eq!(status, StatusCode::NOT_FOUND); + + // Confirm alias still exists. + assert!( + SeriesAliasRepository::get_by_id(&db, alias_b.id) + .await + .unwrap() + .is_some() + ); +} + +#[tokio::test] +async fn aliases_require_write_permission_for_mutations() { + let (db, _temp) = setup_test_db().await; + let (_lib, series_id) = create_test_series(&db).await; + let state = create_test_auth_state(db.clone()).await; + let token = create_regular_user_and_token(&db, &state).await; + + let app = create_test_router(state).await; + let body = CreateSeriesAliasRequest { + alias: "X".to_string(), + source: None, + }; + let req = post_json_request_with_auth( + &format!("/api/v1/series/{}/aliases", series_id), + &body, + &token, + ); + let (status, _): (StatusCode, Option<ErrorResponse>) = make_json_request(app, req).await; + assert_eq!(status, StatusCode::FORBIDDEN); +} + +// ============================================================================= +// PATCH /tracking — seed on false -> true transition (round D) +// ============================================================================= + +/// Create a book with a `book_metadata` row carrying volume/chapter so seed +/// can derive `latest_known_*` and `track_*` flags. +async fn add_classified_book( + db: &sea_orm::DatabaseConnection, + series_id: Uuid, + library_id: Uuid, + path: &str, + volume: Option<i32>, + chapter: Option<f32>, +) { + use chrono::Utc; + use codex::db::entities::{book_metadata, books}; + use codex::db::repositories::{BookMetadataRepository, BookRepository}; + use sea_orm::{ActiveModelTrait, Set}; + + let book = books::Model { + id: Uuid::new_v4(), + series_id, + library_id, + file_path: path.to_string(), + file_name: path.rsplit('/').next().unwrap_or(path).to_string(), + file_size: 1024, + file_hash: format!("hash_{}", Uuid::new_v4()), + partial_hash: String::new(), + format: "cbz".to_string(), + page_count: 10, + deleted: false, + analyzed: false, + analysis_error: None, + analysis_errors: None, + modified_at: Utc::now(), + created_at: Utc::now(), + updated_at: Utc::now(), + thumbnail_path: None, + thumbnail_generated_at: None, + koreader_hash: None, + epub_positions: None, + epub_spine_items: None, + }; + let created = BookRepository::create(db, &book, None).await.unwrap(); + let meta = BookMetadataRepository::create_with_title_and_number(db, created.id, None, None) + .await + .unwrap(); + let mut active: book_metadata::ActiveModel = meta.into(); + active.volume = Set(volume); + active.chapter = Set(chapter); + active.update(db).await.unwrap(); +} + +#[tokio::test] +async fn patch_tracking_seeds_on_false_to_true_transition() { + use codex::db::repositories::SeriesAliasRepository; + + let (db, _temp) = setup_test_db().await; + let (lib_id, series_id) = create_test_series(&db).await; + + // Two volume-classified books, no chapters. Seed should: + // - insert "Test Series" as a metadata-source alias, + // - set latest_known_volume = 7, latest_known_chapter = None, + // - set track_volumes = true, track_chapters = false. + add_classified_book(&db, series_id, lib_id, "/v1.cbz", Some(1), None).await; + add_classified_book(&db, series_id, lib_id, "/v7.cbz", Some(7), None).await; + + let state = create_test_auth_state(db.clone()).await; + let token = create_admin_and_token(&db, &state).await; + let app = create_test_router(state).await; + + let body = UpdateSeriesTrackingRequest { + tracked: Some(true), + ..Default::default() + }; + let req = patch_json_request_with_auth( + &format!("/api/v1/series/{}/tracking", series_id), + &body, + &token, + ); + let (status, dto): (StatusCode, Option<SeriesTrackingDto>) = make_json_request(app, req).await; + assert_eq!(status, StatusCode::OK); + let dto = dto.unwrap(); + + assert!(dto.tracked, "user-supplied tracked=true must win"); + assert_eq!( + dto.latest_known_volume, + Some(7), + "seed should derive latest_known_volume from local max" + ); + assert_eq!( + dto.latest_known_chapter, None, + "no books have chapters → latest_known_chapter stays None" + ); + assert!( + dto.track_volumes, + "volume-organized series should keep track_volumes on" + ); + assert!( + !dto.track_chapters, + "no chapter classification → track_chapters should default off" + ); + + // Aliases were seeded. + let aliases = SeriesAliasRepository::get_for_series(&db, series_id) + .await + .unwrap(); + assert!( + aliases.iter().any(|a| a.alias == "Test Series"), + "seed should insert the series name as a metadata-source alias; got {:?}", + aliases.iter().map(|a| &a.alias).collect::<Vec<_>>() + ); +} + +#[tokio::test] +async fn patch_tracking_user_value_wins_over_seed() { + let (db, _temp) = setup_test_db().await; + let (lib_id, series_id) = create_test_series(&db).await; + + // Books would seed latest_known_chapter = 50.0 ... + add_classified_book(&db, series_id, lib_id, "/c50.cbz", None, Some(50.0)).await; + + let state = create_test_auth_state(db.clone()).await; + let token = create_admin_and_token(&db, &state).await; + let app = create_test_router(state).await; + + // ... but the user explicitly overrides to 100.0 in the same PATCH. + // Seed runs first, then the user's update is applied on top — so the + // user's value must win. This is the "explicit override beats seed" + // contract the seed-on-track design relies on. + let body = UpdateSeriesTrackingRequest { + tracked: Some(true), + latest_known_chapter: Some(Some(100.0)), + ..Default::default() + }; + let req = patch_json_request_with_auth( + &format!("/api/v1/series/{}/tracking", series_id), + &body, + &token, + ); + let (status, dto): (StatusCode, Option<SeriesTrackingDto>) = make_json_request(app, req).await; + assert_eq!(status, StatusCode::OK); + let dto = dto.unwrap(); + assert!(dto.tracked); + assert_eq!( + dto.latest_known_chapter, + Some(100.0), + "explicit user override must beat the seeded value" + ); +} + +#[tokio::test] +async fn patch_tracking_does_not_re_seed_on_already_tracked_update() { + use codex::db::repositories::SeriesAliasRepository; + + let (db, _temp) = setup_test_db().await; + let (lib_id, series_id) = create_test_series(&db).await; + add_classified_book(&db, series_id, lib_id, "/v1.cbz", Some(1), None).await; + + let state = create_test_auth_state(db.clone()).await; + let token = create_admin_and_token(&db, &state).await; + + // Initial flip -> seeds. + let app = create_test_router(state.clone()).await; + let body = UpdateSeriesTrackingRequest { + tracked: Some(true), + ..Default::default() + }; + let req = patch_json_request_with_auth( + &format!("/api/v1/series/{}/tracking", series_id), + &body, + &token, + ); + let (s1, _): (StatusCode, Option<SeriesTrackingDto>) = make_json_request(app, req).await; + assert_eq!(s1, StatusCode::OK); + + // User deletes the metadata-seeded alias. + let aliases = SeriesAliasRepository::get_for_series(&db, series_id) + .await + .unwrap(); + let seeded = aliases + .iter() + .find(|a| a.alias == "Test Series") + .expect("first PATCH should have seeded the series name"); + SeriesAliasRepository::delete(&db, seeded.id).await.unwrap(); + + // A subsequent PATCH that doesn't flip tracked false→true must NOT + // re-run the seed. If it did, the deleted alias would come back. + let app = create_test_router(state).await; + let body = UpdateSeriesTrackingRequest { + latest_known_chapter: Some(Some(5.0)), + ..Default::default() + }; + let req = patch_json_request_with_auth( + &format!("/api/v1/series/{}/tracking", series_id), + &body, + &token, + ); + let (s2, _): (StatusCode, Option<SeriesTrackingDto>) = make_json_request(app, req).await; + assert_eq!(s2, StatusCode::OK); + + let after = SeriesAliasRepository::get_for_series(&db, series_id) + .await + .unwrap(); + assert!( + !after.iter().any(|a| a.alias == "Test Series"), + "seed must not re-run when tracked is not flipping false→true" + ); +} + +#[tokio::test] +async fn patch_tracking_skips_seed_when_already_tracked_and_re_setting_true() { + use codex::db::repositories::{ + SeriesAliasRepository, SeriesTrackingRepository, TrackingUpdate, + }; + + let (db, _temp) = setup_test_db().await; + let (lib_id, series_id) = create_test_series(&db).await; + add_classified_book(&db, series_id, lib_id, "/v1.cbz", Some(1), None).await; + + // Pre-set tracked=true directly so the PATCH below sees was_tracked=true. + SeriesTrackingRepository::upsert( + &db, + series_id, + TrackingUpdate { + tracked: Some(true), + ..Default::default() + }, + ) + .await + .unwrap(); + // Confirm no aliases seeded yet (we bypassed the handler). + let before = SeriesAliasRepository::get_for_series(&db, series_id) + .await + .unwrap(); + assert!(before.is_empty()); + + // Now PATCH with tracked=true again. Since was_tracked is already true, + // the false→true gate should NOT trigger and seed must not run. + let state = create_test_auth_state(db.clone()).await; + let token = create_admin_and_token(&db, &state).await; + let app = create_test_router(state).await; + let body = UpdateSeriesTrackingRequest { + tracked: Some(true), + ..Default::default() + }; + let req = patch_json_request_with_auth( + &format!("/api/v1/series/{}/tracking", series_id), + &body, + &token, + ); + let (status, _): (StatusCode, Option<SeriesTrackingDto>) = make_json_request(app, req).await; + assert_eq!(status, StatusCode::OK); + + let after = SeriesAliasRepository::get_for_series(&db, series_id) + .await + .unwrap(); + assert!( + after.is_empty(), + "seed must not run when tracked is already true; got {:?}", + after.iter().map(|a| &a.alias).collect::<Vec<_>>() + ); +} diff --git a/tests/task_recovery_integration.rs b/tests/task_recovery_integration.rs index 2bc2238a..2f639cf6 100644 --- a/tests/task_recovery_integration.rs +++ b/tests/task_recovery_integration.rs @@ -627,3 +627,73 @@ async fn test_completed_task_allows_new_task() { "New task should have different ID after previous task completed" ); } + +/// Regression test: enqueueing `poll_release_source` for two different +/// `source_id`s in quick succession must yield two distinct tasks. The dedup +/// path used to match by `task_type` alone for tasks whose identity lives in +/// JSON params (no FK columns), causing the second click on "Poll now" to be +/// silently coalesced onto the first source's in-flight poll. +#[tokio::test] +async fn test_poll_release_source_dedup_is_per_source() { + let (db, _temp_dir) = setup_test_db().await; + + let source_a = Uuid::new_v4(); + let source_b = Uuid::new_v4(); + + let task_a = TaskRepository::enqueue( + &db, + TaskType::PollReleaseSource { + source_id: source_a, + }, + None, + ) + .await + .expect("Failed to enqueue poll for source A"); + + let task_b = TaskRepository::enqueue( + &db, + TaskType::PollReleaseSource { + source_id: source_b, + }, + None, + ) + .await + .expect("Failed to enqueue poll for source B"); + + assert_ne!( + task_a, task_b, + "Polls for distinct release sources must not be deduplicated against each other" + ); + + let stats = TaskRepository::get_stats(&db) + .await + .expect("Failed to get stats"); + assert_eq!(stats.pending, 2, "Both polls should be pending"); +} + +/// Re-enqueueing a poll for the *same* source must still coalesce onto the +/// in-flight task. This is the inverse of the per-source guarantee above and +/// matches the documented `enqueue_poll_now` UX. +#[tokio::test] +async fn test_poll_release_source_dedup_same_source_coalesces() { + let (db, _temp_dir) = setup_test_db().await; + let source_id = Uuid::new_v4(); + + let first = TaskRepository::enqueue(&db, TaskType::PollReleaseSource { source_id }, None) + .await + .expect("Failed to enqueue first poll"); + + let second = TaskRepository::enqueue(&db, TaskType::PollReleaseSource { source_id }, None) + .await + .expect("Failed to enqueue duplicate poll"); + + assert_eq!( + first, second, + "Polls for the same source should coalesce onto the in-flight task" + ); + + let stats = TaskRepository::get_stats(&db) + .await + .expect("Failed to get stats"); + assert_eq!(stats.pending, 1, "Only one pending poll task should exist"); +} diff --git a/web/openapi.json b/web/openapi.json index b2e200e7..1504fb23 100644 --- a/web/openapi.json +++ b/web/openapi.json @@ -7107,35 +7107,31 @@ ] } }, - "/api/v1/scans/active": { + "/api/v1/release-sources": { "get": { "tags": [ - "Scans" + "Releases" ], - "summary": "List all active scans", - "description": "# Permission Required\n- `libraries:read`", - "operationId": "list_active_scans", + "summary": "List all configured release sources (admin-only).", + "operationId": "list_release_sources", "responses": { "200": { - "description": "List of active scans", + "description": "Source list", "content": { "application/json": { "schema": { - "type": "array", - "items": { - "$ref": "#/components/schemas/ScanStatusDto" - } + "$ref": "#/components/schemas/ReleaseSourceListResponse" } } } }, "403": { - "description": "Permission denied" + "description": "PluginsManage permission required" } }, "security": [ { - "bearer_auth": [] + "jwt_bearer": [] }, { "api_key": [] @@ -7143,25 +7139,47 @@ ] } }, - "/api/v1/scans/stream": { + "/api/v1/release-sources/applicability": { "get": { "tags": [ - "Scans" + "Releases" + ], + "summary": "Whether release tracking is available for a given library.", + "description": "Read-only, requires only `SeriesRead`: the response carries no\nadmin-sensitive data (no plugin IDs, no configs, no library\nallowlists), just the boolean and friendly display names. Used by the\nfrontend to:\n\n- hide the per-series Tracking panel + Releases tab on libraries with\n no applicable plugin (cleaner UX);\n- decide whether to show the \"Track for releases\" / \"Don't track for\n releases\" entries in the bulk-selection menu.", + "operationId": "get_release_tracking_applicability", + "parameters": [ + { + "name": "libraryId", + "in": "query", + "description": "Optional library scope. When provided, only plugins that apply to\nthis library are considered (a plugin's `library_ids` field is\neither empty = all, or contains this UUID).", + "required": false, + "schema": { + "type": [ + "string", + "null" + ], + "format": "uuid" + } + } ], - "summary": "Stream scan progress updates via Server-Sent Events", - "description": "# Permission Required\n- `libraries:read`\n\n**DEPRECATED**: This endpoint is replaced by `/api/v1/tasks/stream` which provides\nreal-time updates for all task types including scans. This endpoint now filters\nthe task stream to only show scan_library tasks for backwards compatibility.", - "operationId": "scan_progress_stream", "responses": { "200": { - "description": "SSE stream of scan progress updates" + "description": "Applicability info", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApplicabilityResponse" + } + } + } }, "403": { - "description": "Permission denied" + "description": "SeriesRead permission required" } }, "security": [ { - "bearer_auth": [] + "jwt_bearer": [] }, { "api_key": [] @@ -7169,64 +7187,213 @@ ] } }, - "/api/v1/series": { - "get": { + "/api/v1/release-sources/{source_id}": { + "patch": { "tags": [ - "Series" + "Releases" ], - "summary": "List series with optional library filter and pagination", - "operationId": "list_series", + "summary": "PATCH a release source (admin-only).", + "description": "Toggle `enabled`, override `cronSchedule`, or rename `displayName`.\nSending `cronSchedule: null` clears the override and reverts the row to\ninheriting the server-wide `release_tracking.default_cron_schedule`.", + "operationId": "update_release_source", "parameters": [ { - "name": "page", - "in": "query", - "description": "Page number (1-indexed, default 1)", - "required": false, + "name": "source_id", + "in": "path", + "description": "Source ID", + "required": true, "schema": { - "type": "integer", - "format": "int64", - "minimum": 0 + "type": "string", + "format": "uuid" + } + } + ], + "requestBody": { + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/UpdateReleaseSourceRequest" + } + } + }, + "required": true + }, + "responses": { + "200": { + "description": "Source updated", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ReleaseSourceDto" + } + } } }, + "400": { + "description": "Invalid update payload" + }, + "403": { + "description": "PluginsManage permission required" + }, + "404": { + "description": "Source not found" + } + }, + "security": [ + { + "jwt_bearer": [] + }, { - "name": "pageSize", + "api_key": [] + } + ] + } + }, + "/api/v1/release-sources/{source_id}/poll-now": { + "post": { + "tags": [ + "Releases" + ], + "summary": "Trigger a manual poll for a source.", + "description": "Enqueues a `PollReleaseSource` task immediately. The task runs\nasynchronously via the worker pool; the response confirms the enqueue,\nnot the poll outcome.", + "operationId": "poll_release_source_now", + "parameters": [ + { + "name": "source_id", + "in": "path", + "description": "Source ID", + "required": true, + "schema": { + "type": "string", + "format": "uuid" + } + } + ], + "responses": { + "202": { + "description": "Poll task enqueued", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/PollNowResponse" + } + } + } + }, + "403": { + "description": "PluginsManage permission required" + }, + "404": { + "description": "Source not found" + }, + "409": { + "description": "Source disabled" + } + }, + "security": [ + { + "jwt_bearer": [] + }, + { + "api_key": [] + } + ] + } + }, + "/api/v1/release-sources/{source_id}/reset": { + "post": { + "tags": [ + "Releases" + ], + "summary": "Reset a release source to a clean slate.", + "description": "Deletes every `release_ledger` row owned by the source and clears the\nsource's transient poll state (`etag`, `last_polled_at`, `last_error`,\n`last_error_at`, `last_summary`). User-managed fields (`enabled`,\n`cron_schedule`, `display_name`, `config`) are preserved.\n\nIntended for testing/troubleshooting: after a reset, the next poll\nfetches the upstream feed without an `If-None-Match` header (so no 304\nshort-circuit) and re-records every release as `announced`. Does NOT\nauto-enqueue a poll — call `POST /release-sources/{id}/poll-now` after\nresetting if you want immediate re-fetch.", + "operationId": "reset_release_source", + "parameters": [ + { + "name": "source_id", + "in": "path", + "description": "Source ID", + "required": true, + "schema": { + "type": "string", + "format": "uuid" + } + } + ], + "responses": { + "200": { + "description": "Source reset", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ResetReleaseSourceResponse" + } + } + } + }, + "403": { + "description": "PluginsManage permission required" + }, + "404": { + "description": "Source not found" + } + }, + "security": [ + { + "jwt_bearer": [] + }, + { + "api_key": [] + } + ] + } + }, + "/api/v1/releases": { + "get": { + "tags": [ + "Releases" + ], + "summary": "Cross-series inbox: announced (or filtered) ledger entries, paginated.", + "operationId": "list_release_inbox", + "parameters": [ + { + "name": "state", "in": "query", - "description": "Number of items per page (max 100, default 50)", + "description": "Filter by state. Defaults to `announced`. Pass `all` to disable\nstate filtering entirely (returns rows in every state).", "required": false, "schema": { - "type": "integer", - "format": "int64", - "minimum": 0 + "type": [ + "string", + "null" + ] } }, { - "name": "sort", + "name": "seriesId", "in": "query", - "description": "Sort parameter (format: \"field,direction\" e.g. \"name,asc\")", "required": false, "schema": { "type": [ "string", "null" - ] + ], + "format": "uuid" } }, { - "name": "genres", + "name": "sourceId", "in": "query", - "description": "Filter by genres (comma-separated, AND logic - series must have ALL specified genres)", "required": false, "schema": { "type": [ "string", "null" - ] + ], + "format": "uuid" } }, { - "name": "tags", + "name": "language", "in": "query", - "description": "Filter by tags (comma-separated, AND logic - series must have ALL specified tags)", "required": false, "schema": { "type": [ @@ -7238,7 +7405,7 @@ { "name": "libraryId", "in": "query", - "description": "Filter by library ID", + "description": "Restrict to series belonging to this library.", "required": false, "schema": { "type": [ @@ -7249,22 +7416,33 @@ } }, { - "name": "full", + "name": "page", "in": "query", - "description": "Return full series data including metadata, locks, genres, tags, alternate titles,\nexternal ratings, and external links. Default is false for backward compatibility.", "required": false, "schema": { - "type": "boolean" + "type": "integer", + "format": "int64", + "minimum": 0 + } + }, + { + "name": "pageSize", + "in": "query", + "required": false, + "schema": { + "type": "integer", + "format": "int64", + "minimum": 0 } } ], "responses": { "200": { - "description": "Paginated list of series (returns FullSeriesListResponse when full=true)", + "description": "Paginated inbox entries", "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/PaginatedResponse" + "$ref": "#/components/schemas/PaginatedResponse_ReleaseLedgerEntryDto" } } } @@ -7283,19 +7461,19 @@ ] } }, - "/api/v1/series/bulk/analyze": { + "/api/v1/releases/bulk": { "post": { "tags": [ - "Bulk Operations" + "Releases" ], - "summary": "Bulk analyze multiple series", - "description": "Enqueues analysis tasks for all books in the specified series.\nSeries that don't exist are silently skipped.", - "operationId": "bulk_analyze_series", + "summary": "Apply an action to a batch of ledger rows.", + "description": "`dismiss`, `mark-acquired`, `ignore`, and `reset` all set state\nin-place. `delete` removes the rows and clears the affected sources'\netags so the next poll re-fetches without `If-None-Match`. All run\nas bulk SQL (no per-row round trips), so this scales to thousands of\nrows in one call.", + "operationId": "bulk_release_action", "requestBody": { "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/BulkAnalyzeSeriesRequest" + "$ref": "#/components/schemas/BulkReleaseActionRequest" } } }, @@ -7303,17 +7481,17 @@ }, "responses": { "200": { - "description": "Analysis tasks enqueued", + "description": "Bulk action applied", "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/BulkAnalyzeResponse" + "$ref": "#/components/schemas/BulkReleaseActionResponse" } } } }, - "401": { - "description": "Unauthorized" + "400": { + "description": "Empty ID list or invalid action" }, "403": { "description": "Forbidden" @@ -7321,7 +7499,7 @@ }, "security": [ { - "bearer_auth": [] + "jwt_bearer": [] }, { "api_key": [] @@ -7329,47 +7507,95 @@ ] } }, - "/api/v1/series/bulk/genres": { - "post": { + "/api/v1/releases/facets": { + "get": { "tags": [ - "Bulk Operations" + "Releases" ], - "summary": "Bulk add/remove genres for multiple series", - "operationId": "bulk_modify_series_genres", - "requestBody": { - "content": { - "application/json": { - "schema": { - "$ref": "#/components/schemas/BulkModifySeriesGenresRequest" - } - } - }, - "required": true - }, - "responses": { - "200": { - "description": "Genres modified", - "content": { - "application/json": { + "summary": "Distinct values present in the inbox under the given filters.", + "description": "Returns the languages, libraries, and series that have at least one\nmatching ledger row. The frontend uses this to populate cascading\nSelect dropdowns so users never have to type a UUID and never see\ndropdown options that would yield zero results.", + "operationId": "list_release_facets", + "parameters": [ + { + "name": "state", + "in": "query", + "required": false, + "schema": { + "type": [ + "string", + "null" + ] + } + }, + { + "name": "seriesId", + "in": "query", + "required": false, + "schema": { + "type": [ + "string", + "null" + ], + "format": "uuid" + } + }, + { + "name": "sourceId", + "in": "query", + "required": false, + "schema": { + "type": [ + "string", + "null" + ], + "format": "uuid" + } + }, + { + "name": "language", + "in": "query", + "required": false, + "schema": { + "type": [ + "string", + "null" + ] + } + }, + { + "name": "libraryId", + "in": "query", + "required": false, + "schema": { + "type": [ + "string", + "null" + ], + "format": "uuid" + } + } + ], + "responses": { + "200": { + "description": "Facets for the inbox view", + "content": { + "application/json": { "schema": { - "$ref": "#/components/schemas/BulkMetadataUpdateResponse" + "$ref": "#/components/schemas/ReleaseFacetsResponse" } } } }, "400": { - "description": "Bad request" - }, - "401": { - "description": "Unauthorized" + "description": "Invalid state filter" }, "403": { - "description": "Forbidden" + "description": "SeriesRead permission required" } }, "security": [ { - "bearer_auth": [] + "jwt_bearer": [] }, { "api_key": [] @@ -7377,67 +7603,76 @@ ] } }, - "/api/v1/series/bulk/metadata": { - "patch": { + "/api/v1/releases/{release_id}": { + "delete": { "tags": [ - "Bulk Operations" + "Releases" ], - "summary": "Bulk patch series metadata", - "description": "Applies the same partial metadata update to multiple series at once.\nOnly provided fields will be updated. Changed fields are auto-locked.\nNon-existent series are silently skipped.", - "operationId": "bulk_patch_series_metadata", - "requestBody": { - "content": { - "application/json": { - "schema": { - "$ref": "#/components/schemas/BulkPatchSeriesMetadataRequest" - } + "summary": "Hard-delete a single ledger row.", + "description": "Also clears the source's `etag` so the next poll bypasses\n`If-None-Match` and re-records the deleted row in `announced` state\n(assuming the upstream still lists it). This is the lever users want\nwhen they marked something incorrectly and need to \"get it back\".", + "operationId": "delete_release", + "parameters": [ + { + "name": "release_id", + "in": "path", + "description": "Ledger entry ID", + "required": true, + "schema": { + "type": "string", + "format": "uuid" } - }, - "required": true - }, + } + ], "responses": { "200": { - "description": "Metadata updated", + "description": "Release deleted", "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/BulkMetadataUpdateResponse" + "$ref": "#/components/schemas/DeleteReleaseResponse" } } } }, - "400": { - "description": "Bad request" - }, - "401": { - "description": "Unauthorized" - }, "403": { "description": "Forbidden" + }, + "404": { + "description": "Ledger entry not found" } }, "security": [ { - "bearer_auth": [] + "jwt_bearer": [] }, { "api_key": [] } ] - } - }, - "/api/v1/series/bulk/metadata/locks": { - "put": { + }, + "patch": { "tags": [ - "Bulk Operations" + "Releases" + ], + "summary": "PATCH a ledger entry's state (general-purpose state transition).", + "operationId": "update_release_entry", + "parameters": [ + { + "name": "release_id", + "in": "path", + "description": "Ledger entry ID", + "required": true, + "schema": { + "type": "string", + "format": "uuid" + } + } ], - "summary": "Bulk update metadata locks for multiple series", - "operationId": "bulk_update_series_locks", "requestBody": { "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/BulkUpdateSeriesLocksRequest" + "$ref": "#/components/schemas/UpdateReleaseLedgerEntryRequest" } } }, @@ -7445,28 +7680,28 @@ }, "responses": { "200": { - "description": "Locks updated", + "description": "Updated ledger entry", "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/BulkMetadataUpdateResponse" + "$ref": "#/components/schemas/ReleaseLedgerEntryDto" } } } }, "400": { - "description": "Bad request" - }, - "401": { - "description": "Unauthorized" + "description": "Invalid state" }, "403": { "description": "Forbidden" + }, + "404": { + "description": "Ledger entry not found" } }, "security": [ { - "bearer_auth": [] + "jwt_bearer": [] }, { "api_key": [] @@ -7474,45 +7709,46 @@ ] } }, - "/api/v1/series/bulk/metadata/reset": { + "/api/v1/releases/{release_id}/dismiss": { "post": { "tags": [ - "Bulk Operations" + "Releases" ], - "summary": "Bulk reset metadata for multiple series", - "description": "Resets all metadata for the specified series back to filesystem-derived defaults.\nEach series has its metadata row deleted and recreated, and all associated data\n(genres, tags, alternate titles, external IDs/ratings/links, covers, metadata sources,\nsharing tags) is cleared. User ratings, read progress, and book data are preserved.\n\nSeries that don't exist are silently skipped.", - "operationId": "bulk_reset_series_metadata", - "requestBody": { - "content": { - "application/json": { - "schema": { - "$ref": "#/components/schemas/BulkSeriesRequest" - } + "summary": "Convenience POST: dismiss a release.", + "operationId": "dismiss_release", + "parameters": [ + { + "name": "release_id", + "in": "path", + "description": "Ledger entry ID", + "required": true, + "schema": { + "type": "string", + "format": "uuid" } - }, - "required": true - }, + } + ], "responses": { "200": { - "description": "Metadata reset for specified series", + "description": "Release dismissed", "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/BulkMetadataResetResponse" + "$ref": "#/components/schemas/ReleaseLedgerEntryDto" } } } }, - "401": { - "description": "Unauthorized" - }, "403": { "description": "Forbidden" + }, + "404": { + "description": "Ledger entry not found" } }, "security": [ { - "bearer_auth": [] + "jwt_bearer": [] }, { "api_key": [] @@ -7520,45 +7756,46 @@ ] } }, - "/api/v1/series/bulk/read": { + "/api/v1/releases/{release_id}/mark-acquired": { "post": { "tags": [ - "Bulk Operations" + "Releases" ], - "summary": "Bulk mark multiple series as read", - "description": "Marks all books in the specified series as read for the authenticated user.\nSeries that don't exist are silently skipped.", - "operationId": "bulk_mark_series_as_read", - "requestBody": { - "content": { - "application/json": { - "schema": { - "$ref": "#/components/schemas/BulkSeriesRequest" - } + "summary": "Convenience POST: mark a release acquired.", + "operationId": "mark_release_acquired", + "parameters": [ + { + "name": "release_id", + "in": "path", + "description": "Ledger entry ID", + "required": true, + "schema": { + "type": "string", + "format": "uuid" } - }, - "required": true - }, + } + ], "responses": { "200": { - "description": "Series marked as read", + "description": "Release marked acquired", "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/MarkReadResponse" + "$ref": "#/components/schemas/ReleaseLedgerEntryDto" } } } }, - "401": { - "description": "Unauthorized" - }, "403": { "description": "Forbidden" + }, + "404": { + "description": "Ledger entry not found" } }, "security": [ { - "bearer_auth": [] + "jwt_bearer": [] }, { "api_key": [] @@ -7566,40 +7803,30 @@ ] } }, - "/api/v1/series/bulk/renumber": { - "post": { + "/api/v1/scans/active": { + "get": { "tags": [ - "Bulk Operations" + "Scans" ], - "summary": "Bulk renumber books in multiple series", - "description": "Enqueues a fan-out task that will renumber books in the specified series\nusing each library's number strategy. Returns a task ID for tracking progress via SSE.", - "operationId": "bulk_renumber_series", - "requestBody": { - "content": { - "application/json": { - "schema": { - "$ref": "#/components/schemas/BulkRenumberSeriesRequest" - } - } - }, - "required": true - }, + "summary": "List all active scans", + "description": "# Permission Required\n- `libraries:read`", + "operationId": "list_active_scans", "responses": { "200": { - "description": "Renumber task queued", + "description": "List of active scans", "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/BulkTaskResponse" + "type": "array", + "items": { + "$ref": "#/components/schemas/ScanStatusDto" + } } } } }, - "401": { - "description": "Unauthorized" - }, "403": { - "description": "Forbidden" + "description": "Permission denied" } }, "security": [ @@ -7612,47 +7839,139 @@ ] } }, - "/api/v1/series/bulk/tags": { - "post": { + "/api/v1/scans/stream": { + "get": { "tags": [ - "Bulk Operations" + "Scans" ], - "summary": "Bulk add/remove tags for multiple series", - "operationId": "bulk_modify_series_tags", - "requestBody": { - "content": { - "application/json": { - "schema": { - "$ref": "#/components/schemas/BulkModifySeriesTagsRequest" - } - } - }, - "required": true - }, + "summary": "Stream scan progress updates via Server-Sent Events", + "description": "# Permission Required\n- `libraries:read`\n\n**DEPRECATED**: This endpoint is replaced by `/api/v1/tasks/stream` which provides\nreal-time updates for all task types including scans. This endpoint now filters\nthe task stream to only show scan_library tasks for backwards compatibility.", + "operationId": "scan_progress_stream", "responses": { "200": { - "description": "Tags modified", + "description": "SSE stream of scan progress updates" + }, + "403": { + "description": "Permission denied" + } + }, + "security": [ + { + "bearer_auth": [] + }, + { + "api_key": [] + } + ] + } + }, + "/api/v1/series": { + "get": { + "tags": [ + "Series" + ], + "summary": "List series with optional library filter and pagination", + "operationId": "list_series", + "parameters": [ + { + "name": "page", + "in": "query", + "description": "Page number (1-indexed, default 1)", + "required": false, + "schema": { + "type": "integer", + "format": "int64", + "minimum": 0 + } + }, + { + "name": "pageSize", + "in": "query", + "description": "Number of items per page (max 100, default 50)", + "required": false, + "schema": { + "type": "integer", + "format": "int64", + "minimum": 0 + } + }, + { + "name": "sort", + "in": "query", + "description": "Sort parameter (format: \"field,direction\" e.g. \"name,asc\")", + "required": false, + "schema": { + "type": [ + "string", + "null" + ] + } + }, + { + "name": "genres", + "in": "query", + "description": "Filter by genres (comma-separated, AND logic - series must have ALL specified genres)", + "required": false, + "schema": { + "type": [ + "string", + "null" + ] + } + }, + { + "name": "tags", + "in": "query", + "description": "Filter by tags (comma-separated, AND logic - series must have ALL specified tags)", + "required": false, + "schema": { + "type": [ + "string", + "null" + ] + } + }, + { + "name": "libraryId", + "in": "query", + "description": "Filter by library ID", + "required": false, + "schema": { + "type": [ + "string", + "null" + ], + "format": "uuid" + } + }, + { + "name": "full", + "in": "query", + "description": "Return full series data including metadata, locks, genres, tags, alternate titles,\nexternal ratings, and external links. Default is false for backward compatibility.", + "required": false, + "schema": { + "type": "boolean" + } + } + ], + "responses": { + "200": { + "description": "Paginated list of series (returns FullSeriesListResponse when full=true)", "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/BulkMetadataUpdateResponse" + "$ref": "#/components/schemas/PaginatedResponse" } } } }, - "400": { - "description": "Bad request" - }, - "401": { - "description": "Unauthorized" - }, "403": { "description": "Forbidden" } }, "security": [ { - "bearer_auth": [] + "jwt_bearer": [] }, { "api_key": [] @@ -7660,19 +7979,19 @@ ] } }, - "/api/v1/series/bulk/thumbnails/books/generate": { + "/api/v1/series/bulk/analyze": { "post": { "tags": [ "Bulk Operations" ], - "summary": "Bulk generate thumbnails for books in multiple series", - "description": "Enqueues a fan-out task that will generate thumbnails for all books in the specified series.\nThis is useful for regenerating thumbnails after changing thumbnail settings or fixing\ncorrupt thumbnails.", - "operationId": "bulk_generate_series_book_thumbnails", + "summary": "Bulk analyze multiple series", + "description": "Enqueues analysis tasks for all books in the specified series.\nSeries that don't exist are silently skipped.", + "operationId": "bulk_analyze_series", "requestBody": { "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/BulkGenerateSeriesBookThumbnailsRequest" + "$ref": "#/components/schemas/BulkAnalyzeSeriesRequest" } } }, @@ -7680,11 +7999,11 @@ }, "responses": { "200": { - "description": "Thumbnail generation task queued", + "description": "Analysis tasks enqueued", "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/BulkTaskResponse" + "$ref": "#/components/schemas/BulkAnalyzeResponse" } } } @@ -7706,19 +8025,18 @@ ] } }, - "/api/v1/series/bulk/thumbnails/generate": { + "/api/v1/series/bulk/genres": { "post": { "tags": [ "Bulk Operations" ], - "summary": "Bulk generate series thumbnails", - "description": "Enqueues a fan-out task that will generate thumbnails for the specified series.\nSeries thumbnails are derived from the first book's cover in each series.", - "operationId": "bulk_generate_series_thumbnails", + "summary": "Bulk add/remove genres for multiple series", + "operationId": "bulk_modify_series_genres", "requestBody": { "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/BulkGenerateSeriesThumbnailsRequest" + "$ref": "#/components/schemas/BulkModifySeriesGenresRequest" } } }, @@ -7726,15 +8044,18 @@ }, "responses": { "200": { - "description": "Series thumbnail generation task queued", + "description": "Genres modified", "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/BulkTaskResponse" + "$ref": "#/components/schemas/BulkMetadataUpdateResponse" } } } }, + "400": { + "description": "Bad request" + }, "401": { "description": "Unauthorized" }, @@ -7752,19 +8073,19 @@ ] } }, - "/api/v1/series/bulk/titles/reprocess": { - "post": { + "/api/v1/series/bulk/metadata": { + "patch": { "tags": [ "Bulk Operations" ], - "summary": "Bulk reprocess series titles", - "description": "Enqueues a fan-out task that will reprocess titles for the specified series\nusing their library's preprocessing rules. This is useful when preprocessing\nrules are added or changed after series have already been created.", - "operationId": "bulk_reprocess_series_titles", + "summary": "Bulk patch series metadata", + "description": "Applies the same partial metadata update to multiple series at once.\nOnly provided fields will be updated. Changed fields are auto-locked.\nNon-existent series are silently skipped.", + "operationId": "bulk_patch_series_metadata", "requestBody": { "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/BulkReprocessSeriesTitlesRequest" + "$ref": "#/components/schemas/BulkPatchSeriesMetadataRequest" } } }, @@ -7772,15 +8093,18 @@ }, "responses": { "200": { - "description": "Title reprocessing task queued", + "description": "Metadata updated", "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/BulkTaskResponse" + "$ref": "#/components/schemas/BulkMetadataUpdateResponse" } } } }, + "400": { + "description": "Bad request" + }, "401": { "description": "Unauthorized" }, @@ -7798,19 +8122,18 @@ ] } }, - "/api/v1/series/bulk/unread": { - "post": { + "/api/v1/series/bulk/metadata/locks": { + "put": { "tags": [ "Bulk Operations" ], - "summary": "Bulk mark multiple series as unread", - "description": "Marks all books in the specified series as unread for the authenticated user.\nSeries that don't exist are silently skipped.", - "operationId": "bulk_mark_series_as_unread", + "summary": "Bulk update metadata locks for multiple series", + "operationId": "bulk_update_series_locks", "requestBody": { "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/BulkSeriesRequest" + "$ref": "#/components/schemas/BulkUpdateSeriesLocksRequest" } } }, @@ -7818,15 +8141,18 @@ }, "responses": { "200": { - "description": "Series marked as unread", + "description": "Locks updated", "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/MarkReadResponse" + "$ref": "#/components/schemas/BulkMetadataUpdateResponse" } } } }, + "400": { + "description": "Bad request" + }, "401": { "description": "Unauthorized" }, @@ -7844,58 +8170,45 @@ ] } }, - "/api/v1/series/in-progress": { - "get": { + "/api/v1/series/bulk/metadata/reset": { + "post": { "tags": [ - "Series" + "Bulk Operations" ], - "summary": "List series with in-progress books (series that have at least one book with reading progress that is not completed)", - "operationId": "list_in_progress_series", - "parameters": [ - { - "name": "libraryId", - "in": "query", - "description": "Filter by library ID (optional)", - "required": false, - "schema": { - "type": [ - "string", - "null" - ], - "format": "uuid" + "summary": "Bulk reset metadata for multiple series", + "description": "Resets all metadata for the specified series back to filesystem-derived defaults.\nEach series has its metadata row deleted and recreated, and all associated data\n(genres, tags, alternate titles, external IDs/ratings/links, covers, metadata sources,\nsharing tags) is cleared. User ratings, read progress, and book data are preserved.\n\nSeries that don't exist are silently skipped.", + "operationId": "bulk_reset_series_metadata", + "requestBody": { + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/BulkSeriesRequest" + } } }, - { - "name": "full", - "in": "query", - "description": "Return full series data including metadata, locks, genres, tags, etc.", - "required": false, - "schema": { - "type": "boolean" - } - } - ], + "required": true + }, "responses": { "200": { - "description": "List of in-progress series (returns Vec<FullSeriesResponse> when full=true)", + "description": "Metadata reset for specified series", "content": { "application/json": { "schema": { - "type": "array", - "items": { - "$ref": "#/components/schemas/SeriesDto" - } + "$ref": "#/components/schemas/BulkMetadataResetResponse" } } } }, + "401": { + "description": "Unauthorized" + }, "403": { "description": "Forbidden" } }, "security": [ { - "jwt_bearer": [] + "bearer_auth": [] }, { "api_key": [] @@ -7903,67 +8216,19 @@ ] } }, - "/api/v1/series/list": { + "/api/v1/series/bulk/read": { "post": { "tags": [ - "Series" - ], - "summary": "List series with advanced filtering", - "description": "Supports complex filter conditions including nested AllOf/AnyOf logic,\ngenre/tag filtering with include/exclude, and more.\n\nPagination parameters (page, pageSize, sort) are passed as query parameters.\nFilter conditions are passed in the request body.", - "operationId": "list_series_filtered", - "parameters": [ - { - "name": "page", - "in": "query", - "description": "Page number (1-indexed, minimum 1)", - "required": false, - "schema": { - "type": "integer", - "format": "int64", - "default": 1, - "minimum": 1 - } - }, - { - "name": "pageSize", - "in": "query", - "description": "Number of items per page (max 500, default 50)", - "required": false, - "schema": { - "type": "integer", - "format": "int64", - "default": 50, - "maximum": 500, - "minimum": 1 - } - }, - { - "name": "sort", - "in": "query", - "description": "Sort field and direction (e.g., \"name,asc\" or \"createdAt,desc\")", - "required": false, - "schema": { - "type": [ - "string", - "null" - ] - } - }, - { - "name": "full", - "in": "query", - "description": "Return full data including metadata, locks, and related entities.\nDefault is false for backward compatibility.", - "required": false, - "schema": { - "type": "boolean" - } - } + "Bulk Operations" ], + "summary": "Bulk mark multiple series as read", + "description": "Marks all books in the specified series as read for the authenticated user.\nSeries that don't exist are silently skipped.", + "operationId": "bulk_mark_series_as_read", "requestBody": { "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/SeriesListRequest" + "$ref": "#/components/schemas/BulkSeriesRequest" } } }, @@ -7971,22 +8236,25 @@ }, "responses": { "200": { - "description": "Paginated list of filtered series (returns FullSeriesListResponse when full=true)", + "description": "Series marked as read", "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/PaginatedResponse" + "$ref": "#/components/schemas/MarkReadResponse" } } } }, + "401": { + "description": "Unauthorized" + }, "403": { "description": "Forbidden" } }, "security": [ { - "jwt_bearer": [] + "bearer_auth": [] }, { "api_key": [] @@ -7994,19 +8262,19 @@ ] } }, - "/api/v1/series/list/alphabetical-groups": { + "/api/v1/series/bulk/renumber": { "post": { "tags": [ - "Series" + "Bulk Operations" ], - "summary": "Get alphabetical groups for series", - "description": "Returns a list of alphabetical groups with counts, showing how many series\nstart with each letter/character. This is useful for building A-Z navigation.\nThe same filters as list_series_filtered can be applied.", - "operationId": "list_series_alphabetical_groups", + "summary": "Bulk renumber books in multiple series", + "description": "Enqueues a fan-out task that will renumber books in the specified series\nusing each library's number strategy. Returns a task ID for tracking progress via SSE.", + "operationId": "bulk_renumber_series", "requestBody": { "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/SeriesListRequest" + "$ref": "#/components/schemas/BulkRenumberSeriesRequest" } } }, @@ -8014,25 +8282,25 @@ }, "responses": { "200": { - "description": "List of alphabetical groups with counts", + "description": "Renumber task queued", "content": { "application/json": { "schema": { - "type": "array", - "items": { - "$ref": "#/components/schemas/AlphabeticalGroupDto" - } + "$ref": "#/components/schemas/BulkTaskResponse" } } } }, + "401": { + "description": "Unauthorized" + }, "403": { "description": "Forbidden" } }, "security": [ { - "jwt_bearer": [] + "bearer_auth": [] }, { "api_key": [] @@ -8040,19 +8308,18 @@ ] } }, - "/api/v1/series/metadata/auto-match/task/bulk": { + "/api/v1/series/bulk/tags": { "post": { "tags": [ - "Plugin Actions" + "Bulk Operations" ], - "summary": "Enqueue plugin auto-match tasks for multiple series (bulk operation)", - "description": "Creates background tasks to auto-match metadata for multiple series using the specified plugin.\nEach series gets its own task that runs asynchronously in a worker process.", - "operationId": "enqueue_bulk_auto_match_tasks", + "summary": "Bulk add/remove tags for multiple series", + "operationId": "bulk_modify_series_tags", "requestBody": { "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/EnqueueBulkAutoMatchRequest" + "$ref": "#/components/schemas/BulkModifySeriesTagsRequest" } } }, @@ -8060,26 +8327,23 @@ }, "responses": { "200": { - "description": "Tasks enqueued", + "description": "Tags modified", "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/EnqueueAutoMatchResponse" + "$ref": "#/components/schemas/BulkMetadataUpdateResponse" } } } }, "400": { - "description": "Invalid request" + "description": "Bad request" }, "401": { "description": "Unauthorized" }, "403": { - "description": "No permission to edit series" - }, - "404": { - "description": "Plugin not found" + "description": "Forbidden" } }, "security": [ @@ -8092,69 +8356,45 @@ ] } }, - "/api/v1/series/recently-added": { - "get": { + "/api/v1/series/bulk/thumbnails/books/generate": { + "post": { "tags": [ - "Series" + "Bulk Operations" ], - "summary": "List recently added series", - "operationId": "list_recently_added_series", - "parameters": [ - { - "name": "limit", - "in": "query", - "description": "Maximum number of series to return (default: 50)", - "required": false, - "schema": { - "type": "integer", - "format": "int64", - "minimum": 0 - } - }, - { - "name": "libraryId", - "in": "query", - "description": "Filter by library ID (optional)", - "required": false, - "schema": { - "type": [ - "string", - "null" - ], - "format": "uuid" + "summary": "Bulk generate thumbnails for books in multiple series", + "description": "Enqueues a fan-out task that will generate thumbnails for all books in the specified series.\nThis is useful for regenerating thumbnails after changing thumbnail settings or fixing\ncorrupt thumbnails.", + "operationId": "bulk_generate_series_book_thumbnails", + "requestBody": { + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/BulkGenerateSeriesBookThumbnailsRequest" + } } }, - { - "name": "full", - "in": "query", - "description": "Return full series data including metadata, locks, genres, tags, etc.", - "required": false, - "schema": { - "type": "boolean" - } - } - ], + "required": true + }, "responses": { "200": { - "description": "List of recently added series (returns Vec<FullSeriesResponse> when full=true)", + "description": "Thumbnail generation task queued", "content": { "application/json": { "schema": { - "type": "array", - "items": { - "$ref": "#/components/schemas/SeriesDto" - } + "$ref": "#/components/schemas/BulkTaskResponse" } } } }, + "401": { + "description": "Unauthorized" + }, "403": { "description": "Forbidden" } }, "security": [ { - "jwt_bearer": [] + "bearer_auth": [] }, { "api_key": [] @@ -8162,69 +8402,45 @@ ] } }, - "/api/v1/series/recently-updated": { - "get": { + "/api/v1/series/bulk/thumbnails/generate": { + "post": { "tags": [ - "Series" + "Bulk Operations" ], - "summary": "List recently updated series", - "operationId": "list_recently_updated_series", - "parameters": [ - { - "name": "limit", - "in": "query", - "description": "Maximum number of series to return (default: 50)", - "required": false, - "schema": { - "type": "integer", - "format": "int64", - "minimum": 0 - } - }, - { - "name": "libraryId", - "in": "query", - "description": "Filter by library ID (optional)", - "required": false, - "schema": { - "type": [ - "string", - "null" - ], - "format": "uuid" + "summary": "Bulk generate series thumbnails", + "description": "Enqueues a fan-out task that will generate thumbnails for the specified series.\nSeries thumbnails are derived from the first book's cover in each series.", + "operationId": "bulk_generate_series_thumbnails", + "requestBody": { + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/BulkGenerateSeriesThumbnailsRequest" + } } }, - { - "name": "full", - "in": "query", - "description": "Return full series data including metadata, locks, genres, tags, etc.", - "required": false, - "schema": { - "type": "boolean" - } - } - ], + "required": true + }, "responses": { "200": { - "description": "List of recently updated series (returns Vec<FullSeriesResponse> when full=true)", + "description": "Series thumbnail generation task queued", "content": { "application/json": { "schema": { - "type": "array", - "items": { - "$ref": "#/components/schemas/SeriesDto" - } + "$ref": "#/components/schemas/BulkTaskResponse" } } } }, + "401": { + "description": "Unauthorized" + }, "403": { "description": "Forbidden" } }, "security": [ { - "jwt_bearer": [] + "bearer_auth": [] }, { "api_key": [] @@ -8232,18 +8448,19 @@ ] } }, - "/api/v1/series/search": { + "/api/v1/series/bulk/titles/reprocess": { "post": { "tags": [ - "Series" + "Bulk Operations" ], - "summary": "Search series by name", - "operationId": "search_series", + "summary": "Bulk reprocess series titles", + "description": "Enqueues a fan-out task that will reprocess titles for the specified series\nusing their library's preprocessing rules. This is useful when preprocessing\nrules are added or changed after series have already been created.", + "operationId": "bulk_reprocess_series_titles", "requestBody": { "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/SearchSeriesRequest" + "$ref": "#/components/schemas/BulkReprocessSeriesTitlesRequest" } } }, @@ -8251,25 +8468,25 @@ }, "responses": { "200": { - "description": "Search results (returns Vec<FullSeriesResponse> when full=true)", + "description": "Title reprocessing task queued", "content": { "application/json": { "schema": { - "type": "array", - "items": { - "$ref": "#/components/schemas/SeriesDto" - } + "$ref": "#/components/schemas/BulkTaskResponse" } } } }, + "401": { + "description": "Unauthorized" + }, "403": { "description": "Forbidden" } }, "security": [ { - "jwt_bearer": [] + "bearer_auth": [] }, { "api_key": [] @@ -8277,19 +8494,19 @@ ] } }, - "/api/v1/series/thumbnails/generate": { + "/api/v1/series/bulk/track-for-releases": { "post": { "tags": [ - "Thumbnails" + "Bulk Operations" ], - "summary": "Generate thumbnails for series in a scope", - "description": "This queues a fan-out task that enqueues individual series thumbnail generation tasks.\nSeries thumbnails are the cover images displayed for each series (derived from the first book's cover).\n\n**Scope priority:**\n1. If `series_ids` is provided, only those specific series\n2. If `library_id` is provided, only series in that library\n3. If neither provided, all series in all libraries\n\n**Force behavior:**\n- `force: false` (default): Only generates thumbnails for series that don't have one\n- `force: true`: Regenerates all thumbnails, replacing existing ones\n\n# Permission Required\n- `tasks:write`", - "operationId": "generate_series_thumbnails", + "summary": "Bulk-enable release tracking for multiple series.", + "description": "For each `series_id` in the request, flips `series_tracking.tracked` to\n`true` and runs the seed pass (auto-derives aliases, `latest_known_*`,\n`track_chapters` / `track_volumes` from existing data). Series that don't\nexist are reported as `outcome: skipped`. Series already tracked are\nreported as `outcome: skipped, detail: \"already tracked\"` and the seed is\nnot re-run (idempotent — a re-run would simply re-derive identical\nvalues, but we skip the work).\n\nMirrors the per-series PATCH `false -> true` transition: same seed\nfunction, same idempotency guarantees.", + "operationId": "bulk_track_series_for_releases", "requestBody": { "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/GenerateSeriesThumbnailsRequest" + "$ref": "#/components/schemas/BulkSeriesRequest" } } }, @@ -8297,17 +8514,20 @@ }, "responses": { "200": { - "description": "Series thumbnail generation task queued", + "description": "Bulk-tracked series", "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/CreateTaskResponse" + "$ref": "#/components/schemas/BulkTrackForReleasesResponse" } } } }, + "401": { + "description": "Unauthorized" + }, "403": { - "description": "Permission denied" + "description": "Forbidden" } }, "security": [ @@ -8320,19 +8540,19 @@ ] } }, - "/api/v1/series/titles/reprocess": { + "/api/v1/series/bulk/unread": { "post": { "tags": [ - "Tasks" + "Bulk Operations" ], - "summary": "Reprocess series titles in a scope", - "description": "This queues a fan-out task that enqueues individual series title reprocessing tasks.\nApplies the library's preprocessing rules to regenerate display titles.\n\n**Scope priority:**\n1. If `series_ids` is provided, only those specific series\n2. If `library_id` is provided, only series in that library\n3. If neither provided, all series in all libraries\n\n**Lock behavior:**\n- Series with `title_lock: true` are skipped\n- If title changes and `title_sort_lock` is false, `title_sort` is cleared\n\n# Permission Required\n- `series:write`", - "operationId": "reprocess_series_titles", + "summary": "Bulk mark multiple series as unread", + "description": "Marks all books in the specified series as unread for the authenticated user.\nSeries that don't exist are silently skipped.", + "operationId": "bulk_mark_series_as_unread", "requestBody": { "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/ReprocessSeriesTitlesRequest" + "$ref": "#/components/schemas/BulkSeriesRequest" } } }, @@ -8340,17 +8560,17 @@ }, "responses": { "200": { - "description": "Task enqueued", + "description": "Series marked as unread", "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/EnqueueReprocessTitleResponse" + "$ref": "#/components/schemas/MarkReadResponse" } } } }, - "400": { - "description": "Invalid request" + "401": { + "description": "Unauthorized" }, "403": { "description": "Forbidden" @@ -8366,31 +8586,19 @@ ] } }, - "/api/v1/series/{id}/metadata/apply": { + "/api/v1/series/bulk/untrack-for-releases": { "post": { "tags": [ - "Plugin Actions" - ], - "summary": "Apply metadata from a plugin to a series", - "description": "Fetches metadata from a plugin and applies it to the series, respecting\nRBAC permissions and field locks.", - "operationId": "apply_series_metadata", - "parameters": [ - { - "name": "id", - "in": "path", - "description": "Series ID", - "required": true, - "schema": { - "type": "string", - "format": "uuid" - } - } + "Bulk Operations" ], + "summary": "Bulk-disable release tracking for multiple series.", + "description": "Flips `series_tracking.tracked` to `false`. Does not delete aliases,\n`latest_known_*`, or other tracking config — the user can re-track\nwithout losing customizations, and the seed will re-derive any\nauto-derived fields on the next track-on transition.", + "operationId": "bulk_untrack_series_for_releases", "requestBody": { "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/MetadataApplyRequest" + "$ref": "#/components/schemas/BulkSeriesRequest" } } }, @@ -8398,26 +8606,20 @@ }, "responses": { "200": { - "description": "Metadata applied", + "description": "Bulk-untracked series", "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/MetadataApplyResponse" + "$ref": "#/components/schemas/BulkTrackForReleasesResponse" } } } }, - "400": { - "description": "Invalid request" - }, "401": { "description": "Unauthorized" }, "403": { - "description": "No permission to edit series" - }, - "404": { - "description": "Series or plugin not found" + "description": "Forbidden" } }, "security": [ @@ -8430,63 +8632,58 @@ ] } }, - "/api/v1/series/{id}/metadata/auto-match": { - "post": { + "/api/v1/series/in-progress": { + "get": { "tags": [ - "Plugin Actions" + "Series" ], - "summary": "Auto-match and apply metadata from a plugin to a series", - "description": "Searches for the series using the plugin's metadata search, picks the best match,\nand applies the metadata in one step. This is a convenience endpoint for quick\nmetadata updates without user intervention.", - "operationId": "auto_match_series_metadata", + "summary": "List series with in-progress books (series that have at least one book with reading progress that is not completed)", + "operationId": "list_in_progress_series", "parameters": [ { - "name": "id", - "in": "path", - "description": "Series ID", - "required": true, + "name": "libraryId", + "in": "query", + "description": "Filter by library ID (optional)", + "required": false, "schema": { - "type": "string", + "type": [ + "string", + "null" + ], "format": "uuid" } + }, + { + "name": "full", + "in": "query", + "description": "Return full series data including metadata, locks, genres, tags, etc.", + "required": false, + "schema": { + "type": "boolean" + } } ], - "requestBody": { - "content": { - "application/json": { - "schema": { - "$ref": "#/components/schemas/MetadataAutoMatchRequest" - } - } - }, - "required": true - }, "responses": { "200": { - "description": "Auto-match completed", + "description": "List of in-progress series (returns Vec<FullSeriesResponse> when full=true)", "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/MetadataAutoMatchResponse" + "type": "array", + "items": { + "$ref": "#/components/schemas/SeriesDto" + } } } } }, - "400": { - "description": "Invalid request" - }, - "401": { - "description": "Unauthorized" - }, "403": { - "description": "No permission to edit series" - }, - "404": { - "description": "Series or plugin not found or no match found" + "description": "Forbidden" } }, "security": [ { - "bearer_auth": [] + "jwt_bearer": [] }, { "api_key": [] @@ -8494,23 +8691,59 @@ ] } }, - "/api/v1/series/{id}/metadata/auto-match/task": { + "/api/v1/series/list": { "post": { "tags": [ - "Plugin Actions" + "Series" ], - "summary": "Enqueue a plugin auto-match task for a single series", - "description": "Creates a background task to auto-match metadata for a series using the specified plugin.\nThe task runs asynchronously in a worker process and emits a SeriesMetadataUpdated event\nwhen complete.", - "operationId": "enqueue_auto_match_task", + "summary": "List series with advanced filtering", + "description": "Supports complex filter conditions including nested AllOf/AnyOf logic,\ngenre/tag filtering with include/exclude, and more.\n\nPagination parameters (page, pageSize, sort) are passed as query parameters.\nFilter conditions are passed in the request body.", + "operationId": "list_series_filtered", "parameters": [ { - "name": "id", - "in": "path", - "description": "Series ID", - "required": true, + "name": "page", + "in": "query", + "description": "Page number (1-indexed, minimum 1)", + "required": false, "schema": { - "type": "string", - "format": "uuid" + "type": "integer", + "format": "int64", + "default": 1, + "minimum": 1 + } + }, + { + "name": "pageSize", + "in": "query", + "description": "Number of items per page (max 500, default 50)", + "required": false, + "schema": { + "type": "integer", + "format": "int64", + "default": 50, + "maximum": 500, + "minimum": 1 + } + }, + { + "name": "sort", + "in": "query", + "description": "Sort field and direction (e.g., \"name,asc\" or \"createdAt,desc\")", + "required": false, + "schema": { + "type": [ + "string", + "null" + ] + } + }, + { + "name": "full", + "in": "query", + "description": "Return full data including metadata, locks, and related entities.\nDefault is false for backward compatibility.", + "required": false, + "schema": { + "type": "boolean" } } ], @@ -8518,7 +8751,7 @@ "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/EnqueueAutoMatchRequest" + "$ref": "#/components/schemas/SeriesListRequest" } } }, @@ -8526,31 +8759,22 @@ }, "responses": { "200": { - "description": "Task enqueued", + "description": "Paginated list of filtered series (returns FullSeriesListResponse when full=true)", "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/EnqueueAutoMatchResponse" + "$ref": "#/components/schemas/PaginatedResponse" } } } }, - "400": { - "description": "Invalid request" - }, - "401": { - "description": "Unauthorized" - }, "403": { - "description": "No permission to edit series" - }, - "404": { - "description": "Series or plugin not found" + "description": "Forbidden" } }, "security": [ { - "bearer_auth": [] + "jwt_bearer": [] }, { "api_key": [] @@ -8558,31 +8782,19 @@ ] } }, - "/api/v1/series/{id}/metadata/preview": { + "/api/v1/series/list/alphabetical-groups": { "post": { "tags": [ - "Plugin Actions" - ], - "summary": "Preview metadata from a plugin for a series", - "description": "Fetches metadata from a plugin and computes a field-by-field diff with the current\nseries metadata, showing which fields will be applied, locked, or denied by RBAC.", - "operationId": "preview_series_metadata", - "parameters": [ - { - "name": "id", - "in": "path", - "description": "Series ID", - "required": true, - "schema": { - "type": "string", - "format": "uuid" - } - } + "Series" ], + "summary": "Get alphabetical groups for series", + "description": "Returns a list of alphabetical groups with counts, showing how many series\nstart with each letter/character. This is useful for building A-Z navigation.\nThe same filters as list_series_filtered can be applied.", + "operationId": "list_series_alphabetical_groups", "requestBody": { "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/MetadataPreviewRequest" + "$ref": "#/components/schemas/SeriesListRequest" } } }, @@ -8590,31 +8802,25 @@ }, "responses": { "200": { - "description": "Preview computed", + "description": "List of alphabetical groups with counts", "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/MetadataPreviewResponse" + "type": "array", + "items": { + "$ref": "#/components/schemas/AlphabeticalGroupDto" + } } } } }, - "400": { - "description": "Invalid request" - }, - "401": { - "description": "Unauthorized" - }, "403": { - "description": "No permission to edit series" - }, - "404": { - "description": "Series or plugin not found" + "description": "Forbidden" } }, "security": [ { - "bearer_auth": [] + "jwt_bearer": [] }, { "api_key": [] @@ -8622,52 +8828,46 @@ ] } }, - "/api/v1/series/{id}/metadata/search-title": { - "get": { + "/api/v1/series/metadata/auto-match/task/bulk": { + "post": { "tags": [ "Plugin Actions" ], - "summary": "Get the preprocessed search title for a series", - "description": "Returns the series title after applying plugin and library preprocessing rules.\nUse this to get the correct search query before opening the metadata search modal.", - "operationId": "get_series_search_title", - "parameters": [ - { - "name": "id", - "in": "path", - "description": "Series ID", - "required": true, - "schema": { - "type": "string", - "format": "uuid" + "summary": "Enqueue plugin auto-match tasks for multiple series (bulk operation)", + "description": "Creates background tasks to auto-match metadata for multiple series using the specified plugin.\nEach series gets its own task that runs asynchronously in a worker process.", + "operationId": "enqueue_bulk_auto_match_tasks", + "requestBody": { + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/EnqueueBulkAutoMatchRequest" + } } }, - { - "name": "pluginId", - "in": "query", - "description": "Plugin ID to get preprocessing rules from", - "required": true, - "schema": { - "type": "string", - "format": "uuid" - } - } - ], + "required": true + }, "responses": { "200": { - "description": "Preprocessed search title", + "description": "Tasks enqueued", "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/SearchTitleResponse" + "$ref": "#/components/schemas/EnqueueAutoMatchResponse" } } } }, + "400": { + "description": "Invalid request" + }, "401": { "description": "Unauthorized" }, + "403": { + "description": "No permission to edit series" + }, "404": { - "description": "Series or plugin not found" + "description": "Plugin not found" } }, "security": [ @@ -8680,21 +8880,35 @@ ] } }, - "/api/v1/series/{series_id}": { + "/api/v1/series/recently-added": { "get": { "tags": [ "Series" ], - "summary": "Get series by ID", - "operationId": "get_series", + "summary": "List recently added series", + "operationId": "list_recently_added_series", "parameters": [ { - "name": "series_id", - "in": "path", - "description": "Series ID", - "required": true, + "name": "limit", + "in": "query", + "description": "Maximum number of series to return (default: 50)", + "required": false, "schema": { - "type": "string", + "type": "integer", + "format": "int64", + "minimum": 0 + } + }, + { + "name": "libraryId", + "in": "query", + "description": "Filter by library ID (optional)", + "required": false, + "schema": { + "type": [ + "string", + "null" + ], "format": "uuid" } }, @@ -8710,17 +8924,20 @@ ], "responses": { "200": { - "description": "Series details (returns FullSeriesResponse when full=true)", + "description": "List of recently added series (returns Vec<FullSeriesResponse> when full=true)", "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/SeriesDto" + "type": "array", + "items": { + "$ref": "#/components/schemas/SeriesDto" + } } } } }, - "404": { - "description": "Series not found" + "403": { + "description": "Forbidden" } }, "security": [ @@ -8731,31 +8948,90 @@ "api_key": [] } ] - }, - "patch": { + } + }, + "/api/v1/series/recently-updated": { + "get": { "tags": [ "Series" ], - "summary": "Update series core fields (name/title)", - "description": "Partially updates series_metadata fields. Only provided fields will be updated.\nAbsent fields are unchanged. When name is set to a non-null value, it is automatically locked.", - "operationId": "patch_series", + "summary": "List recently updated series", + "operationId": "list_recently_updated_series", "parameters": [ { - "name": "series_id", - "in": "path", - "description": "Series ID", - "required": true, + "name": "limit", + "in": "query", + "description": "Maximum number of series to return (default: 50)", + "required": false, "schema": { - "type": "string", + "type": "integer", + "format": "int64", + "minimum": 0 + } + }, + { + "name": "libraryId", + "in": "query", + "description": "Filter by library ID (optional)", + "required": false, + "schema": { + "type": [ + "string", + "null" + ], "format": "uuid" } + }, + { + "name": "full", + "in": "query", + "description": "Return full series data including metadata, locks, genres, tags, etc.", + "required": false, + "schema": { + "type": "boolean" + } + } + ], + "responses": { + "200": { + "description": "List of recently updated series (returns Vec<FullSeriesResponse> when full=true)", + "content": { + "application/json": { + "schema": { + "type": "array", + "items": { + "$ref": "#/components/schemas/SeriesDto" + } + } + } + } + }, + "403": { + "description": "Forbidden" + } + }, + "security": [ + { + "jwt_bearer": [] + }, + { + "api_key": [] } + ] + } + }, + "/api/v1/series/search": { + "post": { + "tags": [ + "Series" ], + "summary": "Search series by name", + "operationId": "search_series", "requestBody": { "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/PatchSeriesRequest" + "$ref": "#/components/schemas/SearchSeriesRequest" } } }, @@ -8763,20 +9039,20 @@ }, "responses": { "200": { - "description": "Series updated successfully", + "description": "Search results (returns Vec<FullSeriesResponse> when full=true)", "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/SeriesUpdateResponse" + "type": "array", + "items": { + "$ref": "#/components/schemas/SeriesDto" + } } } } }, "403": { "description": "Forbidden" - }, - "404": { - "description": "Series not found" } }, "security": [ @@ -8789,101 +9065,88 @@ ] } }, - "/api/v1/series/{series_id}/alternate-titles": { - "get": { + "/api/v1/series/thumbnails/generate": { + "post": { "tags": [ - "Series" + "Thumbnails" ], - "summary": "Get alternate titles for a series", - "operationId": "get_series_alternate_titles", - "parameters": [ - { - "name": "series_id", - "in": "path", - "description": "Series ID", - "required": true, - "schema": { - "type": "string", - "format": "uuid" + "summary": "Generate thumbnails for series in a scope", + "description": "This queues a fan-out task that enqueues individual series thumbnail generation tasks.\nSeries thumbnails are the cover images displayed for each series (derived from the first book's cover).\n\n**Scope priority:**\n1. If `series_ids` is provided, only those specific series\n2. If `library_id` is provided, only series in that library\n3. If neither provided, all series in all libraries\n\n**Force behavior:**\n- `force: false` (default): Only generates thumbnails for series that don't have one\n- `force: true`: Regenerates all thumbnails, replacing existing ones\n\n# Permission Required\n- `tasks:write`", + "operationId": "generate_series_thumbnails", + "requestBody": { + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/GenerateSeriesThumbnailsRequest" + } } - } - ], + }, + "required": true + }, "responses": { "200": { - "description": "List of alternate titles for the series", + "description": "Series thumbnail generation task queued", "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/AlternateTitleListResponse" + "$ref": "#/components/schemas/CreateTaskResponse" } } } }, "403": { - "description": "Forbidden" - }, - "404": { - "description": "Series not found" + "description": "Permission denied" } }, "security": [ { - "jwt_bearer": [] + "bearer_auth": [] }, { "api_key": [] } ] - }, + } + }, + "/api/v1/series/titles/reprocess": { "post": { "tags": [ - "Series" - ], - "summary": "Add an alternate title to a series", - "operationId": "create_alternate_title", - "parameters": [ - { - "name": "series_id", - "in": "path", - "description": "Series ID", - "required": true, - "schema": { - "type": "string", - "format": "uuid" - } - } + "Tasks" ], + "summary": "Reprocess series titles in a scope", + "description": "This queues a fan-out task that enqueues individual series title reprocessing tasks.\nApplies the library's preprocessing rules to regenerate display titles.\n\n**Scope priority:**\n1. If `series_ids` is provided, only those specific series\n2. If `library_id` is provided, only series in that library\n3. If neither provided, all series in all libraries\n\n**Lock behavior:**\n- Series with `title_lock: true` are skipped\n- If title changes and `title_sort_lock` is false, `title_sort` is cleared\n\n# Permission Required\n- `series:write`", + "operationId": "reprocess_series_titles", "requestBody": { "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/CreateAlternateTitleRequest" + "$ref": "#/components/schemas/ReprocessSeriesTitlesRequest" } } }, "required": true }, "responses": { - "201": { - "description": "Alternate title created", + "200": { + "description": "Task enqueued", "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/AlternateTitleDto" + "$ref": "#/components/schemas/EnqueueReprocessTitleResponse" } } } }, + "400": { + "description": "Invalid request" + }, "403": { "description": "Forbidden" - }, - "404": { - "description": "Series not found" } }, "security": [ { - "jwt_bearer": [] + "bearer_auth": [] }, { "api_key": [] @@ -8891,16 +9154,17 @@ ] } }, - "/api/v1/series/{series_id}/alternate-titles/{title_id}": { - "delete": { + "/api/v1/series/{id}/metadata/apply": { + "post": { "tags": [ - "Series" + "Plugin Actions" ], - "summary": "Delete an alternate title", - "operationId": "delete_alternate_title", + "summary": "Apply metadata from a plugin to a series", + "description": "Fetches metadata from a plugin and applies it to the series, respecting\nRBAC permissions and field locks.", + "operationId": "apply_series_metadata", "parameters": [ { - "name": "series_id", + "name": "id", "in": "path", "description": "Series ID", "required": true, @@ -8908,47 +9172,63 @@ "type": "string", "format": "uuid" } - }, - { - "name": "title_id", - "in": "path", - "description": "Alternate title ID", - "required": true, - "schema": { - "type": "string", - "format": "uuid" - } } ], + "requestBody": { + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/MetadataApplyRequest" + } + } + }, + "required": true + }, "responses": { - "204": { - "description": "Alternate title deleted" + "200": { + "description": "Metadata applied", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/MetadataApplyResponse" + } + } + } + }, + "400": { + "description": "Invalid request" + }, + "401": { + "description": "Unauthorized" }, "403": { - "description": "Forbidden" + "description": "No permission to edit series" }, "404": { - "description": "Series or title not found" + "description": "Series or plugin not found" } }, "security": [ { - "jwt_bearer": [] + "bearer_auth": [] }, { "api_key": [] } ] - }, - "patch": { + } + }, + "/api/v1/series/{id}/metadata/auto-match": { + "post": { "tags": [ - "Series" + "Plugin Actions" ], - "summary": "Update an alternate title", - "operationId": "update_alternate_title", + "summary": "Auto-match and apply metadata from a plugin to a series", + "description": "Searches for the series using the plugin's metadata search, picks the best match,\nand applies the metadata in one step. This is a convenience endpoint for quick\nmetadata updates without user intervention.", + "operationId": "auto_match_series_metadata", "parameters": [ { - "name": "series_id", + "name": "id", "in": "path", "description": "Series ID", "required": true, @@ -8956,11 +9236,65 @@ "type": "string", "format": "uuid" } + } + ], + "requestBody": { + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/MetadataAutoMatchRequest" + } + } + }, + "required": true + }, + "responses": { + "200": { + "description": "Auto-match completed", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/MetadataAutoMatchResponse" + } + } + } + }, + "400": { + "description": "Invalid request" + }, + "401": { + "description": "Unauthorized" + }, + "403": { + "description": "No permission to edit series" + }, + "404": { + "description": "Series or plugin not found or no match found" + } + }, + "security": [ + { + "bearer_auth": [] }, { - "name": "title_id", + "api_key": [] + } + ] + } + }, + "/api/v1/series/{id}/metadata/auto-match/task": { + "post": { + "tags": [ + "Plugin Actions" + ], + "summary": "Enqueue a plugin auto-match task for a single series", + "description": "Creates a background task to auto-match metadata for a series using the specified plugin.\nThe task runs asynchronously in a worker process and emits a SeriesMetadataUpdated event\nwhen complete.", + "operationId": "enqueue_auto_match_task", + "parameters": [ + { + "name": "id", "in": "path", - "description": "Alternate title ID", + "description": "Series ID", "required": true, "schema": { "type": "string", @@ -8972,7 +9306,7 @@ "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/UpdateAlternateTitleRequest" + "$ref": "#/components/schemas/EnqueueAutoMatchRequest" } } }, @@ -8980,25 +9314,31 @@ }, "responses": { "200": { - "description": "Alternate title updated", + "description": "Task enqueued", "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/AlternateTitleDto" + "$ref": "#/components/schemas/EnqueueAutoMatchResponse" } } } }, + "400": { + "description": "Invalid request" + }, + "401": { + "description": "Unauthorized" + }, "403": { - "description": "Forbidden" + "description": "No permission to edit series" }, "404": { - "description": "Series or title not found" + "description": "Series or plugin not found" } }, "security": [ { - "jwt_bearer": [] + "bearer_auth": [] }, { "api_key": [] @@ -9006,17 +9346,17 @@ ] } }, - "/api/v1/series/{series_id}/analyze": { + "/api/v1/series/{id}/metadata/preview": { "post": { "tags": [ - "Scans" + "Plugin Actions" ], - "summary": "Trigger analysis of all books in a series", - "description": "# Permission Required\n- `series:write`\n\n# Behavior\nEnqueues an AnalyzeSeries task which will create individual AnalyzeBook tasks\nfor each book in the series. All books are analyzed with force=true.\nReturns immediately with a task_id to track progress.", - "operationId": "trigger_series_analysis", + "summary": "Preview metadata from a plugin for a series", + "description": "Fetches metadata from a plugin and computes a field-by-field diff with the current\nseries metadata, showing which fields will be applied, locked, or denied by RBAC.", + "operationId": "preview_series_metadata", "parameters": [ { - "name": "series_id", + "name": "id", "in": "path", "description": "Series ID", "required": true, @@ -9026,22 +9366,38 @@ } } ], + "requestBody": { + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/MetadataPreviewRequest" + } + } + }, + "required": true + }, "responses": { "200": { - "description": "Analysis task enqueued successfully", + "description": "Preview computed", "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/CreateTaskResponse" + "$ref": "#/components/schemas/MetadataPreviewResponse" } } } }, + "400": { + "description": "Invalid request" + }, + "401": { + "description": "Unauthorized" + }, "403": { - "description": "Permission denied" + "description": "No permission to edit series" }, "404": { - "description": "Series not found" + "description": "Series or plugin not found" } }, "security": [ @@ -9054,17 +9410,17 @@ ] } }, - "/api/v1/series/{series_id}/analyze-unanalyzed": { - "post": { + "/api/v1/series/{id}/metadata/search-title": { + "get": { "tags": [ - "Scans" + "Plugin Actions" ], - "summary": "Trigger analysis of unanalyzed books in a series", - "description": "# Permission Required\n- `series:write`\n\n# Behavior\nEnqueues AnalyzeBook tasks (with force=false) for books in the series that have not been analyzed yet.\nThis is useful for recovering from failures or analyzing newly discovered books.\nReturns immediately with a task_id to track progress.", - "operationId": "trigger_series_unanalyzed_analysis", + "summary": "Get the preprocessed search title for a series", + "description": "Returns the series title after applying plugin and library preprocessing rules.\nUse this to get the correct search query before opening the metadata search modal.", + "operationId": "get_series_search_title", "parameters": [ { - "name": "series_id", + "name": "id", "in": "path", "description": "Series ID", "required": true, @@ -9072,24 +9428,34 @@ "type": "string", "format": "uuid" } + }, + { + "name": "pluginId", + "in": "query", + "description": "Plugin ID to get preprocessing rules from", + "required": true, + "schema": { + "type": "string", + "format": "uuid" + } } ], "responses": { "200": { - "description": "Analysis tasks enqueued successfully", + "description": "Preprocessed search title", "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/CreateTaskResponse" + "$ref": "#/components/schemas/SearchTitleResponse" } } } }, - "403": { - "description": "Permission denied" + "401": { + "description": "Unauthorized" }, "404": { - "description": "Series not found" + "description": "Series or plugin not found" } }, "security": [ @@ -9102,13 +9468,13 @@ ] } }, - "/api/v1/series/{series_id}/books": { + "/api/v1/series/{series_id}": { "get": { "tags": [ "Series" ], - "summary": "Get books in a series", - "operationId": "get_series_books", + "summary": "Get series by ID", + "operationId": "get_series", "parameters": [ { "name": "series_id", @@ -9120,19 +9486,10 @@ "format": "uuid" } }, - { - "name": "includeDeleted", - "in": "query", - "description": "Include deleted books in the result", - "required": false, - "schema": { - "type": "boolean" - } - }, { "name": "full", "in": "query", - "description": "Return full data including metadata and locks.\nDefault is false for backward compatibility.", + "description": "Return full series data including metadata, locks, genres, tags, etc.", "required": false, "schema": { "type": "boolean" @@ -9141,21 +9498,15 @@ ], "responses": { "200": { - "description": "List of books in the series (returns Vec<FullBookResponse> when full=true)", + "description": "Series details (returns FullSeriesResponse when full=true)", "content": { "application/json": { "schema": { - "type": "array", - "items": { - "$ref": "#/components/schemas/BookDto" - } + "$ref": "#/components/schemas/SeriesDto" } } } }, - "403": { - "description": "Forbidden" - }, "404": { "description": "Series not found" } @@ -9168,15 +9519,14 @@ "api_key": [] } ] - } - }, - "/api/v1/series/{series_id}/cover": { - "post": { + }, + "patch": { "tags": [ "Series" ], - "summary": "Upload a custom cover/poster for a series", - "operationId": "upload_series_cover", + "summary": "Update series core fields (name/title)", + "description": "Partially updates series_metadata fields. Only provided fields will be updated.\nAbsent fields are unchanged. When name is set to a non-null value, it is automatically locked.", + "operationId": "patch_series", "parameters": [ { "name": "series_id", @@ -9190,11 +9540,10 @@ } ], "requestBody": { - "description": "Multipart form with image file", "content": { - "multipart/form-data": { + "application/json": { "schema": { - "type": "object" + "$ref": "#/components/schemas/PatchSeriesRequest" } } }, @@ -9202,10 +9551,14 @@ }, "responses": { "200": { - "description": "Cover uploaded successfully" - }, - "400": { - "description": "Invalid image or request" + "description": "Series updated successfully", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/SeriesUpdateResponse" + } + } + } }, "403": { "description": "Forbidden" @@ -9224,13 +9577,13 @@ ] } }, - "/api/v1/series/{series_id}/cover/source": { - "patch": { + "/api/v1/series/{series_id}/aliases": { + "get": { "tags": [ - "Series" + "Tracking" ], - "summary": "Set which cover source to use for a series (partial update)", - "operationId": "set_series_cover_source", + "summary": "List release-matching aliases for a series.", + "operationId": "list_series_aliases", "parameters": [ { "name": "series_id", @@ -9243,19 +9596,16 @@ } } ], - "requestBody": { - "content": { - "application/json": { - "schema": { - "$ref": "#/components/schemas/SelectCoverSourceRequest" - } - } - }, - "required": true - }, "responses": { "200": { - "description": "Cover source updated successfully" + "description": "List of aliases", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/SeriesAliasListResponse" + } + } + } }, "403": { "description": "Forbidden" @@ -9272,15 +9622,14 @@ "api_key": [] } ] - } - }, - "/api/v1/series/{series_id}/covers": { - "get": { + }, + "post": { "tags": [ - "Series" + "Tracking" ], - "summary": "List all covers for a series", - "operationId": "list_series_covers", + "summary": "Create a release-matching alias for a series.", + "description": "Idempotent: if `(series_id, alias)` already exists, returns the existing\nrow with HTTP 200 instead of inserting a duplicate.", + "operationId": "create_series_alias", "parameters": [ { "name": "series_id", @@ -9293,17 +9642,40 @@ } } ], + "requestBody": { + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/CreateSeriesAliasRequest" + } + } + }, + "required": true + }, "responses": { "200": { - "description": "List of series covers", + "description": "Alias already existed (idempotent)", "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/SeriesCoverListResponse" + "$ref": "#/components/schemas/SeriesAliasDto" + } + } + } + }, + "201": { + "description": "Alias created", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/SeriesAliasDto" } } } }, + "400": { + "description": "Invalid alias (empty after normalization)" + }, "403": { "description": "Forbidden" }, @@ -9321,13 +9693,13 @@ ] } }, - "/api/v1/series/{series_id}/covers/selected": { + "/api/v1/series/{series_id}/aliases/{alias_id}": { "delete": { "tags": [ - "Series" + "Tracking" ], - "summary": "Reset series cover to default (deselect all custom covers)", - "operationId": "reset_series_cover", + "summary": "Delete a release-matching alias.", + "operationId": "delete_series_alias", "parameters": [ { "name": "series_id", @@ -9338,17 +9710,27 @@ "type": "string", "format": "uuid" } + }, + { + "name": "alias_id", + "in": "path", + "description": "Alias ID", + "required": true, + "schema": { + "type": "string", + "format": "uuid" + } } ], "responses": { "204": { - "description": "Reset to default cover successfully" + "description": "Alias deleted" }, "403": { "description": "Forbidden" }, "404": { - "description": "Series not found" + "description": "Series or alias not found" } }, "security": [ @@ -9361,13 +9743,13 @@ ] } }, - "/api/v1/series/{series_id}/covers/{cover_id}": { - "delete": { + "/api/v1/series/{series_id}/alternate-titles": { + "get": { "tags": [ "Series" ], - "summary": "Delete a cover from a series", - "operationId": "delete_series_cover", + "summary": "Get alternate titles for a series", + "operationId": "get_series_alternate_titles", "parameters": [ { "name": "series_id", @@ -9378,11 +9760,46 @@ "type": "string", "format": "uuid" } + } + ], + "responses": { + "200": { + "description": "List of alternate titles for the series", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/AlternateTitleListResponse" + } + } + } }, + "403": { + "description": "Forbidden" + }, + "404": { + "description": "Series not found" + } + }, + "security": [ { - "name": "cover_id", + "jwt_bearer": [] + }, + { + "api_key": [] + } + ] + }, + "post": { + "tags": [ + "Series" + ], + "summary": "Add an alternate title to a series", + "operationId": "create_alternate_title", + "parameters": [ + { + "name": "series_id", "in": "path", - "description": "Cover ID to delete", + "description": "Series ID", "required": true, "schema": { "type": "string", @@ -9390,18 +9807,32 @@ } } ], - "responses": { - "204": { - "description": "Cover deleted successfully" + "requestBody": { + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/CreateAlternateTitleRequest" + } + } }, - "400": { - "description": "Cannot delete the only selected cover" + "required": true + }, + "responses": { + "201": { + "description": "Alternate title created", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/AlternateTitleDto" + } + } + } }, "403": { "description": "Forbidden" }, "404": { - "description": "Series or cover not found" + "description": "Series not found" } }, "security": [ @@ -9414,14 +9845,13 @@ ] } }, - "/api/v1/series/{series_id}/covers/{cover_id}/image": { - "get": { + "/api/v1/series/{series_id}/alternate-titles/{title_id}": { + "delete": { "tags": [ "Series" ], - "summary": "Get a specific cover image for a series", - "description": "Supports HTTP conditional caching with ETag and Last-Modified headers,\nreturning 304 Not Modified when the client has a valid cached copy.", - "operationId": "get_series_cover_image", + "summary": "Delete an alternate title", + "operationId": "delete_alternate_title", "parameters": [ { "name": "series_id", @@ -9434,9 +9864,9 @@ } }, { - "name": "cover_id", + "name": "title_id", "in": "path", - "description": "Cover ID", + "description": "Alternate title ID", "required": true, "schema": { "type": "string", @@ -9445,20 +9875,14 @@ } ], "responses": { - "200": { - "description": "Cover image", - "content": { - "image/jpeg": {} - } - }, - "304": { - "description": "Not modified (client cache is valid)" + "204": { + "description": "Alternate title deleted" }, "403": { "description": "Forbidden" }, "404": { - "description": "Series or cover not found" + "description": "Series or title not found" } }, "security": [ @@ -9469,15 +9893,13 @@ "api_key": [] } ] - } - }, - "/api/v1/series/{series_id}/covers/{cover_id}/select": { - "put": { + }, + "patch": { "tags": [ "Series" ], - "summary": "Select a cover as the primary cover for a series", - "operationId": "select_series_cover", + "summary": "Update an alternate title", + "operationId": "update_alternate_title", "parameters": [ { "name": "series_id", @@ -9490,9 +9912,9 @@ } }, { - "name": "cover_id", + "name": "title_id", "in": "path", - "description": "Cover ID to select", + "description": "Alternate title ID", "required": true, "schema": { "type": "string", @@ -9500,13 +9922,23 @@ } } ], + "requestBody": { + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/UpdateAlternateTitleRequest" + } + } + }, + "required": true + }, "responses": { "200": { - "description": "Cover selected successfully", + "description": "Alternate title updated", "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/SeriesCoverDto" + "$ref": "#/components/schemas/AlternateTitleDto" } } } @@ -9515,7 +9947,7 @@ "description": "Forbidden" }, "404": { - "description": "Series or cover not found" + "description": "Series or title not found" } }, "security": [ @@ -9528,14 +9960,14 @@ ] } }, - "/api/v1/series/{series_id}/download": { - "get": { + "/api/v1/series/{series_id}/analyze": { + "post": { "tags": [ - "Series" + "Scans" ], - "summary": "Download all books in a series as a zip file", - "description": "Creates a zip archive containing all detected books in the series.\nOnly includes books that were scanned and detected by the library scanner.", - "operationId": "download_series", + "summary": "Trigger analysis of all books in a series", + "description": "# Permission Required\n- `series:write`\n\n# Behavior\nEnqueues an AnalyzeSeries task which will create individual AnalyzeBook tasks\nfor each book in the series. All books are analyzed with force=true.\nReturns immediately with a task_id to track progress.", + "operationId": "trigger_series_analysis", "parameters": [ { "name": "series_id", @@ -9550,21 +9982,25 @@ ], "responses": { "200": { - "description": "Zip file containing all books in the series", + "description": "Analysis task enqueued successfully", "content": { - "application/zip": {} + "application/json": { + "schema": { + "$ref": "#/components/schemas/CreateTaskResponse" + } + } } }, "403": { - "description": "Forbidden" + "description": "Permission denied" }, "404": { - "description": "Series not found or has no books" + "description": "Series not found" } }, "security": [ { - "jwt_bearer": [] + "bearer_auth": [] }, { "api_key": [] @@ -9572,13 +10008,14 @@ ] } }, - "/api/v1/series/{series_id}/external-ids": { - "get": { + "/api/v1/series/{series_id}/analyze-unanalyzed": { + "post": { "tags": [ - "Series" + "Scans" ], - "summary": "List all external IDs for a series", - "operationId": "list_series_external_ids", + "summary": "Trigger analysis of unanalyzed books in a series", + "description": "# Permission Required\n- `series:write`\n\n# Behavior\nEnqueues AnalyzeBook tasks (with force=false) for books in the series that have not been analyzed yet.\nThis is useful for recovering from failures or analyzing newly discovered books.\nReturns immediately with a task_id to track progress.", + "operationId": "trigger_series_unanalyzed_analysis", "parameters": [ { "name": "series_id", @@ -9593,17 +10030,17 @@ ], "responses": { "200": { - "description": "List of external IDs", + "description": "Analysis tasks enqueued successfully", "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/SeriesExternalIdListResponse" + "$ref": "#/components/schemas/CreateTaskResponse" } } } }, "403": { - "description": "Forbidden" + "description": "Permission denied" }, "404": { "description": "Series not found" @@ -9611,20 +10048,21 @@ }, "security": [ { - "jwt_bearer": [] + "bearer_auth": [] }, { "api_key": [] } ] - }, - "post": { + } + }, + "/api/v1/series/{series_id}/books": { + "get": { "tags": [ "Series" ], - "summary": "Create or update an external ID for a series", - "description": "Upserts by series_id + source: if an external ID with the same source already exists,\nit will be updated instead of creating a duplicate.", - "operationId": "create_series_external_id", + "summary": "Get books in a series", + "operationId": "get_series_books", "parameters": [ { "name": "series_id", @@ -9635,25 +10073,36 @@ "type": "string", "format": "uuid" } - } - ], - "requestBody": { - "content": { - "application/json": { - "schema": { - "$ref": "#/components/schemas/CreateSeriesExternalIdRequest" - } + }, + { + "name": "includeDeleted", + "in": "query", + "description": "Include deleted books in the result", + "required": false, + "schema": { + "type": "boolean" } }, - "required": true - }, + { + "name": "full", + "in": "query", + "description": "Return full data including metadata and locks.\nDefault is false for backward compatibility.", + "required": false, + "schema": { + "type": "boolean" + } + } + ], "responses": { "200": { - "description": "External ID created or updated", + "description": "List of books in the series (returns Vec<FullBookResponse> when full=true)", "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/SeriesExternalIdDto" + "type": "array", + "items": { + "$ref": "#/components/schemas/BookDto" + } } } } @@ -9675,13 +10124,13 @@ ] } }, - "/api/v1/series/{series_id}/external-ids/{external_id_id}": { - "delete": { + "/api/v1/series/{series_id}/cover": { + "post": { "tags": [ "Series" ], - "summary": "Delete an external ID from a series", - "operationId": "delete_series_external_id", + "summary": "Upload a custom cover/poster for a series", + "operationId": "upload_series_cover", "parameters": [ { "name": "series_id", @@ -9692,11 +10141,55 @@ "type": "string", "format": "uuid" } + } + ], + "requestBody": { + "description": "Multipart form with image file", + "content": { + "multipart/form-data": { + "schema": { + "type": "object" + } + } + }, + "required": true + }, + "responses": { + "200": { + "description": "Cover uploaded successfully" + }, + "400": { + "description": "Invalid image or request" + }, + "403": { + "description": "Forbidden" + }, + "404": { + "description": "Series not found" + } + }, + "security": [ + { + "jwt_bearer": [] }, { - "name": "external_id_id", + "api_key": [] + } + ] + } + }, + "/api/v1/series/{series_id}/cover/source": { + "patch": { + "tags": [ + "Series" + ], + "summary": "Set which cover source to use for a series (partial update)", + "operationId": "set_series_cover_source", + "parameters": [ + { + "name": "series_id", "in": "path", - "description": "External ID record ID", + "description": "Series ID", "required": true, "schema": { "type": "string", @@ -9704,15 +10197,25 @@ } } ], + "requestBody": { + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/SelectCoverSourceRequest" + } + } + }, + "required": true + }, "responses": { - "204": { - "description": "External ID deleted" + "200": { + "description": "Cover source updated successfully" }, "403": { "description": "Forbidden" }, "404": { - "description": "Series or external ID not found" + "description": "Series not found" } }, "security": [ @@ -9725,13 +10228,13 @@ ] } }, - "/api/v1/series/{series_id}/external-links": { + "/api/v1/series/{series_id}/covers": { "get": { "tags": [ "Series" ], - "summary": "Get external links for a series", - "operationId": "get_series_external_links", + "summary": "List all covers for a series", + "operationId": "list_series_covers", "parameters": [ { "name": "series_id", @@ -9746,11 +10249,11 @@ ], "responses": { "200": { - "description": "List of external links for the series", + "description": "List of series covers", "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/ExternalLinkListResponse" + "$ref": "#/components/schemas/SeriesCoverListResponse" } } } @@ -9770,13 +10273,15 @@ "api_key": [] } ] - }, - "post": { + } + }, + "/api/v1/series/{series_id}/covers/selected": { + "delete": { "tags": [ "Series" ], - "summary": "Add or update an external link for a series", - "operationId": "create_external_link", + "summary": "Reset series cover to default (deselect all custom covers)", + "operationId": "reset_series_cover", "parameters": [ { "name": "series_id", @@ -9789,29 +10294,12 @@ } } ], - "requestBody": { - "content": { - "application/json": { - "schema": { - "$ref": "#/components/schemas/CreateExternalLinkRequest" - } - } - }, - "required": true - }, "responses": { - "200": { - "description": "External link created or updated", - "content": { - "application/json": { - "schema": { - "$ref": "#/components/schemas/ExternalLinkDto" - } - } - } + "204": { + "description": "Reset to default cover successfully" }, "403": { - "description": "Forbidden - admin only" + "description": "Forbidden" }, "404": { "description": "Series not found" @@ -9827,13 +10315,13 @@ ] } }, - "/api/v1/series/{series_id}/external-links/{source}": { + "/api/v1/series/{series_id}/covers/{cover_id}": { "delete": { "tags": [ "Series" ], - "summary": "Delete an external link by source name", - "operationId": "delete_external_link", + "summary": "Delete a cover from a series", + "operationId": "delete_series_cover", "parameters": [ { "name": "series_id", @@ -9846,24 +10334,28 @@ } }, { - "name": "source", + "name": "cover_id", "in": "path", - "description": "Source name (e.g., 'myanimelist', 'mangadex')", + "description": "Cover ID to delete", "required": true, "schema": { - "type": "string" + "type": "string", + "format": "uuid" } } ], "responses": { "204": { - "description": "External link deleted" + "description": "Cover deleted successfully" + }, + "400": { + "description": "Cannot delete the only selected cover" }, "403": { - "description": "Forbidden - admin only" + "description": "Forbidden" }, "404": { - "description": "Series or link not found" + "description": "Series or cover not found" } }, "security": [ @@ -9876,13 +10368,14 @@ ] } }, - "/api/v1/series/{series_id}/external-ratings": { + "/api/v1/series/{series_id}/covers/{cover_id}/image": { "get": { "tags": [ "Series" ], - "summary": "Get external ratings for a series", - "operationId": "get_series_external_ratings", + "summary": "Get a specific cover image for a series", + "description": "Supports HTTP conditional caching with ETag and Last-Modified headers,\nreturning 304 Not Modified when the client has a valid cached copy.", + "operationId": "get_series_cover_image", "parameters": [ { "name": "series_id", @@ -9893,24 +10386,33 @@ "type": "string", "format": "uuid" } + }, + { + "name": "cover_id", + "in": "path", + "description": "Cover ID", + "required": true, + "schema": { + "type": "string", + "format": "uuid" + } } ], "responses": { "200": { - "description": "List of external ratings for the series", + "description": "Cover image", "content": { - "application/json": { - "schema": { - "$ref": "#/components/schemas/ExternalRatingListResponse" - } - } + "image/jpeg": {} } }, + "304": { + "description": "Not modified (client cache is valid)" + }, "403": { "description": "Forbidden" }, "404": { - "description": "Series not found" + "description": "Series or cover not found" } }, "security": [ @@ -9921,13 +10423,15 @@ "api_key": [] } ] - }, - "post": { + } + }, + "/api/v1/series/{series_id}/covers/{cover_id}/select": { + "put": { "tags": [ "Series" ], - "summary": "Add or update an external rating for a series", - "operationId": "create_external_rating", + "summary": "Select a cover as the primary cover for a series", + "operationId": "select_series_cover", "parameters": [ { "name": "series_id", @@ -9938,34 +10442,34 @@ "type": "string", "format": "uuid" } + }, + { + "name": "cover_id", + "in": "path", + "description": "Cover ID to select", + "required": true, + "schema": { + "type": "string", + "format": "uuid" + } } ], - "requestBody": { - "content": { - "application/json": { - "schema": { - "$ref": "#/components/schemas/CreateExternalRatingRequest" - } - } - }, - "required": true - }, "responses": { "200": { - "description": "External rating created or updated", + "description": "Cover selected successfully", "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/ExternalRatingDto" + "$ref": "#/components/schemas/SeriesCoverDto" } } } }, "403": { - "description": "Forbidden - admin only" + "description": "Forbidden" }, "404": { - "description": "Series not found" + "description": "Series or cover not found" } }, "security": [ @@ -9978,62 +10482,14 @@ ] } }, - "/api/v1/series/{series_id}/external-ratings/{source}": { - "delete": { + "/api/v1/series/{series_id}/download": { + "get": { "tags": [ "Series" ], - "summary": "Delete an external rating by source name", - "operationId": "delete_external_rating", - "parameters": [ - { - "name": "series_id", - "in": "path", - "description": "Series ID", - "required": true, - "schema": { - "type": "string", - "format": "uuid" - } - }, - { - "name": "source", - "in": "path", - "description": "Source name (e.g., 'myanimelist', 'anilist')", - "required": true, - "schema": { - "type": "string" - } - } - ], - "responses": { - "204": { - "description": "External rating deleted" - }, - "403": { - "description": "Forbidden - admin only" - }, - "404": { - "description": "Series or rating not found" - } - }, - "security": [ - { - "jwt_bearer": [] - }, - { - "api_key": [] - } - ] - } - }, - "/api/v1/series/{series_id}/genres": { - "get": { - "tags": [ - "Genres" - ], - "summary": "Get genres for a series", - "operationId": "get_series_genres", + "summary": "Download all books in a series as a zip file", + "description": "Creates a zip archive containing all detected books in the series.\nOnly includes books that were scanned and detected by the library scanner.", + "operationId": "download_series", "parameters": [ { "name": "series_id", @@ -10048,20 +10504,16 @@ ], "responses": { "200": { - "description": "List of genres for the series", + "description": "Zip file containing all books in the series", "content": { - "application/json": { - "schema": { - "$ref": "#/components/schemas/GenreListResponse" - } - } + "application/zip": {} } }, "403": { "description": "Forbidden" }, "404": { - "description": "Series not found" + "description": "Series not found or has no books" } }, "security": [ @@ -10072,13 +10524,15 @@ "api_key": [] } ] - }, - "put": { + } + }, + "/api/v1/series/{series_id}/external-ids": { + "get": { "tags": [ - "Genres" + "Series" ], - "summary": "Set genres for a series (replaces existing)", - "operationId": "set_series_genres", + "summary": "List all external IDs for a series", + "operationId": "list_series_external_ids", "parameters": [ { "name": "series_id", @@ -10091,23 +10545,13 @@ } } ], - "requestBody": { - "content": { - "application/json": { - "schema": { - "$ref": "#/components/schemas/SetSeriesGenresRequest" - } - } - }, - "required": true - }, "responses": { "200": { - "description": "Genres updated", + "description": "List of external IDs", "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/GenreListResponse" + "$ref": "#/components/schemas/SeriesExternalIdListResponse" } } } @@ -10130,10 +10574,11 @@ }, "post": { "tags": [ - "Genres" + "Series" ], - "summary": "Add a single genre to a series", - "operationId": "add_series_genre", + "summary": "Create or update an external ID for a series", + "description": "Upserts by series_id + source: if an external ID with the same source already exists,\nit will be updated instead of creating a duplicate.", + "operationId": "create_series_external_id", "parameters": [ { "name": "series_id", @@ -10150,7 +10595,7 @@ "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/AddSeriesGenreRequest" + "$ref": "#/components/schemas/CreateSeriesExternalIdRequest" } } }, @@ -10158,11 +10603,11 @@ }, "responses": { "200": { - "description": "Genre added", + "description": "External ID created or updated", "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/GenreDto" + "$ref": "#/components/schemas/SeriesExternalIdDto" } } } @@ -10184,13 +10629,13 @@ ] } }, - "/api/v1/series/{series_id}/genres/{genre_id}": { + "/api/v1/series/{series_id}/external-ids/{external_id_id}": { "delete": { "tags": [ - "Genres" + "Series" ], - "summary": "Remove a genre from a series", - "operationId": "remove_series_genre", + "summary": "Delete an external ID from a series", + "operationId": "delete_series_external_id", "parameters": [ { "name": "series_id", @@ -10203,9 +10648,9 @@ } }, { - "name": "genre_id", + "name": "external_id_id", "in": "path", - "description": "Genre ID", + "description": "External ID record ID", "required": true, "schema": { "type": "string", @@ -10215,13 +10660,13 @@ ], "responses": { "204": { - "description": "Genre removed from series" + "description": "External ID deleted" }, "403": { "description": "Forbidden" }, "404": { - "description": "Series or genre link not found" + "description": "Series or external ID not found" } }, "security": [ @@ -10234,14 +10679,13 @@ ] } }, - "/api/v1/series/{series_id}/metadata": { + "/api/v1/series/{series_id}/external-links": { "get": { "tags": [ "Series" ], - "summary": "Get series metadata including all related data", - "description": "Returns comprehensive metadata with lock states, genres, tags, alternate titles,\nexternal ratings, and external links.", - "operationId": "get_series_metadata", + "summary": "Get external links for a series", + "operationId": "get_series_external_links", "parameters": [ { "name": "series_id", @@ -10256,11 +10700,11 @@ ], "responses": { "200": { - "description": "Series metadata with all related data", + "description": "List of external links for the series", "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/FullSeriesMetadataResponse" + "$ref": "#/components/schemas/ExternalLinkListResponse" } } } @@ -10281,13 +10725,12 @@ } ] }, - "put": { + "post": { "tags": [ "Series" ], - "summary": "Replace all series metadata (PUT)", - "description": "Replaces all metadata fields with the values in the request.\nOmitting a field (or setting it to null) will clear that field.", - "operationId": "replace_series_metadata", + "summary": "Add or update an external link for a series", + "operationId": "create_external_link", "parameters": [ { "name": "series_id", @@ -10304,7 +10747,7 @@ "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/ReplaceSeriesMetadataRequest" + "$ref": "#/components/schemas/CreateExternalLinkRequest" } } }, @@ -10312,17 +10755,17 @@ }, "responses": { "200": { - "description": "Metadata replaced successfully", + "description": "External link created or updated", "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/SeriesMetadataResponse" + "$ref": "#/components/schemas/ExternalLinkDto" } } } }, "403": { - "description": "Forbidden" + "description": "Forbidden - admin only" }, "404": { "description": "Series not found" @@ -10336,14 +10779,15 @@ "api_key": [] } ] - }, + } + }, + "/api/v1/series/{series_id}/external-links/{source}": { "delete": { "tags": [ "Series" ], - "summary": "Reset series metadata to filesystem-derived defaults", - "description": "Completely resets all series metadata back to original values derived from\nthe filesystem. This deletes and recreates the metadata row, clears all\nassociated data (genres, tags, alternate titles, external IDs, external\nratings, external links, covers, metadata sources, sharing tags), and\nunlocks all fields. The title is reset to the series directory name.\n\nUser ratings, read progress, book records, and book metadata are preserved.", - "operationId": "reset_series_metadata", + "summary": "Delete an external link by source name", + "operationId": "delete_external_link", "parameters": [ { "name": "series_id", @@ -10354,80 +10798,26 @@ "type": "string", "format": "uuid" } - } - ], - "responses": { - "200": { - "description": "Metadata reset successfully", - "content": { - "application/json": { - "schema": { - "$ref": "#/components/schemas/FullSeriesMetadataResponse" - } - } - } - }, - "403": { - "description": "Forbidden" - }, - "404": { - "description": "Series not found" - } - }, - "security": [ - { - "jwt_bearer": [] }, { - "api_key": [] - } - ] - }, - "patch": { - "tags": [ - "Series" - ], - "summary": "Partially update series metadata (PATCH)", - "description": "Only provided fields will be updated. Absent fields are unchanged.\nExplicitly null fields will be cleared.", - "operationId": "patch_series_metadata", - "parameters": [ - { - "name": "series_id", + "name": "source", "in": "path", - "description": "Series ID", + "description": "Source name (e.g., 'myanimelist', 'mangadex')", "required": true, "schema": { - "type": "string", - "format": "uuid" + "type": "string" } } ], - "requestBody": { - "content": { - "application/json": { - "schema": { - "$ref": "#/components/schemas/PatchSeriesMetadataRequest" - } - } - }, - "required": true - }, "responses": { - "200": { - "description": "Metadata updated successfully", - "content": { - "application/json": { - "schema": { - "$ref": "#/components/schemas/SeriesMetadataResponse" - } - } - } + "204": { + "description": "External link deleted" }, "403": { - "description": "Forbidden" + "description": "Forbidden - admin only" }, "404": { - "description": "Series not found" + "description": "Series or link not found" } }, "security": [ @@ -10440,13 +10830,13 @@ ] } }, - "/api/v1/series/{series_id}/metadata/locks": { + "/api/v1/series/{series_id}/external-ratings": { "get": { "tags": [ "Series" ], - "summary": "Get metadata lock states", - "operationId": "get_metadata_locks", + "summary": "Get external ratings for a series", + "operationId": "get_series_external_ratings", "parameters": [ { "name": "series_id", @@ -10461,11 +10851,11 @@ ], "responses": { "200": { - "description": "Current lock states", + "description": "List of external ratings for the series", "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/MetadataLocks" + "$ref": "#/components/schemas/ExternalRatingListResponse" } } } @@ -10486,13 +10876,12 @@ } ] }, - "put": { + "post": { "tags": [ "Series" ], - "summary": "Update metadata lock states", - "description": "Sets which metadata fields are locked. Locked fields will not be overwritten\nby automatic metadata refresh from book analysis or external sources.", - "operationId": "update_metadata_locks", + "summary": "Add or update an external rating for a series", + "operationId": "create_external_rating", "parameters": [ { "name": "series_id", @@ -10509,7 +10898,7 @@ "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/UpdateMetadataLocksRequest" + "$ref": "#/components/schemas/CreateExternalRatingRequest" } } }, @@ -10517,17 +10906,17 @@ }, "responses": { "200": { - "description": "Lock states updated", + "description": "External rating created or updated", "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/MetadataLocks" + "$ref": "#/components/schemas/ExternalRatingDto" } } } }, "403": { - "description": "Forbidden" + "description": "Forbidden - admin only" }, "404": { "description": "Series not found" @@ -10543,13 +10932,13 @@ ] } }, - "/api/v1/series/{series_id}/purge-deleted": { + "/api/v1/series/{series_id}/external-ratings/{source}": { "delete": { "tags": [ "Series" ], - "summary": "Purge deleted books from a series", - "operationId": "purge_series_deleted_books", + "summary": "Delete an external rating by source name", + "operationId": "delete_external_rating", "parameters": [ { "name": "series_id", @@ -10560,26 +10949,26 @@ "type": "string", "format": "uuid" } + }, + { + "name": "source", + "in": "path", + "description": "Source name (e.g., 'myanimelist', 'anilist')", + "required": true, + "schema": { + "type": "string" + } } ], "responses": { - "200": { - "description": "Number of books purged", - "content": { - "text/plain": { - "schema": { - "type": "integer", - "format": "int64", - "minimum": 0 - } - } - } + "204": { + "description": "External rating deleted" }, "403": { - "description": "Forbidden" + "description": "Forbidden - admin only" }, "404": { - "description": "Series not found" + "description": "Series or rating not found" } }, "security": [ @@ -10592,14 +10981,13 @@ ] } }, - "/api/v1/series/{series_id}/rating": { + "/api/v1/series/{series_id}/genres": { "get": { "tags": [ - "Ratings" + "Genres" ], - "summary": "Get the current user's rating for a series", - "description": "Returns null if no rating exists (not a 404, since the series exists but has no rating)", - "operationId": "get_series_rating", + "summary": "Get genres for a series", + "operationId": "get_series_genres", "parameters": [ { "name": "series_id", @@ -10614,18 +11002,11 @@ ], "responses": { "200": { - "description": "User's rating for the series (null if not rated)", + "description": "List of genres for the series", "content": { "application/json": { "schema": { - "oneOf": [ - { - "type": "null" - }, - { - "$ref": "#/components/schemas/UserSeriesRatingDto" - } - ] + "$ref": "#/components/schemas/GenreListResponse" } } } @@ -10648,10 +11029,10 @@ }, "put": { "tags": [ - "Ratings" + "Genres" ], - "summary": "Set (create or update) the current user's rating for a series", - "operationId": "set_series_rating", + "summary": "Set genres for a series (replaces existing)", + "operationId": "set_series_genres", "parameters": [ { "name": "series_id", @@ -10668,7 +11049,7 @@ "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/SetUserRatingRequest" + "$ref": "#/components/schemas/SetSeriesGenresRequest" } } }, @@ -10676,18 +11057,15 @@ }, "responses": { "200": { - "description": "Rating saved", + "description": "Genres updated", "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/UserSeriesRatingDto" + "$ref": "#/components/schemas/GenreListResponse" } } } }, - "400": { - "description": "Invalid rating value" - }, "403": { "description": "Forbidden" }, @@ -10704,12 +11082,12 @@ } ] }, - "delete": { + "post": { "tags": [ - "Ratings" + "Genres" ], - "summary": "Delete the current user's rating for a series", - "operationId": "delete_series_rating", + "summary": "Add a single genre to a series", + "operationId": "add_series_genre", "parameters": [ { "name": "series_id", @@ -10722,15 +11100,32 @@ } } ], + "requestBody": { + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/AddSeriesGenreRequest" + } + } + }, + "required": true + }, "responses": { - "204": { - "description": "Rating deleted" + "200": { + "description": "Genre added", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/GenreDto" + } + } + } }, "403": { "description": "Forbidden" }, "404": { - "description": "Series or rating not found" + "description": "Series not found" } }, "security": [ @@ -10743,14 +11138,13 @@ ] } }, - "/api/v1/series/{series_id}/ratings/average": { - "get": { + "/api/v1/series/{series_id}/genres/{genre_id}": { + "delete": { "tags": [ - "Series" + "Genres" ], - "summary": "Get the average community rating for a series", - "description": "Returns the average rating from all users and the total count of ratings.\nRatings are stored on a 0-100 scale internally.", - "operationId": "get_series_average_rating", + "summary": "Remove a genre from a series", + "operationId": "remove_series_genre", "parameters": [ { "name": "series_id", @@ -10761,28 +11155,27 @@ "type": "string", "format": "uuid" } + }, + { + "name": "genre_id", + "in": "path", + "description": "Genre ID", + "required": true, + "schema": { + "type": "string", + "format": "uuid" + } } ], "responses": { - "200": { - "description": "Average rating for the series", - "content": { - "application/json": { - "schema": { - "$ref": "#/components/schemas/SeriesAverageRatingResponse" - }, - "example": { - "average": 78.5, - "count": 15 - } - } - } + "204": { + "description": "Genre removed from series" }, "403": { "description": "Forbidden" }, "404": { - "description": "Series not found" + "description": "Series or genre link not found" } }, "security": [ @@ -10795,13 +11188,14 @@ ] } }, - "/api/v1/series/{series_id}/read": { - "post": { + "/api/v1/series/{series_id}/metadata": { + "get": { "tags": [ "Series" ], - "summary": "Mark all books in a series as read", - "operationId": "mark_series_as_read", + "summary": "Get series metadata including all related data", + "description": "Returns comprehensive metadata with lock states, genres, tags, alternate titles,\nexternal ratings, and external links.", + "operationId": "get_series_metadata", "parameters": [ { "name": "series_id", @@ -10816,11 +11210,11 @@ ], "responses": { "200": { - "description": "Series marked as read", + "description": "Series metadata with all related data", "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/MarkReadResponse" + "$ref": "#/components/schemas/FullSeriesMetadataResponse" } } } @@ -10840,16 +11234,14 @@ "api_key": [] } ] - } - }, - "/api/v1/series/{series_id}/renumber": { - "post": { + }, + "put": { "tags": [ - "Scans" + "Series" ], - "summary": "Renumber all books in a series", - "description": "# Permission Required\n- `series:write`\n\n# Behavior\nEnqueues a `RenumberSeries` task that recalculates book numbers based on the\nlibrary's number strategy and the current natural sort order of filenames.\nReturns a task ID for tracking progress via SSE.", - "operationId": "renumber_series", + "summary": "Replace all series metadata (PUT)", + "description": "Replaces all metadata fields with the values in the request.\nOmitting a field (or setting it to null) will clear that field.", + "operationId": "replace_series_metadata", "parameters": [ { "name": "series_id", @@ -10862,19 +11254,29 @@ } } ], + "requestBody": { + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ReplaceSeriesMetadataRequest" + } + } + }, + "required": true + }, "responses": { "200": { - "description": "Renumber task enqueued", + "description": "Metadata replaced successfully", "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/CreateTaskResponse" + "$ref": "#/components/schemas/SeriesMetadataResponse" } } } }, "403": { - "description": "Permission denied" + "description": "Forbidden" }, "404": { "description": "Series not found" @@ -10882,21 +11284,20 @@ }, "security": [ { - "bearer_auth": [] + "jwt_bearer": [] }, { "api_key": [] } ] - } - }, - "/api/v1/series/{series_id}/sharing-tags": { - "get": { + }, + "delete": { "tags": [ - "Sharing Tags" + "Series" ], - "summary": "Get sharing tags for a series (admin only)", - "operationId": "get_series_sharing_tags", + "summary": "Reset series metadata to filesystem-derived defaults", + "description": "Completely resets all series metadata back to original values derived from\nthe filesystem. This deletes and recreates the metadata row, clears all\nassociated data (genres, tags, alternate titles, external IDs, external\nratings, external links, covers, metadata sources, sharing tags), and\nunlocks all fields. The title is reset to the series directory name.\n\nUser ratings, read progress, book records, and book metadata are preserved.", + "operationId": "reset_series_metadata", "parameters": [ { "name": "series_id", @@ -10911,20 +11312,20 @@ ], "responses": { "200": { - "description": "List of sharing tags for the series", + "description": "Metadata reset successfully", "content": { "application/json": { "schema": { - "type": "array", - "items": { - "$ref": "#/components/schemas/SharingTagSummaryDto" - } + "$ref": "#/components/schemas/FullSeriesMetadataResponse" } } } }, "403": { - "description": "Forbidden - Missing permission" + "description": "Forbidden" + }, + "404": { + "description": "Series not found" } }, "security": [ @@ -10936,12 +11337,13 @@ } ] }, - "put": { + "patch": { "tags": [ - "Sharing Tags" + "Series" ], - "summary": "Set sharing tags for a series (replaces existing) (admin only)", - "operationId": "set_series_sharing_tags", + "summary": "Partially update series metadata (PATCH)", + "description": "Only provided fields will be updated. Absent fields are unchanged.\nExplicitly null fields will be cleared.", + "operationId": "patch_series_metadata", "parameters": [ { "name": "series_id", @@ -10958,7 +11360,7 @@ "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/SetSeriesSharingTagsRequest" + "$ref": "#/components/schemas/PatchSeriesMetadataRequest" } } }, @@ -10966,20 +11368,20 @@ }, "responses": { "200": { - "description": "Sharing tags set", + "description": "Metadata updated successfully", "content": { "application/json": { "schema": { - "type": "array", - "items": { - "$ref": "#/components/schemas/SharingTagSummaryDto" - } + "$ref": "#/components/schemas/SeriesMetadataResponse" } } } }, "403": { - "description": "Forbidden - Missing permission" + "description": "Forbidden" + }, + "404": { + "description": "Series not found" } }, "security": [ @@ -10990,13 +11392,15 @@ "api_key": [] } ] - }, - "post": { + } + }, + "/api/v1/series/{series_id}/metadata/locks": { + "get": { "tags": [ - "Sharing Tags" + "Series" ], - "summary": "Add a sharing tag to a series (admin only)", - "operationId": "add_series_sharing_tag", + "summary": "Get metadata lock states", + "operationId": "get_metadata_locks", "parameters": [ { "name": "series_id", @@ -11009,25 +11413,22 @@ } } ], - "requestBody": { - "content": { - "application/json": { - "schema": { - "$ref": "#/components/schemas/ModifySeriesSharingTagRequest" - } - } - }, - "required": true - }, "responses": { "200": { - "description": "Sharing tag added" - }, - "400": { - "description": "Tag already assigned" + "description": "Current lock states", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/MetadataLocks" + } + } + } }, "403": { - "description": "Forbidden - Missing permission" + "description": "Forbidden" + }, + "404": { + "description": "Series not found" } }, "security": [ @@ -11038,15 +11439,14 @@ "api_key": [] } ] - } - }, - "/api/v1/series/{series_id}/sharing-tags/{tag_id}": { - "delete": { - "tags": [ - "Sharing Tags" + }, + "put": { + "tags": [ + "Series" ], - "summary": "Remove a sharing tag from a series (admin only)", - "operationId": "remove_series_sharing_tag", + "summary": "Update metadata lock states", + "description": "Sets which metadata fields are locked. Locked fields will not be overwritten\nby automatic metadata refresh from book analysis or external sources.", + "operationId": "update_metadata_locks", "parameters": [ { "name": "series_id", @@ -11057,27 +11457,34 @@ "type": "string", "format": "uuid" } - }, - { - "name": "tag_id", - "in": "path", - "description": "Sharing tag ID", - "required": true, - "schema": { - "type": "string", - "format": "uuid" - } } ], + "requestBody": { + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/UpdateMetadataLocksRequest" + } + } + }, + "required": true + }, "responses": { - "204": { - "description": "Sharing tag removed" + "200": { + "description": "Lock states updated", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/MetadataLocks" + } + } + } }, "403": { - "description": "Forbidden - Missing permission" + "description": "Forbidden" }, "404": { - "description": "Sharing tag not assigned to series" + "description": "Series not found" } }, "security": [ @@ -11090,13 +11497,13 @@ ] } }, - "/api/v1/series/{series_id}/tags": { - "get": { + "/api/v1/series/{series_id}/purge-deleted": { + "delete": { "tags": [ - "Tags" + "Series" ], - "summary": "Get tags for a series", - "operationId": "get_series_tags", + "summary": "Purge deleted books from a series", + "operationId": "purge_series_deleted_books", "parameters": [ { "name": "series_id", @@ -11111,11 +11518,13 @@ ], "responses": { "200": { - "description": "List of tags for the series", + "description": "Number of books purged", "content": { - "application/json": { + "text/plain": { "schema": { - "$ref": "#/components/schemas/TagListResponse" + "type": "integer", + "format": "int64", + "minimum": 0 } } } @@ -11135,13 +11544,16 @@ "api_key": [] } ] - }, - "put": { + } + }, + "/api/v1/series/{series_id}/rating": { + "get": { "tags": [ - "Tags" + "Ratings" ], - "summary": "Set tags for a series (replaces existing)", - "operationId": "set_series_tags", + "summary": "Get the current user's rating for a series", + "description": "Returns null if no rating exists (not a 404, since the series exists but has no rating)", + "operationId": "get_series_rating", "parameters": [ { "name": "series_id", @@ -11154,23 +11566,20 @@ } } ], - "requestBody": { - "content": { - "application/json": { - "schema": { - "$ref": "#/components/schemas/SetSeriesTagsRequest" - } - } - }, - "required": true - }, "responses": { "200": { - "description": "Tags updated", + "description": "User's rating for the series (null if not rated)", "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/TagListResponse" + "oneOf": [ + { + "type": "null" + }, + { + "$ref": "#/components/schemas/UserSeriesRatingDto" + } + ] } } } @@ -11191,12 +11600,12 @@ } ] }, - "post": { + "put": { "tags": [ - "Tags" + "Ratings" ], - "summary": "Add a single tag to a series", - "operationId": "add_series_tag", + "summary": "Set (create or update) the current user's rating for a series", + "operationId": "set_series_rating", "parameters": [ { "name": "series_id", @@ -11213,7 +11622,7 @@ "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/AddSeriesTagRequest" + "$ref": "#/components/schemas/SetUserRatingRequest" } } }, @@ -11221,15 +11630,18 @@ }, "responses": { "200": { - "description": "Tag added", + "description": "Rating saved", "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/TagDto" + "$ref": "#/components/schemas/UserSeriesRatingDto" } } } }, + "400": { + "description": "Invalid rating value" + }, "403": { "description": "Forbidden" }, @@ -11245,15 +11657,13 @@ "api_key": [] } ] - } - }, - "/api/v1/series/{series_id}/tags/{tag_id}": { + }, "delete": { "tags": [ - "Tags" + "Ratings" ], - "summary": "Remove a tag from a series", - "operationId": "remove_series_tag", + "summary": "Delete the current user's rating for a series", + "operationId": "delete_series_rating", "parameters": [ { "name": "series_id", @@ -11264,27 +11674,17 @@ "type": "string", "format": "uuid" } - }, - { - "name": "tag_id", - "in": "path", - "description": "Tag ID", - "required": true, - "schema": { - "type": "string", - "format": "uuid" - } } ], "responses": { "204": { - "description": "Tag removed from series" + "description": "Rating deleted" }, "403": { "description": "Forbidden" }, "404": { - "description": "Series or tag link not found" + "description": "Series or rating not found" } }, "security": [ @@ -11297,13 +11697,14 @@ ] } }, - "/api/v1/series/{series_id}/thumbnail": { + "/api/v1/series/{series_id}/ratings/average": { "get": { "tags": [ "Series" ], - "summary": "Get thumbnail/cover image for a series", - "operationId": "get_series_thumbnail", + "summary": "Get the average community rating for a series", + "description": "Returns the average rating from all users and the total count of ratings.\nRatings are stored on a 0-100 scale internally.", + "operationId": "get_series_average_rating", "parameters": [ { "name": "series_id", @@ -11318,14 +11719,19 @@ ], "responses": { "200": { - "description": "Thumbnail image", + "description": "Average rating for the series", "content": { - "image/jpeg": {} + "application/json": { + "schema": { + "$ref": "#/components/schemas/SeriesAverageRatingResponse" + }, + "example": { + "average": 78.5, + "count": 15 + } + } } }, - "304": { - "description": "Not modified (client cache is valid)" - }, "403": { "description": "Forbidden" }, @@ -11343,14 +11749,13 @@ ] } }, - "/api/v1/series/{series_id}/thumbnail/generate": { + "/api/v1/series/{series_id}/read": { "post": { "tags": [ - "Thumbnails" + "Series" ], - "summary": "Generate thumbnail for a series", - "description": "Queues a task to generate (or regenerate) the thumbnail for a specific series.\nThe series thumbnail is derived from the first book's cover.\n\n# Permission Required\n- `tasks:write`", - "operationId": "generate_series_thumbnail", + "summary": "Mark all books in a series as read", + "operationId": "mark_series_as_read", "parameters": [ { "name": "series_id", @@ -11363,29 +11768,19 @@ } } ], - "requestBody": { - "content": { - "application/json": { - "schema": { - "$ref": "#/components/schemas/ForceRequest" - } - } - }, - "required": true - }, "responses": { "200": { - "description": "Thumbnail generation task queued", + "description": "Series marked as read", "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/CreateTaskResponse" + "$ref": "#/components/schemas/MarkReadResponse" } } } }, "403": { - "description": "Permission denied" + "description": "Forbidden" }, "404": { "description": "Series not found" @@ -11393,7 +11788,7 @@ }, "security": [ { - "bearer_auth": [] + "jwt_bearer": [] }, { "api_key": [] @@ -11401,14 +11796,13 @@ ] } }, - "/api/v1/series/{series_id}/title/reprocess": { - "post": { + "/api/v1/series/{series_id}/releases": { + "get": { "tags": [ - "Tasks" + "Releases" ], - "summary": "Reprocess a series title using library preprocessing rules", - "description": "Applies the library's preprocessing rules to the series' original directory name\nto regenerate the display title. This is useful when preprocessing rules are added\nor changed after series have already been created.\n\nThe title will only be updated if:\n- The `title_lock` is false (respects user edits)\n- The preprocessing rules produce a different title\n\nIf the title is changed and `title_sort_lock` is false, the `title_sort` will be\ncleared (set to None) to let it fall back to the new title for sorting.\n\n- With `dryRun: true`: Returns a synchronous preview of what would change\n- With `dryRun: false` (default): Enqueues a background task to process\n\n# Permission Required\n- `series:write`", - "operationId": "reprocess_series_title", + "summary": "List release-ledger entries for a series.", + "operationId": "list_series_releases", "parameters": [ { "name": "series_id", @@ -11419,25 +11813,49 @@ "type": "string", "format": "uuid" } - } - ], - "requestBody": { - "content": { - "application/json": { - "schema": { - "$ref": "#/components/schemas/EnqueueReprocessTitleRequest" - } + }, + { + "name": "state", + "in": "query", + "description": "Filter by state. Defaults to all states (no filter) so the per-series\nview shows the full history.", + "required": false, + "schema": { + "type": [ + "string", + "null" + ] } }, - "required": true - }, + { + "name": "page", + "in": "query", + "description": "1-indexed page number.", + "required": false, + "schema": { + "type": "integer", + "format": "int64", + "minimum": 0 + } + }, + { + "name": "pageSize", + "in": "query", + "description": "Items per page (max 500, default 50).", + "required": false, + "schema": { + "type": "integer", + "format": "int64", + "minimum": 0 + } + } + ], "responses": { "200": { - "description": "Task enqueued or dry run preview", + "description": "Paginated ledger entries for the series", "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/EnqueueReprocessTitleResponse" + "$ref": "#/components/schemas/PaginatedResponse_ReleaseLedgerEntryDto" } } } @@ -11451,7 +11869,7 @@ }, "security": [ { - "bearer_auth": [] + "jwt_bearer": [] }, { "api_key": [] @@ -11459,13 +11877,14 @@ ] } }, - "/api/v1/series/{series_id}/unread": { + "/api/v1/series/{series_id}/renumber": { "post": { "tags": [ - "Series" + "Scans" ], - "summary": "Mark all books in a series as unread", - "operationId": "mark_series_as_unread", + "summary": "Renumber all books in a series", + "description": "# Permission Required\n- `series:write`\n\n# Behavior\nEnqueues a `RenumberSeries` task that recalculates book numbers based on the\nlibrary's number strategy and the current natural sort order of filenames.\nReturns a task ID for tracking progress via SSE.", + "operationId": "renumber_series", "parameters": [ { "name": "series_id", @@ -11480,17 +11899,17 @@ ], "responses": { "200": { - "description": "Series marked as unread", + "description": "Renumber task enqueued", "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/MarkReadResponse" + "$ref": "#/components/schemas/CreateTaskResponse" } } } }, "403": { - "description": "Forbidden" + "description": "Permission denied" }, "404": { "description": "Series not found" @@ -11498,7 +11917,7 @@ }, "security": [ { - "jwt_bearer": [] + "bearer_auth": [] }, { "api_key": [] @@ -11506,64 +11925,41 @@ ] } }, - "/api/v1/settings/branding": { + "/api/v1/series/{series_id}/sharing-tags": { "get": { "tags": [ - "Settings" + "Sharing Tags" ], - "summary": "Get branding settings (unauthenticated)", - "description": "Returns branding-related settings that are needed on unauthenticated pages\nlike the login screen. This endpoint does not require authentication.", - "operationId": "get_branding_settings", - "responses": { - "200": { - "description": "Branding settings", - "content": { - "application/json": { - "schema": { - "$ref": "#/components/schemas/BrandingSettingsDto" - }, - "example": { - "applicationName": "Codex" - } - } + "summary": "Get sharing tags for a series (admin only)", + "operationId": "get_series_sharing_tags", + "parameters": [ + { + "name": "series_id", + "in": "path", + "description": "Series ID", + "required": true, + "schema": { + "type": "string", + "format": "uuid" } } - } - } - }, - "/api/v1/settings/public": { - "get": { - "tags": [ - "Settings" ], - "summary": "Get public display settings (authenticated users)", - "description": "Returns non-sensitive settings that affect UI/display behavior.\nThis endpoint is available to all authenticated users, not just admins.", - "operationId": "get_public_settings", "responses": { "200": { - "description": "Public settings", + "description": "List of sharing tags for the series", "content": { "application/json": { "schema": { - "type": "object", - "additionalProperties": { - "$ref": "#/components/schemas/PublicSettingDto" - }, - "propertyNames": { - "type": "string" - } - }, - "example": { - "display.custom_metadata_template": { - "key": "display.custom_metadata_template", - "value": "{{#if custom_metadata}}## Additional Information\n{{#each custom_metadata}}- **{{@key}}**: {{this}}\n{{/each}}{{/if}}" + "type": "array", + "items": { + "$ref": "#/components/schemas/SharingTagSummaryDto" } } } } }, - "401": { - "description": "Unauthorized" + "403": { + "description": "Forbidden - Missing permission" } }, "security": [ @@ -11574,21 +11970,30 @@ "api_key": [] } ] - } - }, - "/api/v1/setup/initialize": { - "post": { + }, + "put": { "tags": [ - "Setup" + "Sharing Tags" + ], + "summary": "Set sharing tags for a series (replaces existing) (admin only)", + "operationId": "set_series_sharing_tags", + "parameters": [ + { + "name": "series_id", + "in": "path", + "description": "Series ID", + "required": true, + "schema": { + "type": "string", + "format": "uuid" + } + } ], - "summary": "Initialize application setup by creating the first admin user", - "description": "Creates the first admin user with email verification bypassed and returns a JWT token", - "operationId": "initialize_setup", "requestBody": { "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/InitializeSetupRequest" + "$ref": "#/components/schemas/SetSeriesSharingTagsRequest" } } }, @@ -11596,37 +12001,54 @@ }, "responses": { "200": { - "description": "Setup initialized", + "description": "Sharing tags set", "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/InitializeSetupResponse" + "type": "array", + "items": { + "$ref": "#/components/schemas/SharingTagSummaryDto" + } } } } }, - "400": { - "description": "Invalid request or setup already completed" + "403": { + "description": "Forbidden - Missing permission" + } + }, + "security": [ + { + "jwt_bearer": [] }, - "422": { - "description": "Validation error" + { + "api_key": [] } - } - } - }, - "/api/v1/setup/settings": { - "patch": { + ] + }, + "post": { "tags": [ - "Setup" + "Sharing Tags" + ], + "summary": "Add a sharing tag to a series (admin only)", + "operationId": "add_series_sharing_tag", + "parameters": [ + { + "name": "series_id", + "in": "path", + "description": "Series ID", + "required": true, + "schema": { + "type": "string", + "format": "uuid" + } + } ], - "summary": "Configure initial settings (optional step in setup wizard)", - "description": "Allows the newly created admin to configure database settings", - "operationId": "configure_initial_settings", "requestBody": { "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/ConfigureSettingsRequest" + "$ref": "#/components/schemas/ModifySeriesSharingTagRequest" } } }, @@ -11634,92 +12056,63 @@ }, "responses": { "200": { - "description": "Settings configured", - "content": { - "application/json": { - "schema": { - "$ref": "#/components/schemas/ConfigureSettingsResponse" - } - } - } + "description": "Sharing tag added" + }, + "400": { + "description": "Tag already assigned" }, "403": { - "description": "Forbidden - Admin only" + "description": "Forbidden - Missing permission" } }, "security": [ { "jwt_bearer": [] + }, + { + "api_key": [] } ] } }, - "/api/v1/setup/status": { - "get": { - "tags": [ - "Setup" - ], - "summary": "Check if initial setup is required", - "description": "Returns whether the application needs initial setup (no users exist)", - "operationId": "setup_status", - "responses": { - "200": { - "description": "Setup status", - "content": { - "application/json": { - "schema": { - "$ref": "#/components/schemas/SetupStatusResponse" - } - } - } - } - } - } - }, - "/api/v1/tags": { - "get": { + "/api/v1/series/{series_id}/sharing-tags/{tag_id}": { + "delete": { "tags": [ - "Tags" + "Sharing Tags" ], - "summary": "List all tags", - "operationId": "list_tags", + "summary": "Remove a sharing tag from a series (admin only)", + "operationId": "remove_series_sharing_tag", "parameters": [ { - "name": "page", - "in": "query", - "description": "Page number (1-indexed, default 1)", - "required": false, + "name": "series_id", + "in": "path", + "description": "Series ID", + "required": true, "schema": { - "type": "integer", - "format": "int64", - "minimum": 0 + "type": "string", + "format": "uuid" } }, { - "name": "pageSize", - "in": "query", - "description": "Number of items per page (default 50, max 500)", - "required": false, + "name": "tag_id", + "in": "path", + "description": "Sharing tag ID", + "required": true, "schema": { - "type": "integer", - "format": "int64", - "minimum": 0 + "type": "string", + "format": "uuid" } } ], "responses": { - "200": { - "description": "List of all tags", - "content": { - "application/json": { - "schema": { - "$ref": "#/components/schemas/PaginatedResponse_TagDto" - } - } - } + "204": { + "description": "Sharing tag removed" }, "403": { - "description": "Forbidden" + "description": "Forbidden - Missing permission" + }, + "404": { + "description": "Sharing tag not assigned to series" } }, "security": [ @@ -11732,29 +12125,44 @@ ] } }, - "/api/v1/tags/cleanup": { - "post": { + "/api/v1/series/{series_id}/tags": { + "get": { "tags": [ "Tags" ], - "summary": "Delete all unused tags (tags with no series linked)", - "operationId": "cleanup_tags", + "summary": "Get tags for a series", + "operationId": "get_series_tags", + "parameters": [ + { + "name": "series_id", + "in": "path", + "description": "Series ID", + "required": true, + "schema": { + "type": "string", + "format": "uuid" + } + } + ], "responses": { "200": { - "description": "Cleanup completed", + "description": "List of tags for the series", "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/TaxonomyCleanupResponse" + "$ref": "#/components/schemas/TagListResponse" } } } }, "403": { - "description": "Forbidden - admin only" - } - }, - "security": [ + "description": "Forbidden" + }, + "404": { + "description": "Series not found" + } + }, + "security": [ { "jwt_bearer": [] }, @@ -11762,20 +12170,18 @@ "api_key": [] } ] - } - }, - "/api/v1/tags/{tag_id}": { - "delete": { + }, + "put": { "tags": [ "Tags" ], - "summary": "Delete a tag from the taxonomy (admin only)", - "operationId": "delete_tag", + "summary": "Set tags for a series (replaces existing)", + "operationId": "set_series_tags", "parameters": [ { - "name": "tag_id", + "name": "series_id", "in": "path", - "description": "Tag ID", + "description": "Series ID", "required": true, "schema": { "type": "string", @@ -11783,90 +12189,37 @@ } } ], - "responses": { - "204": { - "description": "Tag deleted" - }, - "403": { - "description": "Forbidden - admin only" - }, - "404": { - "description": "Tag not found" - } - }, - "security": [ - { - "jwt_bearer": [] - }, - { - "api_key": [] - } - ] - } - }, - "/api/v1/tasks": { - "get": { - "tags": [ - "Task Queue" - ], - "summary": "List tasks with optional filtering", - "description": "# Permission Required\n- `tasks:read`", - "operationId": "list_tasks", - "parameters": [ - { - "name": "status", - "in": "query", - "required": false, - "schema": { - "type": [ - "string", - "null" - ] - } - }, - { - "name": "taskType", - "in": "query", - "required": false, - "schema": { - "type": [ - "string", - "null" - ] + "requestBody": { + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/SetSeriesTagsRequest" + } } }, - { - "name": "limit", - "in": "query", - "required": false, - "schema": { - "type": "integer", - "format": "int64", - "minimum": 0 - } - } - ], + "required": true + }, "responses": { "200": { - "description": "Tasks retrieved successfully", + "description": "Tags updated", "content": { "application/json": { "schema": { - "type": "array", - "items": { - "$ref": "#/components/schemas/TaskResponse" - } + "$ref": "#/components/schemas/TagListResponse" } } } }, "403": { - "description": "Permission denied" + "description": "Forbidden" + }, + "404": { + "description": "Series not found" } }, "security": [ { - "bearer_auth": [] + "jwt_bearer": [] }, { "api_key": [] @@ -11875,16 +12228,27 @@ }, "post": { "tags": [ - "Task Queue" + "Tags" + ], + "summary": "Add a single tag to a series", + "operationId": "add_series_tag", + "parameters": [ + { + "name": "series_id", + "in": "path", + "description": "Series ID", + "required": true, + "schema": { + "type": "string", + "format": "uuid" + } + } ], - "summary": "Create a new task", - "description": "# Permission Required\n- `tasks:write`", - "operationId": "create_task", "requestBody": { "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/CreateTaskRequest" + "$ref": "#/components/schemas/AddSeriesTagRequest" } } }, @@ -11892,25 +12256,25 @@ }, "responses": { "200": { - "description": "Task created successfully", + "description": "Tag added", "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/CreateTaskResponse" + "$ref": "#/components/schemas/TagDto" } } } }, - "400": { - "description": "Invalid request" - }, "403": { - "description": "Permission denied" + "description": "Forbidden" + }, + "404": { + "description": "Series not found" } }, "security": [ { - "bearer_auth": [] + "jwt_bearer": [] }, { "api_key": [] @@ -11918,32 +12282,49 @@ ] } }, - "/api/v1/tasks/nuke": { + "/api/v1/series/{series_id}/tags/{tag_id}": { "delete": { "tags": [ - "Task Queue" + "Tags" ], - "summary": "Nuclear option: Delete ALL tasks", - "description": "# Permission Required\n- `admin`", - "operationId": "nuke_all_tasks", - "responses": { - "200": { - "description": "All tasks deleted", - "content": { - "application/json": { - "schema": { - "$ref": "#/components/schemas/PurgeTasksResponse" - } - } + "summary": "Remove a tag from a series", + "operationId": "remove_series_tag", + "parameters": [ + { + "name": "series_id", + "in": "path", + "description": "Series ID", + "required": true, + "schema": { + "type": "string", + "format": "uuid" + } + }, + { + "name": "tag_id", + "in": "path", + "description": "Tag ID", + "required": true, + "schema": { + "type": "string", + "format": "uuid" } + } + ], + "responses": { + "204": { + "description": "Tag removed from series" }, "403": { - "description": "Permission denied (admin only)" + "description": "Forbidden" + }, + "404": { + "description": "Series or tag link not found" } }, "security": [ { - "bearer_auth": [] + "jwt_bearer": [] }, { "api_key": [] @@ -11951,44 +12332,45 @@ ] } }, - "/api/v1/tasks/purge": { - "delete": { + "/api/v1/series/{series_id}/thumbnail": { + "get": { "tags": [ - "Task Queue" + "Series" ], - "summary": "Purge old completed/failed tasks", - "description": "# Permission Required\n- `tasks:write`", - "operationId": "purge_old_tasks", + "summary": "Get thumbnail/cover image for a series", + "operationId": "get_series_thumbnail", "parameters": [ { - "name": "days", - "in": "query", - "description": "Delete tasks older than N days (default: 30)", - "required": false, + "name": "series_id", + "in": "path", + "description": "Series ID", + "required": true, "schema": { - "type": "integer", - "format": "int64" + "type": "string", + "format": "uuid" } } ], "responses": { "200": { - "description": "Tasks purged successfully", + "description": "Thumbnail image", "content": { - "application/json": { - "schema": { - "$ref": "#/components/schemas/PurgeTasksResponse" - } - } + "image/jpeg": {} } }, + "304": { + "description": "Not modified (client cache is valid)" + }, "403": { - "description": "Permission denied" + "description": "Forbidden" + }, + "404": { + "description": "Series not found" } }, "security": [ { - "bearer_auth": [] + "jwt_bearer": [] }, { "api_key": [] @@ -11996,64 +12378,57 @@ ] } }, - "/api/v1/tasks/stats": { - "get": { + "/api/v1/series/{series_id}/thumbnail/generate": { + "post": { "tags": [ - "Task Queue" + "Thumbnails" ], - "summary": "Get queue statistics", - "description": "# Permission Required\n- `tasks:read`", - "operationId": "get_task_stats", - "responses": { - "200": { - "description": "Statistics retrieved successfully", - "content": { - "application/json": { - "schema": { - "$ref": "#/components/schemas/TaskStats" - } - } - } - }, - "403": { - "description": "Permission denied" - } - }, - "security": [ - { - "bearer_auth": [] - }, + "summary": "Generate thumbnail for a series", + "description": "Queues a task to generate (or regenerate) the thumbnail for a specific series.\nThe series thumbnail is derived from the first book's cover.\n\n# Permission Required\n- `tasks:write`", + "operationId": "generate_series_thumbnail", + "parameters": [ { - "api_key": [] + "name": "series_id", + "in": "path", + "description": "Series ID", + "required": true, + "schema": { + "type": "string", + "format": "uuid" + } } - ] - } - }, - "/api/v1/tasks/stream": { - "get": { - "tags": [ - "Events" ], - "summary": "Subscribe to real-time task progress events via SSE", - "description": "Clients can subscribe to this endpoint to receive real-time notifications\nabout background task progress (analyze_book, generate_thumbnails, etc.).\n\n## Authentication\nRequires valid authentication with `LibrariesRead` permission.\n\n## Event Format\nEvents are sent as JSON-encoded `TaskProgressEvent` objects with the following structure:\n```json\n{\n \"task_id\": \"uuid\",\n \"task_type\": \"analyze_book\",\n \"status\": \"running\",\n \"progress\": {\n \"current\": 5,\n \"total\": 10,\n \"message\": \"Processing book 5 of 10\"\n },\n \"started_at\": \"2024-01-06T12:00:00Z\",\n \"library_id\": \"uuid\"\n}\n```\n\n## Keep-Alive\nA keep-alive message is sent every 15 seconds to prevent connection timeout.", - "operationId": "task_progress_stream", + "requestBody": { + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ForceRequest" + } + } + }, + "required": true + }, "responses": { "200": { - "description": "SSE stream of task progress events", + "description": "Thumbnail generation task queued", "content": { - "text/event-stream": {} + "application/json": { + "schema": { + "$ref": "#/components/schemas/CreateTaskResponse" + } + } } }, - "401": { - "description": "Unauthorized" - }, "403": { - "description": "Forbidden" + "description": "Permission denied" + }, + "404": { + "description": "Series not found" } }, "security": [ { - "jwt_bearer": [] + "bearer_auth": [] }, { "api_key": [] @@ -12061,19 +12436,19 @@ ] } }, - "/api/v1/tasks/{task_id}": { - "get": { + "/api/v1/series/{series_id}/title/reprocess": { + "post": { "tags": [ - "Task Queue" + "Tasks" ], - "summary": "Get task by ID", - "description": "# Permission Required\n- `tasks:read`", - "operationId": "get_task", + "summary": "Reprocess a series title using library preprocessing rules", + "description": "Applies the library's preprocessing rules to the series' original directory name\nto regenerate the display title. This is useful when preprocessing rules are added\nor changed after series have already been created.\n\nThe title will only be updated if:\n- The `title_lock` is false (respects user edits)\n- The preprocessing rules produce a different title\n\nIf the title is changed and `title_sort_lock` is false, the `title_sort` will be\ncleared (set to None) to let it fall back to the new title for sorting.\n\n- With `dryRun: true`: Returns a synchronous preview of what would change\n- With `dryRun: false` (default): Enqueues a background task to process\n\n# Permission Required\n- `series:write`", + "operationId": "reprocess_series_title", "parameters": [ { - "name": "task_id", + "name": "series_id", "in": "path", - "description": "Task ID", + "description": "Series ID", "required": true, "schema": { "type": "string", @@ -12081,22 +12456,32 @@ } } ], + "requestBody": { + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/EnqueueReprocessTitleRequest" + } + } + }, + "required": true + }, "responses": { "200": { - "description": "Task retrieved successfully", + "description": "Task enqueued or dry run preview", "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/TaskResponse" + "$ref": "#/components/schemas/EnqueueReprocessTitleResponse" } } } }, "403": { - "description": "Permission denied" + "description": "Forbidden" }, "404": { - "description": "Task not found" + "description": "Series not found" } }, "security": [ @@ -12109,19 +12494,19 @@ ] } }, - "/api/v1/tasks/{task_id}/cancel": { - "post": { + "/api/v1/series/{series_id}/tracking": { + "get": { "tags": [ - "Task Queue" + "Tracking" ], - "summary": "Cancel a task", - "description": "# Permission Required\n- `tasks:write`", - "operationId": "cancel_task", + "summary": "Get release-tracking config for a series.", + "description": "Returns a virtual untracked row when no `series_tracking` row exists, so the\nfrontend can render the panel uniformly without special-casing absent rows.", + "operationId": "get_series_tracking", "parameters": [ { - "name": "task_id", + "name": "series_id", "in": "path", - "description": "Task ID", + "description": "Series ID", "required": true, "schema": { "type": "string", @@ -12131,48 +12516,43 @@ ], "responses": { "200": { - "description": "Task cancelled successfully", + "description": "Tracking config", "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/MessageResponse" + "$ref": "#/components/schemas/SeriesTrackingDto" } } } }, - "400": { - "description": "Task cannot be cancelled" - }, "403": { - "description": "Permission denied" + "description": "Forbidden" }, "404": { - "description": "Task not found" + "description": "Series not found" } }, "security": [ { - "bearer_auth": [] + "jwt_bearer": [] }, { "api_key": [] } ] - } - }, - "/api/v1/tasks/{task_id}/retry": { - "post": { + }, + "patch": { "tags": [ - "Task Queue" + "Tracking" ], - "summary": "Retry a failed task", - "description": "# Permission Required\n- `tasks:write`", - "operationId": "retry_task", + "summary": "Update release-tracking config for a series.", + "description": "Upserts: creates the row on first write, applies the patch otherwise.\nAll fields are optional — omit to leave alone, send `null` on a nullable\nfield to clear it.", + "operationId": "update_series_tracking", "parameters": [ { - "name": "task_id", + "name": "series_id", "in": "path", - "description": "Task ID", + "description": "Series ID", "required": true, "schema": { "type": "string", @@ -12180,30 +12560,37 @@ } } ], + "requestBody": { + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/UpdateSeriesTrackingRequest" + } + } + }, + "required": true + }, "responses": { "200": { - "description": "Task queued for retry", + "description": "Tracking config updated", "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/MessageResponse" + "$ref": "#/components/schemas/SeriesTrackingDto" } } } }, - "400": { - "description": "Task is not in failed state" - }, "403": { - "description": "Permission denied" + "description": "Forbidden" }, "404": { - "description": "Task not found" + "description": "Series not found" } }, "security": [ { - "bearer_auth": [] + "jwt_bearer": [] }, { "api_key": [] @@ -12211,19 +12598,18 @@ ] } }, - "/api/v1/tasks/{task_id}/unlock": { + "/api/v1/series/{series_id}/unread": { "post": { "tags": [ - "Task Queue" + "Series" ], - "summary": "Unlock a stuck task", - "description": "# Permission Required\n- `tasks:write`", - "operationId": "unlock_task", + "summary": "Mark all books in a series as unread", + "operationId": "mark_series_as_unread", "parameters": [ { - "name": "task_id", + "name": "series_id", "in": "path", - "description": "Task ID", + "description": "Series ID", "required": true, "schema": { "type": "string", @@ -12233,25 +12619,25 @@ ], "responses": { "200": { - "description": "Task unlocked successfully", + "description": "Series marked as unread", "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/MessageResponse" + "$ref": "#/components/schemas/MarkReadResponse" } } } }, "403": { - "description": "Permission denied" + "description": "Forbidden" }, "404": { - "description": "Task not found" + "description": "Series not found" } }, "security": [ { - "bearer_auth": [] + "jwt_bearer": [] }, { "api_key": [] @@ -12259,55 +12645,64 @@ ] } }, - "/api/v1/user": { + "/api/v1/settings/branding": { "get": { "tags": [ - "Current User" + "Settings" ], - "summary": "Get the currently authenticated user's profile", - "operationId": "get_current_user", + "summary": "Get branding settings (unauthenticated)", + "description": "Returns branding-related settings that are needed on unauthenticated pages\nlike the login screen. This endpoint does not require authentication.", + "operationId": "get_branding_settings", "responses": { "200": { - "description": "Current user's profile with sharing tags", + "description": "Branding settings", "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/UserDetailDto" + "$ref": "#/components/schemas/BrandingSettingsDto" + }, + "example": { + "applicationName": "Codex" } } } - }, - "401": { - "description": "Unauthorized" - } - }, - "security": [ - { - "jwt_bearer": [] - }, - { - "api_key": [] } - ] + } } }, - "/api/v1/user/exports/series": { + "/api/v1/settings/public": { "get": { "tags": [ - "Series Exports" + "Settings" ], - "summary": "GET /user/exports/series - List current user's exports", - "operationId": "list_exports", + "summary": "Get public display settings (authenticated users)", + "description": "Returns non-sensitive settings that affect UI/display behavior.\nThis endpoint is available to all authenticated users, not just admins.", + "operationId": "get_public_settings", "responses": { "200": { - "description": "List of exports", + "description": "Public settings", "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/SeriesExportListResponse" + "type": "object", + "additionalProperties": { + "$ref": "#/components/schemas/PublicSettingDto" + }, + "propertyNames": { + "type": "string" + } + }, + "example": { + "display.custom_metadata_template": { + "key": "display.custom_metadata_template", + "value": "{{#if custom_metadata}}## Additional Information\n{{#each custom_metadata}}- **{{@key}}**: {{this}}\n{{/each}}{{/if}}" + } } } } + }, + "401": { + "description": "Unauthorized" } }, "security": [ @@ -12318,112 +12713,152 @@ "api_key": [] } ] - }, + } + }, + "/api/v1/setup/initialize": { "post": { "tags": [ - "Series Exports" + "Setup" ], - "summary": "POST /user/exports/series - Create a new series export job", - "operationId": "create_export", + "summary": "Initialize application setup by creating the first admin user", + "description": "Creates the first admin user with email verification bypassed and returns a JWT token", + "operationId": "initialize_setup", "requestBody": { "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/CreateSeriesExportRequest" + "$ref": "#/components/schemas/InitializeSetupRequest" } } }, "required": true }, "responses": { - "202": { - "description": "Export job created", + "200": { + "description": "Setup initialized", "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/SeriesExportDto" + "$ref": "#/components/schemas/InitializeSetupResponse" } } } }, "400": { - "description": "Invalid request" - }, - "409": { - "description": "Concurrent export limit reached" - } - }, - "security": [ - { - "jwt_bearer": [] + "description": "Invalid request or setup already completed" }, - { - "api_key": [] + "422": { + "description": "Validation error" } - ] + } } }, - "/api/v1/user/exports/series/fields": { - "get": { + "/api/v1/setup/settings": { + "patch": { "tags": [ - "Series Exports" + "Setup" ], - "summary": "GET /user/exports/series/fields - Get the field catalog", - "operationId": "get_field_catalog", + "summary": "Configure initial settings (optional step in setup wizard)", + "description": "Allows the newly created admin to configure database settings", + "operationId": "configure_initial_settings", + "requestBody": { + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ConfigureSettingsRequest" + } + } + }, + "required": true + }, "responses": { "200": { - "description": "Field catalog", + "description": "Settings configured", "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/ExportFieldCatalogResponse" + "$ref": "#/components/schemas/ConfigureSettingsResponse" } } } + }, + "403": { + "description": "Forbidden - Admin only" } }, "security": [ { "jwt_bearer": [] - }, - { - "api_key": [] } ] } }, - "/api/v1/user/exports/series/{id}": { + "/api/v1/setup/status": { "get": { "tags": [ - "Series Exports" + "Setup" ], - "summary": "GET /user/exports/series/{id} - Get a single export's details", - "operationId": "get_export", + "summary": "Check if initial setup is required", + "description": "Returns whether the application needs initial setup (no users exist)", + "operationId": "setup_status", + "responses": { + "200": { + "description": "Setup status", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/SetupStatusResponse" + } + } + } + } + } + } + }, + "/api/v1/tags": { + "get": { + "tags": [ + "Tags" + ], + "summary": "List all tags", + "operationId": "list_tags", "parameters": [ { - "name": "id", - "in": "path", - "description": "Export ID", - "required": true, + "name": "page", + "in": "query", + "description": "Page number (1-indexed, default 1)", + "required": false, "schema": { - "type": "string", - "format": "uuid" + "type": "integer", + "format": "int64", + "minimum": 0 + } + }, + { + "name": "pageSize", + "in": "query", + "description": "Number of items per page (default 50, max 500)", + "required": false, + "schema": { + "type": "integer", + "format": "int64", + "minimum": 0 } } ], "responses": { "200": { - "description": "Export details", + "description": "List of all tags", "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/SeriesExportDto" + "$ref": "#/components/schemas/PaginatedResponse_TagDto" } } } }, - "404": { - "description": "Export not found" + "403": { + "description": "Forbidden" } }, "security": [ @@ -12434,31 +12869,28 @@ "api_key": [] } ] - }, - "delete": { + } + }, + "/api/v1/tags/cleanup": { + "post": { "tags": [ - "Series Exports" - ], - "summary": "DELETE /user/exports/series/{id} - Delete an export and its file", - "operationId": "delete_export", - "parameters": [ - { - "name": "id", - "in": "path", - "description": "Export ID", - "required": true, - "schema": { - "type": "string", - "format": "uuid" - } - } + "Tags" ], + "summary": "Delete all unused tags (tags with no series linked)", + "operationId": "cleanup_tags", "responses": { - "204": { - "description": "Export deleted" + "200": { + "description": "Cleanup completed", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/TaxonomyCleanupResponse" + } + } + } }, - "404": { - "description": "Export not found" + "403": { + "description": "Forbidden - admin only" } }, "security": [ @@ -12471,18 +12903,18 @@ ] } }, - "/api/v1/user/exports/series/{id}/download": { - "get": { + "/api/v1/tags/{tag_id}": { + "delete": { "tags": [ - "Series Exports" + "Tags" ], - "summary": "GET /user/exports/series/{id}/download - Download the export file", - "operationId": "download_export", + "summary": "Delete a tag from the taxonomy (admin only)", + "operationId": "delete_tag", "parameters": [ { - "name": "id", + "name": "tag_id", "in": "path", - "description": "Export ID", + "description": "Tag ID", "required": true, "schema": { "type": "string", @@ -12491,17 +12923,14 @@ } ], "responses": { - "200": { - "description": "Export file", - "content": { - "application/octet-stream": {} - } + "204": { + "description": "Tag deleted" }, - "404": { - "description": "Export not found or file missing" + "403": { + "description": "Forbidden - admin only" }, - "409": { - "description": "Export not yet completed" + "404": { + "description": "Tag not found" } }, "security": [ @@ -12514,164 +12943,87 @@ ] } }, - "/api/v1/user/plugins": { - "get": { - "tags": [ - "User Plugins" - ], - "summary": "List user's plugins (enabled and available)", - "description": "Returns both plugins the user has enabled and plugins available for them to enable.", - "operationId": "list_user_plugins", - "responses": { - "200": { - "description": "User plugins list", - "content": { - "application/json": { - "schema": { - "$ref": "#/components/schemas/UserPluginsListResponse" - } - } - } - }, - "401": { - "description": "Not authenticated" - } - } - } - }, - "/api/v1/user/plugins/oauth/callback": { + "/api/v1/tasks": { "get": { "tags": [ - "User Plugins" + "Task Queue" ], - "summary": "Handle OAuth callback from external provider", - "description": "This endpoint receives the callback after the user authenticates with the\nexternal service. It exchanges the authorization code for tokens and stores\nthem encrypted in the database.", - "operationId": "oauth_callback", + "summary": "List tasks with optional filtering", + "description": "# Permission Required\n- `tasks:read`", + "operationId": "list_tasks", "parameters": [ { - "name": "code", + "name": "status", "in": "query", - "description": "Authorization code from OAuth provider", - "required": true, + "required": false, "schema": { - "type": "string" + "type": [ + "string", + "null" + ] } }, { - "name": "state", + "name": "taskType", "in": "query", - "description": "State parameter for CSRF protection", - "required": true, + "required": false, "schema": { - "type": "string" + "type": [ + "string", + "null" + ] } - } - ], - "responses": { - "200": { - "description": "HTML page that auto-closes the popup" }, - "400": { - "description": "Invalid callback parameters" - } - } - } - }, - "/api/v1/user/plugins/{plugin_id}": { - "get": { - "tags": [ - "User Plugins" - ], - "summary": "Get a single user plugin instance", - "description": "Returns detailed status for a plugin the user has enabled.", - "operationId": "get_user_plugin", - "parameters": [ { - "name": "plugin_id", - "in": "path", - "description": "Plugin ID", - "required": true, + "name": "limit", + "in": "query", + "required": false, "schema": { - "type": "string", - "format": "uuid" + "type": "integer", + "format": "int64", + "minimum": 0 } } ], "responses": { "200": { - "description": "User plugin details", + "description": "Tasks retrieved successfully", "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/UserPluginDto" + "type": "array", + "items": { + "$ref": "#/components/schemas/TaskResponse" + } } } } }, - "401": { - "description": "Not authenticated" - }, - "404": { - "description": "Plugin not enabled for this user" + "403": { + "description": "Permission denied" } - } - }, - "delete": { - "tags": [ - "User Plugins" - ], - "summary": "Disconnect a plugin (remove data and credentials)", - "operationId": "disconnect_plugin", - "parameters": [ + }, + "security": [ { - "name": "plugin_id", - "in": "path", - "description": "Plugin ID to disconnect", - "required": true, - "schema": { - "type": "string", - "format": "uuid" - } - } - ], - "responses": { - "200": { - "description": "Plugin disconnected and data removed" - }, - "401": { - "description": "Not authenticated" + "bearer_auth": [] }, - "404": { - "description": "Plugin not enabled for this user" - } - } - } - }, - "/api/v1/user/plugins/{plugin_id}/config": { - "patch": { - "tags": [ - "User Plugins" - ], - "summary": "Update user plugin configuration", - "description": "Allows the user to set per-user configuration overrides for their plugin instance.", - "operationId": "update_user_plugin_config", - "parameters": [ { - "name": "plugin_id", - "in": "path", - "description": "Plugin ID to update config for", - "required": true, - "schema": { - "type": "string", - "format": "uuid" - } + "api_key": [] } + ] + }, + "post": { + "tags": [ + "Task Queue" ], + "summary": "Create a new task", + "description": "# Permission Required\n- `tasks:write`", + "operationId": "create_task", "requestBody": { "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/UpdateUserPluginConfigRequest" + "$ref": "#/components/schemas/CreateTaskRequest" } } }, @@ -12679,210 +13031,188 @@ }, "responses": { "200": { - "description": "Configuration updated", + "description": "Task created successfully", "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/UserPluginDto" + "$ref": "#/components/schemas/CreateTaskResponse" } } } }, "400": { - "description": "Invalid configuration" + "description": "Invalid request" }, - "401": { - "description": "Not authenticated" + "403": { + "description": "Permission denied" + } + }, + "security": [ + { + "bearer_auth": [] }, - "404": { - "description": "Plugin not enabled for this user" + { + "api_key": [] } - } + ] } }, - "/api/v1/user/plugins/{plugin_id}/credentials": { - "post": { + "/api/v1/tasks/nuke": { + "delete": { "tags": [ - "User Plugins" - ], - "summary": "Set user credentials (personal access token) for a plugin", - "description": "Allows users to authenticate by pasting a personal access token\ninstead of going through the OAuth flow.", - "operationId": "set_user_credentials", - "parameters": [ - { - "name": "plugin_id", - "in": "path", - "description": "Plugin ID to set credentials for", - "required": true, - "schema": { - "type": "string", - "format": "uuid" - } - } + "Task Queue" ], - "requestBody": { - "content": { - "application/json": { - "schema": { - "$ref": "#/components/schemas/SetUserCredentialsRequest" - } - } - }, - "required": true - }, + "summary": "Nuclear option: Delete ALL tasks", + "description": "# Permission Required\n- `admin`", + "operationId": "nuke_all_tasks", "responses": { "200": { - "description": "Credentials stored", + "description": "All tasks deleted", "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/UserPluginDto" + "$ref": "#/components/schemas/PurgeTasksResponse" } } } }, - "400": { - "description": "Invalid request" - }, - "401": { - "description": "Not authenticated" - }, - "404": { - "description": "Plugin not enabled for this user" + "403": { + "description": "Permission denied (admin only)" } - } - } - }, - "/api/v1/user/plugins/{plugin_id}/disable": { - "post": { - "tags": [ - "User Plugins" - ], - "summary": "Disable a plugin for the current user", - "operationId": "disable_user_plugin", - "parameters": [ + }, + "security": [ { - "name": "plugin_id", - "in": "path", - "description": "Plugin ID to disable", - "required": true, - "schema": { - "type": "string", - "format": "uuid" - } - } - ], - "responses": { - "200": { - "description": "Plugin disabled" - }, - "401": { - "description": "Not authenticated" + "bearer_auth": [] }, - "404": { - "description": "Plugin not enabled for this user" + { + "api_key": [] } - } + ] } }, - "/api/v1/user/plugins/{plugin_id}/enable": { - "post": { + "/api/v1/tasks/purge": { + "delete": { "tags": [ - "User Plugins" + "Task Queue" ], - "summary": "Enable a plugin for the current user", - "operationId": "enable_user_plugin", + "summary": "Purge old completed/failed tasks", + "description": "# Permission Required\n- `tasks:write`", + "operationId": "purge_old_tasks", "parameters": [ { - "name": "plugin_id", - "in": "path", - "description": "Plugin ID to enable", - "required": true, + "name": "days", + "in": "query", + "description": "Delete tasks older than N days (default: 30)", + "required": false, "schema": { - "type": "string", - "format": "uuid" + "type": "integer", + "format": "int64" } } ], "responses": { "200": { - "description": "Plugin enabled", + "description": "Tasks purged successfully", "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/UserPluginDto" + "$ref": "#/components/schemas/PurgeTasksResponse" } } } }, - "400": { - "description": "Plugin is not a user plugin or not available" - }, - "401": { - "description": "Not authenticated" + "403": { + "description": "Permission denied" + } + }, + "security": [ + { + "bearer_auth": [] }, - "409": { - "description": "Plugin already enabled for this user" + { + "api_key": [] } - } + ] } }, - "/api/v1/user/plugins/{plugin_id}/oauth/start": { - "post": { + "/api/v1/tasks/stats": { + "get": { "tags": [ - "User Plugins" - ], - "summary": "Start OAuth flow for a user plugin", - "description": "Generates an authorization URL and returns it to the client.\nThe client should open this URL in a popup or redirect the user.", - "operationId": "oauth_start", - "parameters": [ - { - "name": "plugin_id", - "in": "path", - "description": "Plugin ID to start OAuth for", - "required": true, - "schema": { - "type": "string", - "format": "uuid" - } - } + "Task Queue" ], + "summary": "Get queue statistics", + "description": "# Permission Required\n- `tasks:read`", + "operationId": "get_task_stats", "responses": { "200": { - "description": "OAuth authorization URL generated", + "description": "Statistics retrieved successfully", "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/OAuthStartResponse" + "$ref": "#/components/schemas/TaskStats" } } } }, - "400": { - "description": "Plugin does not support OAuth or not configured" + "403": { + "description": "Permission denied" + } + }, + "security": [ + { + "bearer_auth": [] + }, + { + "api_key": [] + } + ] + } + }, + "/api/v1/tasks/stream": { + "get": { + "tags": [ + "Events" + ], + "summary": "Subscribe to real-time task progress events via SSE", + "description": "Clients can subscribe to this endpoint to receive real-time notifications\nabout background task progress (analyze_book, generate_thumbnails, etc.).\n\n## Authentication\nRequires valid authentication with `LibrariesRead` permission.\n\n## Event Format\nEvents are sent as JSON-encoded `TaskProgressEvent` objects with the following structure:\n```json\n{\n \"task_id\": \"uuid\",\n \"task_type\": \"analyze_book\",\n \"status\": \"running\",\n \"progress\": {\n \"current\": 5,\n \"total\": 10,\n \"message\": \"Processing book 5 of 10\"\n },\n \"started_at\": \"2024-01-06T12:00:00Z\",\n \"library_id\": \"uuid\"\n}\n```\n\n## Keep-Alive\nA keep-alive message is sent every 15 seconds to prevent connection timeout.", + "operationId": "task_progress_stream", + "responses": { + "200": { + "description": "SSE stream of task progress events", + "content": { + "text/event-stream": {} + } }, "401": { - "description": "Not authenticated" + "description": "Unauthorized" }, - "404": { - "description": "Plugin not found or not enabled" + "403": { + "description": "Forbidden" } - } + }, + "security": [ + { + "jwt_bearer": [] + }, + { + "api_key": [] + } + ] } }, - "/api/v1/user/plugins/{plugin_id}/sync": { - "post": { + "/api/v1/tasks/{task_id}": { + "get": { "tags": [ - "User Plugins" + "Task Queue" ], - "summary": "Trigger a sync operation for a user plugin", - "description": "Enqueues a background sync task that will push/pull reading progress\nbetween Codex and the external service.", - "operationId": "trigger_sync", + "summary": "Get task by ID", + "description": "# Permission Required\n- `tasks:read`", + "operationId": "get_task", "parameters": [ { - "name": "plugin_id", + "name": "task_id", "in": "path", - "description": "Plugin ID to sync", + "description": "Task ID", "required": true, "schema": { "type": "string", @@ -12892,132 +13222,127 @@ ], "responses": { "200": { - "description": "Sync task enqueued", + "description": "Task retrieved successfully", "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/SyncTriggerResponse" + "$ref": "#/components/schemas/TaskResponse" } } } }, - "400": { - "description": "Plugin is not a sync provider or not connected" - }, - "401": { - "description": "Not authenticated" + "403": { + "description": "Permission denied" }, "404": { - "description": "Plugin not enabled for this user" + "description": "Task not found" + } + }, + "security": [ + { + "bearer_auth": [] }, - "409": { - "description": "Sync already in progress" + { + "api_key": [] } - } + ] } }, - "/api/v1/user/plugins/{plugin_id}/sync/status": { - "get": { + "/api/v1/tasks/{task_id}/cancel": { + "post": { "tags": [ - "User Plugins" + "Task Queue" ], - "summary": "Get sync status for a user plugin", - "description": "Returns the current sync status including last sync time, health, and failure count.\nPass `?live=true` to also query the plugin process for live sync state (pending push/pull,\nconflicts, external entry count). This spawns the plugin process and is more expensive.", - "operationId": "get_sync_status", + "summary": "Cancel a task", + "description": "# Permission Required\n- `tasks:write`", + "operationId": "cancel_task", "parameters": [ { - "name": "plugin_id", + "name": "task_id", "in": "path", - "description": "Plugin ID to check sync status", + "description": "Task ID", "required": true, "schema": { "type": "string", "format": "uuid" } - }, - { - "name": "live", - "in": "query", - "description": "If true, spawn the plugin process and query live sync state\n(external count, pending push/pull, conflicts).\nDefault: false (returns database-stored metadata only).", - "required": false, - "schema": { - "type": "boolean" - } } ], "responses": { "200": { - "description": "Sync status", + "description": "Task cancelled successfully", "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/SyncStatusDto" + "$ref": "#/components/schemas/MessageResponse" } } } }, - "401": { - "description": "Not authenticated" + "400": { + "description": "Task cannot be cancelled" + }, + "403": { + "description": "Permission denied" }, "404": { - "description": "Plugin not enabled for this user" + "description": "Task not found" } - } + }, + "security": [ + { + "bearer_auth": [] + }, + { + "api_key": [] + } + ] } }, - "/api/v1/user/plugins/{plugin_id}/tasks": { - "get": { + "/api/v1/tasks/{task_id}/retry": { + "post": { "tags": [ - "User Plugins" + "Task Queue" ], - "summary": "Get the latest task for a user plugin", - "description": "Returns the most recent background task for this user+plugin combination.\nUse the `?type=user_plugin_sync` query parameter to filter by task type.\n\nThis endpoint is user-scoped and does NOT require `TasksRead` permission.\nOnly the authenticated user's own tasks are returned.", - "operationId": "get_plugin_tasks", + "summary": "Retry a failed task", + "description": "# Permission Required\n- `tasks:write`", + "operationId": "retry_task", "parameters": [ { - "name": "plugin_id", + "name": "task_id", "in": "path", - "description": "Plugin ID", + "description": "Task ID", "required": true, "schema": { "type": "string", "format": "uuid" } - }, - { - "name": "type", - "in": "query", - "description": "Filter by task type (e.g., \"user_plugin_sync\").\nIf omitted, returns the latest task of any type for this plugin.", - "required": false, - "schema": { - "type": [ - "string", - "null" - ] - } } ], "responses": { "200": { - "description": "Latest task found", + "description": "Task queued for retry", "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/UserPluginTaskDto" + "$ref": "#/components/schemas/MessageResponse" } } } }, - "401": { - "description": "Not authenticated" + "400": { + "description": "Task is not in failed state" + }, + "403": { + "description": "Permission denied" }, "404": { - "description": "No tasks found for this plugin" + "description": "Task not found" } }, "security": [ { - "jwt_bearer": [] + "bearer_auth": [] }, { "api_key": [] @@ -13025,26 +13350,42 @@ ] } }, - "/api/v1/user/preferences": { - "get": { + "/api/v1/tasks/{task_id}/unlock": { + "post": { "tags": [ - "User Preferences" + "Task Queue" + ], + "summary": "Unlock a stuck task", + "description": "# Permission Required\n- `tasks:write`", + "operationId": "unlock_task", + "parameters": [ + { + "name": "task_id", + "in": "path", + "description": "Task ID", + "required": true, + "schema": { + "type": "string", + "format": "uuid" + } + } ], - "summary": "Get all preferences for the authenticated user", - "operationId": "get_all_preferences", "responses": { "200": { - "description": "User preferences retrieved", + "description": "Task unlocked successfully", "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/UserPreferencesResponse" + "$ref": "#/components/schemas/MessageResponse" } } } }, - "401": { - "description": "Unauthorized" + "403": { + "description": "Permission denied" + }, + "404": { + "description": "Task not found" } }, "security": [ @@ -13055,44 +13396,33 @@ "api_key": [] } ] - }, - "put": { + } + }, + "/api/v1/user": { + "get": { "tags": [ - "User Preferences" + "Current User" ], - "summary": "Set multiple preferences at once", - "operationId": "set_bulk_preferences", - "requestBody": { - "content": { - "application/json": { - "schema": { - "$ref": "#/components/schemas/BulkSetPreferencesRequest" - } - } - }, - "required": true - }, + "summary": "Get the currently authenticated user's profile", + "operationId": "get_current_user", "responses": { "200": { - "description": "Preferences updated successfully", + "description": "Current user's profile with sharing tags", "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/SetPreferencesResponse" + "$ref": "#/components/schemas/UserDetailDto" } } } }, - "400": { - "description": "Invalid preference key or value" - }, "401": { "description": "Unauthorized" } }, "security": [ { - "bearer_auth": [] + "jwt_bearer": [] }, { "api_key": [] @@ -13100,140 +13430,100 @@ ] } }, - "/api/v1/user/preferences/{key}": { + "/api/v1/user/exports/series": { "get": { "tags": [ - "User Preferences" - ], - "summary": "Get a single preference by key", - "operationId": "get_preference", - "parameters": [ - { - "name": "key", - "in": "path", - "description": "Preference key (e.g., 'ui.theme')", - "required": true, - "schema": { - "type": "string" - } - } + "Series Exports" ], + "summary": "GET /user/exports/series - List current user's exports", + "operationId": "list_exports", "responses": { "200": { - "description": "Preference retrieved", + "description": "List of exports", "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/UserPreferenceDto" + "$ref": "#/components/schemas/SeriesExportListResponse" } } } - }, - "401": { - "description": "Unauthorized" - }, - "404": { - "description": "Preference not found" } }, "security": [ { - "bearer_auth": [] + "jwt_bearer": [] }, { "api_key": [] } ] }, - "put": { + "post": { "tags": [ - "User Preferences" - ], - "summary": "Set a single preference value", - "operationId": "set_preference", - "parameters": [ - { - "name": "key", - "in": "path", - "description": "Preference key (e.g., 'ui.theme')", - "required": true, - "schema": { - "type": "string" - } - } + "Series Exports" ], + "summary": "POST /user/exports/series - Create a new series export job", + "operationId": "create_export", "requestBody": { "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/SetPreferenceRequest" + "$ref": "#/components/schemas/CreateSeriesExportRequest" } } }, "required": true }, "responses": { - "200": { - "description": "Preference set successfully", + "202": { + "description": "Export job created", "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/UserPreferenceDto" + "$ref": "#/components/schemas/SeriesExportDto" } } } }, "400": { - "description": "Invalid preference value" + "description": "Invalid request" }, - "401": { - "description": "Unauthorized" + "409": { + "description": "Concurrent export limit reached" } }, "security": [ { - "bearer_auth": [] + "jwt_bearer": [] }, { "api_key": [] } ] - }, - "delete": { + } + }, + "/api/v1/user/exports/series/fields": { + "get": { "tags": [ - "User Preferences" - ], - "summary": "Delete (reset) a preference to its default", - "operationId": "delete_preference", - "parameters": [ - { - "name": "key", - "in": "path", - "description": "Preference key to delete", - "required": true, - "schema": { - "type": "string" - } - } + "Series Exports" ], + "summary": "GET /user/exports/series/fields - Get the field catalog", + "operationId": "get_field_catalog", "responses": { "200": { - "description": "Preference deleted", + "description": "Field catalog", "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/DeletePreferenceResponse" + "$ref": "#/components/schemas/ExportFieldCatalogResponse" } } } - }, - "401": { - "description": "Unauthorized" } }, "security": [ { - "bearer_auth": [] + "jwt_bearer": [] }, { "api_key": [] @@ -13241,26 +13531,38 @@ ] } }, - "/api/v1/user/ratings": { + "/api/v1/user/exports/series/{id}": { "get": { "tags": [ - "Ratings" + "Series Exports" + ], + "summary": "GET /user/exports/series/{id} - Get a single export's details", + "operationId": "get_export", + "parameters": [ + { + "name": "id", + "in": "path", + "description": "Export ID", + "required": true, + "schema": { + "type": "string", + "format": "uuid" + } + } ], - "summary": "List all of the current user's ratings", - "operationId": "list_user_ratings", "responses": { "200": { - "description": "List of user's ratings", + "description": "Export details", "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/UserRatingsListResponse" + "$ref": "#/components/schemas/SeriesExportDto" } } } }, - "403": { - "description": "Forbidden" + "404": { + "description": "Export not found" } }, "security": [ @@ -13271,296 +13573,162 @@ "api_key": [] } ] - } - }, - "/api/v1/user/recommendations": { - "get": { + }, + "delete": { "tags": [ - "Recommendations" + "Series Exports" ], - "summary": "Get personalized recommendations", - "description": "Returns cached recommendations from the database. If no cached data exists\nor the data is stale, an empty list is returned and a background refresh\ntask is auto-triggered. The frontend should use SSE task progress events\nto know when fresh data is ready.", - "operationId": "get_recommendations", - "responses": { - "200": { - "description": "Personalized recommendations", - "content": { - "application/json": { - "schema": { - "$ref": "#/components/schemas/RecommendationsResponse" - } - } + "summary": "DELETE /user/exports/series/{id} - Delete an export and its file", + "operationId": "delete_export", + "parameters": [ + { + "name": "id", + "in": "path", + "description": "Export ID", + "required": true, + "schema": { + "type": "string", + "format": "uuid" } - }, - "401": { - "description": "Not authenticated" - }, - "404": { - "description": "No recommendation plugin enabled" } - } - } - }, - "/api/v1/user/recommendations/refresh": { - "post": { - "tags": [ - "Recommendations" ], - "summary": "Refresh recommendations", - "description": "Enqueues a background task to regenerate recommendations by clearing\nthe cache and updating the taste profile.", - "operationId": "refresh_recommendations", "responses": { - "200": { - "description": "Refresh task enqueued", - "content": { - "application/json": { - "schema": { - "$ref": "#/components/schemas/RecommendationsRefreshResponse" - } - } - } - }, - "401": { - "description": "Not authenticated" + "204": { + "description": "Export deleted" }, "404": { - "description": "No recommendation plugin enabled" + "description": "Export not found" + } + }, + "security": [ + { + "jwt_bearer": [] }, - "409": { - "description": "Recommendation refresh already in progress" + { + "api_key": [] } - } + ] } }, - "/api/v1/user/recommendations/{external_id}/dismiss": { - "post": { + "/api/v1/user/exports/series/{id}/download": { + "get": { "tags": [ - "Recommendations" + "Series Exports" ], - "summary": "Dismiss a recommendation", - "description": "Removes the recommendation from the cached list immediately and enqueues\na background task to notify the plugin asynchronously. Returns instantly.", - "operationId": "dismiss_recommendation", + "summary": "GET /user/exports/series/{id}/download - Download the export file", + "operationId": "download_export", "parameters": [ { - "name": "external_id", + "name": "id", "in": "path", - "description": "External ID of the recommendation to dismiss", + "description": "Export ID", "required": true, "schema": { - "type": "string" + "type": "string", + "format": "uuid" } } ], - "requestBody": { - "content": { - "application/json": { - "schema": { - "$ref": "#/components/schemas/DismissRecommendationRequest" - } - } - }, - "required": true - }, "responses": { "200": { - "description": "Recommendation dismissed", + "description": "Export file", "content": { - "application/json": { - "schema": { - "$ref": "#/components/schemas/DismissRecommendationResponse" - } - } + "application/octet-stream": {} } }, - "401": { - "description": "Not authenticated" - }, "404": { - "description": "No recommendation plugin enabled" + "description": "Export not found or file missing" + }, + "409": { + "description": "Export not yet completed" } - } + }, + "security": [ + { + "jwt_bearer": [] + }, + { + "api_key": [] + } + ] } }, - "/api/v1/user/sharing-tags": { + "/api/v1/user/plugins": { "get": { "tags": [ - "Sharing Tags" + "User Plugins" ], - "summary": "Get current user's sharing tag grants", - "operationId": "get_my_sharing_tags", + "summary": "List user's plugins (enabled and available)", + "description": "Returns both plugins the user has enabled and plugins available for them to enable.", + "operationId": "list_user_plugins", "responses": { "200": { - "description": "List of sharing tag grants for the current user", + "description": "User plugins list", "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/UserSharingTagGrantsResponse" + "$ref": "#/components/schemas/UserPluginsListResponse" } } } - } - }, - "security": [ - { - "jwt_bearer": [] }, - { - "api_key": [] + "401": { + "description": "Not authenticated" } - ] + } } }, - "/api/v1/users": { + "/api/v1/user/plugins/oauth/callback": { "get": { "tags": [ - "Users" + "User Plugins" ], - "summary": "List all users (admin only) with pagination and filtering", - "operationId": "list_users", + "summary": "Handle OAuth callback from external provider", + "description": "This endpoint receives the callback after the user authenticates with the\nexternal service. It exchanges the authorization code for tokens and stores\nthem encrypted in the database.", + "operationId": "oauth_callback", "parameters": [ { - "name": "role", - "in": "query", - "description": "Filter by role", - "required": false, - "schema": { - "oneOf": [ - { - "type": "null" - }, - { - "$ref": "#/components/schemas/UserRole" - } - ] - } - }, - { - "name": "sharingTag", - "in": "query", - "description": "Filter by sharing tag name (users who have a grant for this tag)", - "required": false, - "schema": { - "type": [ - "string", - "null" - ] - } - }, - { - "name": "sharingTagMode", - "in": "query", - "description": "Filter by sharing tag access mode (allow/deny) - only used with sharing_tag", - "required": false, - "schema": { - "type": [ - "string", - "null" - ] - } - }, - { - "name": "page", + "name": "code", "in": "query", - "description": "Page number (1-indexed, default 1)", - "required": false, + "description": "Authorization code from OAuth provider", + "required": true, "schema": { - "type": "integer", - "format": "int64", - "minimum": 0 + "type": "string" } }, { - "name": "pageSize", + "name": "state", "in": "query", - "description": "Number of items per page (max 100, default 50)", - "required": false, + "description": "State parameter for CSRF protection", + "required": true, "schema": { - "type": "integer", - "format": "int64", - "minimum": 0 + "type": "string" } } ], "responses": { "200": { - "description": "Paginated list of users", - "content": { - "application/json": { - "schema": { - "$ref": "#/components/schemas/PaginatedResponse_UserDto" - } - } - } - }, - "403": { - "description": "Forbidden - Admin only" - } - }, - "security": [ - { - "jwt_bearer": [] - }, - { - "api_key": [] - } - ] - }, - "post": { - "tags": [ - "Users" - ], - "summary": "Create a new user (admin only)", - "operationId": "create_user", - "requestBody": { - "content": { - "application/json": { - "schema": { - "$ref": "#/components/schemas/CreateUserRequest" - } - } - }, - "required": true - }, - "responses": { - "201": { - "description": "User created", - "content": { - "application/json": { - "schema": { - "$ref": "#/components/schemas/UserDto" - } - } - } + "description": "HTML page that auto-closes the popup" }, "400": { - "description": "Invalid request" - }, - "403": { - "description": "Forbidden - Admin only" - } - }, - "security": [ - { - "jwt_bearer": [] - }, - { - "api_key": [] + "description": "Invalid callback parameters" } - ] + } } }, - "/api/v1/users/{user_id}": { + "/api/v1/user/plugins/{plugin_id}": { "get": { "tags": [ - "Users" + "User Plugins" ], - "summary": "Get user by ID (admin only)", - "operationId": "get_user", + "summary": "Get a single user plugin instance", + "description": "Returns detailed status for a plugin the user has enabled.", + "operationId": "get_user_plugin", "parameters": [ { - "name": "user_id", + "name": "plugin_id", "in": "path", - "description": "User ID", + "description": "Plugin ID", "required": true, "schema": { "type": "string", @@ -13570,42 +13738,34 @@ ], "responses": { "200": { - "description": "User details with sharing tags", + "description": "User plugin details", "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/UserDetailDto" + "$ref": "#/components/schemas/UserPluginDto" } } } }, - "403": { - "description": "Forbidden - Admin only" + "401": { + "description": "Not authenticated" }, "404": { - "description": "User not found" - } - }, - "security": [ - { - "jwt_bearer": [] - }, - { - "api_key": [] + "description": "Plugin not enabled for this user" } - ] + } }, "delete": { "tags": [ - "Users" + "User Plugins" ], - "summary": "Delete a user (admin only)", - "operationId": "delete_user", + "summary": "Disconnect a plugin (remove data and credentials)", + "operationId": "disconnect_plugin", "parameters": [ { - "name": "user_id", + "name": "plugin_id", "in": "path", - "description": "User ID", + "description": "Plugin ID to disconnect", "required": true, "schema": { "type": "string", @@ -13614,36 +13774,31 @@ } ], "responses": { - "204": { - "description": "User deleted" + "200": { + "description": "Plugin disconnected and data removed" }, - "403": { - "description": "Forbidden - Admin only" + "401": { + "description": "Not authenticated" }, "404": { - "description": "User not found" - } - }, - "security": [ - { - "jwt_bearer": [] - }, - { - "api_key": [] + "description": "Plugin not enabled for this user" } - ] - }, + } + } + }, + "/api/v1/user/plugins/{plugin_id}/config": { "patch": { "tags": [ - "Users" + "User Plugins" ], - "summary": "Update a user (admin only, partial update)", - "operationId": "update_user", + "summary": "Update user plugin configuration", + "description": "Allows the user to set per-user configuration overrides for their plugin instance.", + "operationId": "update_user_plugin_config", "parameters": [ { - "name": "user_id", + "name": "plugin_id", "in": "path", - "description": "User ID", + "description": "Plugin ID to update config for", "required": true, "schema": { "type": "string", @@ -13655,7 +13810,7 @@ "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/UpdateUserRequest" + "$ref": "#/components/schemas/UpdateUserPluginConfigRequest" } } }, @@ -13663,86 +13818,40 @@ }, "responses": { "200": { - "description": "User updated", + "description": "Configuration updated", "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/UserDto" + "$ref": "#/components/schemas/UserPluginDto" } } } }, - "403": { - "description": "Forbidden - Admin only" + "400": { + "description": "Invalid configuration" }, - "404": { - "description": "User not found" - } - }, - "security": [ - { - "jwt_bearer": [] + "401": { + "description": "Not authenticated" }, - { - "api_key": [] + "404": { + "description": "Plugin not enabled for this user" } - ] + } } }, - "/api/v1/users/{user_id}/sharing-tags": { - "get": { + "/api/v1/user/plugins/{plugin_id}/credentials": { + "post": { "tags": [ - "Sharing Tags" + "User Plugins" ], - "summary": "Get sharing tag grants for a user (admin only)", - "operationId": "get_user_sharing_tags", + "summary": "Set user credentials (personal access token) for a plugin", + "description": "Allows users to authenticate by pasting a personal access token\ninstead of going through the OAuth flow.", + "operationId": "set_user_credentials", "parameters": [ { - "name": "user_id", + "name": "plugin_id", "in": "path", - "description": "User ID", - "required": true, - "schema": { - "type": "string", - "format": "uuid" - } - } - ], - "responses": { - "200": { - "description": "List of sharing tag grants for the user", - "content": { - "application/json": { - "schema": { - "$ref": "#/components/schemas/UserSharingTagGrantsResponse" - } - } - } - }, - "403": { - "description": "Forbidden - Missing permission" - } - }, - "security": [ - { - "jwt_bearer": [] - }, - { - "api_key": [] - } - ] - }, - "put": { - "tags": [ - "Sharing Tags" - ], - "summary": "Set a user's sharing tag grant (admin only)", - "operationId": "set_user_sharing_tag", - "parameters": [ - { - "name": "user_id", - "in": "path", - "description": "User ID", + "description": "Plugin ID to set credentials for", "required": true, "schema": { "type": "string", @@ -13754,7 +13863,7 @@ "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/SetUserSharingTagGrantRequest" + "$ref": "#/components/schemas/SetUserCredentialsRequest" } } }, @@ -13762,54 +13871,71 @@ }, "responses": { "200": { - "description": "Sharing tag grant set", + "description": "Credentials stored", "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/UserSharingTagGrantDto" + "$ref": "#/components/schemas/UserPluginDto" } } } }, - "403": { - "description": "Forbidden - Missing permission" + "400": { + "description": "Invalid request" }, - "404": { - "description": "Sharing tag not found" - } - }, - "security": [ - { - "jwt_bearer": [] + "401": { + "description": "Not authenticated" }, - { - "api_key": [] + "404": { + "description": "Plugin not enabled for this user" } - ] + } } }, - "/api/v1/users/{user_id}/sharing-tags/{tag_id}": { - "delete": { + "/api/v1/user/plugins/{plugin_id}/disable": { + "post": { "tags": [ - "Sharing Tags" + "User Plugins" ], - "summary": "Remove a user's sharing tag grant (admin only)", - "operationId": "remove_user_sharing_tag", + "summary": "Disable a plugin for the current user", + "operationId": "disable_user_plugin", "parameters": [ { - "name": "user_id", + "name": "plugin_id", "in": "path", - "description": "User ID", + "description": "Plugin ID to disable", "required": true, "schema": { "type": "string", "format": "uuid" } + } + ], + "responses": { + "200": { + "description": "Plugin disabled" }, + "401": { + "description": "Not authenticated" + }, + "404": { + "description": "Plugin not enabled for this user" + } + } + } + }, + "/api/v1/user/plugins/{plugin_id}/enable": { + "post": { + "tags": [ + "User Plugins" + ], + "summary": "Enable a plugin for the current user", + "operationId": "enable_user_plugin", + "parameters": [ { - "name": "tag_id", + "name": "plugin_id", "in": "path", - "description": "Sharing tag ID", + "description": "Plugin ID to enable", "required": true, "schema": { "type": "string", @@ -13818,86 +13944,84 @@ } ], "responses": { - "204": { - "description": "Sharing tag grant removed" + "200": { + "description": "Plugin enabled", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/UserPluginDto" + } + } + } }, - "403": { - "description": "Forbidden - Missing permission" + "400": { + "description": "Plugin is not a user plugin or not available" }, - "404": { - "description": "Grant not found" - } - }, - "security": [ - { - "jwt_bearer": [] + "401": { + "description": "Not authenticated" }, - { - "api_key": [] + "409": { + "description": "Plugin already enabled for this user" } - ] + } } }, - "/health": { - "get": { + "/api/v1/user/plugins/{plugin_id}/oauth/start": { + "post": { "tags": [ - "Health" + "User Plugins" ], - "summary": "Health check endpoint - checks database connectivity", - "description": "Returns \"OK\" with 200 status if database is healthy,\nor \"Service Unavailable\" with 503 status if database check fails.", - "operationId": "health_check", - "responses": { - "200": { - "description": "Service is healthy" - }, - "503": { - "description": "Service is unavailable" + "summary": "Start OAuth flow for a user plugin", + "description": "Generates an authorization URL and returns it to the client.\nThe client should open this URL in a popup or redirect the user.", + "operationId": "oauth_start", + "parameters": [ + { + "name": "plugin_id", + "in": "path", + "description": "Plugin ID to start OAuth for", + "required": true, + "schema": { + "type": "string", + "format": "uuid" + } } - } - } - }, - "/opds": { - "get": { - "tags": [ - "OPDS" ], - "summary": "Root OPDS catalog", - "description": "Returns the main navigation feed with links to:\n- All libraries\n- Search\n- Recent additions", - "operationId": "root_catalog", "responses": { "200": { - "description": "OPDS root catalog", + "description": "OAuth authorization URL generated", "content": { - "application/atom+xml": {} + "application/json": { + "schema": { + "$ref": "#/components/schemas/OAuthStartResponse" + } + } } }, - "403": { - "description": "Forbidden" - } - }, - "security": [ - { - "jwt_bearer": [] + "400": { + "description": "Plugin does not support OAuth or not configured" }, - { - "api_key": [] + "401": { + "description": "Not authenticated" + }, + "404": { + "description": "Plugin not found or not enabled" } - ] + } } }, - "/opds/books/{book_id}/pages": { - "get": { + "/api/v1/user/plugins/{plugin_id}/sync": { + "post": { "tags": [ - "OPDS" + "User Plugins" ], - "summary": "OPDS-PSE: List all pages in a book", - "description": "Returns a PSE page feed with individual page links for streaming.\nThis allows OPDS clients to read books page-by-page without downloading the entire file.", - "operationId": "opds_book_pages", + "summary": "Trigger a sync operation for a user plugin", + "description": "Enqueues a background sync task that will push/pull reading progress\nbetween Codex and the external service.", + "operationId": "trigger_sync", "parameters": [ { - "name": "book_id", + "name": "plugin_id", "in": "path", - "description": "Book ID", + "description": "Plugin ID to sync", "required": true, "schema": { "type": "string", @@ -13907,41 +14031,43 @@ ], "responses": { "200": { - "description": "OPDS-PSE page feed", + "description": "Sync task enqueued", "content": { - "application/atom+xml": {} + "application/json": { + "schema": { + "$ref": "#/components/schemas/SyncTriggerResponse" + } + } } }, - "403": { - "description": "Forbidden" + "400": { + "description": "Plugin is not a sync provider or not connected" + }, + "401": { + "description": "Not authenticated" }, "404": { - "description": "Book not found" - } - }, - "security": [ - { - "jwt_bearer": [] + "description": "Plugin not enabled for this user" }, - { - "api_key": [] + "409": { + "description": "Sync already in progress" } - ] + } } }, - "/opds/books/{book_id}/pages/{page_number}": { + "/api/v1/user/plugins/{plugin_id}/sync/status": { "get": { "tags": [ - "OPDS" + "User Plugins" ], - "summary": "OPDS-PSE: Get a page image with reading progress tracking", - "description": "Serves the page image (delegating to the v1 handler) and records reading\nprogress via the batching service. This is the endpoint used by OPDS PSE\nclients that read page-by-page and need implicit progress tracking, since\nthey don't have a JavaScript frontend to send explicit progress updates.", - "operationId": "opds_book_page_image", + "summary": "Get sync status for a user plugin", + "description": "Returns the current sync status including last sync time, health, and failure count.\nPass `?live=true` to also query the plugin process for live sync state (pending push/pull,\nconflicts, external entry count). This spawns the plugin process and is more expensive.", + "operationId": "get_sync_status", "parameters": [ { - "name": "book_id", + "name": "plugin_id", "in": "path", - "description": "Book ID", + "description": "Plugin ID to check sync status", "required": true, "schema": { "type": "string", @@ -13949,82 +14075,48 @@ } }, { - "name": "page_number", - "in": "path", - "description": "Page number (1-indexed)", - "required": true, + "name": "live", + "in": "query", + "description": "If true, spawn the plugin process and query live sync state\n(external count, pending push/pull, conflicts).\nDefault: false (returns database-stored metadata only).", + "required": false, "schema": { - "type": "integer", - "format": "int32" + "type": "boolean" } } ], "responses": { "200": { - "description": "Page image (also records reading progress)", + "description": "Sync status", "content": { - "image/jpeg": {} + "application/json": { + "schema": { + "$ref": "#/components/schemas/SyncStatusDto" + } + } } }, - "403": { - "description": "Forbidden" + "401": { + "description": "Not authenticated" }, "404": { - "description": "Book or page not found" - } - }, - "security": [ - { - "jwt_bearer": [] - }, - { - "api_key": [] - } - ] - } - }, - "/opds/libraries": { - "get": { - "tags": [ - "OPDS" - ], - "summary": "List all libraries", - "description": "Returns a navigation feed with all available libraries", - "operationId": "opds_list_libraries", - "responses": { - "200": { - "description": "OPDS libraries feed", - "content": { - "application/atom+xml": {} - } - }, - "403": { - "description": "Forbidden" - } - }, - "security": [ - { - "jwt_bearer": [] - }, - { - "api_key": [] + "description": "Plugin not enabled for this user" } - ] + } } }, - "/opds/libraries/{library_id}": { + "/api/v1/user/plugins/{plugin_id}/tasks": { "get": { "tags": [ - "OPDS" + "User Plugins" ], - "summary": "List series in a library", - "description": "Returns an acquisition feed with all series in the specified library", - "operationId": "opds_library_series", + "summary": "Get the latest task for a user plugin", + "description": "Returns the most recent background task for this user+plugin combination.\nUse the `?type=user_plugin_sync` query parameter to filter by task type.\n\nThis endpoint is user-scoped and does NOT require `TasksRead` permission.\nOnly the authenticated user's own tasks are returned.", + "operationId": "get_plugin_tasks", "parameters": [ { - "name": "library_id", + "name": "plugin_id", "in": "path", - "description": "Library ID", + "description": "Plugin ID", "required": true, "schema": { "type": "string", @@ -14032,38 +14124,34 @@ } }, { - "name": "page", - "in": "query", - "required": false, - "schema": { - "type": "integer", - "format": "int32", - "minimum": 0 - } - }, - { - "name": "pageSize", + "name": "type", "in": "query", + "description": "Filter by task type (e.g., \"user_plugin_sync\").\nIf omitted, returns the latest task of any type for this plugin.", "required": false, "schema": { - "type": "integer", - "format": "int32", - "minimum": 0 + "type": [ + "string", + "null" + ] } } ], "responses": { "200": { - "description": "OPDS library series feed", + "description": "Latest task found", "content": { - "application/atom+xml": {} + "application/json": { + "schema": { + "$ref": "#/components/schemas/UserPluginTaskDto" + } + } } }, - "403": { - "description": "Forbidden" + "401": { + "description": "Not authenticated" }, "404": { - "description": "Library not found" + "description": "No tasks found for this plugin" } }, "security": [ @@ -14076,65 +14164,74 @@ ] } }, - "/opds/search": { + "/api/v1/user/preferences": { "get": { "tags": [ - "OPDS" - ], - "summary": "OPDS search endpoint", - "description": "Searches books and series by title and returns an OPDS acquisition feed", - "operationId": "opds_search", - "parameters": [ - { - "name": "q", - "in": "query", - "description": "Search query string", - "required": true, - "schema": { - "type": "string" - } - } + "User Preferences" ], + "summary": "Get all preferences for the authenticated user", + "operationId": "get_all_preferences", "responses": { "200": { - "description": "OPDS search results", + "description": "User preferences retrieved", "content": { - "application/atom+xml": {} + "application/json": { + "schema": { + "$ref": "#/components/schemas/UserPreferencesResponse" + } + } } }, - "403": { - "description": "Forbidden" + "401": { + "description": "Unauthorized" } }, "security": [ { - "jwt_bearer": [] + "bearer_auth": [] }, { "api_key": [] } ] - } - }, - "/opds/search.xml": { - "get": { + }, + "put": { "tags": [ - "OPDS" + "User Preferences" ], - "summary": "OpenSearch descriptor endpoint", - "description": "Returns the OpenSearch XML descriptor for OPDS clients", - "operationId": "opds_opensearch_descriptor", + "summary": "Set multiple preferences at once", + "operationId": "set_bulk_preferences", + "requestBody": { + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/BulkSetPreferencesRequest" + } + } + }, + "required": true + }, "responses": { "200": { - "description": "OpenSearch descriptor", + "description": "Preferences updated successfully", "content": { - "application/opensearchdescription+xml": {} + "application/json": { + "schema": { + "$ref": "#/components/schemas/SetPreferencesResponse" + } + } } + }, + "400": { + "description": "Invalid preference key or value" + }, + "401": { + "description": "Unauthorized" } }, "security": [ { - "jwt_bearer": [] + "bearer_auth": [] }, { "api_key": [] @@ -14142,109 +14239,140 @@ ] } }, - "/opds/series/{series_id}": { + "/api/v1/user/preferences/{key}": { "get": { "tags": [ - "OPDS" + "User Preferences" ], - "summary": "List books in a series", - "description": "Returns an acquisition feed with all books in the specified series", - "operationId": "opds_series_books", + "summary": "Get a single preference by key", + "operationId": "get_preference", "parameters": [ { - "name": "series_id", + "name": "key", "in": "path", - "description": "Series ID", + "description": "Preference key (e.g., 'ui.theme')", "required": true, "schema": { - "type": "string", - "format": "uuid" + "type": "string" } } ], "responses": { "200": { - "description": "OPDS series books feed", + "description": "Preference retrieved", "content": { - "application/atom+xml": {} + "application/json": { + "schema": { + "$ref": "#/components/schemas/UserPreferenceDto" + } + } } }, - "403": { - "description": "Forbidden" + "401": { + "description": "Unauthorized" }, "404": { - "description": "Series not found" + "description": "Preference not found" } }, "security": [ { - "jwt_bearer": [] + "bearer_auth": [] }, { "api_key": [] } ] - } - }, - "/opds/v2": { - "get": { + }, + "put": { "tags": [ - "OPDS 2.0" + "User Preferences" ], - "summary": "Root OPDS 2.0 catalog", - "description": "Returns the main navigation feed with links to:\n- All libraries\n- Search\n- Recent additions", - "operationId": "opds2_root", + "summary": "Set a single preference value", + "operationId": "set_preference", + "parameters": [ + { + "name": "key", + "in": "path", + "description": "Preference key (e.g., 'ui.theme')", + "required": true, + "schema": { + "type": "string" + } + } + ], + "requestBody": { + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/SetPreferenceRequest" + } + } + }, + "required": true + }, "responses": { "200": { - "description": "OPDS 2.0 root catalog", + "description": "Preference set successfully", "content": { - "application/opds+json": { + "application/json": { "schema": { - "$ref": "#/components/schemas/Opds2Feed" + "$ref": "#/components/schemas/UserPreferenceDto" } } } }, - "403": { - "description": "Forbidden" + "400": { + "description": "Invalid preference value" + }, + "401": { + "description": "Unauthorized" } }, "security": [ { - "jwt_bearer": [] + "bearer_auth": [] }, { "api_key": [] } ] - } - }, - "/opds/v2/libraries": { - "get": { + }, + "delete": { "tags": [ - "OPDS 2.0" + "User Preferences" + ], + "summary": "Delete (reset) a preference to its default", + "operationId": "delete_preference", + "parameters": [ + { + "name": "key", + "in": "path", + "description": "Preference key to delete", + "required": true, + "schema": { + "type": "string" + } + } ], - "summary": "List all libraries (OPDS 2.0)", - "description": "Returns a navigation feed with all available libraries", - "operationId": "opds2_libraries", "responses": { "200": { - "description": "OPDS 2.0 libraries feed", + "description": "Preference deleted", "content": { - "application/opds+json": { + "application/json": { "schema": { - "$ref": "#/components/schemas/Opds2Feed" + "$ref": "#/components/schemas/DeletePreferenceResponse" } } } }, - "403": { - "description": "Forbidden" + "401": { + "description": "Unauthorized" } }, "security": [ { - "jwt_bearer": [] + "bearer_auth": [] }, { "api_key": [] @@ -14252,62 +14380,26 @@ ] } }, - "/opds/v2/libraries/{library_id}": { + "/api/v1/user/ratings": { "get": { "tags": [ - "OPDS 2.0" - ], - "summary": "List series in a library (OPDS 2.0)", - "description": "Returns a navigation feed with all series in the specified library", - "operationId": "opds2_library_series", - "parameters": [ - { - "name": "library_id", - "in": "path", - "description": "Library ID", - "required": true, - "schema": { - "type": "string", - "format": "uuid" - } - }, - { - "name": "page", - "in": "query", - "required": false, - "schema": { - "type": "integer", - "format": "int32", - "minimum": 0 - } - }, - { - "name": "pageSize", - "in": "query", - "required": false, - "schema": { - "type": "integer", - "format": "int32", - "minimum": 0 - } - } + "Ratings" ], + "summary": "List all of the current user's ratings", + "operationId": "list_user_ratings", "responses": { "200": { - "description": "OPDS 2.0 library series feed", + "description": "List of user's ratings", "content": { - "application/opds+json": { + "application/json": { "schema": { - "$ref": "#/components/schemas/Opds2Feed" + "$ref": "#/components/schemas/UserRatingsListResponse" } } } }, "403": { "description": "Forbidden" - }, - "404": { - "description": "Library not found" } }, "security": [ @@ -14320,144 +14412,131 @@ ] } }, - "/opds/v2/recent": { + "/api/v1/user/recommendations": { "get": { "tags": [ - "OPDS 2.0" + "Recommendations" ], - "summary": "List recent additions (OPDS 2.0)", - "description": "Returns a publications feed with recently added books", - "operationId": "opds2_recent", - "parameters": [ - { - "name": "page", - "in": "query", - "required": false, - "schema": { - "type": "integer", - "format": "int32", - "minimum": 0 + "summary": "Get personalized recommendations", + "description": "Returns cached recommendations from the database. If no cached data exists\nor the data is stale, an empty list is returned and a background refresh\ntask is auto-triggered. The frontend should use SSE task progress events\nto know when fresh data is ready.", + "operationId": "get_recommendations", + "responses": { + "200": { + "description": "Personalized recommendations", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/RecommendationsResponse" + } + } } }, - { - "name": "pageSize", - "in": "query", - "required": false, - "schema": { - "type": "integer", - "format": "int32", - "minimum": 0 - } + "401": { + "description": "Not authenticated" + }, + "404": { + "description": "No recommendation plugin enabled" } + } + } + }, + "/api/v1/user/recommendations/refresh": { + "post": { + "tags": [ + "Recommendations" ], + "summary": "Refresh recommendations", + "description": "Enqueues a background task to regenerate recommendations by clearing\nthe cache and updating the taste profile.", + "operationId": "refresh_recommendations", "responses": { "200": { - "description": "OPDS 2.0 recent additions feed", + "description": "Refresh task enqueued", "content": { - "application/opds+json": { + "application/json": { "schema": { - "$ref": "#/components/schemas/Opds2Feed" + "$ref": "#/components/schemas/RecommendationsRefreshResponse" } } } }, - "403": { - "description": "Forbidden" - } - }, - "security": [ - { - "jwt_bearer": [] + "401": { + "description": "Not authenticated" }, - { - "api_key": [] + "404": { + "description": "No recommendation plugin enabled" + }, + "409": { + "description": "Recommendation refresh already in progress" } - ] + } } }, - "/opds/v2/search": { - "get": { + "/api/v1/user/recommendations/{external_id}/dismiss": { + "post": { "tags": [ - "OPDS 2.0" + "Recommendations" ], - "summary": "OPDS 2.0 search endpoint", - "description": "Searches books and series by title and returns an OPDS 2.0 publications feed", - "operationId": "opds2_search", + "summary": "Dismiss a recommendation", + "description": "Removes the recommendation from the cached list immediately and enqueues\na background task to notify the plugin asynchronously. Returns instantly.", + "operationId": "dismiss_recommendation", "parameters": [ { - "name": "query", - "in": "query", - "description": "Search query string", + "name": "external_id", + "in": "path", + "description": "External ID of the recommendation to dismiss", "required": true, "schema": { "type": "string" } } ], + "requestBody": { + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/DismissRecommendationRequest" + } + } + }, + "required": true + }, "responses": { "200": { - "description": "OPDS 2.0 search results", + "description": "Recommendation dismissed", "content": { - "application/opds+json": { + "application/json": { "schema": { - "$ref": "#/components/schemas/Opds2Feed" + "$ref": "#/components/schemas/DismissRecommendationResponse" } } } }, - "400": { - "description": "Bad request - empty query" - }, - "403": { - "description": "Forbidden" - } - }, - "security": [ - { - "jwt_bearer": [] + "401": { + "description": "Not authenticated" }, - { - "api_key": [] + "404": { + "description": "No recommendation plugin enabled" } - ] + } } }, - "/opds/v2/series/{series_id}": { + "/api/v1/user/sharing-tags": { "get": { "tags": [ - "OPDS 2.0" - ], - "summary": "List books in a series (OPDS 2.0)", - "description": "Returns a publications feed with all books in the specified series", - "operationId": "opds2_series_books", - "parameters": [ - { - "name": "series_id", - "in": "path", - "description": "Series ID", - "required": true, - "schema": { - "type": "string", - "format": "uuid" - } - } + "Sharing Tags" ], + "summary": "Get current user's sharing tag grants", + "operationId": "get_my_sharing_tags", "responses": { "200": { - "description": "OPDS 2.0 series books feed", + "description": "List of sharing tag grants for the current user", "content": { - "application/opds+json": { + "application/json": { "schema": { - "$ref": "#/components/schemas/Opds2Feed" + "$ref": "#/components/schemas/UserSharingTagGrantsResponse" } } } - }, - "403": { - "description": "Forbidden" - }, - "404": { - "description": "Series not found" } }, "security": [ @@ -14470,141 +14549,90 @@ ] } }, - "/{prefix}/api/v1/age-ratings": { + "/api/v1/users": { "get": { "tags": [ - "Komga" + "Users" ], - "summary": "List age ratings (stub - always returns empty array)", - "description": "Returns all age ratings in the library.\nCurrently returns empty as Codex doesn't aggregate age ratings separately.\n\n## Endpoint\n`GET /{prefix}/api/v1/age-ratings`\n\n## Authentication\n- Bearer token (JWT)\n- Basic Auth\n- API Key", - "operationId": "komga_list_age_ratings", + "summary": "List all users (admin only) with pagination and filtering", + "operationId": "list_users", "parameters": [ { - "name": "prefix", - "in": "path", - "description": "Komga API prefix (default: komga)", - "required": true, + "name": "role", + "in": "query", + "description": "Filter by role", + "required": false, "schema": { - "type": "string" - } - } - ], - "responses": { - "200": { - "description": "Empty list of age ratings", - "content": { - "application/json": { - "schema": { - "type": "array", - "items": { - "type": "integer", - "format": "int32" - } + "oneOf": [ + { + "type": "null" + }, + { + "$ref": "#/components/schemas/UserRole" } - } - } - }, - "401": { - "description": "Unauthorized" - } - }, - "security": [ - { - "jwt_bearer": [] - }, - { - "api_key": [] - } - ] - } - }, - "/{prefix}/api/v1/books/list": { - "post": { - "tags": [ - "Komga" - ], - "summary": "Search/filter books", - "description": "Returns books matching the filter criteria.\nThis uses POST to support complex filter bodies.\n\n## Endpoint\n`POST /{prefix}/api/v1/books/list`\n\n## Query Parameters\n- `page` - Page number (0-indexed, default: 0)\n- `size` - Page size (default: 20)\n- `sort` - Sort parameter (e.g., \"createdDate,desc\")\n\n## Request Body\nJSON object with filter criteria (library_id, series_id, search_term, etc.)\n\n## Authentication\n- Bearer token (JWT)\n- Basic Auth\n- API Key", - "operationId": "komga_search_books", - "parameters": [ - { - "name": "prefix", - "in": "path", - "description": "Komga API prefix (default: komga)", - "required": true, - "schema": { - "type": "string" + ] } }, { - "name": "page", + "name": "sharingTag", "in": "query", - "description": "Page number (0-indexed, Komga-style)", + "description": "Filter by sharing tag name (users who have a grant for this tag)", "required": false, "schema": { - "type": "integer", - "format": "int32" + "type": [ + "string", + "null" + ] } }, { - "name": "size", + "name": "sharingTagMode", "in": "query", - "description": "Page size (default: 20)", + "description": "Filter by sharing tag access mode (allow/deny) - only used with sharing_tag", "required": false, "schema": { - "type": "integer", - "format": "int32" + "type": [ + "string", + "null" + ] } }, { - "name": "sort", + "name": "page", "in": "query", - "description": "Sort parameter (e.g., \"createdDate,desc\", \"metadata.numberSort,asc\")", + "description": "Page number (1-indexed, default 1)", "required": false, "schema": { - "type": [ - "string", - "null" - ] + "type": "integer", + "format": "int64", + "minimum": 0 } }, { - "name": "library_id", + "name": "pageSize", "in": "query", - "description": "Filter by library ID", + "description": "Number of items per page (max 100, default 50)", "required": false, "schema": { - "type": [ - "string", - "null" - ], - "format": "uuid" + "type": "integer", + "format": "int64", + "minimum": 0 } } ], - "requestBody": { - "content": { - "application/json": { - "schema": { - "$ref": "#/components/schemas/KomgaBooksSearchRequestDto" - } - } - }, - "required": true - }, "responses": { "200": { - "description": "Paginated list of books matching filter", + "description": "Paginated list of users", "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/KomgaPage_KomgaBookDto" + "$ref": "#/components/schemas/PaginatedResponse_UserDto" } } } }, - "401": { - "description": "Unauthorized" + "403": { + "description": "Forbidden - Admin only" } }, "security": [ @@ -14615,85 +14643,39 @@ "api_key": [] } ] - } - }, - "/{prefix}/api/v1/books/ondeck": { - "get": { + }, + "post": { "tags": [ - "Komga" + "Users" ], - "summary": "Get \"on deck\" books", - "description": "Returns books that are currently in-progress (started but not completed).\nThis is the \"continue reading\" shelf in Komic.\n\n## Endpoint\n`GET /{prefix}/api/v1/books/ondeck`\n\n## Query Parameters\n- `page` - Page number (0-indexed, default: 0)\n- `size` - Page size (default: 20)\n\n## Authentication\n- Bearer token (JWT)\n- Basic Auth\n- API Key", - "operationId": "komga_get_books_ondeck", - "parameters": [ - { - "name": "prefix", - "in": "path", - "description": "Komga API prefix (default: komga)", - "required": true, - "schema": { - "type": "string" - } - }, - { - "name": "page", - "in": "query", - "description": "Page number (0-indexed, Komga-style)", - "required": false, - "schema": { - "type": "integer", - "format": "int32" - } - }, - { - "name": "size", - "in": "query", - "description": "Page size (default: 20)", - "required": false, - "schema": { - "type": "integer", - "format": "int32" - } - }, - { - "name": "sort", - "in": "query", - "description": "Sort parameter (e.g., \"createdDate,desc\", \"metadata.numberSort,asc\")", - "required": false, - "schema": { - "type": [ - "string", - "null" - ] + "summary": "Create a new user (admin only)", + "operationId": "create_user", + "requestBody": { + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/CreateUserRequest" + } } }, - { - "name": "library_id", - "in": "query", - "description": "Filter by library ID", - "required": false, - "schema": { - "type": [ - "string", - "null" - ], - "format": "uuid" - } - } - ], + "required": true + }, "responses": { - "200": { - "description": "Paginated list of in-progress books", + "201": { + "description": "User created", "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/KomgaPage_KomgaBookDto" + "$ref": "#/components/schemas/UserDto" } } } }, - "401": { - "description": "Unauthorized" + "400": { + "description": "Invalid request" + }, + "403": { + "description": "Forbidden - Admin only" } }, "security": [ @@ -14706,28 +14688,18 @@ ] } }, - "/{prefix}/api/v1/books/{book_id}": { + "/api/v1/users/{user_id}": { "get": { "tags": [ - "Komga" + "Users" ], - "summary": "Get a book by ID", - "description": "Returns a single book in Komga-compatible format.\n\n## Endpoint\n`GET /{prefix}/api/v1/books/{bookId}`\n\n## Authentication\n- Bearer token (JWT)\n- Basic Auth\n- API Key", - "operationId": "komga_get_book", + "summary": "Get user by ID (admin only)", + "operationId": "get_user", "parameters": [ { - "name": "prefix", - "in": "path", - "description": "Komga API prefix (default: komga)", - "required": true, - "schema": { - "type": "string" - } - }, - { - "name": "book_id", + "name": "user_id", "in": "path", - "description": "Book ID", + "description": "User ID", "required": true, "schema": { "type": "string", @@ -14737,20 +14709,20 @@ ], "responses": { "200": { - "description": "Book details", + "description": "User details with sharing tags", "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/KomgaBookDto" + "$ref": "#/components/schemas/UserDetailDto" } } } }, - "401": { - "description": "Unauthorized" + "403": { + "description": "Forbidden - Admin only" }, "404": { - "description": "Book not found" + "description": "User not found" } }, "security": [ @@ -14761,30 +14733,18 @@ "api_key": [] } ] - } - }, - "/{prefix}/api/v1/books/{book_id}/file": { - "get": { - "tags": [ - "Komga" + }, + "delete": { + "tags": [ + "Users" ], - "summary": "Download book file", - "description": "Streams the original book file (CBZ, CBR, EPUB, PDF) for download.\nIncludes proper Content-Disposition header with UTF-8 encoding.\n\n## Endpoint\n`GET /{prefix}/api/v1/books/{bookId}/file`\n\n## Authentication\n- Bearer token (JWT)\n- Basic Auth\n- API Key", - "operationId": "komga_download_book_file", + "summary": "Delete a user (admin only)", + "operationId": "delete_user", "parameters": [ { - "name": "prefix", - "in": "path", - "description": "Komga API prefix (default: komga)", - "required": true, - "schema": { - "type": "string" - } - }, - { - "name": "book_id", + "name": "user_id", "in": "path", - "description": "Book ID", + "description": "User ID", "required": true, "schema": { "type": "string", @@ -14793,17 +14753,14 @@ } ], "responses": { - "200": { - "description": "Book file download", - "content": { - "application/octet-stream": {} - } + "204": { + "description": "User deleted" }, - "401": { - "description": "Unauthorized" + "403": { + "description": "Forbidden - Admin only" }, "404": { - "description": "Book not found or file missing" + "description": "User not found" } }, "security": [ @@ -14814,30 +14771,18 @@ "api_key": [] } ] - } - }, - "/{prefix}/api/v1/books/{book_id}/next": { - "get": { + }, + "patch": { "tags": [ - "Komga" + "Users" ], - "summary": "Get next book in series", - "description": "Returns the next book in the same series by sort order.\n\n## Endpoint\n`GET /{prefix}/api/v1/books/{bookId}/next`\n\n## Response\n- 200: Next book DTO\n- 404: No next book (this is the last book in series)\n\n## Authentication\n- Bearer token (JWT)\n- Basic Auth\n- API Key", - "operationId": "komga_get_next_book", + "summary": "Update a user (admin only, partial update)", + "operationId": "update_user", "parameters": [ { - "name": "prefix", - "in": "path", - "description": "Komga API prefix (default: komga)", - "required": true, - "schema": { - "type": "string" - } - }, - { - "name": "book_id", + "name": "user_id", "in": "path", - "description": "Book ID", + "description": "User ID", "required": true, "schema": { "type": "string", @@ -14845,22 +14790,32 @@ } } ], + "requestBody": { + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/UpdateUserRequest" + } + } + }, + "required": true + }, "responses": { "200": { - "description": "Next book in series", + "description": "User updated", "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/KomgaBookDto" + "$ref": "#/components/schemas/UserDto" } } } }, - "401": { - "description": "Unauthorized" + "403": { + "description": "Forbidden - Admin only" }, "404": { - "description": "No next book" + "description": "User not found" } }, "security": [ @@ -14873,28 +14828,18 @@ ] } }, - "/{prefix}/api/v1/books/{book_id}/pages": { + "/api/v1/users/{user_id}/sharing-tags": { "get": { "tags": [ - "Komga" + "Sharing Tags" ], - "summary": "List all pages for a book", - "description": "Returns an array of page metadata for all pages in a book.\nPages are ordered by page number (1-indexed).\n\n## Endpoint\n`GET /{prefix}/api/v1/books/{bookId}/pages`\n\n## Authentication\n- Bearer token (JWT)\n- Basic Auth\n- API Key\n\n## Response\nReturns an array of `KomgaPageDto` objects with page metadata including\nfilename, MIME type, dimensions, and size.", - "operationId": "komga_list_pages", + "summary": "Get sharing tag grants for a user (admin only)", + "operationId": "get_user_sharing_tags", "parameters": [ { - "name": "prefix", - "in": "path", - "description": "Komga API prefix (default: komga)", - "required": true, - "schema": { - "type": "string" - } - }, - { - "name": "book_id", + "name": "user_id", "in": "path", - "description": "Book ID", + "description": "User ID", "required": true, "schema": { "type": "string", @@ -14904,23 +14849,17 @@ ], "responses": { "200": { - "description": "List of pages in the book", + "description": "List of sharing tag grants for the user", "content": { "application/json": { "schema": { - "type": "array", - "items": { - "$ref": "#/components/schemas/KomgaPageDto" - } + "$ref": "#/components/schemas/UserSharingTagGrantsResponse" } } } }, - "401": { - "description": "Unauthorized" - }, - "404": { - "description": "Book not found" + "403": { + "description": "Forbidden - Missing permission" } }, "security": [ @@ -14931,59 +14870,51 @@ "api_key": [] } ] - } - }, - "/{prefix}/api/v1/books/{book_id}/pages/{page_number}": { - "get": { + }, + "put": { "tags": [ - "Komga" + "Sharing Tags" ], - "summary": "Get a specific page image", - "description": "Streams the raw page image for the requested page number.\nPage numbers are 1-indexed.\n\n## Endpoint\n`GET /{prefix}/api/v1/books/{bookId}/pages/{pageNumber}`\n\n## Authentication\n- Bearer token (JWT)\n- Basic Auth\n- API Key (via cookie fallback for browser image tags)\n\n## Response\nReturns the raw image data with appropriate Content-Type header.\nResponse is cached for 1 year (immutable content).", - "operationId": "komga_get_page", + "summary": "Set a user's sharing tag grant (admin only)", + "operationId": "set_user_sharing_tag", "parameters": [ { - "name": "prefix", - "in": "path", - "description": "Komga API prefix (default: komga)", - "required": true, - "schema": { - "type": "string" - } - }, - { - "name": "book_id", + "name": "user_id", "in": "path", - "description": "Book ID", + "description": "User ID", "required": true, "schema": { "type": "string", "format": "uuid" } - }, - { - "name": "page_number", - "in": "path", - "description": "Page number (1-indexed)", - "required": true, - "schema": { - "type": "integer", - "format": "int32" - } } ], + "requestBody": { + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/SetUserSharingTagGrantRequest" + } + } + }, + "required": true + }, "responses": { "200": { - "description": "Page image", + "description": "Sharing tag grant set", "content": { - "image/*": {} + "application/json": { + "schema": { + "$ref": "#/components/schemas/UserSharingTagGrantDto" + } + } } }, - "401": { - "description": "Unauthorized" + "403": { + "description": "Forbidden - Missing permission" }, "404": { - "description": "Book or page not found" + "description": "Sharing tag not found" } }, "security": [ @@ -14996,28 +14927,18 @@ ] } }, - "/{prefix}/api/v1/books/{book_id}/pages/{page_number}/thumbnail": { - "get": { + "/api/v1/users/{user_id}/sharing-tags/{tag_id}": { + "delete": { "tags": [ - "Komga" + "Sharing Tags" ], - "summary": "Get a page thumbnail", - "description": "Returns a thumbnail version of the requested page.\nThumbnails are resized to max 300px width/height while maintaining aspect ratio.\n\n## Endpoint\n`GET /{prefix}/api/v1/books/{bookId}/pages/{pageNumber}/thumbnail`\n\n## Authentication\n- Bearer token (JWT)\n- Basic Auth\n- API Key (via cookie fallback for browser image tags)\n\n## Response\nReturns a JPEG thumbnail with appropriate caching headers.", - "operationId": "komga_get_page_thumbnail", + "summary": "Remove a user's sharing tag grant (admin only)", + "operationId": "remove_user_sharing_tag", "parameters": [ { - "name": "prefix", - "in": "path", - "description": "Komga API prefix (default: komga)", - "required": true, - "schema": { - "type": "string" - } - }, - { - "name": "book_id", + "name": "user_id", "in": "path", - "description": "Book ID", + "description": "User ID", "required": true, "schema": { "type": "string", @@ -15025,28 +14946,25 @@ } }, { - "name": "page_number", + "name": "tag_id", "in": "path", - "description": "Page number (1-indexed)", + "description": "Sharing tag ID", "required": true, "schema": { - "type": "integer", - "format": "int32" + "type": "string", + "format": "uuid" } } ], "responses": { - "200": { - "description": "Page thumbnail image", - "content": { - "image/jpeg": {} - } + "204": { + "description": "Sharing tag grant removed" }, - "401": { - "description": "Unauthorized" + "403": { + "description": "Forbidden - Missing permission" }, "404": { - "description": "Book or page not found" + "description": "Grant not found" } }, "security": [ @@ -15059,51 +14977,41 @@ ] } }, - "/{prefix}/api/v1/books/{book_id}/previous": { + "/health": { "get": { "tags": [ - "Komga" - ], - "summary": "Get previous book in series", - "description": "Returns the previous book in the same series by sort order.\n\n## Endpoint\n`GET /{prefix}/api/v1/books/{bookId}/previous`\n\n## Response\n- 200: Previous book DTO\n- 404: No previous book (this is the first book in series)\n\n## Authentication\n- Bearer token (JWT)\n- Basic Auth\n- API Key", - "operationId": "komga_get_previous_book", - "parameters": [ - { - "name": "prefix", - "in": "path", - "description": "Komga API prefix (default: komga)", - "required": true, - "schema": { - "type": "string" - } - }, - { - "name": "book_id", - "in": "path", - "description": "Book ID", - "required": true, - "schema": { - "type": "string", - "format": "uuid" - } - } + "Health" ], + "summary": "Health check endpoint - checks database connectivity", + "description": "Returns \"OK\" with 200 status if database is healthy,\nor \"Service Unavailable\" with 503 status if database check fails.", + "operationId": "health_check", "responses": { "200": { - "description": "Previous book in series", - "content": { - "application/json": { - "schema": { - "$ref": "#/components/schemas/KomgaBookDto" - } - } - } + "description": "Service is healthy" }, - "401": { - "description": "Unauthorized" + "503": { + "description": "Service is unavailable" + } + } + } + }, + "/opds": { + "get": { + "tags": [ + "OPDS" + ], + "summary": "Root OPDS catalog", + "description": "Returns the main navigation feed with links to:\n- All libraries\n- Search\n- Recent additions", + "operationId": "root_catalog", + "responses": { + "200": { + "description": "OPDS root catalog", + "content": { + "application/atom+xml": {} + } }, - "404": { - "description": "No previous book" + "403": { + "description": "Forbidden" } }, "security": [ @@ -15116,24 +15024,15 @@ ] } }, - "/{prefix}/api/v1/books/{book_id}/read-progress": { - "delete": { + "/opds/books/{book_id}/pages": { + "get": { "tags": [ - "Komga" + "OPDS" ], - "summary": "Delete reading progress for a book (mark as unread)", - "description": "Removes all reading progress for a book, effectively marking it as unread.\n\n## Endpoint\n`DELETE /{prefix}/api/v1/books/{bookId}/read-progress`\n\n## Response\n- 204 No Content on success\n\n## Authentication\n- Bearer token (JWT)\n- Basic Auth\n- API Key", - "operationId": "komga_delete_progress", + "summary": "OPDS-PSE: List all pages in a book", + "description": "Returns a PSE page feed with individual page links for streaming.\nThis allows OPDS clients to read books page-by-page without downloading the entire file.", + "operationId": "opds_book_pages", "parameters": [ - { - "name": "prefix", - "in": "path", - "description": "Komga API prefix (default: komga)", - "required": true, - "schema": { - "type": "string" - } - }, { "name": "book_id", "in": "path", @@ -15146,11 +15045,14 @@ } ], "responses": { - "204": { - "description": "Progress deleted successfully" + "200": { + "description": "OPDS-PSE page feed", + "content": { + "application/atom+xml": {} + } }, - "401": { - "description": "Unauthorized" + "403": { + "description": "Forbidden" }, "404": { "description": "Book not found" @@ -15164,54 +15066,79 @@ "api_key": [] } ] - }, - "patch": { + } + }, + "/opds/books/{book_id}/pages/{page_number}": { + "get": { "tags": [ - "Komga" + "OPDS" ], - "summary": "Update reading progress for a book", - "description": "Updates the user's reading progress for a specific book.\nKomic sends: `{ \"completed\": false, \"page\": 151 }`\n\n## Endpoint\n`PATCH /{prefix}/api/v1/books/{bookId}/read-progress`\n\n## Request Body\n- `page` - Current page number (1-indexed, optional)\n- `completed` - Whether book is completed (optional)\n- `device_id` - Device ID (optional, not used by Komic)\n- `device_name` - Device name (optional, not used by Komic)\n\n## Response\n- 204 No Content on success (Komga behavior)\n\n## Authentication\n- Bearer token (JWT)\n- Basic Auth\n- API Key", - "operationId": "komga_update_progress", + "summary": "OPDS-PSE: Get a page image with reading progress tracking", + "description": "Serves the page image (delegating to the v1 handler) and records reading\nprogress via the batching service. This is the endpoint used by OPDS PSE\nclients that read page-by-page and need implicit progress tracking, since\nthey don't have a JavaScript frontend to send explicit progress updates.", + "operationId": "opds_book_page_image", "parameters": [ { - "name": "prefix", + "name": "book_id", "in": "path", - "description": "Komga API prefix (default: komga)", + "description": "Book ID", "required": true, "schema": { - "type": "string" + "type": "string", + "format": "uuid" } }, { - "name": "book_id", + "name": "page_number", "in": "path", - "description": "Book ID", + "description": "Page number (1-indexed)", "required": true, "schema": { - "type": "string", - "format": "uuid" + "type": "integer", + "format": "int32" } } ], - "requestBody": { - "content": { - "application/json": { - "schema": { - "$ref": "#/components/schemas/KomgaReadProgressUpdateDto" - } + "responses": { + "200": { + "description": "Page image (also records reading progress)", + "content": { + "image/jpeg": {} } }, - "required": true + "403": { + "description": "Forbidden" + }, + "404": { + "description": "Book or page not found" + } }, - "responses": { - "204": { - "description": "Progress updated successfully" + "security": [ + { + "jwt_bearer": [] }, - "401": { - "description": "Unauthorized" + { + "api_key": [] + } + ] + } + }, + "/opds/libraries": { + "get": { + "tags": [ + "OPDS" + ], + "summary": "List all libraries", + "description": "Returns a navigation feed with all available libraries", + "operationId": "opds_list_libraries", + "responses": { + "200": { + "description": "OPDS libraries feed", + "content": { + "application/atom+xml": {} + } }, - "404": { - "description": "Book not found" + "403": { + "description": "Forbidden" } }, "security": [ @@ -15224,47 +15151,58 @@ ] } }, - "/{prefix}/api/v1/books/{book_id}/thumbnail": { + "/opds/libraries/{library_id}": { "get": { "tags": [ - "Komga" + "OPDS" ], - "summary": "Get book thumbnail", - "description": "Returns a thumbnail image for the book's first page.\n\n## Endpoint\n`GET /{prefix}/api/v1/books/{bookId}/thumbnail`\n\n## Authentication\n- Bearer token (JWT)\n- Basic Auth\n- API Key", - "operationId": "komga_get_book_thumbnail", + "summary": "List series in a library", + "description": "Returns an acquisition feed with all series in the specified library", + "operationId": "opds_library_series", "parameters": [ { - "name": "prefix", + "name": "library_id", "in": "path", - "description": "Komga API prefix (default: komga)", + "description": "Library ID", "required": true, "schema": { - "type": "string" + "type": "string", + "format": "uuid" } }, { - "name": "book_id", - "in": "path", - "description": "Book ID", - "required": true, + "name": "page", + "in": "query", + "required": false, "schema": { - "type": "string", - "format": "uuid" + "type": "integer", + "format": "int32", + "minimum": 0 + } + }, + { + "name": "pageSize", + "in": "query", + "required": false, + "schema": { + "type": "integer", + "format": "int32", + "minimum": 0 } } ], "responses": { "200": { - "description": "Book thumbnail image", + "description": "OPDS library series feed", "content": { - "image/jpeg": {} + "application/atom+xml": {} } }, - "401": { - "description": "Unauthorized" + "403": { + "description": "Forbidden" }, "404": { - "description": "Book not found or has no pages" + "description": "Library not found" } }, "security": [ @@ -15277,19 +15215,19 @@ ] } }, - "/{prefix}/api/v1/collections": { + "/opds/search": { "get": { "tags": [ - "Komga" + "OPDS" ], - "summary": "List collections (stub - always returns empty)", - "description": "Komga collections are user-created groupings of series.\nCodex doesn't support this feature, so we return empty results.\n\n## Endpoint\n`GET /{prefix}/api/v1/collections`\n\n## Authentication\n- Bearer token (JWT)\n- Basic Auth\n- API Key", - "operationId": "komga_list_collections", + "summary": "OPDS search endpoint", + "description": "Searches books and series by title and returns an OPDS acquisition feed", + "operationId": "opds_search", "parameters": [ { - "name": "prefix", - "in": "path", - "description": "Komga API prefix (default: komga)", + "name": "q", + "in": "query", + "description": "Search query string", "required": true, "schema": { "type": "string" @@ -15298,17 +15236,13 @@ ], "responses": { "200": { - "description": "Empty list of collections", + "description": "OPDS search results", "content": { - "application/json": { - "schema": { - "$ref": "#/components/schemas/KomgaPage_KomgaCollectionDto" - } - } + "application/atom+xml": {} } }, - "401": { - "description": "Unauthorized" + "403": { + "description": "Forbidden" } }, "security": [ @@ -15321,41 +15255,20 @@ ] } }, - "/{prefix}/api/v1/genres": { + "/opds/search.xml": { "get": { "tags": [ - "Komga" - ], - "summary": "List genres", - "description": "Returns all genres in the library.\n\n## Endpoint\n`GET /{prefix}/api/v1/genres`\n\n## Authentication\n- Bearer token (JWT)\n- Basic Auth\n- API Key", - "operationId": "komga_list_genres", - "parameters": [ - { - "name": "prefix", - "in": "path", - "description": "Komga API prefix (default: komga)", - "required": true, - "schema": { - "type": "string" - } - } + "OPDS" ], + "summary": "OpenSearch descriptor endpoint", + "description": "Returns the OpenSearch XML descriptor for OPDS clients", + "operationId": "opds_opensearch_descriptor", "responses": { "200": { - "description": "List of all genres", + "description": "OpenSearch descriptor", "content": { - "application/json": { - "schema": { - "type": "array", - "items": { - "type": "string" - } - } - } + "application/opensearchdescription+xml": {} } - }, - "401": { - "description": "Unauthorized" } }, "security": [ @@ -15368,41 +15281,38 @@ ] } }, - "/{prefix}/api/v1/languages": { + "/opds/series/{series_id}": { "get": { "tags": [ - "Komga" + "OPDS" ], - "summary": "List languages (stub - always returns empty array)", - "description": "Returns all languages in the library.\nCurrently returns empty as Codex doesn't aggregate languages separately.\n\n## Endpoint\n`GET /{prefix}/api/v1/languages`\n\n## Authentication\n- Bearer token (JWT)\n- Basic Auth\n- API Key", - "operationId": "komga_list_languages", + "summary": "List books in a series", + "description": "Returns an acquisition feed with all books in the specified series", + "operationId": "opds_series_books", "parameters": [ { - "name": "prefix", + "name": "series_id", "in": "path", - "description": "Komga API prefix (default: komga)", + "description": "Series ID", "required": true, "schema": { - "type": "string" + "type": "string", + "format": "uuid" } } ], "responses": { "200": { - "description": "Empty list of languages", + "description": "OPDS series books feed", "content": { - "application/json": { - "schema": { - "type": "array", - "items": { - "type": "string" - } - } - } + "application/atom+xml": {} } }, - "401": { - "description": "Unauthorized" + "403": { + "description": "Forbidden" + }, + "404": { + "description": "Series not found" } }, "security": [ @@ -15415,41 +15325,60 @@ ] } }, - "/{prefix}/api/v1/libraries": { + "/opds/v2": { "get": { "tags": [ - "Komga" + "OPDS 2.0" ], - "summary": "List all libraries", - "description": "Returns all libraries in Komga-compatible format.\n\n## Endpoint\n`GET /{prefix}/api/v1/libraries`\n\n## Authentication\n- Bearer token (JWT)\n- Basic Auth\n- API Key", - "operationId": "komga_list_libraries", - "parameters": [ - { - "name": "prefix", - "in": "path", - "description": "Komga API prefix (default: komga)", - "required": true, - "schema": { - "type": "string" + "summary": "Root OPDS 2.0 catalog", + "description": "Returns the main navigation feed with links to:\n- All libraries\n- Search\n- Recent additions", + "operationId": "opds2_root", + "responses": { + "200": { + "description": "OPDS 2.0 root catalog", + "content": { + "application/opds+json": { + "schema": { + "$ref": "#/components/schemas/Opds2Feed" + } + } } + }, + "403": { + "description": "Forbidden" + } + }, + "security": [ + { + "jwt_bearer": [] + }, + { + "api_key": [] } + ] + } + }, + "/opds/v2/libraries": { + "get": { + "tags": [ + "OPDS 2.0" ], + "summary": "List all libraries (OPDS 2.0)", + "description": "Returns a navigation feed with all available libraries", + "operationId": "opds2_libraries", "responses": { "200": { - "description": "List of libraries", + "description": "OPDS 2.0 libraries feed", "content": { - "application/json": { + "application/opds+json": { "schema": { - "type": "array", - "items": { - "$ref": "#/components/schemas/KomgaLibraryDto" - } + "$ref": "#/components/schemas/Opds2Feed" } } } }, - "401": { - "description": "Unauthorized" + "403": { + "description": "Forbidden" } }, "security": [ @@ -15462,24 +15391,15 @@ ] } }, - "/{prefix}/api/v1/libraries/{library_id}": { + "/opds/v2/libraries/{library_id}": { "get": { "tags": [ - "Komga" + "OPDS 2.0" ], - "summary": "Get library by ID", - "description": "Returns a single library in Komga-compatible format.\n\n## Endpoint\n`GET /{prefix}/api/v1/libraries/{libraryId}`\n\n## Authentication\n- Bearer token (JWT)\n- Basic Auth\n- API Key", - "operationId": "komga_get_library", + "summary": "List series in a library (OPDS 2.0)", + "description": "Returns a navigation feed with all series in the specified library", + "operationId": "opds2_library_series", "parameters": [ - { - "name": "prefix", - "in": "path", - "description": "Komga API prefix (default: komga)", - "required": true, - "schema": { - "type": "string" - } - }, { "name": "library_id", "in": "path", @@ -15489,21 +15409,41 @@ "type": "string", "format": "uuid" } + }, + { + "name": "page", + "in": "query", + "required": false, + "schema": { + "type": "integer", + "format": "int32", + "minimum": 0 + } + }, + { + "name": "pageSize", + "in": "query", + "required": false, + "schema": { + "type": "integer", + "format": "int32", + "minimum": 0 + } } ], "responses": { "200": { - "description": "Library details", + "description": "OPDS 2.0 library series feed", "content": { - "application/json": { + "application/opds+json": { "schema": { - "$ref": "#/components/schemas/KomgaLibraryDto" + "$ref": "#/components/schemas/Opds2Feed" } } } }, - "401": { - "description": "Unauthorized" + "403": { + "description": "Forbidden" }, "404": { "description": "Library not found" @@ -15519,47 +15459,49 @@ ] } }, - "/{prefix}/api/v1/libraries/{library_id}/thumbnail": { + "/opds/v2/recent": { "get": { "tags": [ - "Komga" + "OPDS 2.0" ], - "summary": "Get library thumbnail", - "description": "Returns a thumbnail image for the library. Uses the first series' cover\nas the library thumbnail, or returns a 404 if no series exist.\n\n## Endpoint\n`GET /{prefix}/api/v1/libraries/{libraryId}/thumbnail`\n\n## Authentication\n- Bearer token (JWT)\n- Basic Auth\n- API Key (via cookie fallback for browser image tags)", - "operationId": "komga_get_library_thumbnail", + "summary": "List recent additions (OPDS 2.0)", + "description": "Returns a publications feed with recently added books", + "operationId": "opds2_recent", "parameters": [ { - "name": "prefix", - "in": "path", - "description": "Komga API prefix (default: komga)", - "required": true, + "name": "page", + "in": "query", + "required": false, "schema": { - "type": "string" + "type": "integer", + "format": "int32", + "minimum": 0 } }, { - "name": "library_id", - "in": "path", - "description": "Library ID", - "required": true, + "name": "pageSize", + "in": "query", + "required": false, "schema": { - "type": "string", - "format": "uuid" + "type": "integer", + "format": "int32", + "minimum": 0 } } ], "responses": { "200": { - "description": "Library thumbnail image", + "description": "OPDS 2.0 recent additions feed", "content": { - "image/jpeg": {} + "application/opds+json": { + "schema": { + "$ref": "#/components/schemas/Opds2Feed" + } + } } }, - "401": { - "description": "Unauthorized" - }, - "404": { - "description": "Library not found or no series in library" + "403": { + "description": "Forbidden" } }, "security": [ @@ -15572,19 +15514,19 @@ ] } }, - "/{prefix}/api/v1/publishers": { + "/opds/v2/search": { "get": { "tags": [ - "Komga" + "OPDS 2.0" ], - "summary": "List publishers (stub - always returns empty array)", - "description": "Returns all publishers in the library.\nCurrently returns empty as Codex doesn't aggregate publishers separately.\n\n## Endpoint\n`GET /{prefix}/api/v1/publishers`\n\n## Authentication\n- Bearer token (JWT)\n- Basic Auth\n- API Key", - "operationId": "komga_list_publishers", + "summary": "OPDS 2.0 search endpoint", + "description": "Searches books and series by title and returns an OPDS 2.0 publications feed", + "operationId": "opds2_search", "parameters": [ { - "name": "prefix", - "in": "path", - "description": "Komga API prefix (default: komga)", + "name": "query", + "in": "query", + "description": "Search query string", "required": true, "schema": { "type": "string" @@ -15593,20 +15535,20 @@ ], "responses": { "200": { - "description": "Empty list of publishers", + "description": "OPDS 2.0 search results", "content": { - "application/json": { + "application/opds+json": { "schema": { - "type": "array", - "items": { - "type": "string" - } + "$ref": "#/components/schemas/Opds2Feed" } } } }, - "401": { - "description": "Unauthorized" + "400": { + "description": "Bad request - empty query" + }, + "403": { + "description": "Forbidden" } }, "security": [ @@ -15619,38 +15561,42 @@ ] } }, - "/{prefix}/api/v1/readlists": { + "/opds/v2/series/{series_id}": { "get": { "tags": [ - "Komga" + "OPDS 2.0" ], - "summary": "List read lists (stub - always returns empty)", - "description": "Komga read lists are user-created lists of books to read.\nCodex doesn't support this feature, so we return empty results.\n\n## Endpoint\n`GET /{prefix}/api/v1/readlists`\n\n## Authentication\n- Bearer token (JWT)\n- Basic Auth\n- API Key", - "operationId": "komga_list_readlists", + "summary": "List books in a series (OPDS 2.0)", + "description": "Returns a publications feed with all books in the specified series", + "operationId": "opds2_series_books", "parameters": [ { - "name": "prefix", + "name": "series_id", "in": "path", - "description": "Komga API prefix (default: komga)", + "description": "Series ID", "required": true, "schema": { - "type": "string" + "type": "string", + "format": "uuid" } } ], "responses": { "200": { - "description": "Empty list of read lists", + "description": "OPDS 2.0 series books feed", "content": { - "application/json": { + "application/opds+json": { "schema": { - "$ref": "#/components/schemas/KomgaPage_KomgaReadListDto" + "$ref": "#/components/schemas/Opds2Feed" } } } }, - "401": { - "description": "Unauthorized" + "403": { + "description": "Forbidden" + }, + "404": { + "description": "Series not found" } }, "security": [ @@ -15663,14 +15609,14 @@ ] } }, - "/{prefix}/api/v1/series": { + "/{prefix}/api/v1/age-ratings": { "get": { "tags": [ "Komga" ], - "summary": "List all series (paginated)", - "description": "Returns all series in Komga-compatible format with pagination.\n\n## Endpoint\n`GET /{prefix}/api/v1/series`\n\n## Query Parameters\n- `page` - Page number (0-indexed, default: 0)\n- `size` - Page size (default: 20)\n- `library_id` - Optional filter by library UUID\n- `search` - Optional search query\n\n## Authentication\n- Bearer token (JWT)\n- Basic Auth\n- API Key", - "operationId": "komga_list_series", + "summary": "List age ratings (stub - always returns empty array)", + "description": "Returns all age ratings in the library.\nCurrently returns empty as Codex doesn't aggregate age ratings separately.\n\n## Endpoint\n`GET /{prefix}/api/v1/age-ratings`\n\n## Authentication\n- Bearer token (JWT)\n- Basic Auth\n- API Key", + "operationId": "komga_list_age_ratings", "parameters": [ { "name": "prefix", @@ -15680,72 +15626,19 @@ "schema": { "type": "string" } - }, - { - "name": "page", - "in": "query", - "description": "Page number (0-indexed, Komga-style)", - "required": false, - "schema": { - "type": "integer", - "format": "int32" - } - }, - { - "name": "size", - "in": "query", - "description": "Page size (default: 20)", - "required": false, - "schema": { - "type": "integer", - "format": "int32" - } - }, - { - "name": "library_id", - "in": "query", - "description": "Filter by library ID", - "required": false, - "schema": { - "type": [ - "string", - "null" - ], - "format": "uuid" - } - }, - { - "name": "search", - "in": "query", - "description": "Search query", - "required": false, - "schema": { - "type": [ - "string", - "null" - ] - } - }, - { - "name": "sort", - "in": "query", - "description": "Sort parameter (e.g., \"metadata.titleSort,asc\", \"createdDate,desc\")", - "required": false, - "schema": { - "type": [ - "string", - "null" - ] - } } ], "responses": { "200": { - "description": "Paginated list of series", + "description": "Empty list of age ratings", "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/KomgaPage_KomgaSeriesDto" + "type": "array", + "items": { + "type": "integer", + "format": "int32" + } } } } @@ -15764,14 +15657,14 @@ ] } }, - "/{prefix}/api/v1/series/list": { + "/{prefix}/api/v1/books/list": { "post": { "tags": [ "Komga" ], - "summary": "Search/filter series", - "description": "Returns series matching the filter criteria.\nThis uses POST to support complex filter bodies.\n\n## Endpoint\n`POST /{prefix}/api/v1/series/list`\n\n## Query Parameters\n- `page` - Page number (0-indexed, default: 0)\n- `size` - Page size (default: 20)\n- `sort` - Sort parameter (e.g., \"createdDate,desc\")\n\n## Request Body\nJSON object with filter criteria (library_id, fullTextSearch, condition, etc.)\n\n## Authentication\n- Bearer token (JWT)\n- Basic Auth\n- API Key", - "operationId": "komga_search_series", + "summary": "Search/filter books", + "description": "Returns books matching the filter criteria.\nThis uses POST to support complex filter bodies.\n\n## Endpoint\n`POST /{prefix}/api/v1/books/list`\n\n## Query Parameters\n- `page` - Page number (0-indexed, default: 0)\n- `size` - Page size (default: 20)\n- `sort` - Sort parameter (e.g., \"createdDate,desc\")\n\n## Request Body\nJSON object with filter criteria (library_id, series_id, search_term, etc.)\n\n## Authentication\n- Bearer token (JWT)\n- Basic Auth\n- API Key", + "operationId": "komga_search_books", "parameters": [ { "name": "prefix", @@ -15803,22 +15696,9 @@ } }, { - "name": "library_id", - "in": "query", - "description": "Filter by library ID", - "required": false, - "schema": { - "type": [ - "string", - "null" - ], - "format": "uuid" - } - }, - { - "name": "search", + "name": "sort", "in": "query", - "description": "Search query", + "description": "Sort parameter (e.g., \"createdDate,desc\", \"metadata.numberSort,asc\")", "required": false, "schema": { "type": [ @@ -15828,15 +15708,16 @@ } }, { - "name": "sort", + "name": "library_id", "in": "query", - "description": "Sort parameter (e.g., \"metadata.titleSort,asc\", \"createdDate,desc\")", + "description": "Filter by library ID", "required": false, "schema": { "type": [ "string", "null" - ] + ], + "format": "uuid" } } ], @@ -15844,7 +15725,7 @@ "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/KomgaSeriesSearchRequestDto" + "$ref": "#/components/schemas/KomgaBooksSearchRequestDto" } } }, @@ -15852,11 +15733,11 @@ }, "responses": { "200": { - "description": "Paginated list of series matching filter", + "description": "Paginated list of books matching filter", "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/KomgaPage_KomgaSeriesDto" + "$ref": "#/components/schemas/KomgaPage_KomgaBookDto" } } } @@ -15875,14 +15756,14 @@ ] } }, - "/{prefix}/api/v1/series/new": { + "/{prefix}/api/v1/books/ondeck": { "get": { "tags": [ "Komga" ], - "summary": "Get recently added series", - "description": "Returns series sorted by created date descending (newest first).\n\n## Endpoint\n`GET /{prefix}/api/v1/series/new`\n\n## Query Parameters\n- `page` - Page number (0-indexed, default: 0)\n- `size` - Page size (default: 20)\n- `library_id` - Optional filter by library UUID\n\n## Authentication\n- Bearer token (JWT)\n- Basic Auth\n- API Key", - "operationId": "komga_get_series_new", + "summary": "Get \"on deck\" books", + "description": "Returns books that are currently in-progress (started but not completed).\nThis is the \"continue reading\" shelf in Komic.\n\n## Endpoint\n`GET /{prefix}/api/v1/books/ondeck`\n\n## Query Parameters\n- `page` - Page number (0-indexed, default: 0)\n- `size` - Page size (default: 20)\n\n## Authentication\n- Bearer token (JWT)\n- Basic Auth\n- API Key", + "operationId": "komga_get_books_ondeck", "parameters": [ { "name": "prefix", @@ -15914,22 +15795,9 @@ } }, { - "name": "library_id", - "in": "query", - "description": "Filter by library ID", - "required": false, - "schema": { - "type": [ - "string", - "null" - ], - "format": "uuid" - } - }, - { - "name": "search", + "name": "sort", "in": "query", - "description": "Search query", + "description": "Sort parameter (e.g., \"createdDate,desc\", \"metadata.numberSort,asc\")", "required": false, "schema": { "type": [ @@ -15939,25 +15807,26 @@ } }, { - "name": "sort", + "name": "library_id", "in": "query", - "description": "Sort parameter (e.g., \"metadata.titleSort,asc\", \"createdDate,desc\")", + "description": "Filter by library ID", "required": false, "schema": { "type": [ "string", "null" - ] + ], + "format": "uuid" } } ], "responses": { "200": { - "description": "Paginated list of recently added series", + "description": "Paginated list of in-progress books", "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/KomgaPage_KomgaSeriesDto" + "$ref": "#/components/schemas/KomgaPage_KomgaBookDto" } } } @@ -15976,14 +15845,14 @@ ] } }, - "/{prefix}/api/v1/series/release-dates": { + "/{prefix}/api/v1/books/{book_id}": { "get": { "tags": [ "Komga" ], - "summary": "List series release dates (stub - always returns empty array)", - "description": "Returns all release dates used by series in the library.\nCurrently returns empty as Codex doesn't aggregate release dates separately.\n\n## Endpoint\n`GET /{prefix}/api/v1/series/release-dates`\n\n## Authentication\n- Bearer token (JWT)\n- Basic Auth\n- API Key", - "operationId": "komga_list_series_release_dates", + "summary": "Get a book by ID", + "description": "Returns a single book in Komga-compatible format.\n\n## Endpoint\n`GET /{prefix}/api/v1/books/{bookId}`\n\n## Authentication\n- Bearer token (JWT)\n- Basic Auth\n- API Key", + "operationId": "komga_get_book", "parameters": [ { "name": "prefix", @@ -15993,24 +15862,34 @@ "schema": { "type": "string" } + }, + { + "name": "book_id", + "in": "path", + "description": "Book ID", + "required": true, + "schema": { + "type": "string", + "format": "uuid" + } } ], "responses": { "200": { - "description": "Empty list of release dates", + "description": "Book details", "content": { "application/json": { "schema": { - "type": "array", - "items": { - "type": "string" - } + "$ref": "#/components/schemas/KomgaBookDto" } } } }, "401": { "description": "Unauthorized" + }, + "404": { + "description": "Book not found" } }, "security": [ @@ -16023,14 +15902,14 @@ ] } }, - "/{prefix}/api/v1/series/updated": { + "/{prefix}/api/v1/books/{book_id}/file": { "get": { "tags": [ "Komga" ], - "summary": "Get recently updated series", - "description": "Returns series sorted by last modified date descending (most recently updated first).\n\n## Endpoint\n`GET /{prefix}/api/v1/series/updated`\n\n## Query Parameters\n- `page` - Page number (0-indexed, default: 0)\n- `size` - Page size (default: 20)\n- `library_id` - Optional filter by library UUID\n\n## Authentication\n- Bearer token (JWT)\n- Basic Auth\n- API Key", - "operationId": "komga_get_series_updated", + "summary": "Download book file", + "description": "Streams the original book file (CBZ, CBR, EPUB, PDF) for download.\nIncludes proper Content-Disposition header with UTF-8 encoding.\n\n## Endpoint\n`GET /{prefix}/api/v1/books/{bookId}/file`\n\n## Authentication\n- Bearer token (JWT)\n- Basic Auth\n- API Key", + "operationId": "komga_download_book_file", "parameters": [ { "name": "prefix", @@ -16042,76 +15921,28 @@ } }, { - "name": "page", - "in": "query", - "description": "Page number (0-indexed, Komga-style)", - "required": false, - "schema": { - "type": "integer", - "format": "int32" - } - }, - { - "name": "size", - "in": "query", - "description": "Page size (default: 20)", - "required": false, - "schema": { - "type": "integer", - "format": "int32" - } - }, - { - "name": "library_id", - "in": "query", - "description": "Filter by library ID", - "required": false, + "name": "book_id", + "in": "path", + "description": "Book ID", + "required": true, "schema": { - "type": [ - "string", - "null" - ], + "type": "string", "format": "uuid" } - }, - { - "name": "search", - "in": "query", - "description": "Search query", - "required": false, - "schema": { - "type": [ - "string", - "null" - ] - } - }, - { - "name": "sort", - "in": "query", - "description": "Sort parameter (e.g., \"metadata.titleSort,asc\", \"createdDate,desc\")", - "required": false, - "schema": { - "type": [ - "string", - "null" - ] - } } ], "responses": { "200": { - "description": "Paginated list of recently updated series", + "description": "Book file download", "content": { - "application/json": { - "schema": { - "$ref": "#/components/schemas/KomgaPage_KomgaSeriesDto" - } - } + "application/octet-stream": {} } }, "401": { "description": "Unauthorized" + }, + "404": { + "description": "Book not found or file missing" } }, "security": [ @@ -16124,15 +15955,15 @@ ] } }, - "/{prefix}/api/v1/series/{series_id}": { + "/{prefix}/api/v1/books/{book_id}/next": { "get": { "tags": [ "Komga" ], - "summary": "Get series by ID", - "description": "Returns a single series in Komga-compatible format.\n\n## Endpoint\n`GET /{prefix}/api/v1/series/{seriesId}`\n\n## Authentication\n- Bearer token (JWT)\n- Basic Auth\n- API Key", - "operationId": "komga_get_series", - "parameters": [ + "summary": "Get next book in series", + "description": "Returns the next book in the same series by sort order.\n\n## Endpoint\n`GET /{prefix}/api/v1/books/{bookId}/next`\n\n## Response\n- 200: Next book DTO\n- 404: No next book (this is the last book in series)\n\n## Authentication\n- Bearer token (JWT)\n- Basic Auth\n- API Key", + "operationId": "komga_get_next_book", + "parameters": [ { "name": "prefix", "in": "path", @@ -16143,9 +15974,9 @@ } }, { - "name": "series_id", + "name": "book_id", "in": "path", - "description": "Series ID", + "description": "Book ID", "required": true, "schema": { "type": "string", @@ -16155,11 +15986,11 @@ ], "responses": { "200": { - "description": "Series details", + "description": "Next book in series", "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/KomgaSeriesDto" + "$ref": "#/components/schemas/KomgaBookDto" } } } @@ -16168,7 +15999,7 @@ "description": "Unauthorized" }, "404": { - "description": "Series not found" + "description": "No next book" } }, "security": [ @@ -16181,14 +16012,14 @@ ] } }, - "/{prefix}/api/v1/series/{series_id}/books": { + "/{prefix}/api/v1/books/{book_id}/pages": { "get": { "tags": [ "Komga" ], - "summary": "Get books in a series", - "description": "Returns all books in a series with pagination.\n\n## Endpoint\n`GET /{prefix}/api/v1/series/{seriesId}/books`\n\n## Query Parameters\n- `page` - Page number (0-indexed, default: 0)\n- `size` - Page size (default: 20)\n\n## Authentication\n- Bearer token (JWT)\n- Basic Auth\n- API Key", - "operationId": "komga_get_series_books", + "summary": "List all pages for a book", + "description": "Returns an array of page metadata for all pages in a book.\nPages are ordered by page number (1-indexed).\n\n## Endpoint\n`GET /{prefix}/api/v1/books/{bookId}/pages`\n\n## Authentication\n- Bearer token (JWT)\n- Basic Auth\n- API Key\n\n## Response\nReturns an array of `KomgaPageDto` objects with page metadata including\nfilename, MIME type, dimensions, and size.", + "operationId": "komga_list_pages", "parameters": [ { "name": "prefix", @@ -16200,80 +16031,26 @@ } }, { - "name": "series_id", + "name": "book_id", "in": "path", - "description": "Series ID", + "description": "Book ID", "required": true, "schema": { "type": "string", "format": "uuid" } - }, - { - "name": "page", - "in": "query", - "description": "Page number (0-indexed, Komga-style)", - "required": false, - "schema": { - "type": "integer", - "format": "int32" - } - }, - { - "name": "size", - "in": "query", - "description": "Page size (default: 20)", - "required": false, - "schema": { - "type": "integer", - "format": "int32" - } - }, - { - "name": "library_id", - "in": "query", - "description": "Filter by library ID", - "required": false, - "schema": { - "type": [ - "string", - "null" - ], - "format": "uuid" - } - }, - { - "name": "search", - "in": "query", - "description": "Search query", - "required": false, - "schema": { - "type": [ - "string", - "null" - ] - } - }, - { - "name": "sort", - "in": "query", - "description": "Sort parameter (e.g., \"metadata.titleSort,asc\", \"createdDate,desc\")", - "required": false, - "schema": { - "type": [ - "string", - "null" - ] - } } ], "responses": { "200": { - "description": "Paginated list of books in series", + "description": "List of pages in the book", "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/KomgaPage_KomgaBookDto" + "type": "array", + "items": { + "$ref": "#/components/schemas/KomgaPageDto" + } } } } @@ -16282,7 +16059,7 @@ "description": "Unauthorized" }, "404": { - "description": "Series not found" + "description": "Book not found" } }, "security": [ @@ -16295,14 +16072,14 @@ ] } }, - "/{prefix}/api/v1/series/{series_id}/read-progress": { - "post": { + "/{prefix}/api/v1/books/{book_id}/pages/{page_number}": { + "get": { "tags": [ "Komga" ], - "summary": "Mark all books in a series as read", - "description": "Marks all books in a series as completed (read) for the current user.\nThis is equivalent to marking each book individually as completed.\n\n## Endpoint\n`POST /{prefix}/api/v1/series/{seriesId}/read-progress`\n\n## Response\n- 204 No Content on success (Komga behavior)\n\n## Authentication\n- Bearer token (JWT)\n- Basic Auth\n- API Key", - "operationId": "komga_mark_series_as_read", + "summary": "Get a specific page image", + "description": "Streams the raw page image for the requested page number.\nPage numbers are 1-indexed.\n\n## Endpoint\n`GET /{prefix}/api/v1/books/{bookId}/pages/{pageNumber}`\n\n## Authentication\n- Bearer token (JWT)\n- Basic Auth\n- API Key (via cookie fallback for browser image tags)\n\n## Response\nReturns the raw image data with appropriate Content-Type header.\nResponse is cached for 1 year (immutable content).", + "operationId": "komga_get_page", "parameters": [ { "name": "prefix", @@ -16314,25 +16091,38 @@ } }, { - "name": "series_id", + "name": "book_id", "in": "path", - "description": "Series ID", + "description": "Book ID", "required": true, "schema": { "type": "string", "format": "uuid" } + }, + { + "name": "page_number", + "in": "path", + "description": "Page number (1-indexed)", + "required": true, + "schema": { + "type": "integer", + "format": "int32" + } } ], "responses": { - "204": { - "description": "Series marked as read" + "200": { + "description": "Page image", + "content": { + "image/*": {} + } }, "401": { "description": "Unauthorized" }, "404": { - "description": "Series not found" + "description": "Book or page not found" } }, "security": [ @@ -16343,14 +16133,16 @@ "api_key": [] } ] - }, - "delete": { + } + }, + "/{prefix}/api/v1/books/{book_id}/pages/{page_number}/thumbnail": { + "get": { "tags": [ "Komga" ], - "summary": "Mark all books in a series as unread", - "description": "Removes all reading progress for all books in a series, effectively marking\nthe entire series as unread for the current user.\n\n## Endpoint\n`DELETE /{prefix}/api/v1/series/{seriesId}/read-progress`\n\n## Response\n- 204 No Content on success\n\n## Authentication\n- Bearer token (JWT)\n- Basic Auth\n- API Key", - "operationId": "komga_mark_series_as_unread", + "summary": "Get a page thumbnail", + "description": "Returns a thumbnail version of the requested page.\nThumbnails are resized to max 300px width/height while maintaining aspect ratio.\n\n## Endpoint\n`GET /{prefix}/api/v1/books/{bookId}/pages/{pageNumber}/thumbnail`\n\n## Authentication\n- Bearer token (JWT)\n- Basic Auth\n- API Key (via cookie fallback for browser image tags)\n\n## Response\nReturns a JPEG thumbnail with appropriate caching headers.", + "operationId": "komga_get_page_thumbnail", "parameters": [ { "name": "prefix", @@ -16362,25 +16154,38 @@ } }, { - "name": "series_id", + "name": "book_id", "in": "path", - "description": "Series ID", + "description": "Book ID", "required": true, "schema": { "type": "string", "format": "uuid" } + }, + { + "name": "page_number", + "in": "path", + "description": "Page number (1-indexed)", + "required": true, + "schema": { + "type": "integer", + "format": "int32" + } } ], "responses": { - "204": { - "description": "Series marked as unread" + "200": { + "description": "Page thumbnail image", + "content": { + "image/jpeg": {} + } }, "401": { "description": "Unauthorized" }, "404": { - "description": "Series not found" + "description": "Book or page not found" } }, "security": [ @@ -16393,14 +16198,14 @@ ] } }, - "/{prefix}/api/v1/series/{series_id}/thumbnail": { + "/{prefix}/api/v1/books/{book_id}/previous": { "get": { "tags": [ "Komga" ], - "summary": "Get series thumbnail", - "description": "Returns a thumbnail image for the series.\n\n## Endpoint\n`GET /{prefix}/api/v1/series/{seriesId}/thumbnail`\n\n## Authentication\n- Bearer token (JWT)\n- Basic Auth\n- API Key", - "operationId": "komga_get_series_thumbnail", + "summary": "Get previous book in series", + "description": "Returns the previous book in the same series by sort order.\n\n## Endpoint\n`GET /{prefix}/api/v1/books/{bookId}/previous`\n\n## Response\n- 200: Previous book DTO\n- 404: No previous book (this is the first book in series)\n\n## Authentication\n- Bearer token (JWT)\n- Basic Auth\n- API Key", + "operationId": "komga_get_previous_book", "parameters": [ { "name": "prefix", @@ -16412,9 +16217,9 @@ } }, { - "name": "series_id", + "name": "book_id", "in": "path", - "description": "Series ID", + "description": "Book ID", "required": true, "schema": { "type": "string", @@ -16424,16 +16229,20 @@ ], "responses": { "200": { - "description": "Series thumbnail image", + "description": "Previous book in series", "content": { - "image/jpeg": {} + "application/json": { + "schema": { + "$ref": "#/components/schemas/KomgaBookDto" + } + } } }, "401": { "description": "Unauthorized" }, "404": { - "description": "Series not found" + "description": "No previous book" } }, "security": [ @@ -16446,14 +16255,14 @@ ] } }, - "/{prefix}/api/v1/tags": { - "get": { + "/{prefix}/api/v1/books/{book_id}/read-progress": { + "delete": { "tags": [ "Komga" ], - "summary": "List tags", - "description": "Returns all tags in the library.\n\n## Endpoint\n`GET /{prefix}/api/v1/tags`\n\n## Authentication\n- Bearer token (JWT)\n- Basic Auth\n- API Key", - "operationId": "komga_list_tags", + "summary": "Delete reading progress for a book (mark as unread)", + "description": "Removes all reading progress for a book, effectively marking it as unread.\n\n## Endpoint\n`DELETE /{prefix}/api/v1/books/{bookId}/read-progress`\n\n## Response\n- 204 No Content on success\n\n## Authentication\n- Bearer token (JWT)\n- Basic Auth\n- API Key", + "operationId": "komga_delete_progress", "parameters": [ { "name": "prefix", @@ -16463,24 +16272,27 @@ "schema": { "type": "string" } + }, + { + "name": "book_id", + "in": "path", + "description": "Book ID", + "required": true, + "schema": { + "type": "string", + "format": "uuid" + } } ], "responses": { - "200": { - "description": "List of all tags", - "content": { - "application/json": { - "schema": { - "type": "array", - "items": { - "type": "string" - } - } - } - } + "204": { + "description": "Progress deleted successfully" }, "401": { "description": "Unauthorized" + }, + "404": { + "description": "Book not found" } }, "security": [ @@ -16491,16 +16303,14 @@ "api_key": [] } ] - } - }, - "/{prefix}/api/v1/users/me": { - "get": { + }, + "patch": { "tags": [ "Komga" ], - "summary": "Get current user information", - "description": "Returns information about the currently authenticated user in Komga format.\nThis endpoint is used by Komic and other apps to verify authentication\nand determine user capabilities.\n\n## Endpoint\n`GET /{prefix}/api/v1/users/me`\n\n## Response\nReturns a `KomgaUserDto` containing:\n- User ID (UUID as string)\n- Email address\n- Roles (ADMIN, USER, FILE_DOWNLOAD)\n- Library access settings\n- Content restrictions\n\n## Authentication\n- Bearer token (JWT)\n- Basic Auth\n- API Key", - "operationId": "komga_get_current_user", + "summary": "Update reading progress for a book", + "description": "Updates the user's reading progress for a specific book.\nKomic sends: `{ \"completed\": false, \"page\": 151 }`\n\n## Endpoint\n`PATCH /{prefix}/api/v1/books/{bookId}/read-progress`\n\n## Request Body\n- `page` - Current page number (1-indexed, optional)\n- `completed` - Whether book is completed (optional)\n- `device_id` - Device ID (optional, not used by Komic)\n- `device_name` - Device name (optional, not used by Komic)\n\n## Response\n- 204 No Content on success (Komga behavior)\n\n## Authentication\n- Bearer token (JWT)\n- Basic Auth\n- API Key", + "operationId": "komga_update_progress", "parameters": [ { "name": "prefix", @@ -16510,21 +16320,1350 @@ "schema": { "type": "string" } + }, + { + "name": "book_id", + "in": "path", + "description": "Book ID", + "required": true, + "schema": { + "type": "string", + "format": "uuid" + } } ], - "responses": { - "200": { - "description": "Current user information", - "content": { - "application/json": { - "schema": { - "$ref": "#/components/schemas/KomgaUserDto" - } + "requestBody": { + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/KomgaReadProgressUpdateDto" } } }, - "401": { - "description": "Unauthorized" + "required": true + }, + "responses": { + "204": { + "description": "Progress updated successfully" + }, + "401": { + "description": "Unauthorized" + }, + "404": { + "description": "Book not found" + } + }, + "security": [ + { + "jwt_bearer": [] + }, + { + "api_key": [] + } + ] + } + }, + "/{prefix}/api/v1/books/{book_id}/thumbnail": { + "get": { + "tags": [ + "Komga" + ], + "summary": "Get book thumbnail", + "description": "Returns a thumbnail image for the book's first page.\n\n## Endpoint\n`GET /{prefix}/api/v1/books/{bookId}/thumbnail`\n\n## Authentication\n- Bearer token (JWT)\n- Basic Auth\n- API Key", + "operationId": "komga_get_book_thumbnail", + "parameters": [ + { + "name": "prefix", + "in": "path", + "description": "Komga API prefix (default: komga)", + "required": true, + "schema": { + "type": "string" + } + }, + { + "name": "book_id", + "in": "path", + "description": "Book ID", + "required": true, + "schema": { + "type": "string", + "format": "uuid" + } + } + ], + "responses": { + "200": { + "description": "Book thumbnail image", + "content": { + "image/jpeg": {} + } + }, + "401": { + "description": "Unauthorized" + }, + "404": { + "description": "Book not found or has no pages" + } + }, + "security": [ + { + "jwt_bearer": [] + }, + { + "api_key": [] + } + ] + } + }, + "/{prefix}/api/v1/collections": { + "get": { + "tags": [ + "Komga" + ], + "summary": "List collections (stub - always returns empty)", + "description": "Komga collections are user-created groupings of series.\nCodex doesn't support this feature, so we return empty results.\n\n## Endpoint\n`GET /{prefix}/api/v1/collections`\n\n## Authentication\n- Bearer token (JWT)\n- Basic Auth\n- API Key", + "operationId": "komga_list_collections", + "parameters": [ + { + "name": "prefix", + "in": "path", + "description": "Komga API prefix (default: komga)", + "required": true, + "schema": { + "type": "string" + } + } + ], + "responses": { + "200": { + "description": "Empty list of collections", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/KomgaPage_KomgaCollectionDto" + } + } + } + }, + "401": { + "description": "Unauthorized" + } + }, + "security": [ + { + "jwt_bearer": [] + }, + { + "api_key": [] + } + ] + } + }, + "/{prefix}/api/v1/genres": { + "get": { + "tags": [ + "Komga" + ], + "summary": "List genres", + "description": "Returns all genres in the library.\n\n## Endpoint\n`GET /{prefix}/api/v1/genres`\n\n## Authentication\n- Bearer token (JWT)\n- Basic Auth\n- API Key", + "operationId": "komga_list_genres", + "parameters": [ + { + "name": "prefix", + "in": "path", + "description": "Komga API prefix (default: komga)", + "required": true, + "schema": { + "type": "string" + } + } + ], + "responses": { + "200": { + "description": "List of all genres", + "content": { + "application/json": { + "schema": { + "type": "array", + "items": { + "type": "string" + } + } + } + } + }, + "401": { + "description": "Unauthorized" + } + }, + "security": [ + { + "jwt_bearer": [] + }, + { + "api_key": [] + } + ] + } + }, + "/{prefix}/api/v1/languages": { + "get": { + "tags": [ + "Komga" + ], + "summary": "List languages (stub - always returns empty array)", + "description": "Returns all languages in the library.\nCurrently returns empty as Codex doesn't aggregate languages separately.\n\n## Endpoint\n`GET /{prefix}/api/v1/languages`\n\n## Authentication\n- Bearer token (JWT)\n- Basic Auth\n- API Key", + "operationId": "komga_list_languages", + "parameters": [ + { + "name": "prefix", + "in": "path", + "description": "Komga API prefix (default: komga)", + "required": true, + "schema": { + "type": "string" + } + } + ], + "responses": { + "200": { + "description": "Empty list of languages", + "content": { + "application/json": { + "schema": { + "type": "array", + "items": { + "type": "string" + } + } + } + } + }, + "401": { + "description": "Unauthorized" + } + }, + "security": [ + { + "jwt_bearer": [] + }, + { + "api_key": [] + } + ] + } + }, + "/{prefix}/api/v1/libraries": { + "get": { + "tags": [ + "Komga" + ], + "summary": "List all libraries", + "description": "Returns all libraries in Komga-compatible format.\n\n## Endpoint\n`GET /{prefix}/api/v1/libraries`\n\n## Authentication\n- Bearer token (JWT)\n- Basic Auth\n- API Key", + "operationId": "komga_list_libraries", + "parameters": [ + { + "name": "prefix", + "in": "path", + "description": "Komga API prefix (default: komga)", + "required": true, + "schema": { + "type": "string" + } + } + ], + "responses": { + "200": { + "description": "List of libraries", + "content": { + "application/json": { + "schema": { + "type": "array", + "items": { + "$ref": "#/components/schemas/KomgaLibraryDto" + } + } + } + } + }, + "401": { + "description": "Unauthorized" + } + }, + "security": [ + { + "jwt_bearer": [] + }, + { + "api_key": [] + } + ] + } + }, + "/{prefix}/api/v1/libraries/{library_id}": { + "get": { + "tags": [ + "Komga" + ], + "summary": "Get library by ID", + "description": "Returns a single library in Komga-compatible format.\n\n## Endpoint\n`GET /{prefix}/api/v1/libraries/{libraryId}`\n\n## Authentication\n- Bearer token (JWT)\n- Basic Auth\n- API Key", + "operationId": "komga_get_library", + "parameters": [ + { + "name": "prefix", + "in": "path", + "description": "Komga API prefix (default: komga)", + "required": true, + "schema": { + "type": "string" + } + }, + { + "name": "library_id", + "in": "path", + "description": "Library ID", + "required": true, + "schema": { + "type": "string", + "format": "uuid" + } + } + ], + "responses": { + "200": { + "description": "Library details", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/KomgaLibraryDto" + } + } + } + }, + "401": { + "description": "Unauthorized" + }, + "404": { + "description": "Library not found" + } + }, + "security": [ + { + "jwt_bearer": [] + }, + { + "api_key": [] + } + ] + } + }, + "/{prefix}/api/v1/libraries/{library_id}/thumbnail": { + "get": { + "tags": [ + "Komga" + ], + "summary": "Get library thumbnail", + "description": "Returns a thumbnail image for the library. Uses the first series' cover\nas the library thumbnail, or returns a 404 if no series exist.\n\n## Endpoint\n`GET /{prefix}/api/v1/libraries/{libraryId}/thumbnail`\n\n## Authentication\n- Bearer token (JWT)\n- Basic Auth\n- API Key (via cookie fallback for browser image tags)", + "operationId": "komga_get_library_thumbnail", + "parameters": [ + { + "name": "prefix", + "in": "path", + "description": "Komga API prefix (default: komga)", + "required": true, + "schema": { + "type": "string" + } + }, + { + "name": "library_id", + "in": "path", + "description": "Library ID", + "required": true, + "schema": { + "type": "string", + "format": "uuid" + } + } + ], + "responses": { + "200": { + "description": "Library thumbnail image", + "content": { + "image/jpeg": {} + } + }, + "401": { + "description": "Unauthorized" + }, + "404": { + "description": "Library not found or no series in library" + } + }, + "security": [ + { + "jwt_bearer": [] + }, + { + "api_key": [] + } + ] + } + }, + "/{prefix}/api/v1/publishers": { + "get": { + "tags": [ + "Komga" + ], + "summary": "List publishers (stub - always returns empty array)", + "description": "Returns all publishers in the library.\nCurrently returns empty as Codex doesn't aggregate publishers separately.\n\n## Endpoint\n`GET /{prefix}/api/v1/publishers`\n\n## Authentication\n- Bearer token (JWT)\n- Basic Auth\n- API Key", + "operationId": "komga_list_publishers", + "parameters": [ + { + "name": "prefix", + "in": "path", + "description": "Komga API prefix (default: komga)", + "required": true, + "schema": { + "type": "string" + } + } + ], + "responses": { + "200": { + "description": "Empty list of publishers", + "content": { + "application/json": { + "schema": { + "type": "array", + "items": { + "type": "string" + } + } + } + } + }, + "401": { + "description": "Unauthorized" + } + }, + "security": [ + { + "jwt_bearer": [] + }, + { + "api_key": [] + } + ] + } + }, + "/{prefix}/api/v1/readlists": { + "get": { + "tags": [ + "Komga" + ], + "summary": "List read lists (stub - always returns empty)", + "description": "Komga read lists are user-created lists of books to read.\nCodex doesn't support this feature, so we return empty results.\n\n## Endpoint\n`GET /{prefix}/api/v1/readlists`\n\n## Authentication\n- Bearer token (JWT)\n- Basic Auth\n- API Key", + "operationId": "komga_list_readlists", + "parameters": [ + { + "name": "prefix", + "in": "path", + "description": "Komga API prefix (default: komga)", + "required": true, + "schema": { + "type": "string" + } + } + ], + "responses": { + "200": { + "description": "Empty list of read lists", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/KomgaPage_KomgaReadListDto" + } + } + } + }, + "401": { + "description": "Unauthorized" + } + }, + "security": [ + { + "jwt_bearer": [] + }, + { + "api_key": [] + } + ] + } + }, + "/{prefix}/api/v1/series": { + "get": { + "tags": [ + "Komga" + ], + "summary": "List all series (paginated)", + "description": "Returns all series in Komga-compatible format with pagination.\n\n## Endpoint\n`GET /{prefix}/api/v1/series`\n\n## Query Parameters\n- `page` - Page number (0-indexed, default: 0)\n- `size` - Page size (default: 20)\n- `library_id` - Optional filter by library UUID\n- `search` - Optional search query\n\n## Authentication\n- Bearer token (JWT)\n- Basic Auth\n- API Key", + "operationId": "komga_list_series", + "parameters": [ + { + "name": "prefix", + "in": "path", + "description": "Komga API prefix (default: komga)", + "required": true, + "schema": { + "type": "string" + } + }, + { + "name": "page", + "in": "query", + "description": "Page number (0-indexed, Komga-style)", + "required": false, + "schema": { + "type": "integer", + "format": "int32" + } + }, + { + "name": "size", + "in": "query", + "description": "Page size (default: 20)", + "required": false, + "schema": { + "type": "integer", + "format": "int32" + } + }, + { + "name": "library_id", + "in": "query", + "description": "Filter by library ID", + "required": false, + "schema": { + "type": [ + "string", + "null" + ], + "format": "uuid" + } + }, + { + "name": "search", + "in": "query", + "description": "Search query", + "required": false, + "schema": { + "type": [ + "string", + "null" + ] + } + }, + { + "name": "sort", + "in": "query", + "description": "Sort parameter (e.g., \"metadata.titleSort,asc\", \"createdDate,desc\")", + "required": false, + "schema": { + "type": [ + "string", + "null" + ] + } + } + ], + "responses": { + "200": { + "description": "Paginated list of series", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/KomgaPage_KomgaSeriesDto" + } + } + } + }, + "401": { + "description": "Unauthorized" + } + }, + "security": [ + { + "jwt_bearer": [] + }, + { + "api_key": [] + } + ] + } + }, + "/{prefix}/api/v1/series/list": { + "post": { + "tags": [ + "Komga" + ], + "summary": "Search/filter series", + "description": "Returns series matching the filter criteria.\nThis uses POST to support complex filter bodies.\n\n## Endpoint\n`POST /{prefix}/api/v1/series/list`\n\n## Query Parameters\n- `page` - Page number (0-indexed, default: 0)\n- `size` - Page size (default: 20)\n- `sort` - Sort parameter (e.g., \"createdDate,desc\")\n\n## Request Body\nJSON object with filter criteria (library_id, fullTextSearch, condition, etc.)\n\n## Authentication\n- Bearer token (JWT)\n- Basic Auth\n- API Key", + "operationId": "komga_search_series", + "parameters": [ + { + "name": "prefix", + "in": "path", + "description": "Komga API prefix (default: komga)", + "required": true, + "schema": { + "type": "string" + } + }, + { + "name": "page", + "in": "query", + "description": "Page number (0-indexed, Komga-style)", + "required": false, + "schema": { + "type": "integer", + "format": "int32" + } + }, + { + "name": "size", + "in": "query", + "description": "Page size (default: 20)", + "required": false, + "schema": { + "type": "integer", + "format": "int32" + } + }, + { + "name": "library_id", + "in": "query", + "description": "Filter by library ID", + "required": false, + "schema": { + "type": [ + "string", + "null" + ], + "format": "uuid" + } + }, + { + "name": "search", + "in": "query", + "description": "Search query", + "required": false, + "schema": { + "type": [ + "string", + "null" + ] + } + }, + { + "name": "sort", + "in": "query", + "description": "Sort parameter (e.g., \"metadata.titleSort,asc\", \"createdDate,desc\")", + "required": false, + "schema": { + "type": [ + "string", + "null" + ] + } + } + ], + "requestBody": { + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/KomgaSeriesSearchRequestDto" + } + } + }, + "required": true + }, + "responses": { + "200": { + "description": "Paginated list of series matching filter", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/KomgaPage_KomgaSeriesDto" + } + } + } + }, + "401": { + "description": "Unauthorized" + } + }, + "security": [ + { + "jwt_bearer": [] + }, + { + "api_key": [] + } + ] + } + }, + "/{prefix}/api/v1/series/new": { + "get": { + "tags": [ + "Komga" + ], + "summary": "Get recently added series", + "description": "Returns series sorted by created date descending (newest first).\n\n## Endpoint\n`GET /{prefix}/api/v1/series/new`\n\n## Query Parameters\n- `page` - Page number (0-indexed, default: 0)\n- `size` - Page size (default: 20)\n- `library_id` - Optional filter by library UUID\n\n## Authentication\n- Bearer token (JWT)\n- Basic Auth\n- API Key", + "operationId": "komga_get_series_new", + "parameters": [ + { + "name": "prefix", + "in": "path", + "description": "Komga API prefix (default: komga)", + "required": true, + "schema": { + "type": "string" + } + }, + { + "name": "page", + "in": "query", + "description": "Page number (0-indexed, Komga-style)", + "required": false, + "schema": { + "type": "integer", + "format": "int32" + } + }, + { + "name": "size", + "in": "query", + "description": "Page size (default: 20)", + "required": false, + "schema": { + "type": "integer", + "format": "int32" + } + }, + { + "name": "library_id", + "in": "query", + "description": "Filter by library ID", + "required": false, + "schema": { + "type": [ + "string", + "null" + ], + "format": "uuid" + } + }, + { + "name": "search", + "in": "query", + "description": "Search query", + "required": false, + "schema": { + "type": [ + "string", + "null" + ] + } + }, + { + "name": "sort", + "in": "query", + "description": "Sort parameter (e.g., \"metadata.titleSort,asc\", \"createdDate,desc\")", + "required": false, + "schema": { + "type": [ + "string", + "null" + ] + } + } + ], + "responses": { + "200": { + "description": "Paginated list of recently added series", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/KomgaPage_KomgaSeriesDto" + } + } + } + }, + "401": { + "description": "Unauthorized" + } + }, + "security": [ + { + "jwt_bearer": [] + }, + { + "api_key": [] + } + ] + } + }, + "/{prefix}/api/v1/series/release-dates": { + "get": { + "tags": [ + "Komga" + ], + "summary": "List series release dates (stub - always returns empty array)", + "description": "Returns all release dates used by series in the library.\nCurrently returns empty as Codex doesn't aggregate release dates separately.\n\n## Endpoint\n`GET /{prefix}/api/v1/series/release-dates`\n\n## Authentication\n- Bearer token (JWT)\n- Basic Auth\n- API Key", + "operationId": "komga_list_series_release_dates", + "parameters": [ + { + "name": "prefix", + "in": "path", + "description": "Komga API prefix (default: komga)", + "required": true, + "schema": { + "type": "string" + } + } + ], + "responses": { + "200": { + "description": "Empty list of release dates", + "content": { + "application/json": { + "schema": { + "type": "array", + "items": { + "type": "string" + } + } + } + } + }, + "401": { + "description": "Unauthorized" + } + }, + "security": [ + { + "jwt_bearer": [] + }, + { + "api_key": [] + } + ] + } + }, + "/{prefix}/api/v1/series/updated": { + "get": { + "tags": [ + "Komga" + ], + "summary": "Get recently updated series", + "description": "Returns series sorted by last modified date descending (most recently updated first).\n\n## Endpoint\n`GET /{prefix}/api/v1/series/updated`\n\n## Query Parameters\n- `page` - Page number (0-indexed, default: 0)\n- `size` - Page size (default: 20)\n- `library_id` - Optional filter by library UUID\n\n## Authentication\n- Bearer token (JWT)\n- Basic Auth\n- API Key", + "operationId": "komga_get_series_updated", + "parameters": [ + { + "name": "prefix", + "in": "path", + "description": "Komga API prefix (default: komga)", + "required": true, + "schema": { + "type": "string" + } + }, + { + "name": "page", + "in": "query", + "description": "Page number (0-indexed, Komga-style)", + "required": false, + "schema": { + "type": "integer", + "format": "int32" + } + }, + { + "name": "size", + "in": "query", + "description": "Page size (default: 20)", + "required": false, + "schema": { + "type": "integer", + "format": "int32" + } + }, + { + "name": "library_id", + "in": "query", + "description": "Filter by library ID", + "required": false, + "schema": { + "type": [ + "string", + "null" + ], + "format": "uuid" + } + }, + { + "name": "search", + "in": "query", + "description": "Search query", + "required": false, + "schema": { + "type": [ + "string", + "null" + ] + } + }, + { + "name": "sort", + "in": "query", + "description": "Sort parameter (e.g., \"metadata.titleSort,asc\", \"createdDate,desc\")", + "required": false, + "schema": { + "type": [ + "string", + "null" + ] + } + } + ], + "responses": { + "200": { + "description": "Paginated list of recently updated series", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/KomgaPage_KomgaSeriesDto" + } + } + } + }, + "401": { + "description": "Unauthorized" + } + }, + "security": [ + { + "jwt_bearer": [] + }, + { + "api_key": [] + } + ] + } + }, + "/{prefix}/api/v1/series/{series_id}": { + "get": { + "tags": [ + "Komga" + ], + "summary": "Get series by ID", + "description": "Returns a single series in Komga-compatible format.\n\n## Endpoint\n`GET /{prefix}/api/v1/series/{seriesId}`\n\n## Authentication\n- Bearer token (JWT)\n- Basic Auth\n- API Key", + "operationId": "komga_get_series", + "parameters": [ + { + "name": "prefix", + "in": "path", + "description": "Komga API prefix (default: komga)", + "required": true, + "schema": { + "type": "string" + } + }, + { + "name": "series_id", + "in": "path", + "description": "Series ID", + "required": true, + "schema": { + "type": "string", + "format": "uuid" + } + } + ], + "responses": { + "200": { + "description": "Series details", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/KomgaSeriesDto" + } + } + } + }, + "401": { + "description": "Unauthorized" + }, + "404": { + "description": "Series not found" + } + }, + "security": [ + { + "jwt_bearer": [] + }, + { + "api_key": [] + } + ] + } + }, + "/{prefix}/api/v1/series/{series_id}/books": { + "get": { + "tags": [ + "Komga" + ], + "summary": "Get books in a series", + "description": "Returns all books in a series with pagination.\n\n## Endpoint\n`GET /{prefix}/api/v1/series/{seriesId}/books`\n\n## Query Parameters\n- `page` - Page number (0-indexed, default: 0)\n- `size` - Page size (default: 20)\n\n## Authentication\n- Bearer token (JWT)\n- Basic Auth\n- API Key", + "operationId": "komga_get_series_books", + "parameters": [ + { + "name": "prefix", + "in": "path", + "description": "Komga API prefix (default: komga)", + "required": true, + "schema": { + "type": "string" + } + }, + { + "name": "series_id", + "in": "path", + "description": "Series ID", + "required": true, + "schema": { + "type": "string", + "format": "uuid" + } + }, + { + "name": "page", + "in": "query", + "description": "Page number (0-indexed, Komga-style)", + "required": false, + "schema": { + "type": "integer", + "format": "int32" + } + }, + { + "name": "size", + "in": "query", + "description": "Page size (default: 20)", + "required": false, + "schema": { + "type": "integer", + "format": "int32" + } + }, + { + "name": "library_id", + "in": "query", + "description": "Filter by library ID", + "required": false, + "schema": { + "type": [ + "string", + "null" + ], + "format": "uuid" + } + }, + { + "name": "search", + "in": "query", + "description": "Search query", + "required": false, + "schema": { + "type": [ + "string", + "null" + ] + } + }, + { + "name": "sort", + "in": "query", + "description": "Sort parameter (e.g., \"metadata.titleSort,asc\", \"createdDate,desc\")", + "required": false, + "schema": { + "type": [ + "string", + "null" + ] + } + } + ], + "responses": { + "200": { + "description": "Paginated list of books in series", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/KomgaPage_KomgaBookDto" + } + } + } + }, + "401": { + "description": "Unauthorized" + }, + "404": { + "description": "Series not found" + } + }, + "security": [ + { + "jwt_bearer": [] + }, + { + "api_key": [] + } + ] + } + }, + "/{prefix}/api/v1/series/{series_id}/read-progress": { + "post": { + "tags": [ + "Komga" + ], + "summary": "Mark all books in a series as read", + "description": "Marks all books in a series as completed (read) for the current user.\nThis is equivalent to marking each book individually as completed.\n\n## Endpoint\n`POST /{prefix}/api/v1/series/{seriesId}/read-progress`\n\n## Response\n- 204 No Content on success (Komga behavior)\n\n## Authentication\n- Bearer token (JWT)\n- Basic Auth\n- API Key", + "operationId": "komga_mark_series_as_read", + "parameters": [ + { + "name": "prefix", + "in": "path", + "description": "Komga API prefix (default: komga)", + "required": true, + "schema": { + "type": "string" + } + }, + { + "name": "series_id", + "in": "path", + "description": "Series ID", + "required": true, + "schema": { + "type": "string", + "format": "uuid" + } + } + ], + "responses": { + "204": { + "description": "Series marked as read" + }, + "401": { + "description": "Unauthorized" + }, + "404": { + "description": "Series not found" + } + }, + "security": [ + { + "jwt_bearer": [] + }, + { + "api_key": [] + } + ] + }, + "delete": { + "tags": [ + "Komga" + ], + "summary": "Mark all books in a series as unread", + "description": "Removes all reading progress for all books in a series, effectively marking\nthe entire series as unread for the current user.\n\n## Endpoint\n`DELETE /{prefix}/api/v1/series/{seriesId}/read-progress`\n\n## Response\n- 204 No Content on success\n\n## Authentication\n- Bearer token (JWT)\n- Basic Auth\n- API Key", + "operationId": "komga_mark_series_as_unread", + "parameters": [ + { + "name": "prefix", + "in": "path", + "description": "Komga API prefix (default: komga)", + "required": true, + "schema": { + "type": "string" + } + }, + { + "name": "series_id", + "in": "path", + "description": "Series ID", + "required": true, + "schema": { + "type": "string", + "format": "uuid" + } + } + ], + "responses": { + "204": { + "description": "Series marked as unread" + }, + "401": { + "description": "Unauthorized" + }, + "404": { + "description": "Series not found" + } + }, + "security": [ + { + "jwt_bearer": [] + }, + { + "api_key": [] + } + ] + } + }, + "/{prefix}/api/v1/series/{series_id}/thumbnail": { + "get": { + "tags": [ + "Komga" + ], + "summary": "Get series thumbnail", + "description": "Returns a thumbnail image for the series.\n\n## Endpoint\n`GET /{prefix}/api/v1/series/{seriesId}/thumbnail`\n\n## Authentication\n- Bearer token (JWT)\n- Basic Auth\n- API Key", + "operationId": "komga_get_series_thumbnail", + "parameters": [ + { + "name": "prefix", + "in": "path", + "description": "Komga API prefix (default: komga)", + "required": true, + "schema": { + "type": "string" + } + }, + { + "name": "series_id", + "in": "path", + "description": "Series ID", + "required": true, + "schema": { + "type": "string", + "format": "uuid" + } + } + ], + "responses": { + "200": { + "description": "Series thumbnail image", + "content": { + "image/jpeg": {} + } + }, + "401": { + "description": "Unauthorized" + }, + "404": { + "description": "Series not found" + } + }, + "security": [ + { + "jwt_bearer": [] + }, + { + "api_key": [] + } + ] + } + }, + "/{prefix}/api/v1/tags": { + "get": { + "tags": [ + "Komga" + ], + "summary": "List tags", + "description": "Returns all tags in the library.\n\n## Endpoint\n`GET /{prefix}/api/v1/tags`\n\n## Authentication\n- Bearer token (JWT)\n- Basic Auth\n- API Key", + "operationId": "komga_list_tags", + "parameters": [ + { + "name": "prefix", + "in": "path", + "description": "Komga API prefix (default: komga)", + "required": true, + "schema": { + "type": "string" + } + } + ], + "responses": { + "200": { + "description": "List of all tags", + "content": { + "application/json": { + "schema": { + "type": "array", + "items": { + "type": "string" + } + } + } + } + }, + "401": { + "description": "Unauthorized" + } + }, + "security": [ + { + "jwt_bearer": [] + }, + { + "api_key": [] + } + ] + } + }, + "/{prefix}/api/v1/users/me": { + "get": { + "tags": [ + "Komga" + ], + "summary": "Get current user information", + "description": "Returns information about the currently authenticated user in Komga format.\nThis endpoint is used by Komic and other apps to verify authentication\nand determine user capabilities.\n\n## Endpoint\n`GET /{prefix}/api/v1/users/me`\n\n## Response\nReturns a `KomgaUserDto` containing:\n- User ID (UUID as string)\n- Email address\n- Roles (ADMIN, USER, FILE_DOWNLOAD)\n- Library access settings\n- Content restrictions\n\n## Authentication\n- Bearer token (JWT)\n- Basic Auth\n- API Key", + "operationId": "komga_get_current_user", + "parameters": [ + { + "name": "prefix", + "in": "path", + "description": "Komga API prefix (default: komga)", + "required": true, + "schema": { + "type": "string" + } + } + ], + "responses": { + "200": { + "description": "Current user information", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/KomgaUserDto" + } + } + } + }, + "401": { + "description": "Unauthorized" } }, "security": [ @@ -16907,6 +18046,27 @@ } } }, + "ApplicabilityResponse": { + "type": "object", + "description": "Response shape for `GET /api/v1/release-sources/applicability`.", + "required": [ + "applicable", + "pluginDisplayNames" + ], + "properties": { + "applicable": { + "type": "boolean", + "description": "`true` when at least one enabled `release_source` plugin applies to\nthe requested library (or, if no `libraryId` was supplied, to *any*\nlibrary). The frontend uses this to decide whether to render the\nper-series Tracking panel and Releases tab, or to show the\nbulk-track menu entry." + }, + "pluginDisplayNames": { + "type": "array", + "items": { + "type": "string" + }, + "description": "Plugin display names (or fallback to `name` when no manifest cached\nyet) of the enabled release-source plugins covering this library.\nEmpty when `applicable` is `false`. Useful for surfacing \"Powered by\nMangaUpdates, Nyaa\" hints in the UI." + } + } + }, "AuthorContextDto": { "type": "object", "description": "Author context for template evaluation.", @@ -20487,6 +21647,57 @@ } } }, + "BulkReleaseAction": { + "type": "string", + "description": "Action requested by `POST /api/v1/releases/bulk`.", + "enum": [ + "dismiss", + "mark-acquired", + "ignore", + "reset", + "delete" + ] + }, + "BulkReleaseActionRequest": { + "type": "object", + "description": "Request body for `POST /api/v1/releases/bulk`.", + "required": [ + "ids", + "action" + ], + "properties": { + "action": { + "$ref": "#/components/schemas/BulkReleaseAction" + }, + "ids": { + "type": "array", + "items": { + "type": "string", + "format": "uuid" + } + } + } + }, + "BulkReleaseActionResponse": { + "type": "object", + "description": "Response from `POST /api/v1/releases/bulk`.", + "required": [ + "affected", + "action" + ], + "properties": { + "action": { + "$ref": "#/components/schemas/BulkReleaseAction", + "description": "Action that ran (echoed back for client-side confirmation toasts)." + }, + "affected": { + "type": "integer", + "format": "int64", + "description": "Number of ledger rows actually affected. Less than `ids.len()` when\nsome IDs were already deleted concurrently.", + "minimum": 0 + } + } + }, "BulkRenumberSeriesRequest": { "type": "object", "description": "Request for bulk renumber operations on multiple series", @@ -20591,24 +21802,83 @@ } } }, - "BulkTaskResponse": { + "BulkTaskResponse": { + "type": "object", + "description": "Response for bulk task operations", + "required": [ + "taskId", + "message" + ], + "properties": { + "message": { + "type": "string", + "description": "Message describing the operation", + "example": "Thumbnail generation task queued for 5 series" + }, + "taskId": { + "type": "string", + "format": "uuid", + "description": "ID of the fan-out task that was created", + "example": "550e8400-e29b-41d4-a716-446655440000" + } + } + }, + "BulkTrackForReleasesItem": { + "type": "object", + "description": "Per-series outcome of a bulk track / untrack operation.\n\nReturned in `BulkTrackForReleasesResponse.results` so the UI can show a\nper-row status (e.g. \"tracked\", \"skipped: not found\", \"errored: …\") without\nre-querying the tracking config endpoint per series.", + "required": [ + "seriesId", + "outcome" + ], + "properties": { + "detail": { + "type": [ + "string", + "null" + ], + "description": "Free-form detail (error message for `errored`, reason for `skipped`).\n`None` for the success cases." + }, + "outcome": { + "type": "string", + "description": "`tracked` | `untracked` | `skipped` | `errored`." + }, + "seriesId": { + "type": "string", + "format": "uuid" + } + } + }, + "BulkTrackForReleasesResponse": { "type": "object", - "description": "Response for bulk task operations", + "description": "Aggregate result of `POST /series/bulk/track-for-releases` and its untrack\ncounterpart. Counts and per-series outcomes for client-side display.", "required": [ - "taskId", - "message" + "changed", + "alreadyInState", + "errored", + "results" ], "properties": { - "message": { - "type": "string", - "description": "Message describing the operation", - "example": "Thumbnail generation task queued for 5 series" + "alreadyInState": { + "type": "integer", + "description": "Series whose `tracked` flag was already in the target state. No-ops.", + "minimum": 0 }, - "taskId": { - "type": "string", - "format": "uuid", - "description": "ID of the fan-out task that was created", - "example": "550e8400-e29b-41d4-a716-446655440000" + "changed": { + "type": "integer", + "description": "Series successfully flipped to `tracked = true` (or `false` for the\nuntrack endpoint).", + "minimum": 0 + }, + "errored": { + "type": "integer", + "description": "Series that could not be processed (missing, error, etc.).", + "minimum": 0 + }, + "results": { + "type": "array", + "items": { + "$ref": "#/components/schemas/BulkTrackForReleasesItem" + }, + "description": "Per-series outcomes in input order." } } }, @@ -20800,7 +22070,7 @@ }, "type": { "type": "string", - "description": "Field type: \"number\", \"string\", or \"boolean\"" + "description": "Field type — free-form documentation hint. Common values: \"number\",\n\"string\", \"boolean\", \"string-array\", \"object\". The host never validates\nstored config against this; it forwards the raw JSON to the plugin." } } }, @@ -21386,6 +22656,26 @@ } } }, + "CreateSeriesAliasRequest": { + "type": "object", + "required": [ + "alias" + ], + "properties": { + "alias": { + "type": "string", + "description": "Alias text. Will be trimmed; must normalize to non-empty.", + "example": "Boku no Hero Academia" + }, + "source": { + "type": [ + "string", + "null" + ], + "description": "Optional explicit source. Defaults to `manual` when called from the API.\nPlugin-internal flows write `metadata`; we don't expose that to HTTP." + } + } + }, "CreateSeriesExportRequest": { "type": "object", "description": "Request body for creating a new series export", @@ -21633,6 +22923,19 @@ } } }, + "DeleteReleaseResponse": { + "type": "object", + "description": "Response from `DELETE /api/v1/releases/{id}`.\n\nSingle-row delete returns a small confirmation rather than 204 so the\nfrontend can surface a toast that mentions the etag clear (\"the next\npoll will re-fetch this release\"). Mirrors the bulk-delete shape with\n`affected = 1`.", + "required": [ + "deleted" + ], + "properties": { + "deleted": { + "type": "boolean", + "description": "`true` if the row was deleted, `false` if it didn't exist." + } + } + }, "DetectedSeriesDto": { "type": "object", "description": "Detected series information for preview", @@ -22505,6 +23808,92 @@ ] } } + }, + { + "type": "object", + "description": "A new release was recorded in the ledger.\n\nEmitted once per accepted, non-deduped ledger insert by the polling\ntask and the `releases/record` reverse-RPC handler. The frontend uses\nthis to bump the Releases nav badge, surface a toast on the inbox\npage, and refresh the per-series Releases tab.", + "required": [ + "ledgerId", + "seriesId", + "sourceId", + "pluginId", + "language", + "type" + ], + "properties": { + "chapter": { + "type": [ + "number", + "null" + ], + "format": "double", + "description": "Chapter announced (if the source emits chapters)." + }, + "language": { + "type": "string", + "description": "Language code (e.g. `\"en\"`); used by client-side notification\npreference filters." + }, + "ledgerId": { + "type": "string", + "format": "uuid" + }, + "pluginId": { + "type": "string", + "description": "Plugin name that owns the source (`release_sources.plugin_id`).\nHelps the frontend filter without an extra lookup." + }, + "seriesId": { + "type": "string", + "format": "uuid" + }, + "sourceId": { + "type": "string", + "format": "uuid" + }, + "type": { + "type": "string", + "enum": [ + "release_announced" + ] + }, + "volume": { + "type": [ + "integer", + "null" + ], + "format": "int32", + "description": "Volume announced (if the source emits volumes)." + } + } + }, + { + "type": "object", + "description": "A release source's poll task completed.\n\nEmitted at the end of every `poll_release_source` task run, after\n`release_sources.last_summary` / `last_polled_at` / `etag` have been\npersisted. The frontend uses this to refresh the Release tracking\nsettings page in real time so users don't have to reload to see a\n\"Poll now\" finish. Carries no diff details — receivers should\ninvalidate the source query and re-read the row.", + "required": [ + "sourceId", + "pluginId", + "hadError", + "type" + ], + "properties": { + "hadError": { + "type": "boolean", + "description": "`true` if the poll wrote a `last_error`. Cheap \"did it fail\"\nhint without forcing the client to refetch." + }, + "pluginId": { + "type": "string", + "description": "Plugin that owns the source (`release_sources.plugin_id`).\nCheap filter for clients only watching certain plugins." + }, + "sourceId": { + "type": "string", + "format": "uuid" + }, + "type": { + "type": "string", + "enum": [ + "release_source_polled" + ] + } + } } ], "description": "Specific event types for entity changes" @@ -23777,6 +25166,32 @@ "description": "When the series was last updated", "example": "2024-01-15T10:30:00Z" }, + "upstreamChapterGap": { + "type": [ + "number", + "null" + ], + "format": "float", + "description": "Upstream-vs-local chapter delta. See `SeriesDto::upstream_chapter_gap`.", + "example": 3.0 + }, + "upstreamGapProvider": { + "type": [ + "string", + "null" + ], + "description": "Provider that supplied the upstream counts. See\n`SeriesDto::upstream_gap_provider`.", + "example": "MangaBaka" + }, + "upstreamVolumeGap": { + "type": [ + "integer", + "null" + ], + "format": "int32", + "description": "Upstream-vs-local volume delta. See `SeriesDto::upstream_volume_gap`.", + "example": 1 + }, "volumesOwned": { "type": [ "integer", @@ -28332,6 +29747,174 @@ } } }, + "PaginatedResponse_ReleaseLedgerEntryDto": { + "type": "object", + "description": "Generic paginated response wrapper with HATEOAS links", + "required": [ + "data", + "page", + "pageSize", + "total", + "totalPages", + "links" + ], + "properties": { + "data": { + "type": "array", + "items": { + "type": "object", + "description": "A single release announcement. Sources write these; the inbox reads them.", + "required": [ + "id", + "seriesId", + "seriesTitle", + "sourceId", + "externalReleaseId", + "payloadUrl", + "confidence", + "state", + "observedAt", + "createdAt" + ], + "properties": { + "chapter": { + "type": [ + "number", + "null" + ], + "format": "double", + "description": "Decimal supports `12.5` etc." + }, + "confidence": { + "type": "number", + "format": "double" + }, + "createdAt": { + "type": "string", + "format": "date-time" + }, + "externalReleaseId": { + "type": "string", + "description": "Plugin-stable identity for the release (used for dedup).", + "example": "nyaa:1234567" + }, + "formatHints": { + "description": "Sparse `{ \"jxl\": true, \"container\": \"cbz\", ... }`." + }, + "groupOrUploader": { + "type": [ + "string", + "null" + ], + "description": "Group/scanlator/uploader attribution." + }, + "id": { + "type": "string", + "format": "uuid", + "example": "550e8400-e29b-41d4-a716-446655440a00" + }, + "infoHash": { + "type": [ + "string", + "null" + ], + "description": "Torrent info_hash, if applicable." + }, + "language": { + "type": [ + "string", + "null" + ] + }, + "mediaUrl": { + "type": [ + "string", + "null" + ], + "description": "Optional second URL for direct fetch (`.torrent`, `magnet:`, DDL\nlink). Travels paired with [`Self::media_url_kind`]." + }, + "mediaUrlKind": { + "type": [ + "string", + "null" + ], + "description": "Classifies what `media_url` points at: `torrent` | `magnet` |\n`direct` | `other`. The frontend uses this to pick a kind-specific\nicon next to the standard external-link icon." + }, + "metadata": { + "description": "Source-specific extras (free-form)." + }, + "observedAt": { + "type": "string", + "format": "date-time" + }, + "payloadUrl": { + "type": "string", + "description": "Where to acquire the release. Conventionally a human-readable\nlanding page (Nyaa view page, MangaUpdates release page)." + }, + "seriesId": { + "type": "string", + "format": "uuid", + "example": "550e8400-e29b-41d4-a716-446655440002" + }, + "seriesTitle": { + "type": "string", + "description": "Series title at the time of the response. Joined from the `series`\ntable so the inbox UI can render a human-readable label without a\nfollow-up fetch. Falls back to the empty string only if the series\nrow was hard-deleted between the join and the read.", + "example": "Chainsaw Man" + }, + "sourceId": { + "type": "string", + "format": "uuid", + "example": "550e8400-e29b-41d4-a716-446655440b00" + }, + "state": { + "type": "string", + "description": "`announced` | `dismissed` | `marked_acquired` | `hidden`." + }, + "volume": { + "type": [ + "integer", + "null" + ], + "format": "int32" + } + } + }, + "description": "The data items for this page" + }, + "links": { + "$ref": "#/components/schemas/PaginationLinks", + "description": "HATEOAS navigation links" + }, + "page": { + "type": "integer", + "format": "int64", + "description": "Current page number (1-indexed)", + "example": 1, + "minimum": 0 + }, + "pageSize": { + "type": "integer", + "format": "int64", + "description": "Number of items per page", + "example": 50, + "minimum": 0 + }, + "total": { + "type": "integer", + "format": "int64", + "description": "Total number of items across all pages", + "example": 150, + "minimum": 0 + }, + "totalPages": { + "type": "integer", + "format": "int64", + "description": "Total number of pages", + "example": 3, + "minimum": 0 + } + } + }, "PaginatedResponse_SeriesDto": { "type": "object", "description": "Generic paginated response wrapper with HATEOAS links", @@ -28474,6 +30057,32 @@ "description": "When the series was last updated", "example": "2024-01-15T10:30:00Z" }, + "upstreamChapterGap": { + "type": [ + "number", + "null" + ], + "format": "float", + "description": "Difference between the upstream original-language chapter count\n(`series_metadata.total_chapter_count`, supplied by metadata\nproviders like MangaBaka or AniList) and the highest locally-owned\nchapter (`local_max_chapter`).\n\nAlways `None` unless the series is tracked AND `track_chapters` is\nenabled AND the provider count is populated AND the rounded-to-1-\ndecimal gap is positive. **This is an informational signal, not a\nrelease announcement** — Phase 6's MangaUpdates plugin owns the\ntranslation-release feed.", + "example": 3.0 + }, + "upstreamGapProvider": { + "type": [ + "string", + "null" + ], + "description": "Display name of the metadata provider that supplied the upstream\ncounts (e.g., \"MangaBaka\", \"AniList\"). Set whenever at least one of\n`upstream_chapter_gap` / `upstream_volume_gap` is populated. Used by\nthe Phase 7 badge tooltip.", + "example": "MangaBaka" + }, + "upstreamVolumeGap": { + "type": [ + "integer", + "null" + ], + "format": "int32", + "description": "Difference between the upstream original-language volume count\n(`series_metadata.total_volume_count`) and the highest locally-owned\nvolume (`local_max_volume`). Same suppression rules as\n`upstream_chapter_gap`, gated on `track_volumes`.", + "example": 1 + }, "volumesOwned": { "type": [ "integer", @@ -29624,6 +31233,10 @@ }, "description": "Content types this plugin can provide metadata for (e.g., [\"series\", \"book\"])" }, + "releaseSource": { + "type": "boolean", + "description": "Whether the plugin declares the `release_source` capability (announces\nnew chapter / volume releases for tracked series)." + }, "userReadSync": { "type": "boolean", "description": "Can sync user reading progress" @@ -30705,6 +32318,24 @@ } } }, + "PollNowResponse": { + "type": "object", + "description": "Response shape from the `poll-now` endpoint.\n\n`status` is `enqueued` after a successful enqueue. The `message` carries\nthe task ID for follow-up (`tasks.id`); the task runs asynchronously, so\nthis response does not reflect poll outcome.", + "required": [ + "status", + "message" + ], + "properties": { + "message": { + "type": "string", + "description": "Human-readable message; includes the enqueued task ID." + }, + "status": { + "type": "string", + "description": "`enqueued` on success." + } + } + }, "PreviewScanRequest": { "type": "object", "description": "Preview scan request", @@ -31116,388 +32747,735 @@ }, "startedAt": { "type": "string", - "format": "date-time", - "description": "When reading started", - "example": "2024-01-10T14:30:00Z" + "format": "date-time", + "description": "When reading started", + "example": "2024-01-10T14:30:00Z" + }, + "updatedAt": { + "type": "string", + "format": "date-time", + "description": "When progress was last updated", + "example": "2024-01-15T18:45:00Z" + }, + "userId": { + "type": "string", + "format": "uuid", + "description": "User ID", + "example": "550e8400-e29b-41d4-a716-446655440001" + } + } + }, + "ReadingProgress": { + "type": "object", + "description": "Reading progress information for a publication\n\nCustom extension for tracking reading progress in OPDS 2.0.\nCompatible with reading apps that support progress sync.", + "required": [ + "currentPage", + "totalPages", + "progressPercent", + "isCompleted" + ], + "properties": { + "currentPage": { + "type": "integer", + "format": "int32", + "description": "Current page (1-indexed)" + }, + "isCompleted": { + "type": "boolean", + "description": "Whether the book has been completed" + }, + "lastReadAt": { + "type": [ + "string", + "null" + ], + "format": "date-time", + "description": "Last time progress was updated" + }, + "progressPercent": { + "type": "number", + "format": "double", + "description": "Progress as a percentage (0.0 - 100.0)" + }, + "totalPages": { + "type": "integer", + "format": "int32", + "description": "Total number of pages in the book" + } + } + }, + "RecommendationDto": { + "type": "object", + "description": "A single recommendation for the user", + "required": [ + "externalId", + "title", + "score", + "reason" + ], + "properties": { + "basedOn": { + "type": "array", + "items": { + "type": "string" + }, + "description": "Titles that influenced this recommendation" + }, + "codexSeriesId": { + "type": [ + "string", + "null" + ], + "description": "Codex series ID if matched to an existing series" + }, + "countryOfOrigin": { + "type": [ + "string", + "null" + ], + "description": "Country of origin ISO code (e.g., \"JP\", \"KR\", \"CN\")" + }, + "coverUrl": { + "type": [ + "string", + "null" + ], + "description": "Cover image URL" + }, + "externalId": { + "type": "string", + "description": "External ID on the source service" + }, + "externalUrl": { + "type": [ + "string", + "null" + ], + "description": "URL to the entry on the external service" + }, + "format": { + "type": [ + "string", + "null" + ], + "description": "Media format (e.g., \"MANGA\", \"NOVEL\", \"ONE_SHOT\")" + }, + "genres": { + "type": "array", + "items": { + "type": "string" + }, + "description": "Genres" + }, + "inCodex": { + "type": "boolean", + "description": "Whether this series exists in the Codex library (matched via external IDs)" + }, + "inLibrary": { + "type": "boolean", + "description": "Whether this series is already in the user's library (as reported by the plugin)" + }, + "popularity": { + "type": [ + "integer", + "null" + ], + "format": "int32", + "description": "Popularity ranking/count on the source service" + }, + "rating": { + "type": [ + "integer", + "null" + ], + "format": "int32", + "description": "Average user rating on the source service (0-100 scale)" + }, + "reason": { + "type": "string", + "description": "Human-readable reason for this recommendation" + }, + "score": { + "type": "number", + "format": "double", + "description": "Confidence/relevance score (0.0 to 1.0)" + }, + "startYear": { + "type": [ + "integer", + "null" + ], + "format": "int32", + "description": "Year the series started" + }, + "status": { + "type": [ + "string", + "null" + ], + "description": "Publication status (ongoing, ended, hiatus, abandoned, unknown)" + }, + "summary": { + "type": [ + "string", + "null" + ], + "description": "Summary/description" + }, + "tags": { + "type": [ + "array", + "null" + ], + "items": { + "$ref": "#/components/schemas/RecommendationTagDto" + }, + "description": "Tags with relevance rank" + }, + "title": { + "type": "string", + "description": "Title of the recommended series/book" + }, + "totalChapterCount": { + "type": [ + "number", + "null" + ], + "format": "float", + "description": "Total expected number of chapters in the series. May be fractional." + }, + "totalVolumeCount": { + "type": [ + "integer", + "null" + ], + "format": "int32", + "description": "Total expected number of volumes in the series." + } + } + }, + "RecommendationTagDto": { + "type": "object", + "description": "A tag with relevance rank from the source service", + "required": [ + "name", + "rank", + "category" + ], + "properties": { + "category": { + "type": "string", + "description": "Tag category (e.g., \"Genre\", \"Theme\")" + }, + "name": { + "type": "string", + "description": "Tag name (e.g., \"Isekai\", \"Gore\")" }, - "updatedAt": { + "rank": { + "type": "integer", + "format": "int32", + "description": "Relevance rank (0-100)" + } + } + }, + "RecommendationsRefreshResponse": { + "type": "object", + "description": "Response from POST /api/v1/user/recommendations/refresh", + "required": [ + "taskId", + "message" + ], + "properties": { + "message": { "type": "string", - "format": "date-time", - "description": "When progress was last updated", - "example": "2024-01-15T18:45:00Z" + "description": "Human-readable status message" }, - "userId": { + "taskId": { "type": "string", "format": "uuid", - "description": "User ID", - "example": "550e8400-e29b-41d4-a716-446655440001" + "description": "Task ID for tracking the refresh operation" } } }, - "ReadingProgress": { + "RecommendationsResponse": { "type": "object", - "description": "Reading progress information for a publication\n\nCustom extension for tracking reading progress in OPDS 2.0.\nCompatible with reading apps that support progress sync.", + "description": "Response from GET /api/v1/user/recommendations", "required": [ - "currentPage", - "totalPages", - "progressPercent", - "isCompleted" + "recommendations", + "pluginId", + "pluginName" ], "properties": { - "currentPage": { - "type": "integer", - "format": "int32", - "description": "Current page (1-indexed)" - }, - "isCompleted": { + "cached": { "type": "boolean", - "description": "Whether the book has been completed" + "description": "Whether these are cached results" }, - "lastReadAt": { + "generatedAt": { "type": [ "string", "null" ], - "format": "date-time", - "description": "Last time progress was updated" + "description": "When these recommendations were generated" }, - "progressPercent": { - "type": "number", - "format": "double", - "description": "Progress as a percentage (0.0 - 100.0)" + "pluginId": { + "type": "string", + "format": "uuid", + "description": "Plugin that provided these recommendations" }, - "totalPages": { - "type": "integer", - "format": "int32", - "description": "Total number of pages in the book" - } - } - }, - "RecommendationDto": { - "type": "object", - "description": "A single recommendation for the user", - "required": [ - "externalId", - "title", - "score", - "reason" - ], - "properties": { - "basedOn": { + "pluginName": { + "type": "string", + "description": "Plugin display name" + }, + "recommendations": { "type": "array", "items": { - "type": "string" + "$ref": "#/components/schemas/RecommendationDto" }, - "description": "Titles that influenced this recommendation" + "description": "Personalized recommendations" }, - "codexSeriesId": { + "taskId": { "type": [ "string", "null" ], - "description": "Codex series ID if matched to an existing series" + "format": "uuid", + "description": "ID of the running/pending background task, if any" }, - "countryOfOrigin": { + "taskStatus": { "type": [ "string", "null" ], - "description": "Country of origin ISO code (e.g., \"JP\", \"KR\", \"CN\")" + "description": "Status of a running/pending background task (\"pending\" or \"running\"), if any" + } + } + }, + "RefreshScope": { + "type": "string", + "description": "Scope of a metadata refresh job.\n\nPhase 9 only honours [`RefreshScope::SeriesOnly`] at runtime. The\nother variants are schema-accepted but rejected by the validator.", + "enum": [ + "series_only", + "books_only", + "series_and_books" + ] + }, + "RegisterRequest": { + "type": "object", + "description": "Register request", + "required": [ + "username", + "email", + "password" + ], + "properties": { + "email": { + "type": "string", + "description": "Email address", + "example": "john@example.com" }, - "coverUrl": { + "password": { + "type": "string", + "description": "Password", + "example": "securePassword123!" + }, + "username": { + "type": "string", + "description": "Username", + "example": "johndoe" + } + } + }, + "RegisterResponse": { + "type": "object", + "description": "Register response", + "required": [ + "user" + ], + "properties": { + "accessToken": { "type": [ "string", "null" ], - "description": "Cover image URL" + "description": "JWT access token (if email confirmation not required)", + "example": "eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9..." }, - "externalId": { - "type": "string", - "description": "External ID on the source service" + "expiresIn": { + "type": [ + "integer", + "null" + ], + "format": "int64", + "description": "Token expiry in seconds", + "example": 86400, + "minimum": 0 }, - "externalUrl": { + "message": { "type": [ "string", "null" ], - "description": "URL to the entry on the external service" + "description": "Message about email verification if required", + "example": "Please check your email to verify your account" }, - "format": { + "tokenType": { "type": [ "string", "null" ], - "description": "Media format (e.g., \"MANGA\", \"NOVEL\", \"ONE_SHOT\")" + "description": "Token type (always \"Bearer\")", + "example": "Bearer" }, - "genres": { + "user": { + "$ref": "#/components/schemas/UserInfo", + "description": "User information" + } + } + }, + "ReleaseFacetsResponse": { + "type": "object", + "description": "Response shape for `GET /api/v1/releases/facets`.\n\nEach list reflects the distinct values present in the ledger under the\n**other** active filters (Solr-style facet exclusion), so dropdowns\nnever offer combinations that would yield zero results. The frontend\nuses these to populate cascading filter Select inputs without forcing\nthe user to type UUIDs.", + "required": [ + "languages", + "libraries", + "series" + ], + "properties": { + "languages": { "type": "array", "items": { - "type": "string" - }, - "description": "Genres" + "$ref": "#/components/schemas/ReleaseLanguageFacetDto" + } }, - "inCodex": { - "type": "boolean", - "description": "Whether this series exists in the Codex library (matched via external IDs)" + "libraries": { + "type": "array", + "items": { + "$ref": "#/components/schemas/ReleaseLibraryFacetDto" + } }, - "inLibrary": { - "type": "boolean", - "description": "Whether this series is already in the user's library (as reported by the plugin)" + "series": { + "type": "array", + "items": { + "$ref": "#/components/schemas/ReleaseSeriesFacetDto" + } + } + } + }, + "ReleaseLanguageFacetDto": { + "type": "object", + "description": "One language option in the inbox facets response.", + "required": [ + "language", + "count" + ], + "properties": { + "count": { + "type": "integer", + "format": "int64", + "minimum": 0 }, - "popularity": { + "language": { + "type": "string" + } + } + }, + "ReleaseLedgerEntryDto": { + "type": "object", + "description": "A single release announcement. Sources write these; the inbox reads them.", + "required": [ + "id", + "seriesId", + "seriesTitle", + "sourceId", + "externalReleaseId", + "payloadUrl", + "confidence", + "state", + "observedAt", + "createdAt" + ], + "properties": { + "chapter": { "type": [ - "integer", + "number", "null" ], - "format": "int32", - "description": "Popularity ranking/count on the source service" + "format": "double", + "description": "Decimal supports `12.5` etc." }, - "rating": { + "confidence": { + "type": "number", + "format": "double" + }, + "createdAt": { + "type": "string", + "format": "date-time" + }, + "externalReleaseId": { + "type": "string", + "description": "Plugin-stable identity for the release (used for dedup).", + "example": "nyaa:1234567" + }, + "formatHints": { + "description": "Sparse `{ \"jxl\": true, \"container\": \"cbz\", ... }`." + }, + "groupOrUploader": { "type": [ - "integer", + "string", "null" ], - "format": "int32", - "description": "Average user rating on the source service (0-100 scale)" + "description": "Group/scanlator/uploader attribution." }, - "reason": { - "type": "string", - "description": "Human-readable reason for this recommendation" - }, - "score": { - "type": "number", - "format": "double", - "description": "Confidence/relevance score (0.0 to 1.0)" + "id": { + "type": "string", + "format": "uuid", + "example": "550e8400-e29b-41d4-a716-446655440a00" }, - "startYear": { + "infoHash": { "type": [ - "integer", + "string", "null" ], - "format": "int32", - "description": "Year the series started" + "description": "Torrent info_hash, if applicable." }, - "status": { + "language": { "type": [ "string", "null" - ], - "description": "Publication status (ongoing, ended, hiatus, abandoned, unknown)" + ] }, - "summary": { + "mediaUrl": { "type": [ "string", "null" ], - "description": "Summary/description" + "description": "Optional second URL for direct fetch (`.torrent`, `magnet:`, DDL\nlink). Travels paired with [`Self::media_url_kind`]." }, - "tags": { + "mediaUrlKind": { "type": [ - "array", + "string", "null" ], - "items": { - "$ref": "#/components/schemas/RecommendationTagDto" - }, - "description": "Tags with relevance rank" + "description": "Classifies what `media_url` points at: `torrent` | `magnet` |\n`direct` | `other`. The frontend uses this to pick a kind-specific\nicon next to the standard external-link icon." }, - "title": { + "metadata": { + "description": "Source-specific extras (free-form)." + }, + "observedAt": { "type": "string", - "description": "Title of the recommended series/book" + "format": "date-time" }, - "totalChapterCount": { - "type": [ - "number", - "null" - ], - "format": "float", - "description": "Total expected number of chapters in the series. May be fractional." + "payloadUrl": { + "type": "string", + "description": "Where to acquire the release. Conventionally a human-readable\nlanding page (Nyaa view page, MangaUpdates release page)." }, - "totalVolumeCount": { + "seriesId": { + "type": "string", + "format": "uuid", + "example": "550e8400-e29b-41d4-a716-446655440002" + }, + "seriesTitle": { + "type": "string", + "description": "Series title at the time of the response. Joined from the `series`\ntable so the inbox UI can render a human-readable label without a\nfollow-up fetch. Falls back to the empty string only if the series\nrow was hard-deleted between the join and the read.", + "example": "Chainsaw Man" + }, + "sourceId": { + "type": "string", + "format": "uuid", + "example": "550e8400-e29b-41d4-a716-446655440b00" + }, + "state": { + "type": "string", + "description": "`announced` | `dismissed` | `marked_acquired` | `hidden`." + }, + "volume": { "type": [ "integer", "null" ], - "format": "int32", - "description": "Total expected number of volumes in the series." + "format": "int32" } } }, - "RecommendationTagDto": { + "ReleaseLedgerListResponse": { "type": "object", - "description": "A tag with relevance rank from the source service", "required": [ - "name", - "rank", - "category" + "entries" ], "properties": { - "category": { - "type": "string", - "description": "Tag category (e.g., \"Genre\", \"Theme\")" + "entries": { + "type": "array", + "items": { + "$ref": "#/components/schemas/ReleaseLedgerEntryDto" + } + } + } + }, + "ReleaseLibraryFacetDto": { + "type": "object", + "description": "One library option in the inbox facets response.", + "required": [ + "libraryId", + "libraryName", + "count" + ], + "properties": { + "count": { + "type": "integer", + "format": "int64", + "minimum": 0 }, - "name": { + "libraryId": { "type": "string", - "description": "Tag name (e.g., \"Isekai\", \"Gore\")" + "format": "uuid" }, - "rank": { - "type": "integer", - "format": "int32", - "description": "Relevance rank (0-100)" + "libraryName": { + "type": "string" } } }, - "RecommendationsRefreshResponse": { + "ReleaseSeriesFacetDto": { "type": "object", - "description": "Response from POST /api/v1/user/recommendations/refresh", + "description": "One series option in the inbox facets response. Carries the joined\n`library_id` and `library_name` so the frontend can group the dropdown\nby library without a follow-up call.", "required": [ - "taskId", - "message" + "seriesId", + "seriesTitle", + "libraryId", + "libraryName", + "count" ], "properties": { - "message": { + "count": { + "type": "integer", + "format": "int64", + "description": "Number of ledger rows matching the active filter for this series.", + "minimum": 0 + }, + "libraryId": { "type": "string", - "description": "Human-readable status message" + "format": "uuid" }, - "taskId": { + "libraryName": { + "type": "string" + }, + "seriesId": { "type": "string", - "format": "uuid", - "description": "Task ID for tracking the refresh operation" + "format": "uuid" + }, + "seriesTitle": { + "type": "string" } } }, - "RecommendationsResponse": { + "ReleaseSourceDto": { "type": "object", - "description": "Response from GET /api/v1/user/recommendations", + "description": "A configured release source (one row per logical feed).", "required": [ - "recommendations", + "id", "pluginId", - "pluginName" + "sourceKey", + "displayName", + "kind", + "enabled", + "effectiveCronSchedule", + "createdAt", + "updatedAt" ], "properties": { - "cached": { - "type": "boolean", - "description": "Whether these are cached results" + "config": { + "description": "Source-specific configuration (free-form)." }, - "generatedAt": { + "createdAt": { + "type": "string", + "format": "date-time" + }, + "cronSchedule": { "type": [ "string", "null" ], - "description": "When these recommendations were generated" + "description": "Per-source cron override (5-field POSIX cron). `null` when the row\ninherits the server-wide `release_tracking.default_cron_schedule`.\nAlways present in the response (not omitted on null) so clients can\ndistinguish \"inheriting\" from \"field missing.\"" }, - "pluginId": { - "type": "string", - "format": "uuid", - "description": "Plugin that provided these recommendations" + "displayName": { + "type": "string" }, - "pluginName": { + "effectiveCronSchedule": { "type": "string", - "description": "Plugin display name" + "description": "The cron expression actually used by the scheduler for this source:\nthe row's `cron_schedule` if set, otherwise the resolved server-wide\ndefault. Lets the UI display \"Daily (Default)\" without needing to\nfetch the global setting separately." }, - "recommendations": { - "type": "array", - "items": { - "$ref": "#/components/schemas/RecommendationDto" - }, - "description": "Personalized recommendations" + "enabled": { + "type": "boolean" }, - "taskId": { + "etag": { "type": [ "string", "null" ], - "format": "uuid", - "description": "ID of the running/pending background task, if any" + "description": "Opaque etag/cursor used for conditional fetches." }, - "taskStatus": { - "type": [ - "string", - "null" - ], - "description": "Status of a running/pending background task (\"pending\" or \"running\"), if any" - } - } - }, - "RefreshScope": { - "type": "string", - "description": "Scope of a metadata refresh job.\n\nPhase 9 only honours [`RefreshScope::SeriesOnly`] at runtime. The\nother variants are schema-accepted but rejected by the validator.", - "enum": [ - "series_only", - "books_only", - "series_and_books" - ] - }, - "RegisterRequest": { - "type": "object", - "description": "Register request", - "required": [ - "username", - "email", - "password" - ], - "properties": { - "email": { + "id": { "type": "string", - "description": "Email address", - "example": "john@example.com" + "format": "uuid", + "example": "550e8400-e29b-41d4-a716-446655440b00" }, - "password": { + "kind": { "type": "string", - "description": "Password", - "example": "securePassword123!" + "description": "`rss-uploader` | `rss-series` | `api-feed` | `metadata-feed` | `metadata-piggyback`." }, - "username": { - "type": "string", - "description": "Username", - "example": "johndoe" - } - } - }, - "RegisterResponse": { - "type": "object", - "description": "Register response", - "required": [ - "user" - ], - "properties": { - "accessToken": { + "lastError": { "type": [ "string", "null" - ], - "description": "JWT access token (if email confirmation not required)", - "example": "eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9..." + ] }, - "expiresIn": { + "lastErrorAt": { "type": [ - "integer", + "string", "null" ], - "format": "int64", - "description": "Token expiry in seconds", - "example": 86400, - "minimum": 0 + "format": "date-time" }, - "message": { + "lastPolledAt": { "type": [ "string", "null" ], - "description": "Message about email verification if required", - "example": "Please check your email to verify your account" + "format": "date-time" }, - "tokenType": { + "lastSummary": { "type": [ "string", "null" ], - "description": "Token type (always \"Bearer\")", - "example": "Bearer" + "description": "One-line summary of the most recent successful poll. Surfaced under\nthe row's status badge so users can see *why* a poll returned no\nannouncements without grepping logs. NULL until the first successful\npoll on the source." }, - "user": { - "$ref": "#/components/schemas/UserInfo", - "description": "User information" + "pluginId": { + "type": "string", + "description": "Owning plugin id, or `core` for in-core synthetic sources.", + "example": "release-nyaa" + }, + "sourceKey": { + "type": "string", + "description": "Plugin-defined unique key.", + "example": "nyaa:user:tsuna69" + }, + "updatedAt": { + "type": "string", + "format": "date-time" + } + } + }, + "ReleaseSourceListResponse": { + "type": "object", + "required": [ + "sources" + ], + "properties": { + "sources": { + "type": "array", + "items": { + "$ref": "#/components/schemas/ReleaseSourceDto" + } } } }, @@ -32114,6 +34092,21 @@ } } }, + "ResetReleaseSourceResponse": { + "type": "object", + "description": "Response shape from the `reset` endpoint.\n\nReturns the number of ledger rows removed so callers can show a\nconfirmation toast. The source's transient poll state (etag,\nlast_polled_at, last_error, last_summary) is also cleared, but those\nare not counted here.", + "required": [ + "deletedLedgerEntries" + ], + "properties": { + "deletedLedgerEntries": { + "type": "integer", + "format": "int64", + "description": "Number of `release_ledger` rows deleted for this source.", + "minimum": 0 + } + } + }, "RetryAllErrorsRequest": { "type": "object", "description": "Request body for bulk retrying all book errors", @@ -32435,6 +34428,64 @@ } } }, + "SeriesAliasDto": { + "type": "object", + "description": "Title alias used by release-source plugins to match incoming releases by\ntitle (Nyaa, MangaUpdates without an external ID, etc.).", + "required": [ + "id", + "seriesId", + "alias", + "normalized", + "source", + "createdAt" + ], + "properties": { + "alias": { + "type": "string", + "description": "Alias as entered (preserves casing/punctuation).", + "example": "My Hero Academia" + }, + "createdAt": { + "type": "string", + "format": "date-time" + }, + "id": { + "type": "string", + "format": "uuid", + "description": "Alias row ID.", + "example": "550e8400-e29b-41d4-a716-446655440100" + }, + "normalized": { + "type": "string", + "description": "Lowercased + punctuation-stripped form used for matching.", + "example": "my hero academia" + }, + "seriesId": { + "type": "string", + "format": "uuid", + "example": "550e8400-e29b-41d4-a716-446655440002" + }, + "source": { + "type": "string", + "description": "`metadata` (auto-derived) | `manual` (user-entered).", + "example": "manual" + } + } + }, + "SeriesAliasListResponse": { + "type": "object", + "required": [ + "aliases" + ], + "properties": { + "aliases": { + "type": "array", + "items": { + "$ref": "#/components/schemas/SeriesAliasDto" + } + } + } + }, "SeriesAverageRatingResponse": { "type": "object", "description": "Response containing the average community rating for a series", @@ -32924,6 +34975,32 @@ "description": "When the series was last updated", "example": "2024-01-15T10:30:00Z" }, + "upstreamChapterGap": { + "type": [ + "number", + "null" + ], + "format": "float", + "description": "Difference between the upstream original-language chapter count\n(`series_metadata.total_chapter_count`, supplied by metadata\nproviders like MangaBaka or AniList) and the highest locally-owned\nchapter (`local_max_chapter`).\n\nAlways `None` unless the series is tracked AND `track_chapters` is\nenabled AND the provider count is populated AND the rounded-to-1-\ndecimal gap is positive. **This is an informational signal, not a\nrelease announcement** — Phase 6's MangaUpdates plugin owns the\ntranslation-release feed.", + "example": 3.0 + }, + "upstreamGapProvider": { + "type": [ + "string", + "null" + ], + "description": "Display name of the metadata provider that supplied the upstream\ncounts (e.g., \"MangaBaka\", \"AniList\"). Set whenever at least one of\n`upstream_chapter_gap` / `upstream_volume_gap` is populated. Used by\nthe Phase 7 badge tooltip.", + "example": "MangaBaka" + }, + "upstreamVolumeGap": { + "type": [ + "integer", + "null" + ], + "format": "int32", + "description": "Difference between the upstream original-language volume count\n(`series_metadata.total_volume_count`) and the highest locally-owned\nvolume (`local_max_volume`). Same suppression rules as\n`upstream_chapter_gap`, gated on `track_volumes`.", + "example": 1 + }, "volumesOwned": { "type": [ "integer", @@ -33504,6 +35581,93 @@ "custom" ] }, + "SeriesTrackingDto": { + "type": "object", + "description": "Per-series release-tracking configuration.\n\nReturned even for untracked series — the row defaults to `tracked: false`\nwith conservative defaults so the frontend can render the panel without\nspecial-casing missing rows.", + "required": [ + "seriesId", + "tracked", + "trackChapters", + "trackVolumes", + "createdAt", + "updatedAt" + ], + "properties": { + "confidenceThresholdOverride": { + "type": [ + "number", + "null" + ], + "format": "double", + "description": "Per-series override of the server's confidence threshold (0.0 - 1.0)." + }, + "createdAt": { + "type": "string", + "format": "date-time", + "description": "When the row was created (epoch when virtual)." + }, + "languages": { + "type": [ + "array", + "null" + ], + "items": { + "type": "string" + }, + "description": "Per-series language preference (ISO 639-1 codes, e.g. `[\"en\", \"es\"]`).\n`null` means \"fall back to the server-wide default (`release_tracking.default_languages`).\"\nUsed by aggregation feeds (e.g. MangaUpdates) that emit candidates in many languages." + }, + "latestKnownChapter": { + "type": [ + "number", + "null" + ], + "format": "double", + "description": "Latest known external chapter (supports decimals like 12.5)." + }, + "latestKnownVolume": { + "type": [ + "integer", + "null" + ], + "format": "int32", + "description": "Latest known external volume." + }, + "pollIntervalOverrideS": { + "type": [ + "integer", + "null" + ], + "format": "int32", + "description": "Per-series override of the source poll interval (seconds)." + }, + "seriesId": { + "type": "string", + "format": "uuid", + "description": "Series ID this config belongs to.", + "example": "550e8400-e29b-41d4-a716-446655440002" + }, + "trackChapters": { + "type": "boolean", + "description": "Whether to announce new chapters." + }, + "trackVolumes": { + "type": "boolean", + "description": "Whether to announce new volumes." + }, + "tracked": { + "type": "boolean", + "description": "Whether release tracking is enabled." + }, + "updatedAt": { + "type": "string", + "format": "date-time", + "description": "When the row was last updated (epoch when virtual)." + }, + "volumeChapterMap": { + "description": "Sparse map of `{ \"<volume>\": { \"first\": ch, \"last\": ch } }`." + } + } + }, "SeriesUpdateResponse": { "type": "object", "description": "Response for series update", @@ -35383,6 +37547,60 @@ "format": "uuid" } } + }, + { + "type": "object", + "description": "Backfill release-tracking aliases from existing series metadata.\n\nWalks series in scope, harvests the canonical title plus alternate titles\nfrom `series_metadata` and `series_alternate_titles`, and seeds them as\n`metadata`-source aliases in `series_aliases`. Idempotent — re-runs do\nnot create duplicates. Does NOT enable tracking; that stays explicit.", + "required": [ + "type" + ], + "properties": { + "libraryId": { + "type": [ + "string", + "null" + ], + "format": "uuid", + "description": "If set, scope to this library; otherwise all series." + }, + "seriesIds": { + "type": [ + "array", + "null" + ], + "items": { + "type": "string", + "format": "uuid" + }, + "description": "If set, scope to these specific series (takes precedence over library_id)." + }, + "type": { + "type": "string", + "enum": [ + "backfill_tracking_from_metadata" + ] + } + } + }, + { + "type": "object", + "description": "Poll a single `release_sources` row for new releases.\n\nResolves the source's owning plugin, calls `releases/poll` over the\nexisting plugin host, runs returned candidates through the matcher +\nthreshold, and writes accepted candidates to the ledger. On success\nupdates `last_polled_at` (and optionally `etag`); on failure records\n`last_error`. Idempotent: ledger writes dedup on\n`(source_id, external_release_id)` and `info_hash`.", + "required": [ + "sourceId", + "type" + ], + "properties": { + "sourceId": { + "type": "string", + "format": "uuid" + }, + "type": { + "type": "string", + "enum": [ + "poll_release_source" + ] + } + } } ], "description": "Task types supported by the distributed task queue" @@ -36475,6 +38693,108 @@ } } }, + "UpdateReleaseLedgerEntryRequest": { + "type": "object", + "description": "PATCH payload for ledger row state transitions.\n\nOnly `state` is patchable from the API today; the rest of the row is\nsource-controlled. `state` is validated against the canonical set:\n`announced` | `dismissed` | `marked_acquired` | `hidden`.", + "properties": { + "state": { + "type": [ + "string", + "null" + ], + "description": "New state. See [`ReleaseLedgerEntryDto::state`] for allowed values." + } + } + }, + "UpdateReleaseSourceRequest": { + "type": "object", + "description": "PATCH payload for a release source. All fields optional; omit to leave alone.\n\n`cron_schedule` uses double-Option semantics:\n- field absent (`None`): leave the row's cron_schedule unchanged\n- explicit `null` (`Some(None)`) / `\"\"` / `\" \"`: clear the override\n (revert to inheriting the server-wide\n `release_tracking.default_cron_schedule`)\n- `Some(Some(\"0 */6 * * *\"))`: set a per-source override", + "properties": { + "cronSchedule": { + "type": [ + "string", + "null" + ], + "description": "5-field POSIX cron expression. Use `null` (or empty string) to\nclear the override and inherit the server-wide default." + }, + "displayName": { + "type": [ + "string", + "null" + ] + }, + "enabled": { + "type": [ + "boolean", + "null" + ] + } + } + }, + "UpdateSeriesTrackingRequest": { + "type": "object", + "description": "PATCH payload for tracking config. All fields are optional:\nomit a field to leave it untouched. Use a JSON `null` on a nullable field\nto clear it explicitly.", + "properties": { + "confidenceThresholdOverride": { + "type": [ + "number", + "null" + ], + "format": "double" + }, + "languages": { + "type": [ + "array", + "null" + ], + "items": { + "type": "string" + }, + "description": "ISO 639-1 codes; `null` clears (falls back to server-wide default)." + }, + "latestKnownChapter": { + "type": [ + "number", + "null" + ], + "format": "double", + "description": "Use `Some(null)` to clear, `Some(<value>)` to set, omit to leave alone." + }, + "latestKnownVolume": { + "type": [ + "integer", + "null" + ], + "format": "int32" + }, + "pollIntervalOverrideS": { + "type": [ + "integer", + "null" + ], + "format": "int32" + }, + "trackChapters": { + "type": [ + "boolean", + "null" + ] + }, + "trackVolumes": { + "type": [ + "boolean", + "null" + ] + }, + "tracked": { + "type": [ + "boolean", + "null" + ] + }, + "volumeChapterMap": {} + } + }, "UpdateSettingRequest": { "type": "object", "description": "Update setting request", @@ -37359,6 +39679,14 @@ "name": "Series", "description": "Series browsing and search endpoints" }, + { + "name": "Tracking", + "description": "Release-tracking config and matcher aliases" + }, + { + "name": "Releases", + "description": "Release ledger (announcements) and source admin" + }, { "name": "Books", "description": "Book details and metadata endpoints" @@ -37484,6 +39812,8 @@ "tags": [ "Libraries", "Series", + "Tracking", + "Releases", "Books", "Pages" ] diff --git a/web/src/App.tsx b/web/src/App.tsx index a50c9cbb..399d55a1 100644 --- a/web/src/App.tsx +++ b/web/src/App.tsx @@ -22,6 +22,7 @@ import { OidcComplete } from "@/pages/OidcComplete"; import { Reader } from "@/pages/Reader"; import { Recommendations } from "@/pages/Recommendations"; import { Register } from "@/pages/Register"; +import { ReleasesInbox } from "@/pages/ReleasesInbox"; import { SearchResults } from "@/pages/SearchResults"; import { SeriesDetail } from "@/pages/SeriesDetail"; import { Setup } from "@/pages/Setup"; @@ -35,6 +36,7 @@ import { PluginStorageSettings, PluginsSettings, ProfileSettings, + ReleaseTrackingSettings, SeriesExportsSettings, ServerSettings, SharingTagsSettings, @@ -186,6 +188,17 @@ function App() { } /> + <Route + path="/releases" + element={ + <ProtectedRoute> + <AppLayout> + <ReleasesInbox /> + </AppLayout> + </ProtectedRoute> + } + /> + <Route path="/libraries" element={ @@ -420,6 +433,17 @@ function App() { } /> + <Route + path="/settings/release-tracking" + element={ + <ProtectedRoute> + <AppLayout> + <ReleaseTrackingSettings /> + </AppLayout> + </ProtectedRoute> + } + /> + <Route path="*" element={<Navigate to="/" replace />} /> </Routes> </BrowserRouter> diff --git a/web/src/api/releases.ts b/web/src/api/releases.ts new file mode 100644 index 00000000..be8b2ff6 --- /dev/null +++ b/web/src/api/releases.ts @@ -0,0 +1,218 @@ +import type { components } from "@/types/api.generated"; +import { api } from "./client"; + +export type ReleaseLedgerEntry = components["schemas"]["ReleaseLedgerEntryDto"]; +export type ReleaseSource = components["schemas"]["ReleaseSourceDto"]; +export type UpdateReleaseLedgerEntryRequest = + components["schemas"]["UpdateReleaseLedgerEntryRequest"]; +export type UpdateReleaseSourceRequest = + components["schemas"]["UpdateReleaseSourceRequest"]; +export type PaginatedReleases = + components["schemas"]["PaginatedResponse_ReleaseLedgerEntryDto"]; +export type ReleaseTrackingApplicability = + components["schemas"]["ApplicabilityResponse"]; +export type ResetReleaseSourceResponse = + components["schemas"]["ResetReleaseSourceResponse"]; +export type ReleaseFacets = components["schemas"]["ReleaseFacetsResponse"]; +export type ReleaseSeriesFacet = components["schemas"]["ReleaseSeriesFacetDto"]; +export type ReleaseLibraryFacet = + components["schemas"]["ReleaseLibraryFacetDto"]; +export type ReleaseLanguageFacet = + components["schemas"]["ReleaseLanguageFacetDto"]; +export type BulkReleaseAction = components["schemas"]["BulkReleaseAction"]; +export type BulkReleaseActionRequest = + components["schemas"]["BulkReleaseActionRequest"]; +export type BulkReleaseActionResponse = + components["schemas"]["BulkReleaseActionResponse"]; +export type DeleteReleaseResponse = + components["schemas"]["DeleteReleaseResponse"]; + +export interface ReleaseInboxParams { + /** State filter. Use `"all"` for no state restriction; defaults to `"announced"` server-side. */ + state?: string; + seriesId?: string; + sourceId?: string; + language?: string; + libraryId?: string; + page?: number; + pageSize?: number; +} + +export interface ReleaseFacetsParams { + state?: string; + seriesId?: string; + sourceId?: string; + language?: string; + libraryId?: string; +} + +export interface SeriesReleaseListParams { + state?: string; + page?: number; + pageSize?: number; +} + +function buildQuery(params: object) { + const search = new URLSearchParams(); + for (const [key, value] of Object.entries(params)) { + if (value !== undefined && value !== null && value !== "") { + search.append(key, String(value)); + } + } + const qs = search.toString(); + return qs ? `?${qs}` : ""; +} + +export const releasesApi = { + listInbox: async ( + params: ReleaseInboxParams = {}, + ): Promise<PaginatedReleases> => { + const response = await api.get<PaginatedReleases>( + `/releases${buildQuery(params)}`, + ); + return response.data; + }, + + listForSeries: async ( + seriesId: string, + params: SeriesReleaseListParams = {}, + ): Promise<PaginatedReleases> => { + const response = await api.get<PaginatedReleases>( + `/series/${seriesId}/releases${buildQuery(params)}`, + ); + return response.data; + }, + + patchEntry: async ( + releaseId: string, + update: UpdateReleaseLedgerEntryRequest, + ): Promise<ReleaseLedgerEntry> => { + const response = await api.patch<ReleaseLedgerEntry>( + `/releases/${releaseId}`, + update, + ); + return response.data; + }, + + dismiss: async (releaseId: string): Promise<ReleaseLedgerEntry> => { + const response = await api.post<ReleaseLedgerEntry>( + `/releases/${releaseId}/dismiss`, + ); + return response.data; + }, + + markAcquired: async (releaseId: string): Promise<ReleaseLedgerEntry> => { + const response = await api.post<ReleaseLedgerEntry>( + `/releases/${releaseId}/mark-acquired`, + ); + return response.data; + }, + + /** + * Hard-delete a single ledger row. The affected source's `etag` is + * cleared so the next poll re-fetches without `If-None-Match` and + * re-announces the row. + */ + delete: async (releaseId: string): Promise<DeleteReleaseResponse> => { + const response = await api.delete<DeleteReleaseResponse>( + `/releases/${releaseId}`, + ); + return response.data; + }, + + /** + * Apply an action (`dismiss`, `mark-acquired`, `delete`) to a batch + * of ledger rows in a single request. Server caps at 500 ids; clients + * should batch larger selections. + */ + bulk: async ( + request: BulkReleaseActionRequest, + ): Promise<BulkReleaseActionResponse> => { + const response = await api.post<BulkReleaseActionResponse>( + `/releases/bulk`, + request, + ); + return response.data; + }, + + /** + * Distinct values present in the inbox under a given filter set. + * Each facet excludes its own dimension so dropdowns never collapse + * to the active selection. Used by the inbox UI to populate cascading + * filter Selects without forcing UUID input. + */ + facets: async (params: ReleaseFacetsParams = {}): Promise<ReleaseFacets> => { + const response = await api.get<ReleaseFacets>( + `/releases/facets${buildQuery(params)}`, + ); + return response.data; + }, +}; + +export const releaseSourcesApi = { + list: async (): Promise<ReleaseSource[]> => { + const response = await api.get<{ sources: ReleaseSource[] }>( + `/release-sources`, + ); + return response.data.sources; + }, + + update: async ( + sourceId: string, + update: UpdateReleaseSourceRequest, + ): Promise<ReleaseSource> => { + const response = await api.patch<ReleaseSource>( + `/release-sources/${sourceId}`, + update, + ); + return response.data; + }, + + pollNow: async ( + sourceId: string, + ): Promise<{ status: string; message: string }> => { + const response = await api.post<{ status: string; message: string }>( + `/release-sources/${sourceId}/poll-now`, + ); + return response.data; + }, + + /** + * Drop every ledger row for this source and clear its transient poll + * state (etag, last_polled_at, last_error, last_summary). User-managed + * fields (enabled, cronSchedule, displayName, config) are preserved. + * + * Used as a "force re-emit" lever for testing: after a reset, the next + * poll fetches the upstream feed without `If-None-Match` (no 304 + * short-circuit) and re-records every release as `announced`. + */ + reset: async (sourceId: string): Promise<ResetReleaseSourceResponse> => { + const response = await api.post<ResetReleaseSourceResponse>( + `/release-sources/${sourceId}/reset`, + ); + return response.data; + }, + + /** + * Whether release tracking is available for a given library scope. + * + * Returns `applicable: true` when at least one enabled release-source + * plugin applies to `libraryId` (or, with `libraryId` omitted, to any + * library). The frontend uses this to hide the per-series Tracking panel + * and Releases tab on libraries that aren't covered, and to gate the + * bulk-track menu entry. + */ + applicability: async ( + libraryId?: string, + ): Promise<ReleaseTrackingApplicability> => { + const params = new URLSearchParams(); + if (libraryId) { + params.set("libraryId", libraryId); + } + const qs = params.toString(); + const response = await api.get<ReleaseTrackingApplicability>( + `/release-sources/applicability${qs ? `?${qs}` : ""}`, + ); + return response.data; + }, +}; diff --git a/web/src/api/series.ts b/web/src/api/series.ts index f19db9ea..52b52d8e 100644 --- a/web/src/api/series.ts +++ b/web/src/api/series.ts @@ -7,8 +7,12 @@ import type { SeriesCondition, SeriesListRequest, } from "@/types"; +import type { components } from "@/types/api.generated"; import { api } from "./client"; +export type BulkTrackForReleasesResponse = + components["schemas"]["BulkTrackForReleasesResponse"]; + export interface SeriesFilters { page?: number; pageSize?: number; @@ -376,6 +380,36 @@ export const seriesApi = { return response.data; }, + /** + * Bulk-enable release tracking. Flips `tracked: true` on each series and + * runs the seed pass (auto-derives aliases, latest_known_*, track_*). + * Series already tracked are reported as `outcome: skipped`. + */ + bulkTrackForReleases: async ( + seriesIds: string[], + ): Promise<BulkTrackForReleasesResponse> => { + const response = await api.post<BulkTrackForReleasesResponse>( + "/series/bulk/track-for-releases", + { seriesIds }, + ); + return response.data; + }, + + /** + * Bulk-disable release tracking. Flips `tracked: false` without deleting + * aliases or other tracking config — re-tracking later still re-seeds + * the auto-derived fields. + */ + bulkUntrackForReleases: async ( + seriesIds: string[], + ): Promise<BulkTrackForReleasesResponse> => { + const response = await api.post<BulkTrackForReleasesResponse>( + "/series/bulk/untrack-for-releases", + { seriesIds }, + ); + return response.data; + }, + /** * Queue analysis for all books in multiple series in bulk * @param seriesIds - Array of series IDs to analyze diff --git a/web/src/api/tracking.ts b/web/src/api/tracking.ts new file mode 100644 index 00000000..e860d797 --- /dev/null +++ b/web/src/api/tracking.ts @@ -0,0 +1,51 @@ +import type { components } from "@/types/api.generated"; +import { api } from "./client"; + +export type SeriesTracking = components["schemas"]["SeriesTrackingDto"]; +export type UpdateSeriesTrackingRequest = + components["schemas"]["UpdateSeriesTrackingRequest"]; +export type SeriesAlias = components["schemas"]["SeriesAliasDto"]; +export type CreateSeriesAliasRequest = + components["schemas"]["CreateSeriesAliasRequest"]; + +export const trackingApi = { + getTracking: async (seriesId: string): Promise<SeriesTracking> => { + const response = await api.get<SeriesTracking>( + `/series/${seriesId}/tracking`, + ); + return response.data; + }, + + updateTracking: async ( + seriesId: string, + update: UpdateSeriesTrackingRequest, + ): Promise<SeriesTracking> => { + const response = await api.patch<SeriesTracking>( + `/series/${seriesId}/tracking`, + update, + ); + return response.data; + }, + + listAliases: async (seriesId: string): Promise<SeriesAlias[]> => { + const response = await api.get<{ aliases: SeriesAlias[] }>( + `/series/${seriesId}/aliases`, + ); + return response.data.aliases; + }, + + createAlias: async ( + seriesId: string, + request: CreateSeriesAliasRequest, + ): Promise<SeriesAlias> => { + const response = await api.post<SeriesAlias>( + `/series/${seriesId}/aliases`, + request, + ); + return response.data; + }, + + deleteAlias: async (seriesId: string, aliasId: string): Promise<void> => { + await api.delete(`/series/${seriesId}/aliases/${aliasId}`); + }, +}; diff --git a/web/src/components/forms/PluginConfigModal.test.tsx b/web/src/components/forms/PluginConfigModal.test.tsx index 0b3c9aad..508416ba 100644 --- a/web/src/components/forms/PluginConfigModal.test.tsx +++ b/web/src/components/forms/PluginConfigModal.test.tsx @@ -297,4 +297,89 @@ describe("PluginConfigModal", () => { screen.queryByText("Configure: Test Plugin"), ).not.toBeInTheDocument(); }); + + it("hides permission selectors and shows an explanatory note for release-source plugins", () => { + const plugin = createMockPlugin({ + manifest: { + name: "release-mangaupdates", + displayName: "MangaUpdates Releases", + version: "1.0.0", + protocolVersion: "1.0", + capabilities: { releaseSource: true }, + contentTypes: [], + } as never, + }); + + renderWithProviders( + <PluginConfigModal + plugin={plugin} + opened={true} + onClose={vi.fn()} + libraries={mockLibraries} + />, + ); + + expect( + screen.getByText(/No permission settings for this plugin/), + ).toBeInTheDocument(); + expect(screen.queryByPlaceholderText("Select permissions")).toBeNull(); + expect(screen.queryByPlaceholderText("Select scopes")).toBeNull(); + expect(screen.queryByText("Library Filter")).toBeNull(); + }); + + it("hides permission selectors for recommendation-only plugins", () => { + const plugin = createMockPlugin({ + manifest: { + name: "recommendations-anilist", + displayName: "AniList Recommendations", + version: "1.0.0", + protocolVersion: "1.0", + capabilities: { userRecommendationProvider: true }, + contentTypes: [], + } as never, + }); + + renderWithProviders( + <PluginConfigModal + plugin={plugin} + opened={true} + onClose={vi.fn()} + libraries={mockLibraries} + />, + ); + + expect( + screen.getByText(/No permission settings for this plugin/), + ).toBeInTheDocument(); + expect(screen.queryByPlaceholderText("Select permissions")).toBeNull(); + }); + + it("hides permission selectors for sync-only plugins", () => { + const plugin = createMockPlugin({ + manifest: { + name: "sync-anilist", + displayName: "AniList Sync", + version: "1.0.0", + protocolVersion: "1.0", + capabilities: { metadataProvider: [], userReadSync: true }, + contentTypes: [], + } as never, + }); + + renderWithProviders( + <PluginConfigModal + plugin={plugin} + opened={true} + onClose={vi.fn()} + libraries={mockLibraries} + />, + ); + + expect( + screen.getByText(/No permission settings for this plugin/), + ).toBeInTheDocument(); + expect(screen.queryByPlaceholderText("Select permissions")).toBeNull(); + expect(screen.queryByPlaceholderText("Select scopes")).toBeNull(); + expect(screen.queryByText("Library Filter")).toBeNull(); + }); }); diff --git a/web/src/components/forms/plugin-config/PermissionsTab.tsx b/web/src/components/forms/plugin-config/PermissionsTab.tsx index 985ce31f..66729861 100644 --- a/web/src/components/forms/plugin-config/PermissionsTab.tsx +++ b/web/src/components/forms/plugin-config/PermissionsTab.tsx @@ -11,6 +11,10 @@ import type { PluginDto } from "@/api/plugins"; import { getPermissionData, getScopeData, + hasPermissionableSurface, + isRecommendationProvider, + isReleaseSource, + isSyncProvider, type PluginConfigForm, } from "./types"; @@ -25,6 +29,36 @@ export function PermissionsTab({ form, libraries, }: PermissionsTabProps) { + // Plugins whose only capabilities are `releaseSource`, + // `userRecommendationProvider`, or `userReadSync` don't go through the RBAC + // permission gate, don't expose scoped UI actions, and aren't + // library-filtered. Render an explanatory note instead of empty selectors. + if (!hasPermissionableSurface(plugin)) { + const capabilityLabel = isReleaseSource(plugin) + ? "Release-source" + : isRecommendationProvider(plugin) + ? "Recommendation" + : isSyncProvider(plugin) + ? "Sync" + : null; + return ( + <Stack gap="md"> + <Alert + icon={<IconInfoCircle size={16} />} + color="blue" + variant="light" + title="No permission settings for this plugin" + > + <Text size="sm"> + {capabilityLabel + ? `${capabilityLabel} plugins are gated by their manifest capability — they don't write metadata, don't expose scoped UI actions, and aren't library-filtered. There is nothing to configure on this tab.` + : "This plugin doesn't expose any capability that uses permissions, scopes, or the library filter."} + </Text> + </Alert> + </Stack> + ); + } + const permissionInfo = getPermissionData(plugin); const scopeData = getScopeData(plugin); diff --git a/web/src/components/forms/plugin-config/types.ts b/web/src/components/forms/plugin-config/types.ts index 434dd35e..737b496c 100644 --- a/web/src/components/forms/plugin-config/types.ts +++ b/web/src/components/forms/plugin-config/types.ts @@ -15,6 +15,40 @@ export function isSyncProvider(plugin: PluginDto): boolean { return plugin.manifest?.capabilities?.userReadSync === true; } +export function isReleaseSource(plugin: PluginDto): boolean { + return plugin.manifest?.capabilities?.releaseSource === true; +} + +export function isRecommendationProvider(plugin: PluginDto): boolean { + return plugin.manifest?.capabilities?.userRecommendationProvider === true; +} + +/** + * Returns true if the plugin has any capability for which permissions, + * scopes, or the library filter actually do something. + * + * Only metadata providers go through these row-level controls: + * - `permissions` are checked on the metadata-apply path + * (`src/services/metadata/apply.rs`, `book_apply.rs`). + * - `scopes` + `library_ids` are checked when the UI lists plugin actions + * for a series/book/library context + * (`src/services/plugin/manager.rs::plugins_by_scope_and_library`). + * + * Release-source, recommendation, and sync plugins are gated only by + * manifest capability (checked at reverse-RPC dispatch in + * `src/services/plugin/permissions.rs`); they don't write metadata, don't + * expose scoped UI actions, and aren't library-filtered. Showing those + * fields when they have no effect is misleading — an empty state suggests + * "you forgot to configure something" when there's nothing to configure. + * + * Plugins without a manifest are considered permissionable so the existing + * "test this plugin to discover its capabilities" warning still triggers. + */ +export function hasPermissionableSurface(plugin: PluginDto): boolean { + if (!hasManifest(plugin)) return true; + return isMetadataProvider(plugin); +} + export function isOAuthPlugin(plugin: PluginDto): boolean { return plugin.manifest?.oauth != null; } @@ -65,7 +99,6 @@ const LIBRARY_PERMISSION_VALUES = new Set( export function getPermissionData(plugin: PluginDto) { const isMeta = isMetadataProvider(plugin); - const isSync = isSyncProvider(plugin); const noManifest = !hasManifest(plugin); if (noManifest) { @@ -98,9 +131,6 @@ export function getPermissionData(plugin: PluginDto) { METADATA_PERMISSION_VALUES.has(p.value), ).map((p) => ({ value: p.value, label: p.label })), }); - } - - if (isSync || isMeta) { groups.push({ group: "Library", items: AVAILABLE_PERMISSIONS.filter((p) => @@ -132,13 +162,6 @@ const BOOK_SCOPES = new Set([ "library:scan", ]); -// Sync providers operate at series/library level -const SYNC_SCOPES = new Set([ - "series:detail", - "library:detail", - "library:scan", -]); - export function getScopeData(plugin: PluginDto) { const noManifest = !hasManifest(plugin); @@ -149,12 +172,10 @@ export function getScopeData(plugin: PluginDto) { const metadataTargets = plugin.manifest?.capabilities?.metadataProvider ?? []; const canSeries = metadataTargets.includes("series"); const canBook = metadataTargets.includes("book"); - const isSync = isSyncProvider(plugin); const allowed = new Set<string>(); if (canSeries) for (const s of SERIES_SCOPES) allowed.add(s); if (canBook) for (const s of BOOK_SCOPES) allowed.add(s); - if (isSync) for (const s of SYNC_SCOPES) allowed.add(s); return AVAILABLE_SCOPES.filter((s) => allowed.has(s.value)).map((s) => ({ value: s.value, diff --git a/web/src/components/layout/ReleasesNavBadge.tsx b/web/src/components/layout/ReleasesNavBadge.tsx new file mode 100644 index 00000000..2b414b6d --- /dev/null +++ b/web/src/components/layout/ReleasesNavBadge.tsx @@ -0,0 +1,16 @@ +import { Badge } from "@mantine/core"; +import { useReleaseAnnouncementsStore } from "@/store/releaseAnnouncementsStore"; + +/** + * Inline badge shown next to the "Releases" nav entry. Hidden when the + * counter is zero so it doesn't add visual noise to the sidebar. + */ +export function ReleasesNavBadge() { + const unseen = useReleaseAnnouncementsStore((s) => s.unseenCount); + if (unseen <= 0) return null; + return ( + <Badge color="orange" variant="filled" size="sm" circle> + {unseen > 99 ? "99+" : unseen} + </Badge> + ); +} diff --git a/web/src/components/layout/Sidebar.tsx b/web/src/components/layout/Sidebar.tsx index ad44c18d..469a489a 100644 --- a/web/src/components/layout/Sidebar.tsx +++ b/web/src/components/layout/Sidebar.tsx @@ -29,6 +29,7 @@ import { IconPlugConnected, IconPlus, IconRadar, + IconRss, IconScan, IconServer, IconSettings, @@ -44,6 +45,7 @@ import { Link, useLocation, useNavigate } from "react-router-dom"; import { librariesApi } from "@/api/libraries"; import { userPluginsApi } from "@/api/userPlugins"; import { LibraryModal } from "@/components/forms/LibraryModal"; +import { ReleasesNavBadge } from "@/components/layout/ReleasesNavBadge"; import { LibraryActionsMenu } from "@/components/library/LibraryActionsMenu"; import { TaskNotificationBadge } from "@/components/TaskNotificationBadge"; import { useAppInfo } from "@/hooks/useAppInfo"; @@ -340,6 +342,14 @@ export function Sidebar() { active={currentPath === "/recommendations"} /> )} + <NavLink + component={Link} + to="/releases" + label="Releases" + leftSection={<IconRss size={20} />} + active={currentPath.startsWith("/releases")} + rightSection={<ReleasesNavBadge />} + /> <NavLink component={Link} to={`/libraries/all/${getLastTab("all") || "series"}`} @@ -567,6 +577,15 @@ export function Sidebar() { leftSection={<IconPlugConnected size={16} />} active={currentPath.startsWith("/settings/plugins")} /> + <NavLink + component={Link} + to="/settings/release-tracking" + label="Release Tracking" + leftSection={<IconRss size={16} />} + active={currentPath.startsWith( + "/settings/release-tracking", + )} + /> {/* Access Section */} <Divider diff --git a/web/src/components/library/BulkSelectionToolbar.test.tsx b/web/src/components/library/BulkSelectionToolbar.test.tsx index d3f8e5f4..b1c14d8f 100644 --- a/web/src/components/library/BulkSelectionToolbar.test.tsx +++ b/web/src/components/library/BulkSelectionToolbar.test.tsx @@ -35,6 +35,18 @@ vi.mock("@/api/series", () => ({ tasksEnqueued: 5, message: "Enqueued 5 analysis tasks for 2 series", }), + bulkTrackForReleases: vi.fn().mockResolvedValue({ + changed: 2, + alreadyInState: 0, + errored: 0, + results: [], + }), + bulkUntrackForReleases: vi.fn().mockResolvedValue({ + changed: 1, + alreadyInState: 0, + errored: 0, + results: [], + }), }, })); @@ -43,6 +55,15 @@ vi.mock("@/hooks/usePermissions", () => ({ usePermissions: vi.fn(), })); +// Mock the applicability hook so the Release Tracking menu entries render. +// Tests that need to hide them can override the mock. +vi.mock("@/hooks/useReleaseTrackingApplicability", () => ({ + useReleaseTrackingApplicability: vi.fn(() => ({ + data: { applicable: true, pluginDisplayNames: ["Nyaa Releases"] }, + isLoading: false, + })), +})); + const mockPermissionsAdmin = () => { vi.mocked(usePermissions).mockReturnValue({ user: { id: "user-1", username: "admin", role: "admin" } as ReturnType< @@ -351,6 +372,55 @@ describe("BulkSelectionToolbar", () => { expect(seriesApi.bulkAnalyze).toHaveBeenCalledWith(["series-1"]); }); }); + + it("calls bulkTrackForReleases with all selected series when Track for releases is clicked", async () => { + const { seriesApi } = await import("@/api/series"); + const user = userEvent.setup(); + + useBulkSelectionStore.getState().toggleSelection("series-1", "series"); + useBulkSelectionStore.getState().toggleSelection("series-2", "series"); + + renderWithProviders(<BulkSelectionToolbar />); + + await user.click(screen.getByRole("button", { name: /more actions/i })); + await waitFor(() => { + expect(screen.getByText("Track for releases")).toBeInTheDocument(); + }); + await user.click(screen.getByText("Track for releases")); + + await waitFor(() => { + expect(seriesApi.bulkTrackForReleases).toHaveBeenCalledTimes(1); + }); + // The toolbar passes the full selected-id list as a single argument. + const calls = (seriesApi.bulkTrackForReleases as ReturnType<typeof vi.fn>) + .mock.calls; + expect(calls[0][0]).toEqual( + expect.arrayContaining(["series-1", "series-2"]), + ); + }); + + it("calls bulkUntrackForReleases when Don't track for releases is clicked", async () => { + const { seriesApi } = await import("@/api/series"); + const user = userEvent.setup(); + + useBulkSelectionStore.getState().toggleSelection("series-1", "series"); + + renderWithProviders(<BulkSelectionToolbar />); + + await user.click(screen.getByRole("button", { name: /more actions/i })); + await waitFor(() => { + expect( + screen.getByText("Don't track for releases"), + ).toBeInTheDocument(); + }); + await user.click(screen.getByText("Don't track for releases")); + + await waitFor(() => { + expect(seriesApi.bulkUntrackForReleases).toHaveBeenCalledWith([ + "series-1", + ]); + }); + }); }); describe("selection clearing after action", () => { diff --git a/web/src/components/library/BulkSelectionToolbar.tsx b/web/src/components/library/BulkSelectionToolbar.tsx index bf50cf4c..b66aaafc 100644 --- a/web/src/components/library/BulkSelectionToolbar.tsx +++ b/web/src/components/library/BulkSelectionToolbar.tsx @@ -11,6 +11,8 @@ import { import { notifications } from "@mantine/notifications"; import { IconAnalyze, + IconBell, + IconBellOff, IconBook, IconBookOff, IconChevronDown, @@ -29,6 +31,7 @@ import { pluginActionsApi, pluginsApi } from "@/api/plugins"; import { seriesApi } from "@/api/series"; import { BulkMetadataEditModal } from "@/components/library/BulkMetadataEditModal"; import { usePermissions } from "@/hooks/usePermissions"; +import { useReleaseTrackingApplicability } from "@/hooks/useReleaseTrackingApplicability"; import { selectPageItems, selectSelectionCount, @@ -95,6 +98,16 @@ export function BulkSelectionToolbar() { enabled: selectionType === "book" && count > 0, }); + // Whether any enabled release-source plugin exists in the install at all. + // Bulk selections may span libraries, so we use the global (no library + // filter) applicability — it just hides the "Mark as Tracked" / "Mark as + // Untracked" entries when no plugin is configured anywhere. Per-library + // plugin scopes still apply at poll time. + const { data: releaseTrackingApplicability } = + useReleaseTrackingApplicability(); + const showReleaseTrackingMenu = + releaseTrackingApplicability?.applicable === true; + // Helper to refetch all related queries const refetchAll = () => { queryClient.refetchQueries({ @@ -386,6 +399,56 @@ export function BulkSelectionToolbar() { }, }); + // Bulk set release-tracking flag. No dedicated bulk endpoint exists yet — + // fan out per-series PATCH calls. Acceptable scale for a hand-managed library + // (hundreds of series, low-frequency action). + // Single-call bulk track/untrack via the dedicated endpoints. The host + // runs the seed pass per series on track-on transitions (auto-derives + // aliases, latest_known_*, track_chapters/volumes) so users get + // notification-ready tracking without touching the per-series panel. + const bulkSetTrackedMutation = useMutation({ + mutationFn: async ({ + seriesIds, + tracked, + }: { + seriesIds: string[]; + tracked: boolean; + }) => { + const response = tracked + ? await seriesApi.bulkTrackForReleases(seriesIds) + : await seriesApi.bulkUntrackForReleases(seriesIds); + return { total: seriesIds.length, response }; + }, + onSuccess: ({ total, response }, { tracked }) => { + const errored = response.errored; + if (errored === 0) { + notifications.show({ + title: tracked ? "Tracking enabled" : "Tracking disabled", + message: + response.alreadyInState > 0 + ? `Updated ${response.changed} series (${response.alreadyInState} already in this state).` + : `Updated ${response.changed} of ${total} series.`, + color: tracked ? "green" : "blue", + }); + } else { + notifications.show({ + title: "Some updates failed", + message: `${response.changed} updated, ${response.alreadyInState} unchanged, ${errored} failed.`, + color: "yellow", + }); + } + refetchAll(); + clearSelection(); + }, + onError: (error: Error) => { + notifications.show({ + title: "Failed to update tracking", + message: error.message || "Bulk tracking update failed", + color: "red", + }); + }, + }); + // Bulk reset series metadata const bulkResetMetadataMutation = useMutation({ mutationFn: (seriesIds: string[]) => seriesApi.bulkResetMetadata(seriesIds), @@ -452,7 +515,8 @@ export function BulkSelectionToolbar() { bulkGenerateSeriesBookThumbnailsMutation.isPending || bulkReprocessTitlesMutation.isPending || bulkRenumberSeriesMutation.isPending || - bulkResetMetadataMutation.isPending; + bulkResetMetadataMutation.isPending || + bulkSetTrackedMutation.isPending; // Determine if the "More" menu should be shown based on permissions const showBooksMoreMenu = isBooks && (canWriteBooks || canWriteTasks); @@ -799,6 +863,37 @@ export function BulkSelectionToolbar() { Reprocess Titles </Menu.Item> + {showReleaseTrackingMenu && ( + <> + <Menu.Divider /> + <Menu.Label>Release Tracking</Menu.Label> + <Menu.Item + leftSection={<IconBell size={16} />} + onClick={() => + bulkSetTrackedMutation.mutate({ + seriesIds: selectedIds, + tracked: true, + }) + } + disabled={isAnyPending} + > + Track for releases + </Menu.Item> + <Menu.Item + leftSection={<IconBellOff size={16} />} + onClick={() => + bulkSetTrackedMutation.mutate({ + seriesIds: selectedIds, + tracked: false, + }) + } + disabled={isAnyPending} + > + Don't track for releases + </Menu.Item> + </> + )} + <Menu.Divider /> <Menu.Label>Metadata</Menu.Label> <Menu.Item diff --git a/web/src/components/reader/PdfContinuousScrollReader.tsx b/web/src/components/reader/PdfContinuousScrollReader.tsx index 4aefbc56..0f08a32f 100644 --- a/web/src/components/reader/PdfContinuousScrollReader.tsx +++ b/web/src/components/reader/PdfContinuousScrollReader.tsx @@ -8,9 +8,10 @@ import type { PdfZoomLevel } from "./PdfReader"; import "react-pdf/dist/Page/TextLayer.css"; import "react-pdf/dist/Page/AnnotationLayer.css"; -// Configure PDF.js worker - use CDN with the exact version bundled in react-pdf -// This avoids version mismatches when pdfjs-dist is also installed as a direct dependency -pdfjs.GlobalWorkerOptions.workerSrc = `//unpkg.com/pdfjs-dist@${pdfjs.version}/build/pdf.worker.min.mjs`; +pdfjs.GlobalWorkerOptions.workerSrc = new URL( + "pdfjs-dist/build/pdf.worker.min.mjs", + import.meta.url, +).toString(); // ============================================================================= // Types diff --git a/web/src/components/reader/PdfReader.tsx b/web/src/components/reader/PdfReader.tsx index dc337c14..bd28abf2 100644 --- a/web/src/components/reader/PdfReader.tsx +++ b/web/src/components/reader/PdfReader.tsx @@ -29,9 +29,10 @@ import { ReaderToolbar } from "./ReaderToolbar"; import "react-pdf/dist/Page/TextLayer.css"; import "react-pdf/dist/Page/AnnotationLayer.css"; -// Configure PDF.js worker - use CDN with the exact version bundled in react-pdf -// This avoids version mismatches when pdfjs-dist is also installed as a direct dependency -pdfjs.GlobalWorkerOptions.workerSrc = `//unpkg.com/pdfjs-dist@${pdfjs.version}/build/pdf.worker.min.mjs`; +pdfjs.GlobalWorkerOptions.workerSrc = new URL( + "pdfjs-dist/build/pdf.worker.min.mjs", + import.meta.url, +).toString(); export type PdfZoomLevel = | "fit-page" diff --git a/web/src/components/releases/MediaUrlIcon.tsx b/web/src/components/releases/MediaUrlIcon.tsx new file mode 100644 index 00000000..18adfc47 --- /dev/null +++ b/web/src/components/releases/MediaUrlIcon.tsx @@ -0,0 +1,42 @@ +import { ActionIcon, Tooltip } from "@mantine/core"; +import { + IconCloudDownload, + IconDownload, + IconMagnet, +} from "@tabler/icons-react"; + +interface MediaUrlIconProps { + url: string; + kind?: string | null; +} + +const KIND_META: Record< + string, + { label: string; Icon: React.ComponentType<{ size?: number }> } +> = { + torrent: { label: "Download .torrent", Icon: IconDownload }, + magnet: { label: "Open magnet link", Icon: IconMagnet }, + direct: { label: "Direct download", Icon: IconDownload }, + other: { label: "Open media link", Icon: IconCloudDownload }, +}; + +export function MediaUrlIcon({ url, kind }: MediaUrlIconProps) { + const meta = (kind ? KIND_META[kind] : undefined) ?? KIND_META.other; + const { label, Icon } = meta; + + return ( + <Tooltip label={label}> + <ActionIcon + component="a" + href={url} + target="_blank" + rel="noopener noreferrer" + variant="subtle" + size="sm" + aria-label={label} + > + <Icon size={16} /> + </ActionIcon> + </Tooltip> + ); +} diff --git a/web/src/components/releases/ReleasesBulkActionBar.tsx b/web/src/components/releases/ReleasesBulkActionBar.tsx new file mode 100644 index 00000000..314dff22 --- /dev/null +++ b/web/src/components/releases/ReleasesBulkActionBar.tsx @@ -0,0 +1,104 @@ +import { Button, Card, Group, Text } from "@mantine/core"; +import { + IconCheck, + IconEyeOff, + IconRefresh, + IconTrash, + IconX, +} from "@tabler/icons-react"; +import type { BulkReleaseAction } from "@/api/releases"; + +interface ReleasesBulkActionBarProps { + count: number; + isPending: boolean; + onAction: (action: BulkReleaseAction) => void; + onClear: () => void; + /** Show the Delete button. The inbox routes Delete through a confirm modal, + * which it wires up itself; the series panel currently doesn't expose + * bulk-delete (use per-row delete instead). */ + onDeleteClick?: () => void; + /** When true, render as a sticky banner (page-level inbox). Off for the + * embedded series panel where the parent card already provides framing. */ + sticky?: boolean; +} + +export function ReleasesBulkActionBar({ + count, + isPending, + onAction, + onClear, + onDeleteClick, + sticky = false, +}: ReleasesBulkActionBarProps) { + return ( + <Card + withBorder + padding="sm" + radius="md" + style={sticky ? { position: "sticky", top: 0, zIndex: 2 } : undefined} + > + <Group justify="space-between" wrap="wrap"> + <Text size="sm" fw={500}> + {count} selected + </Text> + <Group gap="xs"> + <Button + size="xs" + variant="light" + color="green" + leftSection={<IconCheck size={14} />} + loading={isPending} + onClick={() => onAction("mark-acquired")} + > + Mark acquired + </Button> + <Button + size="xs" + variant="light" + color="gray" + leftSection={<IconX size={14} />} + loading={isPending} + onClick={() => onAction("dismiss")} + > + Dismiss + </Button> + <Button + size="xs" + variant="light" + color="gray" + leftSection={<IconEyeOff size={14} />} + loading={isPending} + onClick={() => onAction("ignore")} + > + Ignore + </Button> + <Button + size="xs" + variant="light" + color="blue" + leftSection={<IconRefresh size={14} />} + loading={isPending} + onClick={() => onAction("reset")} + > + Reset + </Button> + {onDeleteClick && ( + <Button + size="xs" + variant="light" + color="red" + leftSection={<IconTrash size={14} />} + loading={isPending} + onClick={onDeleteClick} + > + Delete + </Button> + )} + <Button size="xs" variant="subtle" onClick={onClear}> + Clear + </Button> + </Group> + </Group> + </Card> + ); +} diff --git a/web/src/components/releases/ReleasesBulkDeleteModal.tsx b/web/src/components/releases/ReleasesBulkDeleteModal.tsx new file mode 100644 index 00000000..69ab031c --- /dev/null +++ b/web/src/components/releases/ReleasesBulkDeleteModal.tsx @@ -0,0 +1,41 @@ +import { Button, Group, Modal, Stack, Text } from "@mantine/core"; + +interface ReleasesBulkDeleteModalProps { + opened: boolean; + onClose: () => void; + onConfirm: () => void; + count: number; + isPending: boolean; +} + +/** Confirmation modal for bulk-deleting ledger entries. + * Hard-deletes are reversible by the upstream re-poll, so we surface that + * caveat in the body — users typically want Dismiss, not Delete. */ +export function ReleasesBulkDeleteModal({ + opened, + onClose, + onConfirm, + count, + isPending, +}: ReleasesBulkDeleteModalProps) { + const noun = count === 1 ? "release" : "releases"; + return ( + <Modal opened={opened} onClose={onClose} title="Delete releases?" centered> + <Stack gap="md"> + <Text size="sm"> + This will hard-delete {count} {noun} from the ledger and clear the + affected sources' cache so they re-fetch on the next poll. The + releases will reappear if the upstream still lists them. + </Text> + <Group justify="flex-end" gap="xs"> + <Button variant="subtle" onClick={onClose}> + Cancel + </Button> + <Button color="red" loading={isPending} onClick={onConfirm}> + Delete {count} {noun} + </Button> + </Group> + </Stack> + </Modal> + ); +} diff --git a/web/src/components/releases/ReleasesTable.tsx b/web/src/components/releases/ReleasesTable.tsx new file mode 100644 index 00000000..3a75e6ee --- /dev/null +++ b/web/src/components/releases/ReleasesTable.tsx @@ -0,0 +1,238 @@ +import { + ActionIcon, + Anchor, + Badge, + Checkbox, + Group, + Stack, + Table, + Text, + Tooltip, +} from "@mantine/core"; +import { + IconCheck, + IconExternalLink, + IconTrash, + IconX, +} from "@tabler/icons-react"; +import { format } from "date-fns"; +import { Link } from "react-router-dom"; +import type { ReleaseLedgerEntry, ReleaseSource } from "@/api/releases"; +import { MediaUrlIcon } from "./MediaUrlIcon"; + +const STATE_BADGE: Record<string, { color: string; label: string }> = { + announced: { color: "blue", label: "New" }, + marked_acquired: { color: "green", label: "Acquired" }, + dismissed: { color: "gray", label: "Dismissed" }, + hidden: { color: "gray", label: "Hidden" }, +}; + +interface ReleasesTableProps { + entries: ReleaseLedgerEntry[]; + sourceById: Map<string, ReleaseSource>; + selected: Set<string>; + onToggleOne: (id: string) => void; + onToggleAll: () => void; + onDismiss: (id: string) => void; + onMarkAcquired: (id: string) => void; + onDelete: (id: string) => void; + /** When true, render a Series column linking to the series detail page. + * Off when the table is already scoped to a single series. */ + showSeriesColumn?: boolean; + /** Disable per-row action buttons while a mutation is in flight. */ + isDismissPending?: boolean; + isMarkAcquiredPending?: boolean; + isDeletePending?: boolean; + /** Visual density. The page-level inbox uses "sm"; the embedded panel + * uses "xs" so it doesn't dominate the surrounding card. */ + verticalSpacing?: "xs" | "sm"; +} + +function formatChapterVolume(entry: ReleaseLedgerEntry): string { + const hasChapter = entry.chapter !== null && entry.chapter !== undefined; + const hasVolume = entry.volume !== null && entry.volume !== undefined; + if (!hasChapter && !hasVolume) return "—"; + const chapter = hasChapter ? `Ch ${entry.chapter}` : ""; + const volume = hasVolume + ? hasChapter + ? ` · Vol ${entry.volume}` + : `Vol ${entry.volume}` + : ""; + return `${chapter}${volume}`; +} + +export function ReleasesTable({ + entries, + sourceById, + selected, + onToggleOne, + onToggleAll, + onDismiss, + onMarkAcquired, + onDelete, + showSeriesColumn = false, + isDismissPending = false, + isMarkAcquiredPending = false, + isDeletePending = false, + verticalSpacing = "sm", +}: ReleasesTableProps) { + const allSelected = + entries.length > 0 && entries.every((e) => selected.has(e.id)); + const someSelected = entries.some((e) => selected.has(e.id)) && !allSelected; + + return ( + <Table verticalSpacing={verticalSpacing} highlightOnHover> + <Table.Thead> + <Table.Tr> + <Table.Th w={36}> + <Checkbox + aria-label="Select all releases" + checked={allSelected} + indeterminate={someSelected} + onChange={onToggleAll} + /> + </Table.Th> + {showSeriesColumn && <Table.Th>Series</Table.Th>} + <Table.Th>Ch / Vol</Table.Th> + <Table.Th>Source / Group</Table.Th> + <Table.Th>Lang</Table.Th> + <Table.Th>State</Table.Th> + <Table.Th>Observed</Table.Th> + <Table.Th aria-label="Actions" /> + </Table.Tr> + </Table.Thead> + <Table.Tbody> + {entries.map((entry) => { + const stateInfo = STATE_BADGE[entry.state] ?? { + color: "gray", + label: entry.state, + }; + const isSelected = selected.has(entry.id); + const source = sourceById.get(entry.sourceId); + const sourceLabel = + source?.displayName ?? `${entry.sourceId.slice(0, 8)}…`; + return ( + <Table.Tr + key={entry.id} + bg={isSelected ? "var(--mantine-color-blue-light)" : undefined} + > + <Table.Td> + <Checkbox + aria-label={`Select release ${entry.id}`} + checked={isSelected} + onChange={() => onToggleOne(entry.id)} + /> + </Table.Td> + {showSeriesColumn && ( + <Table.Td> + <Anchor + component={Link} + to={`/series/${entry.seriesId}#releases`} + size="sm" + lineClamp={1} + > + {entry.seriesTitle.length > 0 + ? entry.seriesTitle + : `${entry.seriesId.slice(0, 8)}…`} + </Anchor> + </Table.Td> + )} + <Table.Td> + <Text size="sm" fw={500}> + {formatChapterVolume(entry)} + </Text> + </Table.Td> + <Table.Td> + <Stack gap={2}> + {entry.groupOrUploader && + entry.groupOrUploader !== sourceLabel && ( + <Text size="sm">{entry.groupOrUploader}</Text> + )} + <Text size="sm" fw={500}> + {sourceLabel} + </Text> + </Stack> + </Table.Td> + <Table.Td> + <Text size="sm">{entry.language ?? "—"}</Text> + </Table.Td> + <Table.Td> + <Badge color={stateInfo.color} variant="light" size="sm"> + {stateInfo.label} + </Badge> + </Table.Td> + <Table.Td> + <Text size="xs" c="dimmed"> + {format(new Date(entry.observedAt), "yyyy-MM-dd")} + </Text> + </Table.Td> + <Table.Td> + <Group gap={4} justify="flex-end" wrap="nowrap"> + <Tooltip label="Open payload URL"> + <ActionIcon + component="a" + href={entry.payloadUrl} + target="_blank" + rel="noopener noreferrer" + variant="subtle" + size="sm" + aria-label="Open payload URL" + > + <IconExternalLink size={16} /> + </ActionIcon> + </Tooltip> + {entry.mediaUrl && ( + <MediaUrlIcon + url={entry.mediaUrl} + kind={entry.mediaUrlKind} + /> + )} + {entry.state === "announced" && ( + <> + <Tooltip label="Mark acquired"> + <ActionIcon + variant="subtle" + size="sm" + color="green" + loading={isMarkAcquiredPending} + onClick={() => onMarkAcquired(entry.id)} + aria-label="Mark acquired" + > + <IconCheck size={16} /> + </ActionIcon> + </Tooltip> + <Tooltip label="Dismiss"> + <ActionIcon + variant="subtle" + size="sm" + color="gray" + loading={isDismissPending} + onClick={() => onDismiss(entry.id)} + aria-label="Dismiss" + > + <IconX size={16} /> + </ActionIcon> + </Tooltip> + </> + )} + <Tooltip label="Delete (will reappear on next poll)"> + <ActionIcon + variant="subtle" + size="sm" + color="red" + loading={isDeletePending} + onClick={() => onDelete(entry.id)} + aria-label="Delete" + > + <IconTrash size={16} /> + </ActionIcon> + </Tooltip> + </Group> + </Table.Td> + </Table.Tr> + ); + })} + </Table.Tbody> + </Table> + ); +} diff --git a/web/src/components/series/BehindByBadge.test.tsx b/web/src/components/series/BehindByBadge.test.tsx new file mode 100644 index 00000000..ea46f7a9 --- /dev/null +++ b/web/src/components/series/BehindByBadge.test.tsx @@ -0,0 +1,94 @@ +import { describe, expect, it, vi } from "vitest"; +import { renderWithProviders, screen, userEvent } from "@/test/utils"; +import { BehindByBadge } from "./BehindByBadge"; + +const navigateMock = vi.fn(); +vi.mock("react-router-dom", async () => { + const actual = + await vi.importActual<typeof import("react-router-dom")>( + "react-router-dom", + ); + return { + ...actual, + useNavigate: () => navigateMock, + }; +}); + +const SERIES_ID = "00000000-0000-0000-0000-000000000001"; + +describe("BehindByBadge", () => { + it("renders translation chapter badge with delta and unit", () => { + renderWithProviders( + <BehindByBadge + variant="translation" + axis="chapter" + delta={3} + seriesId={SERIES_ID} + provider="MangaUpdates" + language="en" + />, + ); + expect( + screen.getByTestId("behind-by-translation-chapter"), + ).toHaveTextContent("+3 ch (translation)"); + }); + + it("renders upstream volume badge with grey/informational variant", () => { + renderWithProviders( + <BehindByBadge + variant="upstream" + axis="volume" + delta={1} + seriesId={SERIES_ID} + provider="MangaBaka" + />, + ); + expect(screen.getByTestId("behind-by-upstream-volume")).toHaveTextContent( + "+1 vol (upstream)", + ); + }); + + it("hides when delta is zero or negative", () => { + renderWithProviders( + <BehindByBadge + variant="translation" + axis="chapter" + delta={0} + seriesId={SERIES_ID} + />, + ); + expect( + screen.queryByTestId("behind-by-translation-chapter"), + ).not.toBeInTheDocument(); + }); + + it("translation badge navigates to series Releases on click", async () => { + const user = userEvent.setup(); + navigateMock.mockReset(); + renderWithProviders( + <BehindByBadge + variant="translation" + axis="chapter" + delta={2} + seriesId={SERIES_ID} + />, + ); + await user.click(screen.getByTestId("behind-by-translation-chapter")); + expect(navigateMock).toHaveBeenCalledWith(`/series/${SERIES_ID}#releases`); + }); + + it("upstream badge does not navigate (informational only)", async () => { + const user = userEvent.setup(); + navigateMock.mockReset(); + renderWithProviders( + <BehindByBadge + variant="upstream" + axis="chapter" + delta={5} + seriesId={SERIES_ID} + />, + ); + await user.click(screen.getByTestId("behind-by-upstream-chapter")); + expect(navigateMock).not.toHaveBeenCalled(); + }); +}); diff --git a/web/src/components/series/BehindByBadge.tsx b/web/src/components/series/BehindByBadge.tsx new file mode 100644 index 00000000..ac8cff86 --- /dev/null +++ b/web/src/components/series/BehindByBadge.tsx @@ -0,0 +1,81 @@ +import { Badge, Tooltip } from "@mantine/core"; +import { useNavigate } from "react-router-dom"; + +export type BehindByVariant = "translation" | "upstream"; +export type BehindByAxis = "chapter" | "volume"; + +export interface BehindByBadgeProps { + /** Translation = warm/orange/clickable (Releases tab); upstream = grey informational. */ + variant: BehindByVariant; + axis: BehindByAxis; + /** Positive integer; the badge is hidden when <= 0. */ + delta: number; + /** Required to navigate when the translation badge is clicked. */ + seriesId: string; + /** Provider/source name shown in the tooltip ("MangaBaka", "MangaUpdates", ...). */ + provider?: string | null; + /** Language list for translation badges; rendered as e.g. "en". */ + language?: string; +} + +/** + * Compact "+N ch" / "+N vol" badge near the series header. Two variants: + * + * - `translation` (orange, actionable): `latestKnownChapter > localMaxChapter`. + * Click navigates to the Releases tab. Phase 6 (MangaUpdates) is the writer. + * - `upstream` (grey, informational): `upstreamChapterGap > 0`. Phase 5 metadata + * gap signal — not actionable, no Releases tab to send the user to. + */ +export function BehindByBadge({ + variant, + axis, + delta, + seriesId, + provider, + language, +}: BehindByBadgeProps) { + const navigate = useNavigate(); + + if (!Number.isFinite(delta) || delta <= 0) { + return null; + } + + const unit = axis === "chapter" ? "ch" : "vol"; + const label = + variant === "translation" + ? `+${delta} ${unit} (translation)` + : `+${delta} ${unit} (upstream)`; + + const tooltip = + variant === "translation" + ? `${provider ?? "A release source"} announced ${delta} more ${ + axis === "chapter" ? "chapters" : "volumes" + }${language ? ` in ${language}` : ""} — open Releases.` + : `${provider ?? "An external metadata provider"} reports ${delta} more ${ + axis === "chapter" ? "chapters" : "volumes" + } in the original language.`; + + const color = variant === "translation" ? "orange" : "gray"; + + const badge = ( + <Badge + color={color} + variant="light" + size="sm" + style={{ + cursor: variant === "translation" ? "pointer" : "default", + textTransform: "none", + }} + onClick={ + variant === "translation" + ? () => navigate(`/series/${seriesId}#releases`) + : undefined + } + data-testid={`behind-by-${variant}-${axis}`} + > + {label} + </Badge> + ); + + return <Tooltip label={tooltip}>{badge}</Tooltip>; +} diff --git a/web/src/components/series/SeriesReleasesPanel.test.tsx b/web/src/components/series/SeriesReleasesPanel.test.tsx new file mode 100644 index 00000000..dbf9cb82 --- /dev/null +++ b/web/src/components/series/SeriesReleasesPanel.test.tsx @@ -0,0 +1,344 @@ +import { beforeEach, describe, expect, it, vi } from "vitest"; +import { + type BulkReleaseActionResponse, + type PaginatedReleases, + type ReleaseLedgerEntry, + type ReleaseSource, + releaseSourcesApi, + releasesApi, +} from "@/api/releases"; +import { + renderWithProviders, + screen, + userEvent, + waitFor, + within, +} from "@/test/utils"; +import { SeriesReleasesPanel } from "./SeriesReleasesPanel"; + +vi.mock("@/api/releases", () => ({ + releasesApi: { + listInbox: vi.fn(), + listForSeries: vi.fn(), + patchEntry: vi.fn(), + dismiss: vi.fn(), + markAcquired: vi.fn(), + delete: vi.fn(), + bulk: vi.fn(), + }, + releaseSourcesApi: { + list: vi.fn(), + update: vi.fn(), + pollNow: vi.fn(), + }, +})); + +const SERIES_ID = "00000000-0000-0000-0000-000000000001"; + +function entry(over: Partial<ReleaseLedgerEntry> = {}): ReleaseLedgerEntry { + return { + id: "ent-1", + seriesId: SERIES_ID, + seriesTitle: "Series", + sourceId: "11111111-1111-1111-1111-111111111111", + externalReleaseId: "ext-1", + payloadUrl: "https://example.com/r/1", + confidence: 0.95, + state: "announced", + observedAt: "2026-05-01T00:00:00Z", + createdAt: "2026-05-01T00:00:00Z", + chapter: 143, + volume: null, + language: "en", + groupOrUploader: "GroupA", + ...over, + }; +} + +function paginated(entries: ReleaseLedgerEntry[]): PaginatedReleases { + return { + data: entries, + page: 1, + pageSize: 100, + total: entries.length, + totalPages: 1, + links: { + self: "/api/v1/series/x/releases", + }, + } as PaginatedReleases; +} + +const list = vi.mocked(releasesApi.listForSeries); +const dismiss = vi.mocked(releasesApi.dismiss); +const markAcquired = vi.mocked(releasesApi.markAcquired); +const deleteRelease = vi.mocked(releasesApi.delete); +const bulk = vi.mocked(releasesApi.bulk); + +/** The panel collapses by default; tests expand it once before asserting. */ +async function expandPanel() { + const user = userEvent.setup(); + const toggle = await screen.findByRole("button", { + name: /expand releases/i, + }); + await user.click(toggle); +} + +describe("SeriesReleasesPanel", () => { + beforeEach(() => { + vi.clearAllMocks(); + vi.mocked(releaseSourcesApi.list).mockResolvedValue([]); + }); + + it("collapses by default and only renders the body after the user expands", async () => { + list.mockResolvedValue( + paginated([entry({ id: "a", groupOrUploader: "Group-A" })]), + ); + renderWithProviders(<SeriesReleasesPanel seriesId={SERIES_ID} />); + // Header carries the expand affordance; while collapsed, body content sits + // in an aria-hidden subtree (Mantine's Collapse) so the toggle's a11y name + // is "Expand releases" and row buttons are hidden from the a11y tree. + await screen.findByRole("button", { name: /expand releases/i }); + expect( + screen.queryByRole("button", { name: /dismiss/i }), + ).not.toBeInTheDocument(); + await expandPanel(); + await screen.findByRole("button", { name: /collapse releases/i }); + await screen.findByRole("button", { name: /dismiss/i, hidden: true }); + }); + + it("renders an empty-state message when no releases exist", async () => { + list.mockResolvedValueOnce(paginated([])); + renderWithProviders(<SeriesReleasesPanel seriesId={SERIES_ID} />); + await expandPanel(); + await waitFor(() => { + expect(screen.getByText(/no releases yet/i)).toBeInTheDocument(); + }); + }); + + it("renders one row per ledger entry with the chapter label repeated", async () => { + list.mockResolvedValueOnce( + paginated([ + entry({ id: "a", chapter: 143, groupOrUploader: "Group-A" }), + entry({ + id: "b", + chapter: 143, + groupOrUploader: "Group-B", + externalReleaseId: "ext-2", + }), + entry({ + id: "c", + chapter: 142, + groupOrUploader: "Group-C", + externalReleaseId: "ext-3", + }), + ]), + ); + renderWithProviders(<SeriesReleasesPanel seriesId={SERIES_ID} />); + await expandPanel(); + await waitFor(() => { + expect(screen.getByText("Group-A")).toBeInTheDocument(); + }); + expect(screen.getByText("Group-B")).toBeInTheDocument(); + expect(screen.getByText("Group-C")).toBeInTheDocument(); + // Flat rows: each row carries its own chapter label. Two rows for Ch 143 + // (Group-A and Group-B), one row for Ch 142. + expect(screen.getAllByText(/Ch 143/)).toHaveLength(2); + expect(screen.getAllByText(/Ch 142/)).toHaveLength(1); + }); + + it("renders the source display name from the sources list", async () => { + list.mockResolvedValueOnce( + paginated([ + entry({ + id: "a", + sourceId: "11111111-1111-1111-1111-111111111111", + groupOrUploader: "tsuna69", + }), + ]), + ); + vi.mocked(releaseSourcesApi.list).mockResolvedValue([ + { + id: "11111111-1111-1111-1111-111111111111", + pluginId: "release-nyaa", + sourceKey: "nyaa:user:tsuna69", + displayName: "Nyaa - tsuna69", + kind: "rss_uploader", + cronSchedule: null, + effectiveCronSchedule: "0 * * * *", + enabled: true, + config: null, + createdAt: "2026-01-01T00:00:00Z", + updatedAt: "2026-01-01T00:00:00Z", + } as ReleaseSource, + ]); + renderWithProviders(<SeriesReleasesPanel seriesId={SERIES_ID} />); + await expandPanel(); + await waitFor(() => { + expect(screen.getByText("Nyaa - tsuna69")).toBeInTheDocument(); + }); + // The UUID-prefix fallback should not appear once the join resolves. + expect(screen.queryByText(/source: 11111111…/)).not.toBeInTheDocument(); + }); + + it("dismisses an announced entry via the dismiss action", async () => { + list.mockResolvedValue( + paginated([entry({ id: "a", groupOrUploader: "OnlyGroup" })]), + ); + dismiss.mockResolvedValueOnce(entry({ id: "a", state: "dismissed" })); + const user = userEvent.setup(); + renderWithProviders(<SeriesReleasesPanel seriesId={SERIES_ID} />); + await expandPanel(); + await screen.findByText("OnlyGroup"); + const dismissButton = await screen.findByRole("button", { + name: /dismiss/i, + hidden: true, + }); + await user.click(dismissButton); + await waitFor(() => { + expect(dismiss).toHaveBeenCalledWith("a"); + }); + }); + + it("marks an announced entry acquired via the action", async () => { + list.mockResolvedValue( + paginated([entry({ id: "a", groupOrUploader: "OnlyGroup" })]), + ); + markAcquired.mockResolvedValueOnce( + entry({ id: "a", state: "marked_acquired" }), + ); + const user = userEvent.setup(); + renderWithProviders(<SeriesReleasesPanel seriesId={SERIES_ID} />); + await expandPanel(); + await screen.findByText("OnlyGroup"); + const acquireButton = await screen.findByRole("button", { + name: /mark acquired/i, + hidden: true, + }); + await user.click(acquireButton); + await waitFor(() => { + expect(markAcquired).toHaveBeenCalledWith("a"); + }); + }); + + it("hard-deletes a row via the delete action", async () => { + list.mockResolvedValue( + paginated([entry({ id: "a", groupOrUploader: "OnlyGroup" })]), + ); + deleteRelease.mockResolvedValueOnce({ + affectedReleaseIds: ["a"], + affectedSeriesIds: [SERIES_ID], + affectedSourceIds: [], + } as BulkReleaseActionResponse); + const user = userEvent.setup(); + renderWithProviders(<SeriesReleasesPanel seriesId={SERIES_ID} />); + await expandPanel(); + await screen.findByText("OnlyGroup"); + const deleteButton = await screen.findByRole("button", { + name: /delete/i, + hidden: true, + }); + await user.click(deleteButton); + await waitFor(() => { + expect(deleteRelease).toHaveBeenCalledWith("a"); + }); + }); + + it("bulk-marks selected entries as acquired", async () => { + list.mockResolvedValue( + paginated([ + entry({ id: "a", chapter: 200, groupOrUploader: "Group-A" }), + entry({ id: "b", chapter: 199, groupOrUploader: "Group-B" }), + entry({ id: "c", chapter: 198, groupOrUploader: "Group-C" }), + ]), + ); + bulk.mockResolvedValueOnce({ + affectedReleaseIds: ["a", "b"], + affectedSeriesIds: [SERIES_ID], + affectedSourceIds: [], + } as BulkReleaseActionResponse); + const user = userEvent.setup(); + renderWithProviders(<SeriesReleasesPanel seriesId={SERIES_ID} />); + await expandPanel(); + await screen.findByText("Group-A"); + // Select rows a and b individually. + await user.click( + await screen.findByRole("checkbox", { + name: "Select release a", + hidden: true, + }), + ); + await user.click( + await screen.findByRole("checkbox", { + name: "Select release b", + hidden: true, + }), + ); + // Action bar appears with the count. Find the bulk action by walking up + // from the "2 selected" label — the per-row "Mark acquired" buttons share + // the same accessible name, so role-by-name returns multiple. + const banner = screen + .getByText("2 selected") + .closest("div.mantine-Card-root"); + if (!banner) throw new Error("bulk banner not found"); + const bulkButton = within(banner as HTMLElement).getByRole("button", { + name: /mark acquired/i, + }); + await user.click(bulkButton); + await waitFor(() => { + expect(bulk).toHaveBeenCalledWith({ + ids: ["a", "b"], + action: "mark-acquired", + }); + }); + }); + + it("bulk-deletes via the Delete button after the modal confirm", async () => { + list.mockResolvedValue( + paginated([ + entry({ id: "a", chapter: 200, groupOrUploader: "Group-A" }), + entry({ id: "b", chapter: 199, groupOrUploader: "Group-B" }), + ]), + ); + bulk.mockResolvedValueOnce({ + affectedReleaseIds: ["a", "b"], + affectedSeriesIds: [SERIES_ID], + affectedSourceIds: [], + } as BulkReleaseActionResponse); + const user = userEvent.setup(); + renderWithProviders(<SeriesReleasesPanel seriesId={SERIES_ID} />); + await expandPanel(); + await screen.findByText("Group-A"); + await user.click( + await screen.findByRole("checkbox", { + name: "Select release a", + hidden: true, + }), + ); + await user.click( + await screen.findByRole("checkbox", { + name: "Select release b", + hidden: true, + }), + ); + // Open the bulk-delete modal from the action bar (scoped to the banner + // because per-row Delete buttons share the accessible name). + const banner = screen + .getByText("2 selected") + .closest("div.mantine-Card-root"); + if (!banner) throw new Error("bulk banner not found"); + await user.click( + within(banner as HTMLElement).getByRole("button", { name: /^delete$/i }), + ); + // Confirm in the modal — its button label includes the count. + const confirmButton = await screen.findByRole("button", { + name: /delete 2 releases/i, + }); + await user.click(confirmButton); + await waitFor(() => { + expect(bulk).toHaveBeenCalledWith({ + ids: ["a", "b"], + action: "delete", + }); + }); + }); +}); diff --git a/web/src/components/series/SeriesReleasesPanel.tsx b/web/src/components/series/SeriesReleasesPanel.tsx new file mode 100644 index 00000000..ea40ff20 --- /dev/null +++ b/web/src/components/series/SeriesReleasesPanel.tsx @@ -0,0 +1,241 @@ +import { + ActionIcon, + Badge, + Box, + Card, + Collapse, + Group, + Loader, + SegmentedControl, + Stack, + Text, + Tooltip, +} from "@mantine/core"; +import { useDisclosure } from "@mantine/hooks"; +import { + IconBellOff, + IconBellRinging, + IconChevronDown, + IconChevronRight, + IconRss, +} from "@tabler/icons-react"; +import { useEffect, useMemo, useState } from "react"; +import type { BulkReleaseAction, ReleaseSource } from "@/api/releases"; +import { ReleasesBulkActionBar } from "@/components/releases/ReleasesBulkActionBar"; +import { ReleasesBulkDeleteModal } from "@/components/releases/ReleasesBulkDeleteModal"; +import { ReleasesTable } from "@/components/releases/ReleasesTable"; +import { + useBulkReleaseAction, + useDeleteRelease, + useDismissRelease, + useMarkReleaseAcquired, + useReleaseSources, + useSeriesReleases, +} from "@/hooks/useReleases"; +import { useUserPreference } from "@/hooks/useUserPreference"; + +interface SeriesReleasesPanelProps { + seriesId: string; +} + +export function SeriesReleasesPanel({ seriesId }: SeriesReleasesPanelProps) { + const [stateView, setStateView] = useState<"new" | "all">("new"); + // Releases panel collapses by default — series detail is the user's main + // landing point and the panel can grow long. They open it deliberately. + const [opened, { toggle }] = useDisclosure(false); + const stateFilter = stateView === "new" ? "announced" : undefined; + + // Per-user mute. Persisted via the user_preferences store with localStorage + // caching + debounced server sync. + const [mutedSeriesIds, setMutedSeriesIds] = useUserPreference( + "release_tracking.muted_series_ids", + ); + const isMuted = mutedSeriesIds.includes(seriesId); + const toggleMute = () => { + if (isMuted) { + setMutedSeriesIds(mutedSeriesIds.filter((id) => id !== seriesId)); + } else { + setMutedSeriesIds([...mutedSeriesIds, seriesId]); + } + }; + + const { data, isLoading } = useSeriesReleases(seriesId, { + state: stateFilter, + pageSize: 100, + }); + const { data: sources } = useReleaseSources(); + const dismiss = useDismissRelease(); + const markAcquired = useMarkReleaseAcquired(); + const deleteRelease = useDeleteRelease(); + const bulk = useBulkReleaseAction(); + + const entries = data?.data ?? []; + const [selected, setSelected] = useState<Set<string>>(new Set()); + const [confirmBulkDelete, { open: openBulkDelete, close: closeBulkDelete }] = + useDisclosure(false); + // Drop selections when the visible set changes — IDs that fell off screen + // shouldn't quietly remain selected for the next bulk action. + // biome-ignore lint/correctness/useExhaustiveDependencies: deps are change-triggers + useEffect(() => { + setSelected(new Set()); + }, [stateView, seriesId]); + const toggleAll = () => { + setSelected((prev) => { + const allSelected = + entries.length > 0 && entries.every((e) => prev.has(e.id)); + const next = new Set(prev); + if (allSelected) { + for (const e of entries) next.delete(e.id); + } else { + for (const e of entries) next.add(e.id); + } + return next; + }); + }; + const toggleOne = (id: string) => { + setSelected((prev) => { + const next = new Set(prev); + if (next.has(id)) next.delete(id); + else next.add(id); + return next; + }); + }; + const runBulk = (action: BulkReleaseAction) => { + const ids = Array.from(selected); + if (ids.length === 0) return; + bulk.mutate({ ids, action }, { onSuccess: () => setSelected(new Set()) }); + }; + + // Same client-side join the inbox uses: keep the ledger DTO lean while + // showing a human label instead of a UUID prefix. + const sourceById = useMemo(() => { + const map = new Map<string, ReleaseSource>(); + for (const s of sources ?? []) map.set(s.id, s); + return map; + }, [sources]); + + if (isLoading) { + return ( + <Card withBorder padding="md" radius="md"> + <Group> + <Loader size="sm" /> + <Text size="sm">Loading releases…</Text> + </Group> + </Card> + ); + } + + return ( + <> + <Card withBorder padding="md" radius="md"> + <Stack gap="sm"> + <Group justify="space-between" wrap="nowrap" id="releases"> + <Group + gap="xs" + onClick={toggle} + style={{ cursor: "pointer", flex: 1, minWidth: 0 }} + role="button" + aria-expanded={opened} + aria-label={opened ? "Collapse releases" : "Expand releases"} + > + {opened ? ( + <IconChevronDown size={16} /> + ) : ( + <IconChevronRight size={16} /> + )} + <IconRss size={18} /> + <Text fw={600}>Releases</Text> + <Badge color="gray" variant="light" size="sm"> + {data?.total ?? 0} + </Badge> + {isMuted && ( + <Badge color="orange" variant="light" size="sm"> + Muted + </Badge> + )} + </Group> + <Group gap="xs"> + <Tooltip + label={ + isMuted + ? "Re-enable announcement toasts and badge for this series" + : "Stop announcement toasts and badge for this series (your account only)" + } + > + <ActionIcon + variant="subtle" + color={isMuted ? "orange" : "gray"} + onClick={toggleMute} + aria-label={isMuted ? "Unmute releases" : "Mute releases"} + > + {isMuted ? ( + <IconBellOff size={16} /> + ) : ( + <IconBellRinging size={16} /> + )} + </ActionIcon> + </Tooltip> + {opened && ( + <SegmentedControl + size="xs" + value={stateView} + onChange={(v) => setStateView(v as "new" | "all")} + data={[ + { value: "new", label: "New" }, + { value: "all", label: "All" }, + ]} + aria-label="Release state filter" + /> + )} + </Group> + </Group> + + <Collapse in={opened}> + {selected.size > 0 && ( + <Box mb="xs"> + <ReleasesBulkActionBar + count={selected.size} + isPending={bulk.isPending} + onAction={runBulk} + onClear={() => setSelected(new Set())} + onDeleteClick={openBulkDelete} + /> + </Box> + )} + {entries.length === 0 ? ( + <Text size="sm" c="dimmed"> + No releases yet. Once a release source picks this series up, new + chapters/volumes will land here. + </Text> + ) : ( + <ReleasesTable + entries={entries} + sourceById={sourceById} + selected={selected} + onToggleOne={toggleOne} + onToggleAll={toggleAll} + onDismiss={(id) => dismiss.mutate(id)} + onMarkAcquired={(id) => markAcquired.mutate(id)} + onDelete={(id) => deleteRelease.mutate(id)} + isDismissPending={dismiss.isPending} + isMarkAcquiredPending={markAcquired.isPending} + isDeletePending={deleteRelease.isPending} + verticalSpacing="xs" + /> + )} + </Collapse> + </Stack> + </Card> + <ReleasesBulkDeleteModal + opened={confirmBulkDelete} + onClose={closeBulkDelete} + onConfirm={() => { + runBulk("delete"); + closeBulkDelete(); + }} + count={selected.size} + isPending={bulk.isPending} + /> + </> + ); +} diff --git a/web/src/components/series/TrackingPanel.test.tsx b/web/src/components/series/TrackingPanel.test.tsx new file mode 100644 index 00000000..a1b72d0d --- /dev/null +++ b/web/src/components/series/TrackingPanel.test.tsx @@ -0,0 +1,167 @@ +import { beforeEach, describe, expect, it, vi } from "vitest"; +import { trackingApi } from "@/api/tracking"; +import { renderWithProviders, screen, userEvent, waitFor } from "@/test/utils"; +import { TrackingPanel } from "./TrackingPanel"; + +vi.mock("@/api/tracking", () => ({ + trackingApi: { + getTracking: vi.fn(), + updateTracking: vi.fn(), + listAliases: vi.fn(), + createAlias: vi.fn(), + deleteAlias: vi.fn(), + }, +})); + +const get = vi.mocked(trackingApi.getTracking); +const update = vi.mocked(trackingApi.updateTracking); +const list = vi.mocked(trackingApi.listAliases); +const create = vi.mocked(trackingApi.createAlias); +const del = vi.mocked(trackingApi.deleteAlias); + +const SERIES_ID = "00000000-0000-0000-0000-000000000001"; + +const baseTracking = { + seriesId: SERIES_ID, + tracked: false, + trackChapters: true, + trackVolumes: true, + createdAt: "2024-01-01T00:00:00Z", + updatedAt: "2024-01-01T00:00:00Z", +}; + +const baseAlias = ( + alias: string, + source: "manual" | "metadata" = "manual", +) => ({ + id: `alias-${alias}`, + seriesId: SERIES_ID, + alias, + normalized: alias.toLowerCase(), + source, + createdAt: "2024-01-01T00:00:00Z", +}); + +describe("TrackingPanel", () => { + beforeEach(() => { + vi.clearAllMocks(); + list.mockResolvedValue([]); + }); + + it("renders the toggle in untracked state", async () => { + get.mockResolvedValue({ ...baseTracking }); + + renderWithProviders(<TrackingPanel seriesId={SERIES_ID} canEdit={true} />); + + await waitFor(() => { + expect( + screen.getByRole("switch", { name: /Toggle release tracking/i }), + ).not.toBeChecked(); + }); + + // Announce switches are hidden when not tracked. + expect(screen.queryByText("Announce")).not.toBeInTheDocument(); + }); + + it("shows announce flags when tracked", async () => { + get.mockResolvedValue({ + ...baseTracking, + tracked: true, + latestKnownChapter: 142.5, + }); + + renderWithProviders(<TrackingPanel seriesId={SERIES_ID} canEdit={true} />); + + await waitFor(() => { + expect(screen.getByText("Announce")).toBeInTheDocument(); + }); + expect(screen.getByLabelText("Chapters")).toBeChecked(); + expect(screen.getByLabelText("Volumes")).toBeChecked(); + }); + + it("toggles tracked via mutation", async () => { + const user = userEvent.setup(); + get.mockResolvedValue({ ...baseTracking }); + update.mockResolvedValue({ ...baseTracking, tracked: true }); + + renderWithProviders(<TrackingPanel seriesId={SERIES_ID} canEdit={true} />); + + const toggle = await screen.findByRole("switch", { + name: /Toggle release tracking/i, + }); + await user.click(toggle); + + await waitFor(() => { + expect(update).toHaveBeenCalledWith(SERIES_ID, { tracked: true }); + }); + }); + + it("renders aliases and supports add (after expanding the collapsed panel)", async () => { + const user = userEvent.setup(); + get.mockResolvedValue({ ...baseTracking, tracked: true }); + list.mockResolvedValue([baseAlias("Existing")]); + create.mockImplementation(async (_id, req) => baseAlias(req.alias)); + + renderWithProviders(<TrackingPanel seriesId={SERIES_ID} canEdit={true} />); + + // The panel is collapsed by default — expand to reach the alias UI. + await user.click( + await screen.findByRole("button", { name: /Expand release tracking/i }), + ); + + await screen.findByText("Existing"); + + const input = screen.getByPlaceholderText(/Add an alias/i); + await user.type(input, "New Alias"); + await user.click(screen.getByRole("button", { name: /^Add$/i })); + + await waitFor(() => { + expect(create).toHaveBeenCalledWith(SERIES_ID, { alias: "New Alias" }); + }); + }); + + it("hides edit affordances when canEdit=false", async () => { + get.mockResolvedValue({ ...baseTracking, tracked: true }); + list.mockResolvedValue([baseAlias("Read Only")]); + + renderWithProviders(<TrackingPanel seriesId={SERIES_ID} canEdit={false} />); + + await screen.findByText("Read Only"); + + expect( + screen.queryByPlaceholderText(/Add an alias/i), + ).not.toBeInTheDocument(); + expect( + screen.queryByRole("button", { name: /^Add$/i }), + ).not.toBeInTheDocument(); + expect( + screen.getByRole("switch", { name: /Toggle release tracking/i }), + ).toBeDisabled(); + }); + + it("calls deleteAlias when remove is clicked", async () => { + const user = userEvent.setup(); + get.mockResolvedValue({ ...baseTracking, tracked: true }); + const alias = baseAlias("Delete Me"); + list.mockResolvedValue([alias]); + del.mockResolvedValue(undefined); + + renderWithProviders(<TrackingPanel seriesId={SERIES_ID} canEdit={true} />); + + // Expand to reveal the alias list. + await user.click( + await screen.findByRole("button", { name: /Expand release tracking/i }), + ); + + // findByRole waits past Mantine's Collapse animation into the + // accessibility tree; getByRole here would race against it. + const removeButton = await screen.findByRole("button", { + name: /Remove alias Delete Me/i, + }); + await user.click(removeButton); + + await waitFor(() => { + expect(del).toHaveBeenCalledWith(SERIES_ID, alias.id); + }); + }); +}); diff --git a/web/src/components/series/TrackingPanel.tsx b/web/src/components/series/TrackingPanel.tsx new file mode 100644 index 00000000..7a418bcb --- /dev/null +++ b/web/src/components/series/TrackingPanel.tsx @@ -0,0 +1,301 @@ +import { + ActionIcon, + Badge, + Box, + Button, + Card, + Collapse, + Divider, + Group, + NumberInput, + Stack, + Switch, + Text, + TextInput, + Tooltip, + UnstyledButton, +} from "@mantine/core"; +import { + IconBellRinging, + IconChevronDown, + IconChevronRight, + IconPlus, + IconTrash, +} from "@tabler/icons-react"; +import { type FormEvent, useState } from "react"; +import { + useCreateSeriesAlias, + useDeleteSeriesAlias, + useSeriesAliases, + useSeriesTracking, + useUpdateSeriesTracking, +} from "@/hooks/useSeriesTracking"; + +interface TrackingPanelProps { + seriesId: string; + /** When false, shows read-only state (used for users without SeriesWrite). */ + canEdit: boolean; +} + +/** + * Inline panel on the series detail page for release-tracking config. + * + * Shows: tracked toggle, status, chapter/volume tracking flags, latest known + * chapter/volume, and the aliases list. All mutations debounce-free — the + * surface is small enough that immediate fire-on-blur is fine. + */ +export function TrackingPanel({ seriesId, canEdit }: TrackingPanelProps) { + const trackingQuery = useSeriesTracking(seriesId); + const aliasesQuery = useSeriesAliases(seriesId); + const updateTracking = useUpdateSeriesTracking(seriesId); + const createAlias = useCreateSeriesAlias(seriesId); + const deleteAlias = useDeleteSeriesAlias(seriesId); + + const [aliasDraft, setAliasDraft] = useState(""); + // Default collapsed so the panel is a thin one-liner unless the user + // explicitly wants to fiddle. The summary in the header carries the + // load-bearing info (tracking on/off, last-known marks, alias count). + const [expanded, setExpanded] = useState(false); + + const tracking = trackingQuery.data; + const aliases = aliasesQuery.data ?? []; + + const handleAddAlias = async (e: FormEvent) => { + e.preventDefault(); + const trimmed = aliasDraft.trim(); + if (!trimmed) return; + try { + await createAlias.mutateAsync({ alias: trimmed }); + setAliasDraft(""); + } catch { + // Notification surfaced inside the hook. + } + }; + + // Build a compact one-line summary that conveys "what is this series's + // tracking state right now" without expanding. Examples: + // "Tracking · ch 142 · vol 15 · 3 aliases" + // "Tracking · ch 142 · 0 aliases" + // "Not tracked" + // Untracked summary keeps the panel minimal — the toggle is the only + // actionable control until tracking is on. + const summary = (() => { + if (!tracking?.tracked) return "Not tracked"; + const parts: string[] = ["Tracking"]; + if (tracking.trackChapters && tracking.latestKnownChapter != null) { + parts.push(`ch ${tracking.latestKnownChapter}`); + } + if (tracking.trackVolumes && tracking.latestKnownVolume != null) { + parts.push(`vol ${tracking.latestKnownVolume}`); + } + parts.push(`${aliases.length} alias${aliases.length === 1 ? "" : "es"}`); + return parts.join(" · "); + })(); + + return ( + <Card withBorder padding="md" radius="md"> + <Stack gap="sm"> + <Group justify="space-between" wrap="nowrap"> + <UnstyledButton + onClick={() => setExpanded((v) => !v)} + aria-expanded={expanded} + aria-label={ + expanded ? "Collapse release tracking" : "Expand release tracking" + } + style={{ flex: 1, minWidth: 0 }} + > + <Group gap="xs" wrap="nowrap"> + {expanded ? ( + <IconChevronDown size={16} /> + ) : ( + <IconChevronRight size={16} /> + )} + <IconBellRinging size={18} /> + <Text fw={600}>Release tracking</Text> + {tracking?.tracked && ( + <Badge color="green" variant="light" size="sm"> + TRACKING + </Badge> + )} + <Text size="sm" c="dimmed" truncate> + {summary} + </Text> + </Group> + </UnstyledButton> + <Switch + checked={tracking?.tracked ?? false} + onChange={(event) => + updateTracking.mutate({ tracked: event.currentTarget.checked }) + } + disabled={!canEdit || trackingQuery.isLoading} + aria-label="Toggle release tracking" + /> + </Group> + + <Collapse in={expanded}> + <Stack gap="sm" mt="xs"> + {tracking?.tracked && ( + <> + <Stack gap={4}> + <Text size="sm" fw={500}> + Announce + </Text> + <Group gap="md"> + <Switch + label="Chapters" + checked={tracking.trackChapters} + onChange={(e) => + updateTracking.mutate({ + trackChapters: e.currentTarget.checked, + }) + } + disabled={!canEdit} + /> + <Switch + label="Volumes" + checked={tracking.trackVolumes} + onChange={(e) => + updateTracking.mutate({ + trackVolumes: e.currentTarget.checked, + }) + } + disabled={!canEdit} + /> + </Group> + </Stack> + + <Group grow> + <NumberInput + label="Latest known chapter" + placeholder="—" + value={tracking.latestKnownChapter ?? ""} + onChange={(value) => { + const next = + typeof value === "number" && Number.isFinite(value) + ? value + : null; + updateTracking.mutate({ latestKnownChapter: next }); + }} + allowDecimal + decimalScale={2} + step={0.1} + disabled={!canEdit} + /> + <NumberInput + label="Latest known volume" + placeholder="—" + value={tracking.latestKnownVolume ?? ""} + onChange={(value) => { + const next = + typeof value === "number" && + Number.isFinite(value) && + Number.isInteger(value) + ? value + : null; + updateTracking.mutate({ latestKnownVolume: next }); + }} + allowDecimal={false} + step={1} + disabled={!canEdit} + /> + </Group> + </> + )} + + <Divider my="xs" /> + + <Box> + <Group justify="space-between" mb="xs"> + <Text size="sm" fw={500}> + Matcher aliases + </Text> + <Text size="xs" c="dimmed"> + {aliases.length} alias{aliases.length === 1 ? "" : "es"} + </Text> + </Group> + <Text size="xs" c="dimmed" mb="xs"> + Used by sources that match by title (Nyaa, MangaUpdates without + an ID). + </Text> + + {aliases.length === 0 && ( + <Text size="sm" c="dimmed" fs="italic" mb="xs"> + No aliases yet. Add one below or run the metadata backfill + task. + </Text> + )} + + <Stack gap={4} mb="xs"> + {aliases.map((alias) => ( + <Group + key={alias.id} + justify="space-between" + wrap="nowrap" + gap="xs" + > + <Group + gap="xs" + wrap="nowrap" + style={{ minWidth: 0, flex: 1 }} + > + <Text size="sm" truncate> + {alias.alias} + </Text> + <Badge + color={alias.source === "manual" ? "violet" : "gray"} + variant="light" + size="xs" + > + {alias.source} + </Badge> + </Group> + {canEdit && ( + <Tooltip label="Remove alias"> + <ActionIcon + size="sm" + color="red" + variant="subtle" + onClick={() => deleteAlias.mutate(alias.id)} + loading={ + deleteAlias.isPending && + deleteAlias.variables === alias.id + } + aria-label={`Remove alias ${alias.alias}`} + > + <IconTrash size={14} /> + </ActionIcon> + </Tooltip> + )} + </Group> + ))} + </Stack> + + {canEdit && ( + <form onSubmit={handleAddAlias}> + <Group gap="xs" align="flex-end"> + <TextInput + placeholder="Add an alias…" + value={aliasDraft} + onChange={(e) => setAliasDraft(e.currentTarget.value)} + style={{ flex: 1 }} + disabled={createAlias.isPending} + /> + <Button + type="submit" + size="sm" + leftSection={<IconPlus size={14} />} + loading={createAlias.isPending} + disabled={!aliasDraft.trim()} + > + Add + </Button> + </Group> + </form> + )} + </Box> + </Stack> + </Collapse> + </Stack> + </Card> + ); +} diff --git a/web/src/components/series/index.ts b/web/src/components/series/index.ts index fc01a9ba..d3c6fd5c 100644 --- a/web/src/components/series/index.ts +++ b/web/src/components/series/index.ts @@ -1,4 +1,5 @@ export { AlternateTitles } from "./AlternateTitles"; +export { BehindByBadge } from "./BehindByBadge"; export { CommunityRating } from "./CommunityRating"; export { CustomMetadataDisplay } from "./CustomMetadataDisplay"; export { ExternalIds } from "./ExternalIds"; @@ -11,4 +12,6 @@ export { SeriesMetadata } from "./SeriesMetadata"; export { SeriesMetadataEditModal } from "./SeriesMetadataEditModal"; export { SeriesRating } from "./SeriesRating"; export { SeriesRatingModal } from "./SeriesRatingModal"; +export { SeriesReleasesPanel } from "./SeriesReleasesPanel"; export { SeriesSharingTags } from "./SeriesSharingTags"; +export { TrackingPanel } from "./TrackingPanel"; diff --git a/web/src/hooks/useEntityEvents.test.ts b/web/src/hooks/useEntityEvents.test.ts index 81cb9277..8fcd6126 100644 --- a/web/src/hooks/useEntityEvents.test.ts +++ b/web/src/hooks/useEntityEvents.test.ts @@ -7,7 +7,7 @@ import * as eventsApi from "@/api/events"; import { useAuthStore } from "@/store/authStore"; import { useCoverUpdatesStore } from "@/store/coverUpdatesStore"; import type { EntityChangeEvent } from "@/types"; -import { useEntityEvents } from "./useEntityEvents"; +import { shouldNotifyRelease, useEntityEvents } from "./useEntityEvents"; // Mock the events API vi.mock("@/api/events"); @@ -509,3 +509,74 @@ describe("useEntityEvents", () => { consoleError.mockRestore(); }); }); + +// ============================================================================= +// shouldNotifyRelease — pure filter predicate +// ============================================================================= + +describe("shouldNotifyRelease", () => { + const baseParams = { + seriesId: "s1", + pluginId: "release-nyaa", + language: "en", + notifyLanguagesValue: undefined, + notifyPluginsValue: undefined, + mutedSeriesIds: [] as readonly string[], + }; + + it("lets everything through when filters are empty", () => { + expect(shouldNotifyRelease(baseParams)).toBe(true); + }); + + it("blocks events for muted series", () => { + expect(shouldNotifyRelease({ ...baseParams, mutedSeriesIds: ["s1"] })).toBe( + false, + ); + }); + + it("enforces the language allowlist (case-insensitive)", () => { + // Allowlist is `["EN"]` (uppercase) and event language is `"en"` — + // the predicate normalizes both sides. + expect( + shouldNotifyRelease({ + ...baseParams, + notifyLanguagesValue: '["EN"]', + language: "en", + }), + ).toBe(true); + expect( + shouldNotifyRelease({ + ...baseParams, + notifyLanguagesValue: '["en"]', + language: "es", + }), + ).toBe(false); + }); + + it("enforces the plugin allowlist", () => { + expect( + shouldNotifyRelease({ + ...baseParams, + notifyPluginsValue: '["release-mangaupdates"]', + pluginId: "release-mangaupdates", + }), + ).toBe(true); + expect( + shouldNotifyRelease({ + ...baseParams, + notifyPluginsValue: '["release-mangaupdates"]', + pluginId: "release-nyaa", + }), + ).toBe(false); + }); + + it("treats invalid JSON in setting values as 'no filter'", () => { + expect( + shouldNotifyRelease({ + ...baseParams, + notifyLanguagesValue: "{not valid json}", + notifyPluginsValue: "also broken", + }), + ).toBe(true); + }); +}); diff --git a/web/src/hooks/useEntityEvents.ts b/web/src/hooks/useEntityEvents.ts index 719ac51d..d33e293f 100644 --- a/web/src/hooks/useEntityEvents.ts +++ b/web/src/hooks/useEntityEvents.ts @@ -1,8 +1,11 @@ +import { notifications } from "@mantine/notifications"; import { useQueryClient } from "@tanstack/react-query"; import { useEffect, useState } from "react"; import { eventsApi } from "@/api/events"; import { useAuthStore } from "@/store/authStore"; import { useCoverUpdatesStore } from "@/store/coverUpdatesStore"; +import { useReleaseAnnouncementsStore } from "@/store/releaseAnnouncementsStore"; +import { useUserPreferencesStore } from "@/store/userPreferencesStore"; import type { EntityChangeEvent } from "@/types"; import { createDevLog } from "@/utils/devLog"; @@ -10,6 +13,60 @@ type ConnectionState = "connecting" | "connected" | "disconnected" | "failed"; const log = createDevLog("[SSE]"); +/** Best-effort decode of a JSON-array string (settings + user_preferences + * values are stored as JSON-encoded strings). Non-string entries and parse + * failures collapse to an empty list. */ +function parseStringArray(value: string | undefined | null): string[] { + if (!value) return []; + try { + const parsed = JSON.parse(value); + return Array.isArray(parsed) + ? parsed.filter((v): v is string => typeof v === "string") + : []; + } catch { + return []; + } +} + +/** + * Decide whether a `release_announced` event should bump the badge / surface + * a toast for the current user. + * + * Three filters apply (in order): + * 1. Per-user mute (user_preferences) — drops the event for muted series. + * 2. Server-wide language allowlist — empty = let everything through. + * 3. Server-wide plugin allowlist — empty = let everything through. + * + * Pure helper, exported only for testing. + */ +export function shouldNotifyRelease(params: { + seriesId: string; + pluginId: string; + language: string; + notifyLanguagesValue: string | undefined | null; + notifyPluginsValue: string | undefined | null; + mutedSeriesIds: readonly string[]; +}): boolean { + if (params.mutedSeriesIds.includes(params.seriesId)) return false; + + const allowedLanguages = parseStringArray(params.notifyLanguagesValue).map( + (l) => l.toLowerCase(), + ); + if ( + allowedLanguages.length > 0 && + !allowedLanguages.includes(params.language.toLowerCase()) + ) { + return false; + } + + const allowedPlugins = parseStringArray(params.notifyPluginsValue); + if (allowedPlugins.length > 0 && !allowedPlugins.includes(params.pluginId)) { + return false; + } + + return true; +} + /** * React hook that subscribes to entity change events and automatically * invalidates relevant React Query caches when entities are created, @@ -228,6 +285,81 @@ function handleEntityEvent( break; } + case "release_announced": { + // Snapshot the latest filter state synchronously inside the SSE + // callback so the predicate sees fresh data on every event. + // + // Server-wide allowlists live in React Query cache (loaded by the + // settings page); per-user mutes live in the userPreferences store + // (auto-loaded + persisted to localStorage with debounced sync). + // + // The query keys here MUST match what the settings page uses — kept + // in sync explicitly so a typo doesn't silently bypass filtering. + const notifyLanguagesSetting = queryClient.getQueryData<{ + value?: string; + }>(["admin-setting", "release_tracking.notify_languages"]); + const notifyPluginsSetting = queryClient.getQueryData<{ + value?: string; + }>(["admin-setting", "release_tracking.notify_plugins"]); + const mutedSeriesIds = useUserPreferencesStore + .getState() + .getPreference("release_tracking.muted_series_ids"); + if ( + !shouldNotifyRelease({ + seriesId: event.seriesId, + pluginId: event.pluginId, + language: event.language ?? "", + notifyLanguagesValue: notifyLanguagesSetting?.value, + notifyPluginsValue: notifyPluginsSetting?.value, + mutedSeriesIds, + }) + ) { + break; + } + useReleaseAnnouncementsStore.getState().bump(); + + // Refresh inbox + per-series ledger views in case the user is + // watching them. + queryClient.invalidateQueries({ queryKey: ["releases"] }); + queryClient.invalidateQueries({ + queryKey: ["series", event.seriesId, "releases"], + }); + // Refresh the series tracking row so the Behind-by-N badge can + // pick up the latest_known_* high-water mark advance. + queryClient.invalidateQueries({ + queryKey: ["series", event.seriesId, "tracking"], + }); + // Refresh the full series so localMaxChapter / upstream gap props + // recompute against the latest state. + queryClient.invalidateQueries({ + queryKey: ["series", event.seriesId, "full"], + }); + + // Surface a low-priority toast. Toast text uses chapter or volume + // when the source provided one; falls back to a neutral message. + const label = + event.chapter !== null && event.chapter !== undefined + ? `Ch ${event.chapter}` + : event.volume !== null && event.volume !== undefined + ? `Vol ${event.volume}` + : "New release"; + notifications.show({ + id: `release-${event.ledgerId}`, + title: "New release", + message: `${label} from ${event.pluginId}`, + color: "orange", + }); + break; + } + + case "release_source_polled": { + // A release source's poll task finished; refresh the Release tracking + // settings list so users see updated last_polled_at / last_summary + // / status without manually reloading. Cheap: one query invalidate. + queryClient.invalidateQueries({ queryKey: ["release-sources"] }); + break; + } + default: log("Unknown event type:", event); } diff --git a/web/src/hooks/useReleaseTrackingApplicability.ts b/web/src/hooks/useReleaseTrackingApplicability.ts new file mode 100644 index 00000000..f4f27307 --- /dev/null +++ b/web/src/hooks/useReleaseTrackingApplicability.ts @@ -0,0 +1,39 @@ +import { useQuery } from "@tanstack/react-query"; +import { releaseSourcesApi } from "@/api/releases"; + +/** + * Whether release tracking is available in the user's current scope. + * + * Backed by `GET /api/v1/release-sources/applicability`, which returns + * `applicable: true` when at least one enabled `release_source` plugin + * applies to `libraryId` (or, with `libraryId` omitted, applies to *any* + * library — useful for the global navigation Releases entry). + * + * Single source of truth for three UI gates: + * + * 1. **Per-series Tracking panel + Releases tab**: hide entirely on + * libraries with no covering plugin. Avoids dead-end UI like "click to + * track this series" on a library that has no plugin to actually do + * anything with the tracked state. + * + * 2. **Bulk-selection menu Track / Don't track entries**: only show when + * at least one selected series's library is covered. Mirrors how + * `getActions("series:bulk")` gates other plugin-driven entries. + * + * 3. **Top-level "Releases" navigation**: hidden when no plugin is + * installed at all (no `libraryId` argument). + * + * The query is cheap (one DB hit, no joins) and stale-cached for 5 minutes + * because the answer only flips when an admin enables/disables a plugin + * or changes its library scope — both rare operations. + */ +export function useReleaseTrackingApplicability(libraryId?: string) { + return useQuery({ + queryKey: ["release-tracking-applicability", libraryId ?? null], + queryFn: () => releaseSourcesApi.applicability(libraryId), + // Plugin install/disable is rare; treat the answer as essentially static + // for the life of a normal session. Mutations on the plugin admin page + // can invalidate this key explicitly if we ever want instant updates. + staleTime: 5 * 60 * 1000, + }); +} diff --git a/web/src/hooks/useReleases.ts b/web/src/hooks/useReleases.ts new file mode 100644 index 00000000..70cc4de3 --- /dev/null +++ b/web/src/hooks/useReleases.ts @@ -0,0 +1,269 @@ +import { notifications } from "@mantine/notifications"; +import { useMutation, useQuery, useQueryClient } from "@tanstack/react-query"; +import { useEffect, useRef } from "react"; +import { + type BulkReleaseActionRequest, + type BulkReleaseActionResponse, + type DeleteReleaseResponse, + type PaginatedReleases, + type ReleaseFacets, + type ReleaseFacetsParams, + type ReleaseInboxParams, + type ReleaseLedgerEntry, + type ReleaseSource, + type ResetReleaseSourceResponse, + releaseSourcesApi, + releasesApi, + type SeriesReleaseListParams, + type UpdateReleaseLedgerEntryRequest, + type UpdateReleaseSourceRequest, +} from "@/api/releases"; +import { useTaskProgress } from "@/hooks/useTaskProgress"; + +const RELEASE_POLL_TASK_TYPE = "poll_release_source"; + +export const releasesKeys = { + inbox: (params: ReleaseInboxParams) => ["releases", "inbox", params] as const, + facets: (params: ReleaseFacetsParams) => + ["releases", "facets", params] as const, + series: (seriesId: string, params: SeriesReleaseListParams) => + ["series", seriesId, "releases", params] as const, + inboxRoot: ["releases", "inbox"] as const, + sourcesRoot: ["release-sources"] as const, +}; + +export function useReleaseFacets(params: ReleaseFacetsParams = {}) { + return useQuery<ReleaseFacets>({ + queryKey: releasesKeys.facets(params), + queryFn: () => releasesApi.facets(params), + }); +} + +export function useDeleteRelease() { + const queryClient = useQueryClient(); + return useMutation<DeleteReleaseResponse, Error, string>({ + mutationFn: (releaseId) => releasesApi.delete(releaseId), + onSuccess: () => { + // Delete touches the ledger and (server-side) the source's etag. + // Invalidate both so the table and the source-admin row refresh. + queryClient.invalidateQueries({ queryKey: ["releases"] }); + queryClient.invalidateQueries({ queryKey: ["series"] }); + queryClient.invalidateQueries({ queryKey: releasesKeys.sourcesRoot }); + }, + onError: notifyError("Failed to delete release"), + }); +} + +export function useBulkReleaseAction() { + const queryClient = useQueryClient(); + return useMutation< + BulkReleaseActionResponse, + Error, + BulkReleaseActionRequest + >({ + mutationFn: (request) => releasesApi.bulk(request), + onSuccess: (data) => { + const { affected, action } = data; + const verb = + action === "dismiss" + ? "Dismissed" + : action === "mark-acquired" + ? "Marked acquired" + : action === "ignore" + ? "Ignored" + : action === "reset" + ? "Reset" + : "Deleted"; + const noun = affected === 1 ? "release" : "releases"; + notifications.show({ + title: `${verb} ${affected} ${noun}`, + // Surface the etag-clear side effect for delete so the user knows + // the row will come back on the next poll. + message: + action === "delete" + ? "Affected sources will re-fetch on the next poll." + : undefined, + color: action === "delete" ? "orange" : "blue", + }); + queryClient.invalidateQueries({ queryKey: ["releases"] }); + queryClient.invalidateQueries({ queryKey: ["series"] }); + if (action === "delete") { + queryClient.invalidateQueries({ queryKey: releasesKeys.sourcesRoot }); + } + }, + onError: notifyError("Bulk action failed"), + }); +} + +export function useReleaseInbox(params: ReleaseInboxParams = {}) { + return useQuery<PaginatedReleases>({ + queryKey: releasesKeys.inbox(params), + queryFn: () => releasesApi.listInbox(params), + }); +} + +export function useSeriesReleases( + seriesId: string, + params: SeriesReleaseListParams = {}, + enabled = true, +) { + return useQuery<PaginatedReleases>({ + queryKey: releasesKeys.series(seriesId, params), + queryFn: () => releasesApi.listForSeries(seriesId, params), + enabled: enabled && Boolean(seriesId), + }); +} + +function notifyError(title: string) { + return (error: Error & { response?: { data?: { error?: string } } }) => { + notifications.show({ + title, + message: error.response?.data?.error || error.message || "Unknown error", + color: "red", + }); + }; +} + +export function useDismissRelease() { + const queryClient = useQueryClient(); + return useMutation<ReleaseLedgerEntry, Error, string>({ + mutationFn: (releaseId) => releasesApi.dismiss(releaseId), + onSuccess: () => { + queryClient.invalidateQueries({ queryKey: ["releases"] }); + queryClient.invalidateQueries({ queryKey: ["series"] }); + }, + onError: notifyError("Failed to dismiss release"), + }); +} + +export function useMarkReleaseAcquired() { + const queryClient = useQueryClient(); + return useMutation<ReleaseLedgerEntry, Error, string>({ + mutationFn: (releaseId) => releasesApi.markAcquired(releaseId), + onSuccess: () => { + queryClient.invalidateQueries({ queryKey: ["releases"] }); + queryClient.invalidateQueries({ queryKey: ["series"] }); + }, + onError: notifyError("Failed to mark release acquired"), + }); +} + +export function usePatchRelease() { + const queryClient = useQueryClient(); + return useMutation< + ReleaseLedgerEntry, + Error, + { releaseId: string; update: UpdateReleaseLedgerEntryRequest } + >({ + mutationFn: ({ releaseId, update }) => + releasesApi.patchEntry(releaseId, update), + onSuccess: () => { + queryClient.invalidateQueries({ queryKey: ["releases"] }); + queryClient.invalidateQueries({ queryKey: ["series"] }); + }, + onError: notifyError("Failed to update release"), + }); +} + +export function useReleaseSources() { + const queryClient = useQueryClient(); + const { activeTasks } = useTaskProgress(); + + const query = useQuery<ReleaseSource[]>({ + queryKey: releasesKeys.sourcesRoot, + queryFn: () => releaseSourcesApi.list(), + // Belt-and-braces: SSE `release_source_polled` invalidates this query, but + // very fast polls can race the event pipeline. While any release-poll task + // is in flight, refetch every 5s so `lastPolledAt` / `lastSummary` / + // `lastError` catch up even if the event is missed. Stops once no polls + // are active. + refetchInterval: () => { + const hasActivePoll = Array.from(activeTasks.values()).some( + (task) => + task.taskType === RELEASE_POLL_TASK_TYPE && + (task.status === "pending" || task.status === "running"), + ); + return hasActivePoll ? 5000 : false; + }, + }); + + // Refresh immediately when a release-poll task transitions to a terminal + // state. `useTaskProgress` keeps completed/failed entries around briefly, + // so we watch for the status flip rather than disappearance. + const prevStatusesRef = useRef<Map<string, string>>(new Map()); + useEffect(() => { + const prev = prevStatusesRef.current; + const next = new Map<string, string>(); + + for (const task of activeTasks.values()) { + if (task.taskType !== RELEASE_POLL_TASK_TYPE) continue; + next.set(task.taskId, task.status); + + const prevStatus = prev.get(task.taskId); + if ( + prevStatus && + prevStatus !== task.status && + (task.status === "completed" || task.status === "failed") + ) { + queryClient.invalidateQueries({ queryKey: releasesKeys.sourcesRoot }); + } + } + + prevStatusesRef.current = next; + }, [activeTasks, queryClient]); + + return query; +} + +export function useUpdateReleaseSource() { + const queryClient = useQueryClient(); + return useMutation< + ReleaseSource, + Error, + { sourceId: string; update: UpdateReleaseSourceRequest } + >({ + mutationFn: ({ sourceId, update }) => + releaseSourcesApi.update(sourceId, update), + onSuccess: () => { + queryClient.invalidateQueries({ queryKey: releasesKeys.sourcesRoot }); + }, + onError: notifyError("Failed to update source"), + }); +} + +export function usePollReleaseSourceNow() { + const queryClient = useQueryClient(); + return useMutation<{ status: string; message: string }, Error, string>({ + mutationFn: (sourceId) => releaseSourcesApi.pollNow(sourceId), + onSuccess: () => { + notifications.show({ + title: "Poll enqueued", + message: "The release source will be polled shortly.", + color: "blue", + }); + queryClient.invalidateQueries({ queryKey: releasesKeys.sourcesRoot }); + }, + onError: notifyError("Failed to enqueue poll"), + }); +} + +export function useResetReleaseSource() { + const queryClient = useQueryClient(); + return useMutation<ResetReleaseSourceResponse, Error, string>({ + mutationFn: (sourceId) => releaseSourcesApi.reset(sourceId), + onSuccess: (data) => { + notifications.show({ + title: "Source reset", + message: `Cleared ${data.deletedLedgerEntries} ledger ${ + data.deletedLedgerEntries === 1 ? "entry" : "entries" + }. Click "Poll now" to re-fetch.`, + color: "blue", + }); + // Reset wipes ledger rows, so invalidate everything that reads them. + queryClient.invalidateQueries({ queryKey: releasesKeys.sourcesRoot }); + queryClient.invalidateQueries({ queryKey: releasesKeys.inboxRoot }); + queryClient.invalidateQueries({ queryKey: ["series"] }); + }, + onError: notifyError("Failed to reset source"), + }); +} diff --git a/web/src/hooks/useSeriesTracking.ts b/web/src/hooks/useSeriesTracking.ts new file mode 100644 index 00000000..fc9e6c03 --- /dev/null +++ b/web/src/hooks/useSeriesTracking.ts @@ -0,0 +1,86 @@ +import { notifications } from "@mantine/notifications"; +import { useMutation, useQuery, useQueryClient } from "@tanstack/react-query"; +import { + type CreateSeriesAliasRequest, + type SeriesAlias, + type SeriesTracking, + trackingApi, + type UpdateSeriesTrackingRequest, +} from "@/api/tracking"; + +const trackingKey = (seriesId: string) => + ["series", seriesId, "tracking"] as const; +const aliasesKey = (seriesId: string) => + ["series", seriesId, "aliases"] as const; + +export function useSeriesTracking(seriesId: string, enabled = true) { + return useQuery<SeriesTracking>({ + queryKey: trackingKey(seriesId), + queryFn: () => trackingApi.getTracking(seriesId), + enabled: enabled && Boolean(seriesId), + }); +} + +export function useUpdateSeriesTracking(seriesId: string) { + const queryClient = useQueryClient(); + return useMutation({ + mutationFn: (update: UpdateSeriesTrackingRequest) => + trackingApi.updateTracking(seriesId, update), + onSuccess: (data) => { + queryClient.setQueryData(trackingKey(seriesId), data); + }, + onError: (error: Error & { response?: { data?: { error?: string } } }) => { + notifications.show({ + title: "Failed to update tracking", + message: + error.response?.data?.error || error.message || "Unknown error", + color: "red", + }); + }, + }); +} + +export function useSeriesAliases(seriesId: string, enabled = true) { + return useQuery<SeriesAlias[]>({ + queryKey: aliasesKey(seriesId), + queryFn: () => trackingApi.listAliases(seriesId), + enabled: enabled && Boolean(seriesId), + }); +} + +export function useCreateSeriesAlias(seriesId: string) { + const queryClient = useQueryClient(); + return useMutation({ + mutationFn: (request: CreateSeriesAliasRequest) => + trackingApi.createAlias(seriesId, request), + onSuccess: () => { + queryClient.invalidateQueries({ queryKey: aliasesKey(seriesId) }); + }, + onError: (error: Error & { response?: { data?: { error?: string } } }) => { + notifications.show({ + title: "Failed to add alias", + message: + error.response?.data?.error || error.message || "Unknown error", + color: "red", + }); + }, + }); +} + +export function useDeleteSeriesAlias(seriesId: string) { + const queryClient = useQueryClient(); + return useMutation({ + mutationFn: (aliasId: string) => trackingApi.deleteAlias(seriesId, aliasId), + onSuccess: () => { + queryClient.invalidateQueries({ queryKey: aliasesKey(seriesId) }); + }, + onError: (error: Error & { response?: { data?: { error?: string } } }) => { + notifications.show({ + title: "Failed to remove alias", + message: + error.response?.data?.error || error.message || "Unknown error", + color: "red", + }); + }, + }); +} diff --git a/web/src/pages/ReleasesInbox.test.tsx b/web/src/pages/ReleasesInbox.test.tsx new file mode 100644 index 00000000..13bb9782 --- /dev/null +++ b/web/src/pages/ReleasesInbox.test.tsx @@ -0,0 +1,290 @@ +import { beforeEach, describe, expect, it, vi } from "vitest"; +import { + type PaginatedReleases, + type ReleaseFacets, + type ReleaseLedgerEntry, + type ReleaseSource, + releaseSourcesApi, + releasesApi, +} from "@/api/releases"; +import { useReleaseAnnouncementsStore } from "@/store/releaseAnnouncementsStore"; +import { renderWithProviders, screen, userEvent, waitFor } from "@/test/utils"; +import { ReleasesInbox } from "./ReleasesInbox"; + +vi.mock("@/api/releases", () => ({ + releasesApi: { + listInbox: vi.fn(), + listForSeries: vi.fn(), + patchEntry: vi.fn(), + dismiss: vi.fn(), + markAcquired: vi.fn(), + delete: vi.fn(), + bulk: vi.fn(), + facets: vi.fn(), + }, + releaseSourcesApi: { + list: vi.fn(), + update: vi.fn(), + pollNow: vi.fn(), + }, +})); + +function entry(over: Partial<ReleaseLedgerEntry> = {}): ReleaseLedgerEntry { + return { + id: "ent-1", + seriesId: "00000000-0000-0000-0000-000000000001", + seriesTitle: "Solo Leveling", + sourceId: "11111111-1111-1111-1111-111111111111", + externalReleaseId: "ext-1", + payloadUrl: "https://example.com/r/1", + confidence: 0.95, + state: "announced", + observedAt: "2026-05-01T00:00:00Z", + createdAt: "2026-05-01T00:00:00Z", + chapter: 143, + volume: null, + language: "en", + groupOrUploader: "GroupZ", + ...over, + }; +} + +function paginated(entries: ReleaseLedgerEntry[]): PaginatedReleases { + return { + data: entries, + page: 1, + pageSize: 50, + total: entries.length, + totalPages: 1, + links: { + self: "/api/v1/releases", + }, + } as PaginatedReleases; +} + +function emptyFacets(): ReleaseFacets { + return { languages: [], libraries: [], series: [] }; +} + +function source(over: Partial<ReleaseSource> = {}): ReleaseSource { + return { + id: "11111111-1111-1111-1111-111111111111", + displayName: "MangaUpdates Releases", + sourceKey: "default", + pluginId: "release-mangaupdates", + kind: "metadata-feed", + enabled: true, + cronSchedule: null, + effectiveCronSchedule: "0 0 * * *", + createdAt: "2026-05-01T00:00:00Z", + updatedAt: "2026-05-01T00:00:00Z", + ...over, + } as ReleaseSource; +} + +const list = vi.mocked(releasesApi.listInbox); +const facets = vi.mocked(releasesApi.facets); +const bulk = vi.mocked(releasesApi.bulk); +const remove = vi.mocked(releasesApi.delete); +const sourcesList = vi.mocked(releaseSourcesApi.list); + +describe("ReleasesInbox", () => { + beforeEach(() => { + vi.clearAllMocks(); + useReleaseAnnouncementsStore.getState().reset(); + useReleaseAnnouncementsStore.getState().bump(); + facets.mockResolvedValue(emptyFacets()); + sourcesList.mockResolvedValue([source()]); + }); + + it("renders releases and resets the unseen badge on mount", async () => { + list.mockResolvedValueOnce(paginated([entry()])); + expect(useReleaseAnnouncementsStore.getState().unseenCount).toBe(1); + renderWithProviders(<ReleasesInbox />); + await waitFor(() => { + expect(screen.getByText("GroupZ")).toBeInTheDocument(); + }); + // Series column should show the human title, not a sliced UUID. + expect(screen.getByText("Solo Leveling")).toBeInTheDocument(); + expect(useReleaseAnnouncementsStore.getState().unseenCount).toBe(0); + }); + + it("falls back to a truncated UUID when the series title is empty", async () => { + list.mockResolvedValueOnce(paginated([entry({ seriesTitle: "" })])); + renderWithProviders(<ReleasesInbox />); + await waitFor(() => { + expect(screen.getByText(/^00000000…$/)).toBeInTheDocument(); + }); + }); + + it("renders the source's display name instead of a UUID", async () => { + list.mockResolvedValueOnce(paginated([entry()])); + renderWithProviders(<ReleasesInbox />); + expect( + await screen.findByText("MangaUpdates Releases"), + ).toBeInTheDocument(); + // The bare UUID slice should no longer appear in the row. + expect(screen.queryByText(/^source: 11111111…$/)).not.toBeInTheDocument(); + }); + + it("falls back to a truncated source UUID when the source is unknown", async () => { + sourcesList.mockResolvedValue([]); + list.mockResolvedValueOnce(paginated([entry()])); + renderWithProviders(<ReleasesInbox />); + expect(await screen.findByText(/^11111111…$/)).toBeInTheDocument(); + }); + + it("shows empty-state copy when no entries match", async () => { + list.mockResolvedValueOnce(paginated([])); + renderWithProviders(<ReleasesInbox />); + await waitFor(() => { + expect(screen.getByText(/No releases match/i)).toBeInTheDocument(); + }); + }); + + it("renders a kind-specific media-url icon when mediaUrl is set", async () => { + list.mockResolvedValueOnce( + paginated([ + entry({ + mediaUrl: "https://nyaa.si/download/1.torrent", + mediaUrlKind: "torrent", + }), + ]), + ); + renderWithProviders(<ReleasesInbox />); + const payloadLink = await screen.findByLabelText("Open payload URL"); + expect(payloadLink).toHaveAttribute("href", "https://example.com/r/1"); + const torrentLink = screen.getByLabelText("Download .torrent"); + expect(torrentLink).toHaveAttribute( + "href", + "https://nyaa.si/download/1.torrent", + ); + }); + + it("does not render a media-url icon when mediaUrl is absent", async () => { + list.mockResolvedValueOnce(paginated([entry()])); + renderWithProviders(<ReleasesInbox />); + await screen.findByLabelText("Open payload URL"); + expect(screen.queryByLabelText("Download .torrent")).toBeNull(); + expect(screen.queryByLabelText("Open magnet link")).toBeNull(); + expect(screen.queryByLabelText("Direct download")).toBeNull(); + }); + + it("loads facets with the active filter context", async () => { + list.mockResolvedValue(paginated([])); + facets.mockResolvedValue({ + languages: [{ language: "en", count: 7 }], + libraries: [ + { libraryId: "lib-a", libraryName: "Manga", count: 5 }, + { libraryId: "lib-b", libraryName: "Books", count: 2 }, + ], + series: [ + { + seriesId: "s-1", + seriesTitle: "Solo Leveling", + libraryId: "lib-a", + libraryName: "Manga", + count: 5, + }, + ], + }); + renderWithProviders(<ReleasesInbox />); + await waitFor(() => { + expect(facets).toHaveBeenCalledWith( + expect.objectContaining({ state: "announced" }), + ); + }); + }); + + it("supports state=all by passing 'all' to the inbox query", async () => { + list.mockResolvedValue(paginated([])); + const user = userEvent.setup(); + renderWithProviders(<ReleasesInbox />); + await waitFor(() => { + expect(list).toHaveBeenCalledWith( + expect.objectContaining({ state: "announced" }), + ); + }); + const stateInput = screen.getByTestId( + "releases-state-filter", + ) as HTMLInputElement; + await user.click(stateInput); + const allOption = await screen.findByText("All", { + selector: "[role=option] *, [role=option]", + }); + await user.click(allOption); + await waitFor(() => { + expect(list).toHaveBeenCalledWith( + expect.objectContaining({ state: "all" }), + ); + }); + }); + + it("bulk-dismisses the selected rows", async () => { + list.mockResolvedValue(paginated([entry({ id: "a" }), entry({ id: "b" })])); + bulk.mockResolvedValue({ affected: 2, action: "dismiss" }); + const user = userEvent.setup(); + renderWithProviders(<ReleasesInbox />); + await screen.findAllByText("GroupZ"); + + await user.click(screen.getByLabelText("Select release a")); + await user.click(screen.getByLabelText("Select release b")); + // The bulk action bar's Dismiss button has the IconX icon; the + // per-row dismiss button has aria-label "Dismiss" but no visible + // text. The bar's button has visible "Dismiss" text inside it. + const dismissButtons = await screen.findAllByRole("button", { + name: /Dismiss/, + }); + // Bar button is the only "button" tagged with the visible word. + const barButton = dismissButtons.find((b) => + b.textContent?.includes("Dismiss"), + ); + await user.click(barButton!); + await waitFor(() => { + expect(bulk).toHaveBeenCalledWith({ + ids: ["a", "b"], + action: "dismiss", + }); + }); + }); + + it("requires confirmation before bulk-deleting", async () => { + list.mockResolvedValue(paginated([entry({ id: "a" })])); + bulk.mockResolvedValue({ affected: 1, action: "delete" }); + const user = userEvent.setup(); + renderWithProviders(<ReleasesInbox />); + await screen.findAllByText("GroupZ"); + + await user.click(screen.getByLabelText("Select release a")); + // The bulk-bar Delete button has visible "Delete" text; the per-row + // delete has aria-label only. + const deleteButtons = await screen.findAllByRole("button", { + name: /Delete/, + }); + const barButton = deleteButtons.find((b) => + b.textContent?.includes("Delete"), + ); + await user.click(barButton!); + // Confirmation modal opens — bulk hasn't fired yet. + expect(bulk).not.toHaveBeenCalled(); + await user.click( + await screen.findByRole("button", { name: /Delete 1 release/ }), + ); + await waitFor(() => { + expect(bulk).toHaveBeenCalledWith({ ids: ["a"], action: "delete" }); + }); + }); + + it("per-row delete fires the delete API", async () => { + list.mockResolvedValue(paginated([entry({ id: "a" })])); + remove.mockResolvedValue({ deleted: true }); + const user = userEvent.setup(); + renderWithProviders(<ReleasesInbox />); + await screen.findByText("GroupZ"); + + await user.click(screen.getByLabelText("Delete")); + await waitFor(() => { + expect(remove).toHaveBeenCalledWith("a"); + }); + }); +}); diff --git a/web/src/pages/ReleasesInbox.tsx b/web/src/pages/ReleasesInbox.tsx new file mode 100644 index 00000000..e5cdf983 --- /dev/null +++ b/web/src/pages/ReleasesInbox.tsx @@ -0,0 +1,356 @@ +import { + Badge, + Card, + Group, + Loader, + Pagination, + Select, + Stack, + Text, + Title, +} from "@mantine/core"; +import { useDisclosure } from "@mantine/hooks"; +import { IconRss } from "@tabler/icons-react"; +import { useEffect, useMemo, useState } from "react"; +import type { + BulkReleaseAction, + ReleaseFacets, + ReleaseFacetsParams, + ReleaseInboxParams, + ReleaseSource, +} from "@/api/releases"; +import { ReleasesBulkActionBar } from "@/components/releases/ReleasesBulkActionBar"; +import { ReleasesBulkDeleteModal } from "@/components/releases/ReleasesBulkDeleteModal"; +import { ReleasesTable } from "@/components/releases/ReleasesTable"; +import { useDocumentTitle } from "@/hooks/useDocumentTitle"; +import { + useBulkReleaseAction, + useDeleteRelease, + useDismissRelease, + useMarkReleaseAcquired, + useReleaseFacets, + useReleaseInbox, + useReleaseSources, +} from "@/hooks/useReleases"; +import { useReleaseAnnouncementsStore } from "@/store/releaseAnnouncementsStore"; + +const STATE_OPTIONS = [ + { value: "all", label: "All" }, + { value: "announced", label: "New" }, + { value: "marked_acquired", label: "Acquired" }, + { value: "dismissed", label: "Dismissed" }, + { value: "ignored", label: "Ignored" }, +]; + +const PAGE_SIZE = 50; + +const ALL_VALUE = "__all__"; + +/** Build the grouped, alphabetised series options for the Mantine Select. */ +function buildSeriesOptions(facets: ReleaseFacets | undefined) { + if (!facets) return []; + const byLibrary = new Map< + string, + { libraryName: string; items: { value: string; label: string }[] } + >(); + for (const s of facets.series) { + // Fall back to the id when title/library are missing so the option + // still renders something searchable instead of an empty string. + const libraryName = s.libraryName || "Unknown library"; + const title = s.seriesTitle || `${s.seriesId.slice(0, 8)}…`; + const label = `${title} (${s.count})`; + const existing = byLibrary.get(s.libraryId); + if (existing) { + existing.items.push({ value: s.seriesId, label }); + } else { + byLibrary.set(s.libraryId, { + libraryName, + items: [{ value: s.seriesId, label }], + }); + } + } + const groups = Array.from(byLibrary.values()).sort((a, b) => + a.libraryName.localeCompare(b.libraryName), + ); + for (const g of groups) { + g.items.sort((a, b) => a.label.localeCompare(b.label)); + } + return [ + { value: ALL_VALUE, label: "All series" }, + ...groups.map((g) => ({ group: g.libraryName, items: g.items })), + ]; +} + +function buildLibraryOptions(facets: ReleaseFacets | undefined) { + if (!facets) return [{ value: ALL_VALUE, label: "All libraries" }]; + const opts = facets.libraries + .map((l) => ({ + value: l.libraryId, + label: `${l.libraryName || "Unknown"} (${l.count})`, + })) + .sort((a, b) => a.label.localeCompare(b.label)); + return [{ value: ALL_VALUE, label: "All libraries" }, ...opts]; +} + +function buildLanguageOptions(facets: ReleaseFacets | undefined) { + if (!facets) return [{ value: ALL_VALUE, label: "All languages" }]; + const opts = facets.languages + .map((l) => ({ + value: l.language, + label: `${l.language} (${l.count})`, + })) + .sort((a, b) => a.label.localeCompare(b.label)); + return [{ value: ALL_VALUE, label: "All languages" }, ...opts]; +} + +export function ReleasesInbox() { + useDocumentTitle("Releases"); + + const resetBadge = useReleaseAnnouncementsStore((s) => s.reset); + // Visiting the inbox marks all unseen events as seen — the user has + // landed where the events would have sent them anyway. + useEffect(() => { + resetBadge(); + }, [resetBadge]); + + const [state, setState] = useState<string>("announced"); + const [language, setLanguage] = useState<string>(ALL_VALUE); + const [seriesId, setSeriesId] = useState<string>(ALL_VALUE); + const [libraryId, setLibraryId] = useState<string>(ALL_VALUE); + const [page, setPage] = useState<number>(1); + const [selected, setSelected] = useState<Set<string>>(new Set()); + const [confirmBulkDelete, { open: openBulkDelete, close: closeBulkDelete }] = + useDisclosure(false); + + const inboxParams: ReleaseInboxParams = { + state, + language: language === ALL_VALUE ? undefined : language, + seriesId: seriesId === ALL_VALUE ? undefined : seriesId, + libraryId: libraryId === ALL_VALUE ? undefined : libraryId, + page, + pageSize: PAGE_SIZE, + }; + // Facet query mirrors the inbox filters minus pagination so each + // dropdown reflects what's actually selectable under the current state. + const facetsParams: ReleaseFacetsParams = { + state, + language: language === ALL_VALUE ? undefined : language, + seriesId: seriesId === ALL_VALUE ? undefined : seriesId, + libraryId: libraryId === ALL_VALUE ? undefined : libraryId, + }; + + const { data, isLoading, error } = useReleaseInbox(inboxParams); + const { data: facets } = useReleaseFacets(facetsParams); + const { data: sources } = useReleaseSources(); + const dismiss = useDismissRelease(); + const markAcquired = useMarkReleaseAcquired(); + const deleteRelease = useDeleteRelease(); + const bulk = useBulkReleaseAction(); + + const entries = data?.data ?? []; + const total = data?.total ?? 0; + const totalPages = data?.totalPages ?? 1; + + // Reset bulk selection when the visible page or any filter changes — + // selection IDs don't apply across different pages or filtered views. + // The deps are *triggers*, not values used in the body, so biome's + // exhaustive-deps rule flags them as extra; that's intentional here. + // biome-ignore lint/correctness/useExhaustiveDependencies: deps are change-triggers, not consumed values + useEffect(() => { + setSelected(new Set()); + }, [page, state, language, seriesId, libraryId]); + + const seriesOptions = useMemo(() => buildSeriesOptions(facets), [facets]); + const libraryOptions = useMemo(() => buildLibraryOptions(facets), [facets]); + const languageOptions = useMemo(() => buildLanguageOptions(facets), [facets]); + // Joining `sources` client-side keeps the inbox DTO lean: the source list + // is small and already cached, so a per-row label costs no extra fetch. + const sourceById = useMemo(() => { + const map = new Map<string, ReleaseSource>(); + for (const s of sources ?? []) map.set(s.id, s); + return map; + }, [sources]); + + const allOnPageSelected = + entries.length > 0 && entries.every((e) => selected.has(e.id)); + + const toggleAllOnPage = () => { + setSelected((prev) => { + if (allOnPageSelected) { + const next = new Set(prev); + for (const e of entries) next.delete(e.id); + return next; + } + const next = new Set(prev); + for (const e of entries) next.add(e.id); + return next; + }); + }; + + const toggleOne = (id: string) => { + setSelected((prev) => { + const next = new Set(prev); + if (next.has(id)) { + next.delete(id); + } else { + next.add(id); + } + return next; + }); + }; + + const runBulk = (action: BulkReleaseAction) => { + const ids = Array.from(selected); + if (ids.length === 0) return; + bulk.mutate( + { ids, action }, + { + onSuccess: () => setSelected(new Set()), + }, + ); + }; + + return ( + <Stack p="md" gap="md"> + <Group justify="space-between" wrap="wrap"> + <Group gap="sm"> + <IconRss size={26} /> + <Title order={2}>Releases + + {total} total + + + + + + + { + setLibraryId(value ?? ALL_VALUE); + setPage(1); + }} + w={220} + allowDeselect={false} + searchable + comboboxProps={{ withinPortal: true }} + /> + { + setSeriesId(value ?? ALL_VALUE); + setPage(1); + }} + w={320} + allowDeselect={false} + searchable + nothingFoundMessage="No series with releases" + comboboxProps={{ withinPortal: true }} + /> + + + + {selected.size > 0 && ( + setSelected(new Set())} + onDeleteClick={openBulkDelete} + sticky + /> + )} + + {error && ( + + + Failed to load releases:{" "} + {error instanceof Error ? error.message : String(error)} + + + )} + + {isLoading ? ( + + + + ) : entries.length === 0 ? ( + + + No releases match these filters. New chapters and volumes show up + here once a release source picks them up. + + + ) : ( + + dismiss.mutate(id)} + onMarkAcquired={(id) => markAcquired.mutate(id)} + onDelete={(id) => deleteRelease.mutate(id)} + showSeriesColumn + isDismissPending={dismiss.isPending} + isMarkAcquiredPending={markAcquired.isPending} + isDeletePending={deleteRelease.isPending} + verticalSpacing="sm" + /> + + )} + + {totalPages > 1 && ( + + + + )} + + { + runBulk("delete"); + closeBulkDelete(); + }} + count={selected.size} + isPending={bulk.isPending} + /> + + ); +} diff --git a/web/src/pages/SeriesDetail.tsx b/web/src/pages/SeriesDetail.tsx index 60cb6a47..542e66b7 100644 --- a/web/src/pages/SeriesDetail.tsx +++ b/web/src/pages/SeriesDetail.tsx @@ -55,6 +55,7 @@ import { BulkSelectionToolbar } from "@/components/library/BulkSelectionToolbar" import { MetadataApplyFlow } from "@/components/metadata"; import { AlternateTitles, + BehindByBadge, CommunityRating, CustomMetadataDisplay, ExternalIds, @@ -65,10 +66,14 @@ import { SeriesInfoModal, SeriesMetadataEditModal, SeriesRating, + SeriesReleasesPanel, + TrackingPanel, } from "@/components/series"; import { formatSeriesCounts } from "@/components/series/seriesCounts"; import { useDynamicDocumentTitle } from "@/hooks/useDocumentTitle"; import { usePermissions } from "@/hooks/usePermissions"; +import { useReleaseTrackingApplicability } from "@/hooks/useReleaseTrackingApplicability"; +import { useSeriesTracking } from "@/hooks/useSeriesTracking"; import { useCoverUpdatesStore } from "@/store/coverUpdatesStore"; import { PERMISSIONS } from "@/types/permissions"; import { transformFullSeriesToSeriesContext } from "@/utils/templateUtils"; @@ -150,6 +155,21 @@ export function SeriesDetail() { enabled: !!seriesId && isAdmin, }); + // Fetch tracking config so we can render Behind-by-N badges next to the + // header counts (translation: latestKnownChapter > localMaxChapter, + // upstream: upstreamChapterGap > 0). The query is cheap and shared with + // the TrackingPanel below. + const { data: tracking } = useSeriesTracking(seriesId ?? "", !!seriesId); + + // Whether any enabled release-source plugin applies to this series's + // library. Drives whether the TrackingPanel + SeriesReleasesPanel render + // at all — on libraries with no covering plugin the panels would be a + // dead-end (you can flip `tracked: true` but nothing would ever poll). + const { data: releaseTrackingApplicability } = + useReleaseTrackingApplicability(series?.libraryId); + const releaseTrackingAvailable = + releaseTrackingApplicability?.applicable === true; + // Fetch available plugin actions for series:detail scope, filtered by library const { data: pluginActions } = useQuery({ queryKey: ["plugin-actions", "series:detail", series?.libraryId], @@ -780,6 +800,66 @@ export function SeriesDetail() { ) : null; })()} + {/* Behind-by-N badges: translation gap (Phase 6 release sources) + and upstream gap (Phase 5 metadata signal). Each badge is a + no-op when the gap is zero/missing, the series isn't tracked, + or the corresponding axis is disabled. */} + {tracking?.tracked && ( + + {tracking.trackChapters && + tracking.latestKnownChapter != null && + series.localMaxChapter != null && + tracking.latestKnownChapter > series.localMaxChapter && ( + + )} + {tracking.trackVolumes && + tracking.latestKnownVolume != null && + series.localMaxVolume != null && + tracking.latestKnownVolume > series.localMaxVolume && ( + + )} + {series.upstreamChapterGap != null && + series.upstreamChapterGap > 0 && ( + + )} + {series.upstreamVolumeGap != null && + series.upstreamVolumeGap > 0 && ( + + )} + + )} + {/* Alternate titles inline */} {series.alternateTitles && series.alternateTitles.length > 0 && ( @@ -985,6 +1065,22 @@ export function SeriesDetail() { } /> )} + + {/* Release tracking (admin/editor surface; query stays cheap when collapsed). + Hidden on libraries with no covering release-source plugin. + Sits below the metadata panels: it's an action surface, not + identifying data. */} + {canEditSeries && releaseTrackingAvailable && ( + + )} + + {/* Releases panel: ledger entries grouped by chapter/volume. Shows + whenever the series has tracking enabled and a plugin can + actually deliver releases — otherwise the panel would render + an empty inbox with no path to ever populate. */} + {tracking?.tracked && releaseTrackingAvailable && ( + + )} {/* Bulk Selection Toolbar - shows when items are selected */} diff --git a/web/src/pages/settings/PluginsSettings.test.tsx b/web/src/pages/settings/PluginsSettings.test.tsx index df50a4bc..d63c2cf1 100644 --- a/web/src/pages/settings/PluginsSettings.test.tsx +++ b/web/src/pages/settings/PluginsSettings.test.tsx @@ -1749,6 +1749,8 @@ describe("PluginsSettings - Official Plugins section", () => { expect(screen.getByText("Recommendations")).toBeInTheDocument(); // "Metadata" appears for Echo, Mangabaka, and Open Library plugins expect(screen.getAllByText("Metadata").length).toBeGreaterThanOrEqual(3); + // "Releases" appears for MangaUpdates and Nyaa plugins + expect(screen.getAllByText("Releases").length).toBeGreaterThanOrEqual(2); }); }); diff --git a/web/src/pages/settings/ReleaseTrackingSettings.test.tsx b/web/src/pages/settings/ReleaseTrackingSettings.test.tsx new file mode 100644 index 00000000..f3476f5e --- /dev/null +++ b/web/src/pages/settings/ReleaseTrackingSettings.test.tsx @@ -0,0 +1,234 @@ +import { beforeEach, describe, expect, it, vi } from "vitest"; +import { pluginsApi } from "@/api/plugins"; +import { + type ReleaseSource, + releaseSourcesApi, + releasesApi, +} from "@/api/releases"; +import { renderWithProviders, screen, userEvent, waitFor } from "@/test/utils"; +import { ReleaseTrackingSettings } from "./ReleaseTrackingSettings"; + +vi.mock("@/api/releases", () => ({ + releasesApi: { + listInbox: vi.fn(), + listForSeries: vi.fn(), + patchEntry: vi.fn(), + dismiss: vi.fn(), + markAcquired: vi.fn(), + }, + releaseSourcesApi: { + list: vi.fn(), + update: vi.fn(), + pollNow: vi.fn(), + }, +})); + +vi.mock("@/api/plugins", async (importOriginal) => { + const actual = await importOriginal(); + return { + ...actual, + pluginsApi: { + ...actual.pluginsApi, + getAll: vi.fn(), + }, + }; +}); + +const list = vi.mocked(releaseSourcesApi.list); +const update = vi.mocked(releaseSourcesApi.update); +const pollNow = vi.mocked(releaseSourcesApi.pollNow); +const getAllPlugins = vi.mocked(pluginsApi.getAll); + +function source(over: Partial = {}): ReleaseSource { + return { + id: "11111111-1111-1111-1111-111111111111", + pluginId: "release-mangaupdates", + sourceKey: "mu:batch", + displayName: "MangaUpdates batch", + kind: "rss-series", + enabled: true, + cronSchedule: null, + effectiveCronSchedule: "0 0 * * *", + lastPolledAt: "2026-05-01T00:00:00Z", + lastError: null, + lastErrorAt: null, + etag: null, + config: null, + createdAt: "2026-01-01T00:00:00Z", + updatedAt: "2026-05-01T00:00:00Z", + ...over, + }; +} + +describe("ReleaseTrackingSettings", () => { + beforeEach(() => { + vi.clearAllMocks(); + void releasesApi; + // Default: no plugins installed. Individual tests override as needed. + getAllPlugins.mockResolvedValue({ plugins: [], total: 0 }); + }); + + it("renders sources and the OK status when last poll is fresh", async () => { + list.mockResolvedValueOnce([source()]); + renderWithProviders(); + await waitFor(() => { + expect(screen.getByText("MangaUpdates batch")).toBeInTheDocument(); + }); + expect(screen.getByText("OK")).toBeInTheDocument(); + }); + + it("shows an Errored badge when last_error is populated", async () => { + list.mockResolvedValueOnce([ + source({ lastError: "upstream returned 503" }), + ]); + renderWithProviders(); + await waitFor(() => { + expect(screen.getByText("Errored")).toBeInTheDocument(); + }); + }); + + it("toggling enabled calls update with the new value", async () => { + list.mockResolvedValue([source()]); + update.mockResolvedValueOnce(source({ enabled: false })); + const user = userEvent.setup(); + renderWithProviders(); + await waitFor(() => { + expect(screen.getByText("MangaUpdates batch")).toBeInTheDocument(); + }); + const toggle = screen.getByRole("switch", { name: "Enable source" }); + await user.click(toggle); + await waitFor(() => { + expect(update).toHaveBeenCalledWith( + "11111111-1111-1111-1111-111111111111", + expect.objectContaining({ enabled: false }), + ); + }); + }); + + it("Poll now button is disabled when source is disabled", async () => { + list.mockResolvedValueOnce([source({ enabled: false })]); + renderWithProviders(); + await waitFor(() => { + expect(screen.getByText("MangaUpdates batch")).toBeInTheDocument(); + }); + const pollButton = screen.getByLabelText("Poll now"); + expect(pollButton).toBeDisabled(); + }); + + it("clicking Poll now triggers the API call when source is enabled", async () => { + list.mockResolvedValue([source()]); + pollNow.mockResolvedValueOnce({ status: "enqueued", message: "ok" }); + const user = userEvent.setup(); + renderWithProviders(); + await waitFor(() => { + expect(screen.getByText("MangaUpdates batch")).toBeInTheDocument(); + }); + const pollButton = screen.getByLabelText("Poll now"); + await user.click(pollButton); + await waitFor(() => { + expect(pollNow).toHaveBeenCalledWith( + "11111111-1111-1111-1111-111111111111", + ); + }); + }); + + it("Poll now spinner is per-row, not shared across rows", async () => { + // Two sources: the first poll is held in flight while we click the + // second. Only the first row should show a loading spinner. + list.mockResolvedValue([ + source({ + id: "11111111-1111-1111-1111-111111111111", + displayName: "Source A", + }), + source({ + id: "22222222-2222-2222-2222-222222222222", + displayName: "Source B", + sourceKey: "mu:other", + }), + ]); + + let resolveFirst: + | ((v: { status: string; message: string }) => void) + | null = null; + pollNow.mockImplementationOnce( + () => + new Promise((resolve) => { + resolveFirst = resolve; + }), + ); + + const user = userEvent.setup(); + renderWithProviders(); + await waitFor(() => { + expect(screen.getByText("Source A")).toBeInTheDocument(); + expect(screen.getByText("Source B")).toBeInTheDocument(); + }); + + const pollButtons = screen.getAllByLabelText("Poll now"); + expect(pollButtons).toHaveLength(2); + + await user.click(pollButtons[0]); + + await waitFor(() => { + expect(pollButtons[0]).toHaveAttribute("data-loading", "true"); + }); + // Crucially, the other row's button must NOT be in a loading state while + // row A's poll is in flight. + expect(pollButtons[1]).not.toHaveAttribute("data-loading", "true"); + expect(pollButtons[1]).not.toBeDisabled(); + + // Resolve the first request and verify the spinner clears. + resolveFirst?.({ status: "enqueued", message: "ok" }); + await waitFor(() => { + expect(pollButtons[0]).not.toHaveAttribute("data-loading", "true"); + }); + }); + + it("plugin-sources dropdown lists release-source plugins by display name", async () => { + list.mockResolvedValue([]); + // One release-source plugin + one metadata plugin to confirm filtering. + getAllPlugins.mockResolvedValue({ + plugins: [ + { + id: "p1", + name: "release-mangaupdates", + displayName: "MangaUpdates Releases", + manifest: { + name: "release-mangaupdates", + displayName: "MangaUpdates Releases", + capabilities: { releaseSource: true }, + }, + // The remaining PluginDto fields don't matter for this test. + } as never, + { + id: "p2", + name: "metadata-mangabaka", + displayName: "MangaBaka", + manifest: { + name: "metadata-mangabaka", + displayName: "MangaBaka", + capabilities: { metadataProvider: ["series"] }, + }, + } as never, + ], + total: 2, + }); + + const user = userEvent.setup(); + renderWithProviders(); + // Wait for the plugins query to settle (the dropdown only renders the + // release-source options once `pluginsApi.getAll` resolves). + await waitFor(() => { + expect(getAllPlugins).toHaveBeenCalled(); + }); + // Mantine MultiSelect renders an input with role=textbox associated with + // the label; clicking it opens the dropdown and shows the options. + const select = screen.getByRole("textbox", { name: "Plugin sources" }); + await user.click(select); + await waitFor(() => { + expect(screen.getByText("MangaUpdates Releases")).toBeInTheDocument(); + }); + // Metadata-only plugin is filtered out — should not appear as an option. + expect(screen.queryByText("MangaBaka")).not.toBeInTheDocument(); + }); +}); diff --git a/web/src/pages/settings/ReleaseTrackingSettings.tsx b/web/src/pages/settings/ReleaseTrackingSettings.tsx new file mode 100644 index 00000000..502062d6 --- /dev/null +++ b/web/src/pages/settings/ReleaseTrackingSettings.tsx @@ -0,0 +1,666 @@ +import { + ActionIcon, + Anchor, + Badge, + Box, + Button, + Card, + Group, + Loader, + MultiSelect, + Stack, + Switch, + Table, + TagsInput, + Text, + Title, + Tooltip, +} from "@mantine/core"; +import { notifications } from "@mantine/notifications"; +import { + IconAlertCircle, + IconBellRinging, + IconClockHour4, + IconRefresh, + IconRestore, + IconTrash, +} from "@tabler/icons-react"; +import { useMutation, useQuery, useQueryClient } from "@tanstack/react-query"; +import { CronExpressionParser } from "cron-parser"; +import { toString as cronToString } from "cronstrue"; +import { formatDistanceToNow } from "date-fns"; +import { type Dispatch, type SetStateAction, useMemo, useState } from "react"; +import { pluginsApi } from "@/api/plugins"; +import type { ReleaseSource } from "@/api/releases"; +import { settingsApi } from "@/api/settings"; +import { CronInput } from "@/components/forms/CronInput"; +import { + usePollReleaseSourceNow, + useReleaseSources, + useResetReleaseSource, + useUpdateReleaseSource, +} from "@/hooks/useReleases"; +import { useUserPreference } from "@/hooks/useUserPreference"; + +const SETTING_NOTIFY_LANGUAGES = "release_tracking.notify_languages"; +const SETTING_NOTIFY_PLUGINS = "release_tracking.notify_plugins"; +const SETTING_DEFAULT_CRON_SCHEDULE = "release_tracking.default_cron_schedule"; +const PREF_MUTED_SERIES = "release_tracking.muted_series_ids"; + +/** Parse a settings-table JSON-array value back to a string list. */ +function parseArraySetting(value: string | undefined | null): string[] { + if (!value) return []; + try { + const parsed = JSON.parse(value); + return Array.isArray(parsed) + ? parsed.filter((v): v is string => typeof v === "string") + : []; + } catch { + return []; + } +} + +/** + * Render a cron expression as a human-readable phrase. Mirrors the logic in + * `` (5-part → cronstrue normalization). Returns the raw expression + * as a fallback if parsing fails so we still show *something* meaningful. + */ +function describeCron(expression: string): string { + const trimmed = expression.trim(); + if (!trimmed) return ""; + try { + CronExpressionParser.parse(trimmed); + const parts = trimmed.split(/\s+/); + const normalized = + parts.length === 5 + ? parts.map((p) => (p.startsWith("/") ? `*${p}` : p)).join(" ") + : trimmed; + return cronToString(normalized, { + throwExceptionOnParseError: false, + verbose: false, + }); + } catch { + return trimmed; + } +} + +export function ReleaseTrackingSettings() { + const sourcesQuery = useReleaseSources(); + const update = useUpdateReleaseSource(); + const pollNow = usePollReleaseSourceNow(); + const reset = useResetReleaseSource(); + + // The mutation hooks expose a single shared `isPending` flag, which would + // light up the spinner on every row whenever any one row's request was in + // flight. Track in-flight `sourceId`s explicitly so each row's spinner + // reflects only that row's own request, even when multiple are pending + // concurrently. + const [pollingIds, setPollingIds] = useState>(new Set()); + const [resettingIds, setResettingIds] = useState>( + new Set(), + ); + + const addId = ( + setter: Dispatch>>, + id: string, + ) => setter((prev) => new Set(prev).add(id)); + const removeId = ( + setter: Dispatch>>, + id: string, + ) => + setter((prev) => { + const next = new Set(prev); + next.delete(id); + return next; + }); + + return ( + + + + + Release tracking + + + + Manage release sources. Each row is one logical feed exposed by a + plugin (e.g. one Nyaa uploader or one MangaUpdates batch). Disabling a + source pauses its scheduled polls; "Poll now" enqueues an immediate + fetch. + + + + + + + {sourcesQuery.isLoading ? ( + + + Loading sources… + + ) : sourcesQuery.error ? ( + + + + + Failed to load sources. + + + + ) : (sourcesQuery.data ?? []).length === 0 ? ( + + + No release sources configured. Install a plugin that declares the + `release_source` capability and configure at least one source. + + + ) : ( + + + + + Source + Plugin + Interval + Last poll + Status + Enabled + + + + + {(sourcesQuery.data ?? []).map((source) => ( + + update.mutate({ + sourceId: source.id, + update: { enabled }, + }) + } + onCronScheduleChange={(cronSchedule) => + update.mutate({ + sourceId: source.id, + // Send `null` to clear the override and revert to + // inheriting the server-wide default. + update: { cronSchedule }, + }) + } + onPollNow={() => { + addId(setPollingIds, source.id); + pollNow.mutate(source.id, { + onSettled: () => removeId(setPollingIds, source.id), + }); + }} + pollNowPending={pollingIds.has(source.id)} + onReset={() => { + if ( + window.confirm( + `Reset "${source.displayName}"?\n\nThis deletes every release ledger row for this source and clears its poll state (etag, last poll time). User-managed settings (enabled, interval, name) are preserved. The next poll will re-record everything as new.\n\nThis cannot be undone.`, + ) + ) { + addId(setResettingIds, source.id); + reset.mutate(source.id, { + onSettled: () => removeId(setResettingIds, source.id), + }); + } + }} + resetPending={resettingIds.has(source.id)} + /> + ))} + +
+
+ )} +
+
+ ); +} + +/** + * Server-wide default cron schedule for release-source polling. Each + * `release_sources` row whose `cron_schedule` is NULL inherits this value. + * The compile-time fallback (`"0 0 * * *"`) only applies if the setting row + * itself is missing. + */ +function DefaultScheduleCard() { + const queryClient = useQueryClient(); + const settingQuery = useQuery({ + queryKey: ["admin-setting", SETTING_DEFAULT_CRON_SCHEDULE], + queryFn: () => settingsApi.get(SETTING_DEFAULT_CRON_SCHEDULE), + }); + + const serverValue = settingQuery.data?.value ?? ""; + const [draft, setDraft] = useState(serverValue); + // Sync local draft when the server value changes (initial load, refetch). + // We deliberately don't useEffect: comparing the string each render is + // cheap, and we only update when the upstream value actually changes. + if (draft === "" && serverValue !== "" && !settingQuery.isFetching) { + setDraft(serverValue); + } + + const updateMutation = useMutation({ + mutationFn: (value: string) => + settingsApi.update(SETTING_DEFAULT_CRON_SCHEDULE, { value }), + onSuccess: () => { + queryClient.invalidateQueries({ + queryKey: ["admin-setting", SETTING_DEFAULT_CRON_SCHEDULE], + }); + // Source rows display `effectiveCronSchedule` resolved server-side, + // so a default change must invalidate the source list to refresh + // every inheriting row's "(Default)" label. + queryClient.invalidateQueries({ queryKey: ["release-sources"] }); + notifications.show({ + title: "Default schedule saved", + message: + "All sources without a per-row override will use the new schedule.", + color: "green", + }); + }, + onError: (err: Error) => + notifications.show({ + title: "Failed to save", + message: err.message ?? "Could not update default schedule.", + color: "red", + }), + }); + + const commit = () => { + const trimmed = draft.trim(); + if (!trimmed || trimmed === serverValue) { + setDraft(serverValue); + return; + } + updateMutation.mutate(trimmed); + }; + + return ( + + + + + Default schedule + + + Server-wide default cron used by every release source that doesn't + have its own per-row override. Changing this propagates immediately to + inheriting rows. + + + + + ); +} + +function NotificationPreferencesCard() { + const queryClient = useQueryClient(); + + // Server-wide notify allowlists (admin-managed, persisted in `settings`). + const notifyLanguagesQuery = useQuery({ + queryKey: ["admin-setting", SETTING_NOTIFY_LANGUAGES], + queryFn: () => settingsApi.get(SETTING_NOTIFY_LANGUAGES), + }); + const notifyPluginsQuery = useQuery({ + queryKey: ["admin-setting", SETTING_NOTIFY_PLUGINS], + queryFn: () => settingsApi.get(SETTING_NOTIFY_PLUGINS), + }); + + // Per-user mute list (persisted in user_preferences via the user-prefs + // store, with localStorage caching + debounced server sync). Used here + // only for the count display + "Clear all mutes" action; per-series + // toggle lives on each series detail page. + const [mutedSeriesIds, setMutedSeriesIds] = + useUserPreference(PREF_MUTED_SERIES); + + // Pull every registered plugin so we can show release-source ones in the + // dropdown. Stale entries (in the allowlist but no longer installed) keep + // their slot in the option list so admins can see + remove them. + const pluginsQuery = useQuery({ + queryKey: ["plugins"], + queryFn: pluginsApi.getAll, + }); + + const allowedLanguages = useMemo( + () => parseArraySetting(notifyLanguagesQuery.data?.value), + [notifyLanguagesQuery.data], + ); + const allowedPlugins = useMemo( + () => parseArraySetting(notifyPluginsQuery.data?.value), + [notifyPluginsQuery.data], + ); + + const pluginOptions = useMemo(() => { + const registered = (pluginsQuery.data?.plugins ?? []).filter( + (p) => p.manifest?.capabilities?.releaseSource === true, + ); + const seen = new Set(); + const opts: { value: string; label: string }[] = []; + for (const p of registered) { + seen.add(p.name); + opts.push({ + value: p.name, + label: p.manifest?.displayName ?? p.name, + }); + } + for (const id of allowedPlugins) { + if (!seen.has(id)) { + opts.push({ value: id, label: `${id} (not installed)` }); + } + } + return opts; + }, [pluginsQuery.data, allowedPlugins]); + + // Persist a setting back to the server. Lower-cases language codes so the + // backend filter (`shouldNotify`) doesn't need to re-normalize. + const updateSettingMutation = useMutation({ + mutationFn: ({ key, values }: { key: string; values: string[] }) => + settingsApi.update(key, { value: JSON.stringify(values) }), + onSuccess: (_data, vars) => { + queryClient.invalidateQueries({ + queryKey: ["admin-setting", vars.key], + }); + }, + onError: (err: Error) => + notifications.show({ + title: "Failed to save", + message: err.message ?? "Could not update notification preferences.", + color: "red", + }), + }); + + const clearMutes = () => { + setMutedSeriesIds([]); + notifications.show({ + title: "Mutes cleared", + message: "All per-series mutes have been removed.", + color: "green", + }); + }; + + const setAllowedLanguages = (values: string[]) => + updateSettingMutation.mutate({ + key: SETTING_NOTIFY_LANGUAGES, + values: values + .map((v) => v.trim().toLowerCase()) + .filter((v) => v.length > 0), + }); + const setAllowedPlugins = (values: string[]) => + updateSettingMutation.mutate({ + key: SETTING_NOTIFY_PLUGINS, + values, + }); + + return ( + + + + + Notification preferences + + + Filter announcement toasts and the Releases nav badge. Empty means "no + filter — let everything through." Server-wide for languages and plugin + sources; per-series mute is per-user (toggle on each series detail + page). + + + + + + + Muted series + + + {mutedSeriesIds.length === 0 + ? "No series muted for your account." + : `${mutedSeriesIds.length} series muted for your account.`} + + + + + + + ); +} + +interface RowProps { + source: ReleaseSource; + onToggle: (enabled: boolean) => void; + /** `null` clears the override and reverts to the server-wide default. */ + onCronScheduleChange: (cronSchedule: string | null) => void; + onPollNow: () => void; + pollNowPending: boolean; + onReset: () => void; + resetPending: boolean; +} + +function ReleaseSourceRow({ + source, + onToggle, + onCronScheduleChange, + onPollNow, + pollNowPending, + onReset, + resetPending, +}: RowProps) { + // Truthy `cronSchedule` means the row has a per-source override; render the + // editor inline. The server omits the field entirely (rather than sending + // `null`) when the row is inheriting, so accept both `null` and `undefined` + // as "no override." + const hasOverride = Boolean(source.cronSchedule); + const [isOverriding, setIsOverriding] = useState(hasOverride); + const [draft, setDraft] = useState( + source.cronSchedule || source.effectiveCronSchedule, + ); + + const lastPolled = source.lastPolledAt + ? formatDistanceToNow(new Date(source.lastPolledAt), { addSuffix: true }) + : "—"; + + const commitDraft = () => { + const trimmed = draft.trim(); + if (!trimmed) { + // Empty editor = revert to inherit. + if (source.cronSchedule) onCronScheduleChange(null); + setIsOverriding(false); + setDraft(source.effectiveCronSchedule); + return; + } + if (trimmed !== source.cronSchedule) { + onCronScheduleChange(trimmed); + } + }; + + const resetToDefault = () => { + if (source.cronSchedule) onCronScheduleChange(null); + setIsOverriding(false); + setDraft(source.effectiveCronSchedule); + }; + + return ( + + + + + {source.displayName} + + + {source.sourceKey} + + + + + + {source.pluginId} + + + + {isOverriding ? ( + + + + Reset to default + + + ) : ( + + + {describeCron(source.effectiveCronSchedule)}{" "} + + (Default) + + + { + setIsOverriding(true); + setDraft(source.effectiveCronSchedule); + }} + > + Override + + + )} + + + + {lastPolled} + {source.lastSummary && ( + + {source.lastSummary} + + )} + + + + {source.lastError ? ( + + + Errored + + + ) : source.lastPolledAt ? ( + // Wrap the OK badge in a tooltip carrying `lastSummary` so users + // can see *why* a poll returned nothing (no tracked series, 304, + // dropped below threshold, etc.) without grepping logs. + + + OK + + + ) : ( + + Never polled + + )} + + + onToggle(event.currentTarget.checked)} + aria-label="Enable source" + /> + + + + + + + + + + + + + + + + + ); +} diff --git a/web/src/pages/settings/ServerSettings.tsx b/web/src/pages/settings/ServerSettings.tsx index 79543e73..af0f3f13 100644 --- a/web/src/pages/settings/ServerSettings.tsx +++ b/web/src/pages/settings/ServerSettings.tsx @@ -469,7 +469,14 @@ export function ServerSettings() { ) : ( {Object.entries(groupedSettings) - .filter(([category]) => category.toLowerCase() !== "display") + // "display" is rendered above as a separate section. + // "Release Tracking" has its own dedicated page at + // /settings/release-tracking — hide it here so admins don't + // have two surfaces editing the same keys. + .filter(([category]) => { + const c = category.toLowerCase(); + return c !== "display" && c !== "release tracking"; + }) .sort(([a], [b]) => a.localeCompare(b)) .map(([category, categorySettings]) => ( = { Metadata: "blue", Sync: "teal", Recommendations: "grape", + Releases: "orange", }; const credentialLabels: Record = { diff --git a/web/src/store/releaseAnnouncementsStore.test.ts b/web/src/store/releaseAnnouncementsStore.test.ts new file mode 100644 index 00000000..30ba9976 --- /dev/null +++ b/web/src/store/releaseAnnouncementsStore.test.ts @@ -0,0 +1,17 @@ +import { beforeEach, describe, expect, it } from "vitest"; +import { useReleaseAnnouncementsStore } from "./releaseAnnouncementsStore"; + +describe("releaseAnnouncementsStore", () => { + beforeEach(() => { + useReleaseAnnouncementsStore.getState().reset(); + }); + + it("bump increments and reset clears the unseen counter", () => { + const store = useReleaseAnnouncementsStore.getState(); + store.bump(); + store.bump(); + expect(useReleaseAnnouncementsStore.getState().unseenCount).toBe(2); + store.reset(); + expect(useReleaseAnnouncementsStore.getState().unseenCount).toBe(0); + }); +}); diff --git a/web/src/store/releaseAnnouncementsStore.ts b/web/src/store/releaseAnnouncementsStore.ts new file mode 100644 index 00000000..9659ac7c --- /dev/null +++ b/web/src/store/releaseAnnouncementsStore.ts @@ -0,0 +1,31 @@ +import { create } from "zustand"; + +/** + * Releases nav-badge counter. + * + * Notification *filters* (server-wide language + plugin allowlists, per-user + * mute list) used to live here too, but they belong on durable storage + * (settings + user_preferences) so they survive page reloads. This store now + * just tracks the in-session "unseen" badge count. + * + * The `shouldNotify` decision is made inside the SSE handler in + * `useEntityEvents` by snapshotting the latest filter values from the query + * cache; see that file for the predicate. + */ +interface ReleaseAnnouncementsState { + /** Number of unseen `release_announced` events since the user last visited /releases. */ + unseenCount: number; + + /** Increment the badge counter (called by the SSE handler when shouldNotify passes). */ + bump: () => void; + /** Reset the badge counter (called when the user visits /releases). */ + reset: () => void; +} + +export const useReleaseAnnouncementsStore = create()( + (set) => ({ + unseenCount: 0, + bump: () => set((state) => ({ unseenCount: state.unseenCount + 1 })), + reset: () => set({ unseenCount: 0 }), + }), +); diff --git a/web/src/test/setup.ts b/web/src/test/setup.ts index a5b95c10..58bdc660 100644 --- a/web/src/test/setup.ts +++ b/web/src/test/setup.ts @@ -120,6 +120,13 @@ global.ResizeObserver = class ResizeObserver { unobserve() {} } as any; +// jsdom doesn't implement Element.scrollIntoView, but Mantine's Combobox +// calls it on the active option after clicks. Stubbing here prevents +// "scrollIntoView is not a function" unhandled errors in dropdown tests. +if (!Element.prototype.scrollIntoView) { + Element.prototype.scrollIntoView = vi.fn(); +} + // Mock EventSource for SSE tests global.EventSource = class EventSource { url: string; diff --git a/web/src/types/api.generated.ts b/web/src/types/api.generated.ts index d9ec8e98..6fcb482e 100644 --- a/web/src/types/api.generated.ts +++ b/web/src/types/api.generated.ts @@ -2424,6 +2424,246 @@ export interface paths { patch?: never; trace?: never; }; + "/api/v1/release-sources": { + parameters: { + query?: never; + header?: never; + path?: never; + cookie?: never; + }; + /** List all configured release sources (admin-only). */ + get: operations["list_release_sources"]; + put?: never; + post?: never; + delete?: never; + options?: never; + head?: never; + patch?: never; + trace?: never; + }; + "/api/v1/release-sources/applicability": { + parameters: { + query?: never; + header?: never; + path?: never; + cookie?: never; + }; + /** + * Whether release tracking is available for a given library. + * @description Read-only, requires only `SeriesRead`: the response carries no + * admin-sensitive data (no plugin IDs, no configs, no library + * allowlists), just the boolean and friendly display names. Used by the + * frontend to: + * + * - hide the per-series Tracking panel + Releases tab on libraries with + * no applicable plugin (cleaner UX); + * - decide whether to show the "Track for releases" / "Don't track for + * releases" entries in the bulk-selection menu. + */ + get: operations["get_release_tracking_applicability"]; + put?: never; + post?: never; + delete?: never; + options?: never; + head?: never; + patch?: never; + trace?: never; + }; + "/api/v1/release-sources/{source_id}": { + parameters: { + query?: never; + header?: never; + path?: never; + cookie?: never; + }; + get?: never; + put?: never; + post?: never; + delete?: never; + options?: never; + head?: never; + /** + * PATCH a release source (admin-only). + * @description Toggle `enabled`, override `cronSchedule`, or rename `displayName`. + * Sending `cronSchedule: null` clears the override and reverts the row to + * inheriting the server-wide `release_tracking.default_cron_schedule`. + */ + patch: operations["update_release_source"]; + trace?: never; + }; + "/api/v1/release-sources/{source_id}/poll-now": { + parameters: { + query?: never; + header?: never; + path?: never; + cookie?: never; + }; + get?: never; + put?: never; + /** + * Trigger a manual poll for a source. + * @description Enqueues a `PollReleaseSource` task immediately. The task runs + * asynchronously via the worker pool; the response confirms the enqueue, + * not the poll outcome. + */ + post: operations["poll_release_source_now"]; + delete?: never; + options?: never; + head?: never; + patch?: never; + trace?: never; + }; + "/api/v1/release-sources/{source_id}/reset": { + parameters: { + query?: never; + header?: never; + path?: never; + cookie?: never; + }; + get?: never; + put?: never; + /** + * Reset a release source to a clean slate. + * @description Deletes every `release_ledger` row owned by the source and clears the + * source's transient poll state (`etag`, `last_polled_at`, `last_error`, + * `last_error_at`, `last_summary`). User-managed fields (`enabled`, + * `cron_schedule`, `display_name`, `config`) are preserved. + * + * Intended for testing/troubleshooting: after a reset, the next poll + * fetches the upstream feed without an `If-None-Match` header (so no 304 + * short-circuit) and re-records every release as `announced`. Does NOT + * auto-enqueue a poll — call `POST /release-sources/{id}/poll-now` after + * resetting if you want immediate re-fetch. + */ + post: operations["reset_release_source"]; + delete?: never; + options?: never; + head?: never; + patch?: never; + trace?: never; + }; + "/api/v1/releases": { + parameters: { + query?: never; + header?: never; + path?: never; + cookie?: never; + }; + /** Cross-series inbox: announced (or filtered) ledger entries, paginated. */ + get: operations["list_release_inbox"]; + put?: never; + post?: never; + delete?: never; + options?: never; + head?: never; + patch?: never; + trace?: never; + }; + "/api/v1/releases/bulk": { + parameters: { + query?: never; + header?: never; + path?: never; + cookie?: never; + }; + get?: never; + put?: never; + /** + * Apply an action to a batch of ledger rows. + * @description `dismiss`, `mark-acquired`, `ignore`, and `reset` all set state + * in-place. `delete` removes the rows and clears the affected sources' + * etags so the next poll re-fetches without `If-None-Match`. All run + * as bulk SQL (no per-row round trips), so this scales to thousands of + * rows in one call. + */ + post: operations["bulk_release_action"]; + delete?: never; + options?: never; + head?: never; + patch?: never; + trace?: never; + }; + "/api/v1/releases/facets": { + parameters: { + query?: never; + header?: never; + path?: never; + cookie?: never; + }; + /** + * Distinct values present in the inbox under the given filters. + * @description Returns the languages, libraries, and series that have at least one + * matching ledger row. The frontend uses this to populate cascading + * Select dropdowns so users never have to type a UUID and never see + * dropdown options that would yield zero results. + */ + get: operations["list_release_facets"]; + put?: never; + post?: never; + delete?: never; + options?: never; + head?: never; + patch?: never; + trace?: never; + }; + "/api/v1/releases/{release_id}": { + parameters: { + query?: never; + header?: never; + path?: never; + cookie?: never; + }; + get?: never; + put?: never; + post?: never; + /** + * Hard-delete a single ledger row. + * @description Also clears the source's `etag` so the next poll bypasses + * `If-None-Match` and re-records the deleted row in `announced` state + * (assuming the upstream still lists it). This is the lever users want + * when they marked something incorrectly and need to "get it back". + */ + delete: operations["delete_release"]; + options?: never; + head?: never; + /** PATCH a ledger entry's state (general-purpose state transition). */ + patch: operations["update_release_entry"]; + trace?: never; + }; + "/api/v1/releases/{release_id}/dismiss": { + parameters: { + query?: never; + header?: never; + path?: never; + cookie?: never; + }; + get?: never; + put?: never; + /** Convenience POST: dismiss a release. */ + post: operations["dismiss_release"]; + delete?: never; + options?: never; + head?: never; + patch?: never; + trace?: never; + }; + "/api/v1/releases/{release_id}/mark-acquired": { + parameters: { + query?: never; + header?: never; + path?: never; + cookie?: never; + }; + get?: never; + put?: never; + /** Convenience POST: mark a release acquired. */ + post: operations["mark_release_acquired"]; + delete?: never; + options?: never; + head?: never; + patch?: never; + trace?: never; + }; "/api/v1/scans/active": { parameters: { query?: never; @@ -2713,6 +2953,35 @@ export interface paths { patch?: never; trace?: never; }; + "/api/v1/series/bulk/track-for-releases": { + parameters: { + query?: never; + header?: never; + path?: never; + cookie?: never; + }; + get?: never; + put?: never; + /** + * Bulk-enable release tracking for multiple series. + * @description For each `series_id` in the request, flips `series_tracking.tracked` to + * `true` and runs the seed pass (auto-derives aliases, `latest_known_*`, + * `track_chapters` / `track_volumes` from existing data). Series that don't + * exist are reported as `outcome: skipped`. Series already tracked are + * reported as `outcome: skipped, detail: "already tracked"` and the seed is + * not re-run (idempotent — a re-run would simply re-derive identical + * values, but we skip the work). + * + * Mirrors the per-series PATCH `false -> true` transition: same seed + * function, same idempotency guarantees. + */ + post: operations["bulk_track_series_for_releases"]; + delete?: never; + options?: never; + head?: never; + patch?: never; + trace?: never; + }; "/api/v1/series/bulk/unread": { parameters: { query?: never; @@ -2734,6 +3003,29 @@ export interface paths { patch?: never; trace?: never; }; + "/api/v1/series/bulk/untrack-for-releases": { + parameters: { + query?: never; + header?: never; + path?: never; + cookie?: never; + }; + get?: never; + put?: never; + /** + * Bulk-disable release tracking for multiple series. + * @description Flips `series_tracking.tracked` to `false`. Does not delete aliases, + * `latest_known_*`, or other tracking config — the user can re-track + * without losing customizations, and the seed will re-derive any + * auto-derived fields on the next track-on transition. + */ + post: operations["bulk_untrack_series_for_releases"]; + delete?: never; + options?: never; + head?: never; + patch?: never; + trace?: never; + }; "/api/v1/series/in-progress": { parameters: { query?: never; @@ -3064,6 +3356,45 @@ export interface paths { patch: operations["patch_series"]; trace?: never; }; + "/api/v1/series/{series_id}/aliases": { + parameters: { + query?: never; + header?: never; + path?: never; + cookie?: never; + }; + /** List release-matching aliases for a series. */ + get: operations["list_series_aliases"]; + put?: never; + /** + * Create a release-matching alias for a series. + * @description Idempotent: if `(series_id, alias)` already exists, returns the existing + * row with HTTP 200 instead of inserting a duplicate. + */ + post: operations["create_series_alias"]; + delete?: never; + options?: never; + head?: never; + patch?: never; + trace?: never; + }; + "/api/v1/series/{series_id}/aliases/{alias_id}": { + parameters: { + query?: never; + header?: never; + path?: never; + cookie?: never; + }; + get?: never; + put?: never; + post?: never; + /** Delete a release-matching alias. */ + delete: operations["delete_series_alias"]; + options?: never; + head?: never; + patch?: never; + trace?: never; + }; "/api/v1/series/{series_id}/alternate-titles": { parameters: { query?: never; @@ -3598,6 +3929,23 @@ export interface paths { patch?: never; trace?: never; }; + "/api/v1/series/{series_id}/releases": { + parameters: { + query?: never; + header?: never; + path?: never; + cookie?: never; + }; + /** List release-ledger entries for a series. */ + get: operations["list_series_releases"]; + put?: never; + post?: never; + delete?: never; + options?: never; + head?: never; + patch?: never; + trace?: never; + }; "/api/v1/series/{series_id}/renumber": { parameters: { query?: never; @@ -3772,24 +4120,51 @@ export interface paths { patch?: never; trace?: never; }; - "/api/v1/series/{series_id}/unread": { + "/api/v1/series/{series_id}/tracking": { parameters: { query?: never; header?: never; path?: never; cookie?: never; }; - get?: never; + /** + * Get release-tracking config for a series. + * @description Returns a virtual untracked row when no `series_tracking` row exists, so the + * frontend can render the panel uniformly without special-casing absent rows. + */ + get: operations["get_series_tracking"]; put?: never; - /** Mark all books in a series as unread */ - post: operations["mark_series_as_unread"]; + post?: never; delete?: never; options?: never; head?: never; - patch?: never; + /** + * Update release-tracking config for a series. + * @description Upserts: creates the row on first write, applies the patch otherwise. + * All fields are optional — omit to leave alone, send `null` on a nullable + * field to clear it. + */ + patch: operations["update_series_tracking"]; trace?: never; }; - "/api/v1/settings/branding": { + "/api/v1/series/{series_id}/unread": { + parameters: { + query?: never; + header?: never; + path?: never; + cookie?: never; + }; + get?: never; + put?: never; + /** Mark all books in a series as unread */ + post: operations["mark_series_as_unread"]; + delete?: never; + options?: never; + head?: never; + patch?: never; + trace?: never; + }; + "/api/v1/settings/branding": { parameters: { query?: never; header?: never; @@ -6223,6 +6598,24 @@ export interface components { */ version: string; }; + /** @description Response shape for `GET /api/v1/release-sources/applicability`. */ + ApplicabilityResponse: { + /** + * @description `true` when at least one enabled `release_source` plugin applies to + * the requested library (or, if no `libraryId` was supplied, to *any* + * library). The frontend uses this to decide whether to render the + * per-series Tracking panel and Releases tab, or to show the + * bulk-track menu entry. + */ + applicable: boolean; + /** + * @description Plugin display names (or fallback to `name` when no manifest cached + * yet) of the enabled release-source plugins covering this library. + * Empty when `applicable` is `false`. Useful for surfacing "Powered by + * MangaUpdates, Nyaa" hints in the UI. + */ + pluginDisplayNames: string[]; + }; /** @description Author context for template evaluation. */ AuthorContextDto: { /** @@ -8320,6 +8713,27 @@ export interface components { */ year?: number | null; }; + /** + * @description Action requested by `POST /api/v1/releases/bulk`. + * @enum {string} + */ + BulkReleaseAction: "dismiss" | "mark-acquired" | "ignore" | "reset" | "delete"; + /** @description Request body for `POST /api/v1/releases/bulk`. */ + BulkReleaseActionRequest: { + action: components["schemas"]["BulkReleaseAction"]; + ids: string[]; + }; + /** @description Response from `POST /api/v1/releases/bulk`. */ + BulkReleaseActionResponse: { + /** @description Action that ran (echoed back for client-side confirmation toasts). */ + action: components["schemas"]["BulkReleaseAction"]; + /** + * Format: int64 + * @description Number of ledger rows actually affected. Less than `ids.len()` when + * some IDs were already deleted concurrently. + */ + affected: number; + }; /** @description Request for bulk renumber operations on multiple series */ BulkRenumberSeriesRequest: { /** @@ -8393,6 +8807,41 @@ export interface components { */ taskId: string; }; + /** + * @description Per-series outcome of a bulk track / untrack operation. + * + * Returned in `BulkTrackForReleasesResponse.results` so the UI can show a + * per-row status (e.g. "tracked", "skipped: not found", "errored: …") without + * re-querying the tracking config endpoint per series. + */ + BulkTrackForReleasesItem: { + /** + * @description Free-form detail (error message for `errored`, reason for `skipped`). + * `None` for the success cases. + */ + detail?: string | null; + /** @description `tracked` | `untracked` | `skipped` | `errored`. */ + outcome: string; + /** Format: uuid */ + seriesId: string; + }; + /** + * @description Aggregate result of `POST /series/bulk/track-for-releases` and its untrack + * counterpart. Counts and per-series outcomes for client-side display. + */ + BulkTrackForReleasesResponse: { + /** @description Series whose `tracked` flag was already in the target state. No-ops. */ + alreadyInState: number; + /** + * @description Series successfully flipped to `tracked = true` (or `false` for the + * untrack endpoint). + */ + changed: number; + /** @description Series that could not be processed (missing, error, etc.). */ + errored: number; + /** @description Per-series outcomes in input order. */ + results: components["schemas"]["BulkTrackForReleasesItem"][]; + }; /** @description Request to update metadata locks for multiple books */ BulkUpdateBookLocksRequest: components["schemas"]["UpdateBookMetadataLocksRequest"] & { /** @description Book IDs to update locks for (max 500) */ @@ -8472,7 +8921,11 @@ export interface components { label: string; /** @description Whether this field is required */ required?: boolean; - /** @description Field type: "number", "string", or "boolean" */ + /** + * @description Field type — free-form documentation hint. Common values: "number", + * "string", "boolean", "string-array", "object". The host never validates + * stored config against this; it forwards the raw JSON to the plugin. + */ type: string; }; /** @description Plugin configuration schema - documents available config options */ @@ -8798,6 +9251,18 @@ export interface components { /** @description Working directory for the plugin process */ workingDirectory?: string | null; }; + CreateSeriesAliasRequest: { + /** + * @description Alias text. Will be trimmed; must normalize to non-empty. + * @example Boku no Hero Academia + */ + alias: string; + /** + * @description Optional explicit source. Defaults to `manual` when called from the API. + * Plugin-internal flows write `metadata`; we don't expose that to HTTP. + */ + source?: string | null; + }; /** @description Request body for creating a new series export */ CreateSeriesExportRequest: { /** @description Book field keys to include (for "books" or "both" export types) */ @@ -8932,6 +9397,18 @@ export interface components { */ message: string; }; + /** + * @description Response from `DELETE /api/v1/releases/{id}`. + * + * Single-row delete returns a small confirmation rather than 204 so the + * frontend can surface a toast that mentions the etag clear ("the next + * poll will re-fetch this release"). Mirrors the bulk-delete shape with + * `affected = 1`. + */ + DeleteReleaseResponse: { + /** @description `true` if the row was deleted, `false` if it didn't exist. */ + deleted: boolean; + }; /** @description Detected series information for preview */ DetectedSeriesDto: { /** @description Number of books detected */ @@ -9248,6 +9725,50 @@ export interface components { pluginId: string; /** @enum {string} */ type: "plugin_deleted"; + } | { + /** + * Format: double + * @description Chapter announced (if the source emits chapters). + */ + chapter?: number | null; + /** + * @description Language code (e.g. `"en"`); used by client-side notification + * preference filters. + */ + language: string; + /** Format: uuid */ + ledgerId: string; + /** + * @description Plugin name that owns the source (`release_sources.plugin_id`). + * Helps the frontend filter without an extra lookup. + */ + pluginId: string; + /** Format: uuid */ + seriesId: string; + /** Format: uuid */ + sourceId: string; + /** @enum {string} */ + type: "release_announced"; + /** + * Format: int32 + * @description Volume announced (if the source emits volumes). + */ + volume?: number | null; + } | { + /** + * @description `true` if the poll wrote a `last_error`. Cheap "did it fail" + * hint without forcing the client to refetch. + */ + hadError: boolean; + /** + * @description Plugin that owns the source (`release_sources.plugin_id`). + * Cheap filter for clients only watching certain plugins. + */ + pluginId: string; + /** Format: uuid */ + sourceId: string; + /** @enum {string} */ + type: "release_source_polled"; }; /** * @description Type of entity that was changed @@ -9910,6 +10431,24 @@ export interface components { * @example 2024-01-15T10:30:00Z */ updatedAt: string; + /** + * Format: float + * @description Upstream-vs-local chapter delta. See `SeriesDto::upstream_chapter_gap`. + * @example 3 + */ + upstreamChapterGap?: number | null; + /** + * @description Provider that supplied the upstream counts. See + * `SeriesDto::upstream_gap_provider`. + * @example MangaBaka + */ + upstreamGapProvider?: string | null; + /** + * Format: int32 + * @description Upstream-vs-local volume delta. See `SeriesDto::upstream_volume_gap`. + * @example 1 + */ + upstreamVolumeGap?: number | null; /** * Format: int64 * @description Number of books classified as a complete volume (volume set, chapter null). @@ -12362,6 +12901,106 @@ export interface components { totalPages: number; }; /** @description Generic paginated response wrapper with HATEOAS links */ + PaginatedResponse_ReleaseLedgerEntryDto: { + /** @description The data items for this page */ + data: { + /** + * Format: double + * @description Decimal supports `12.5` etc. + */ + chapter?: number | null; + /** Format: double */ + confidence: number; + /** Format: date-time */ + createdAt: string; + /** + * @description Plugin-stable identity for the release (used for dedup). + * @example nyaa:1234567 + */ + externalReleaseId: string; + /** @description Sparse `{ "jxl": true, "container": "cbz", ... }`. */ + formatHints?: unknown; + /** @description Group/scanlator/uploader attribution. */ + groupOrUploader?: string | null; + /** + * Format: uuid + * @example 550e8400-e29b-41d4-a716-446655440a00 + */ + id: string; + /** @description Torrent info_hash, if applicable. */ + infoHash?: string | null; + language?: string | null; + /** + * @description Optional second URL for direct fetch (`.torrent`, `magnet:`, DDL + * link). Travels paired with [`Self::media_url_kind`]. + */ + mediaUrl?: string | null; + /** + * @description Classifies what `media_url` points at: `torrent` | `magnet` | + * `direct` | `other`. The frontend uses this to pick a kind-specific + * icon next to the standard external-link icon. + */ + mediaUrlKind?: string | null; + /** @description Source-specific extras (free-form). */ + metadata?: unknown; + /** Format: date-time */ + observedAt: string; + /** + * @description Where to acquire the release. Conventionally a human-readable + * landing page (Nyaa view page, MangaUpdates release page). + */ + payloadUrl: string; + /** + * Format: uuid + * @example 550e8400-e29b-41d4-a716-446655440002 + */ + seriesId: string; + /** + * @description Series title at the time of the response. Joined from the `series` + * table so the inbox UI can render a human-readable label without a + * follow-up fetch. Falls back to the empty string only if the series + * row was hard-deleted between the join and the read. + * @example Chainsaw Man + */ + seriesTitle: string; + /** + * Format: uuid + * @example 550e8400-e29b-41d4-a716-446655440b00 + */ + sourceId: string; + /** @description `announced` | `dismissed` | `marked_acquired` | `hidden`. */ + state: string; + /** Format: int32 */ + volume?: number | null; + }[]; + /** @description HATEOAS navigation links */ + links: components["schemas"]["PaginationLinks"]; + /** + * Format: int64 + * @description Current page number (1-indexed) + * @example 1 + */ + page: number; + /** + * Format: int64 + * @description Number of items per page + * @example 50 + */ + pageSize: number; + /** + * Format: int64 + * @description Total number of items across all pages + * @example 150 + */ + total: number; + /** + * Format: int64 + * @description Total number of pages + * @example 3 + */ + totalPages: number; + }; + /** @description Generic paginated response wrapper with HATEOAS links */ PaginatedResponse_SeriesDto: { /** @description The data items for this page */ data: { @@ -12462,6 +13101,38 @@ export interface components { * @example 2024-01-15T10:30:00Z */ updatedAt: string; + /** + * Format: float + * @description Difference between the upstream original-language chapter count + * (`series_metadata.total_chapter_count`, supplied by metadata + * providers like MangaBaka or AniList) and the highest locally-owned + * chapter (`local_max_chapter`). + * + * Always `None` unless the series is tracked AND `track_chapters` is + * enabled AND the provider count is populated AND the rounded-to-1- + * decimal gap is positive. **This is an informational signal, not a + * release announcement** — Phase 6's MangaUpdates plugin owns the + * translation-release feed. + * @example 3 + */ + upstreamChapterGap?: number | null; + /** + * @description Display name of the metadata provider that supplied the upstream + * counts (e.g., "MangaBaka", "AniList"). Set whenever at least one of + * `upstream_chapter_gap` / `upstream_volume_gap` is populated. Used by + * the Phase 7 badge tooltip. + * @example MangaBaka + */ + upstreamGapProvider?: string | null; + /** + * Format: int32 + * @description Difference between the upstream original-language volume count + * (`series_metadata.total_volume_count`) and the highest locally-owned + * volume (`local_max_volume`). Same suppression rules as + * `upstream_chapter_gap`, gated on `track_volumes`. + * @example 1 + */ + upstreamVolumeGap?: number | null; /** * Format: int64 * @description Number of books in this series classified as a complete volume @@ -13135,6 +13806,11 @@ export interface components { externalIdSource?: string | null; /** @description Content types this plugin can provide metadata for (e.g., ["series", "book"]) */ metadataProvider?: string[]; + /** + * @description Whether the plugin declares the `release_source` capability (announces + * new chapter / volume releases for tracked series). + */ + releaseSource?: boolean; /** @description Can sync user reading progress */ userReadSync?: boolean; /** @description Can provide personalized recommendations */ @@ -13724,6 +14400,19 @@ export interface components { /** @description Total count */ total: number; }; + /** + * @description Response shape from the `poll-now` endpoint. + * + * `status` is `enqueued` after a successful enqueue. The `message` carries + * the task ID for follow-up (`tasks.id`); the task runs asynchronously, so + * this response does not reflect poll outcome. + */ + PollNowResponse: { + /** @description Human-readable message; includes the enqueued task ID. */ + message: string; + /** @description `enqueued` on success. */ + status: string; + }; /** @description Preview scan request */ PreviewScanRequest: { /** @@ -14133,81 +14822,260 @@ export interface components { user: components["schemas"]["UserInfo"]; }; /** - * @description PUT request for full replacement of book metadata + * @description Response shape for `GET /api/v1/releases/facets`. * - * All metadata fields will be replaced with the values in this request. - * Omitting a field (or setting it to null) will clear that field. - */ - ReplaceBookMetadataRequest: { + * Each list reflects the distinct values present in the ledger under the + * **other** active filters (Solr-style facet exclusion), so dropdowns + * never offer combinations that would yield zero results. The frontend + * uses these to populate cascading filter Select inputs without forcing + * the user to type UUIDs. + */ + ReleaseFacetsResponse: { + languages: components["schemas"]["ReleaseLanguageFacetDto"][]; + libraries: components["schemas"]["ReleaseLibraryFacetDto"][]; + series: components["schemas"]["ReleaseSeriesFacetDto"][]; + }; + /** @description One language option in the inbox facets response. */ + ReleaseLanguageFacetDto: { + /** Format: int64 */ + count: number; + language: string; + }; + /** @description A single release announcement. Sources write these; the inbox reads them. */ + ReleaseLedgerEntryDto: { /** - * @description Structured author information as JSON array - * @example [ - * { - * "name": "Andy Weir", - * "role": "author", - * "sortName": "Weir, Andy" - * } - * ] + * Format: double + * @description Decimal supports `12.5` etc. */ - authors?: components["schemas"]["BookAuthorDto"][] | null; + chapter?: number | null; + /** Format: double */ + confidence: number; + /** Format: date-time */ + createdAt: string; /** - * @description Awards information - * @example [ - * { - * "category": "Best Novel", - * "name": "Hugo Award", - * "won": true, - * "year": 2015 - * } - * ] + * @description Plugin-stable identity for the release (used for dedup). + * @example nyaa:1234567 */ - awards?: components["schemas"]["BookAwardDto"][] | null; + externalReleaseId: string; + /** @description Sparse `{ "jxl": true, "container": "cbz", ... }`. */ + formatHints?: unknown; + /** @description Group/scanlator/uploader attribution. */ + groupOrUploader?: string | null; /** - * @description Whether the book is black and white - * @example false + * Format: uuid + * @example 550e8400-e29b-41d4-a716-446655440a00 */ - blackAndWhite?: boolean | null; - bookType?: null | components["schemas"]["BookTypeDto"]; + id: string; + /** @description Torrent info_hash, if applicable. */ + infoHash?: string | null; + language?: string | null; /** - * Format: float - * @description Chapter number (may be fractional, e.g. 42.5 for side chapters) - * @example 42 + * @description Optional second URL for direct fetch (`.torrent`, `magnet:`, DDL + * link). Travels paired with [`Self::media_url_kind`]. */ - chapter?: number | null; + mediaUrl?: string | null; /** - * @description Colorist(s) - comma-separated if multiple - * @example Richmond Lewis + * @description Classifies what `media_url` points at: `torrent` | `magnet` | + * `direct` | `other`. The frontend uses this to pick a kind-specific + * icon next to the standard external-link icon. */ - colorist?: string | null; + mediaUrlKind?: string | null; + /** @description Source-specific extras (free-form). */ + metadata?: unknown; + /** Format: date-time */ + observedAt: string; /** - * Format: int32 - * @description Total count in series - * @example 4 + * @description Where to acquire the release. Conventionally a human-readable + * landing page (Nyaa view page, MangaUpdates release page). */ - count?: number | null; + payloadUrl: string; /** - * @description Cover artist(s) - comma-separated if multiple - * @example David Mazzucchelli + * Format: uuid + * @example 550e8400-e29b-41d4-a716-446655440002 */ - coverArtist?: string | null; - /** @description Custom metadata JSON escape hatch */ - customMetadata?: Record | null; + seriesId: string; /** - * Format: int32 - * @description Publication day (1-31) - * @example 1 + * @description Series title at the time of the response. Joined from the `series` + * table so the inbox UI can render a human-readable label without a + * follow-up fetch. Falls back to the empty string only if the series + * row was hard-deleted between the join and the read. + * @example Chainsaw Man */ - day?: number | null; + seriesTitle: string; /** - * @description Edition information (e.g., "First Edition", "Revised Edition") - * @example First Edition + * Format: uuid + * @example 550e8400-e29b-41d4-a716-446655440b00 */ - edition?: string | null; + sourceId: string; + /** @description `announced` | `dismissed` | `marked_acquired` | `hidden`. */ + state: string; + /** Format: int32 */ + volume?: number | null; + }; + ReleaseLedgerListResponse: { + entries: components["schemas"]["ReleaseLedgerEntryDto"][]; + }; + /** @description One library option in the inbox facets response. */ + ReleaseLibraryFacetDto: { + /** Format: int64 */ + count: number; + /** Format: uuid */ + libraryId: string; + libraryName: string; + }; + /** + * @description One series option in the inbox facets response. Carries the joined + * `library_id` and `library_name` so the frontend can group the dropdown + * by library without a follow-up call. + */ + ReleaseSeriesFacetDto: { /** - * @description Editor(s) - comma-separated if multiple - * @example Dennis O'Neil + * Format: int64 + * @description Number of ledger rows matching the active filter for this series. */ - editor?: string | null; + count: number; + /** Format: uuid */ + libraryId: string; + libraryName: string; + /** Format: uuid */ + seriesId: string; + seriesTitle: string; + }; + /** @description A configured release source (one row per logical feed). */ + ReleaseSourceDto: { + /** @description Source-specific configuration (free-form). */ + config?: unknown; + /** Format: date-time */ + createdAt: string; + /** + * @description Per-source cron override (5-field POSIX cron). `null` when the row + * inherits the server-wide `release_tracking.default_cron_schedule`. + * Always present in the response (not omitted on null) so clients can + * distinguish "inheriting" from "field missing." + */ + cronSchedule?: string | null; + displayName: string; + /** + * @description The cron expression actually used by the scheduler for this source: + * the row's `cron_schedule` if set, otherwise the resolved server-wide + * default. Lets the UI display "Daily (Default)" without needing to + * fetch the global setting separately. + */ + effectiveCronSchedule: string; + enabled: boolean; + /** @description Opaque etag/cursor used for conditional fetches. */ + etag?: string | null; + /** + * Format: uuid + * @example 550e8400-e29b-41d4-a716-446655440b00 + */ + id: string; + /** @description `rss-uploader` | `rss-series` | `api-feed` | `metadata-feed` | `metadata-piggyback`. */ + kind: string; + lastError?: string | null; + /** Format: date-time */ + lastErrorAt?: string | null; + /** Format: date-time */ + lastPolledAt?: string | null; + /** + * @description One-line summary of the most recent successful poll. Surfaced under + * the row's status badge so users can see *why* a poll returned no + * announcements without grepping logs. NULL until the first successful + * poll on the source. + */ + lastSummary?: string | null; + /** + * @description Owning plugin id, or `core` for in-core synthetic sources. + * @example release-nyaa + */ + pluginId: string; + /** + * @description Plugin-defined unique key. + * @example nyaa:user:tsuna69 + */ + sourceKey: string; + /** Format: date-time */ + updatedAt: string; + }; + ReleaseSourceListResponse: { + sources: components["schemas"]["ReleaseSourceDto"][]; + }; + /** + * @description PUT request for full replacement of book metadata + * + * All metadata fields will be replaced with the values in this request. + * Omitting a field (or setting it to null) will clear that field. + */ + ReplaceBookMetadataRequest: { + /** + * @description Structured author information as JSON array + * @example [ + * { + * "name": "Andy Weir", + * "role": "author", + * "sortName": "Weir, Andy" + * } + * ] + */ + authors?: components["schemas"]["BookAuthorDto"][] | null; + /** + * @description Awards information + * @example [ + * { + * "category": "Best Novel", + * "name": "Hugo Award", + * "won": true, + * "year": 2015 + * } + * ] + */ + awards?: components["schemas"]["BookAwardDto"][] | null; + /** + * @description Whether the book is black and white + * @example false + */ + blackAndWhite?: boolean | null; + bookType?: null | components["schemas"]["BookTypeDto"]; + /** + * Format: float + * @description Chapter number (may be fractional, e.g. 42.5 for side chapters) + * @example 42 + */ + chapter?: number | null; + /** + * @description Colorist(s) - comma-separated if multiple + * @example Richmond Lewis + */ + colorist?: string | null; + /** + * Format: int32 + * @description Total count in series + * @example 4 + */ + count?: number | null; + /** + * @description Cover artist(s) - comma-separated if multiple + * @example David Mazzucchelli + */ + coverArtist?: string | null; + /** @description Custom metadata JSON escape hatch */ + customMetadata?: Record | null; + /** + * Format: int32 + * @description Publication day (1-31) + * @example 1 + */ + day?: number | null; + /** + * @description Edition information (e.g., "First Edition", "Revised Edition") + * @example First Edition + */ + edition?: string | null; + /** + * @description Editor(s) - comma-separated if multiple + * @example Dennis O'Neil + */ + editor?: string | null; /** * @description Format details * @example Trade Paperback @@ -14516,6 +15384,21 @@ export interface components { */ message: string; }; + /** + * @description Response shape from the `reset` endpoint. + * + * Returns the number of ledger rows removed so callers can show a + * confirmation toast. The source's transient poll state (etag, + * last_polled_at, last_error, last_summary) is also cleared, but those + * are not counted here. + */ + ResetReleaseSourceResponse: { + /** + * Format: int64 + * @description Number of `release_ledger` rows deleted for this source. + */ + deletedLedgerEntries: number; + }; /** @description Request body for bulk retrying all book errors */ RetryAllErrorsRequest: { errorType?: null | components["schemas"]["BookErrorTypeDto"]; @@ -14700,6 +15583,43 @@ export interface components { /** @description Cover source: "default" (first book cover) or "custom" (uploaded cover) */ source: string; }; + /** + * @description Title alias used by release-source plugins to match incoming releases by + * title (Nyaa, MangaUpdates without an external ID, etc.). + */ + SeriesAliasDto: { + /** + * @description Alias as entered (preserves casing/punctuation). + * @example My Hero Academia + */ + alias: string; + /** Format: date-time */ + createdAt: string; + /** + * Format: uuid + * @description Alias row ID. + * @example 550e8400-e29b-41d4-a716-446655440100 + */ + id: string; + /** + * @description Lowercased + punctuation-stripped form used for matching. + * @example my hero academia + */ + normalized: string; + /** + * Format: uuid + * @example 550e8400-e29b-41d4-a716-446655440002 + */ + seriesId: string; + /** + * @description `metadata` (auto-derived) | `manual` (user-entered). + * @example manual + */ + source: string; + }; + SeriesAliasListResponse: { + aliases: components["schemas"]["SeriesAliasDto"][]; + }; /** @description Response containing the average community rating for a series */ SeriesAverageRatingResponse: { /** @@ -14985,6 +15905,38 @@ export interface components { * @example 2024-01-15T10:30:00Z */ updatedAt: string; + /** + * Format: float + * @description Difference between the upstream original-language chapter count + * (`series_metadata.total_chapter_count`, supplied by metadata + * providers like MangaBaka or AniList) and the highest locally-owned + * chapter (`local_max_chapter`). + * + * Always `None` unless the series is tracked AND `track_chapters` is + * enabled AND the provider count is populated AND the rounded-to-1- + * decimal gap is positive. **This is an informational signal, not a + * release announcement** — Phase 6's MangaUpdates plugin owns the + * translation-release feed. + * @example 3 + */ + upstreamChapterGap?: number | null; + /** + * @description Display name of the metadata provider that supplied the upstream + * counts (e.g., "MangaBaka", "AniList"). Set whenever at least one of + * `upstream_chapter_gap` / `upstream_volume_gap` is populated. Used by + * the Phase 7 badge tooltip. + * @example MangaBaka + */ + upstreamGapProvider?: string | null; + /** + * Format: int32 + * @description Difference between the upstream original-language volume count + * (`series_metadata.total_volume_count`) and the highest locally-owned + * volume (`local_max_volume`). Same suppression rules as + * `upstream_chapter_gap`, gated on `track_volumes`. + * @example 1 + */ + upstreamVolumeGap?: number | null; /** * Format: int64 * @description Number of books in this series classified as a complete volume @@ -15301,6 +16253,65 @@ export interface components { * @enum {string} */ SeriesStrategy: "series_volume" | "series_volume_chapter" | "flat" | "publisher_hierarchy" | "calibre" | "custom"; + /** + * @description Per-series release-tracking configuration. + * + * Returned even for untracked series — the row defaults to `tracked: false` + * with conservative defaults so the frontend can render the panel without + * special-casing missing rows. + */ + SeriesTrackingDto: { + /** + * Format: double + * @description Per-series override of the server's confidence threshold (0.0 - 1.0). + */ + confidenceThresholdOverride?: number | null; + /** + * Format: date-time + * @description When the row was created (epoch when virtual). + */ + createdAt: string; + /** + * @description Per-series language preference (ISO 639-1 codes, e.g. `["en", "es"]`). + * `null` means "fall back to the server-wide default (`release_tracking.default_languages`)." + * Used by aggregation feeds (e.g. MangaUpdates) that emit candidates in many languages. + */ + languages?: string[] | null; + /** + * Format: double + * @description Latest known external chapter (supports decimals like 12.5). + */ + latestKnownChapter?: number | null; + /** + * Format: int32 + * @description Latest known external volume. + */ + latestKnownVolume?: number | null; + /** + * Format: int32 + * @description Per-series override of the source poll interval (seconds). + */ + pollIntervalOverrideS?: number | null; + /** + * Format: uuid + * @description Series ID this config belongs to. + * @example 550e8400-e29b-41d4-a716-446655440002 + */ + seriesId: string; + /** @description Whether to announce new chapters. */ + trackChapters: boolean; + /** @description Whether to announce new volumes. */ + trackVolumes: boolean; + /** @description Whether release tracking is enabled. */ + tracked: boolean; + /** + * Format: date-time + * @description When the row was last updated (epoch when virtual). + */ + updatedAt: string; + /** @description Sparse map of `{ "": { "first": ch, "last": ch } }`. */ + volumeChapterMap?: unknown; + }; /** @description Response for series update */ SeriesUpdateResponse: { /** @@ -16215,6 +17226,21 @@ export interface components { type: "user_plugin_recommendation_dismiss"; /** Format: uuid */ userId: string; + } | { + /** + * Format: uuid + * @description If set, scope to this library; otherwise all series. + */ + libraryId?: string | null; + /** @description If set, scope to these specific series (takes precedence over library_id). */ + seriesIds?: string[] | null; + /** @enum {string} */ + type: "backfill_tracking_from_metadata"; + } | { + /** Format: uuid */ + sourceId: string; + /** @enum {string} */ + type: "poll_release_source"; }; /** @description Metrics for a specific task type */ TaskTypeMetricsDto: { @@ -16779,6 +17805,60 @@ export interface components { */ progressPercentage?: number | null; }; + /** + * @description PATCH payload for ledger row state transitions. + * + * Only `state` is patchable from the API today; the rest of the row is + * source-controlled. `state` is validated against the canonical set: + * `announced` | `dismissed` | `marked_acquired` | `hidden`. + */ + UpdateReleaseLedgerEntryRequest: { + /** @description New state. See [`ReleaseLedgerEntryDto::state`] for allowed values. */ + state?: string | null; + }; + /** + * @description PATCH payload for a release source. All fields optional; omit to leave alone. + * + * `cron_schedule` uses double-Option semantics: + * - field absent (`None`): leave the row's cron_schedule unchanged + * - explicit `null` (`Some(None)`) / `""` / `" "`: clear the override + * (revert to inheriting the server-wide + * `release_tracking.default_cron_schedule`) + * - `Some(Some("0 *\/6 * * *"))`: set a per-source override + */ + UpdateReleaseSourceRequest: { + /** + * @description 5-field POSIX cron expression. Use `null` (or empty string) to + * clear the override and inherit the server-wide default. + */ + cronSchedule?: string | null; + displayName?: string | null; + enabled?: boolean | null; + }; + /** + * @description PATCH payload for tracking config. All fields are optional: + * omit a field to leave it untouched. Use a JSON `null` on a nullable field + * to clear it explicitly. + */ + UpdateSeriesTrackingRequest: { + /** Format: double */ + confidenceThresholdOverride?: number | null; + /** @description ISO 639-1 codes; `null` clears (falls back to server-wide default). */ + languages?: string[] | null; + /** + * Format: double + * @description Use `Some(null)` to clear, `Some()` to set, omit to leave alone. + */ + latestKnownChapter?: number | null; + /** Format: int32 */ + latestKnownVolume?: number | null; + /** Format: int32 */ + pollIntervalOverrideS?: number | null; + trackChapters?: boolean | null; + trackVolumes?: boolean | null; + tracked?: boolean | null; + volumeChapterMap?: unknown; + }; /** @description Update setting request */ UpdateSettingRequest: { /** @@ -22529,7 +23609,7 @@ export interface operations { }; }; }; - list_active_scans: { + list_release_sources: { parameters: { query?: never; header?: never; @@ -22538,16 +23618,16 @@ export interface operations { }; requestBody?: never; responses: { - /** @description List of active scans */ + /** @description Source list */ 200: { headers: { [name: string]: unknown; }; content: { - "application/json": components["schemas"]["ScanStatusDto"][]; + "application/json": components["schemas"]["ReleaseSourceListResponse"]; }; }; - /** @description Permission denied */ + /** @description PluginsManage permission required */ 403: { headers: { [name: string]: unknown; @@ -22556,23 +23636,32 @@ export interface operations { }; }; }; - scan_progress_stream: { + get_release_tracking_applicability: { parameters: { - query?: never; + query?: { + /** + * @description Optional library scope. When provided, only plugins that apply to + * this library are considered (a plugin's `library_ids` field is + * either empty = all, or contains this UUID). + */ + libraryId?: string | null; + }; header?: never; path?: never; cookie?: never; }; requestBody?: never; responses: { - /** @description SSE stream of scan progress updates */ + /** @description Applicability info */ 200: { headers: { [name: string]: unknown; }; - content?: never; + content: { + "application/json": components["schemas"]["ApplicabilityResponse"]; + }; }; - /** @description Permission denied */ + /** @description SeriesRead permission required */ 403: { headers: { [name: string]: unknown; @@ -22581,29 +23670,487 @@ export interface operations { }; }; }; - list_series: { + update_release_source: { parameters: { - query?: { - /** @description Page number (1-indexed, default 1) */ - page?: number; - /** @description Number of items per page (max 100, default 50) */ - pageSize?: number; - /** @description Sort parameter (format: "field,direction" e.g. "name,asc") */ - sort?: string | null; - /** @description Filter by genres (comma-separated, AND logic - series must have ALL specified genres) */ - genres?: string | null; - /** @description Filter by tags (comma-separated, AND logic - series must have ALL specified tags) */ - tags?: string | null; - /** @description Filter by library ID */ - libraryId?: string | null; - /** - * @description Return full series data including metadata, locks, genres, tags, alternate titles, - * external ratings, and external links. Default is false for backward compatibility. - */ - full?: boolean; - }; + query?: never; header?: never; - path?: never; + path: { + /** @description Source ID */ + source_id: string; + }; + cookie?: never; + }; + requestBody: { + content: { + "application/json": components["schemas"]["UpdateReleaseSourceRequest"]; + }; + }; + responses: { + /** @description Source updated */ + 200: { + headers: { + [name: string]: unknown; + }; + content: { + "application/json": components["schemas"]["ReleaseSourceDto"]; + }; + }; + /** @description Invalid update payload */ + 400: { + headers: { + [name: string]: unknown; + }; + content?: never; + }; + /** @description PluginsManage permission required */ + 403: { + headers: { + [name: string]: unknown; + }; + content?: never; + }; + /** @description Source not found */ + 404: { + headers: { + [name: string]: unknown; + }; + content?: never; + }; + }; + }; + poll_release_source_now: { + parameters: { + query?: never; + header?: never; + path: { + /** @description Source ID */ + source_id: string; + }; + cookie?: never; + }; + requestBody?: never; + responses: { + /** @description Poll task enqueued */ + 202: { + headers: { + [name: string]: unknown; + }; + content: { + "application/json": components["schemas"]["PollNowResponse"]; + }; + }; + /** @description PluginsManage permission required */ + 403: { + headers: { + [name: string]: unknown; + }; + content?: never; + }; + /** @description Source not found */ + 404: { + headers: { + [name: string]: unknown; + }; + content?: never; + }; + /** @description Source disabled */ + 409: { + headers: { + [name: string]: unknown; + }; + content?: never; + }; + }; + }; + reset_release_source: { + parameters: { + query?: never; + header?: never; + path: { + /** @description Source ID */ + source_id: string; + }; + cookie?: never; + }; + requestBody?: never; + responses: { + /** @description Source reset */ + 200: { + headers: { + [name: string]: unknown; + }; + content: { + "application/json": components["schemas"]["ResetReleaseSourceResponse"]; + }; + }; + /** @description PluginsManage permission required */ + 403: { + headers: { + [name: string]: unknown; + }; + content?: never; + }; + /** @description Source not found */ + 404: { + headers: { + [name: string]: unknown; + }; + content?: never; + }; + }; + }; + list_release_inbox: { + parameters: { + query?: { + /** + * @description Filter by state. Defaults to `announced`. Pass `all` to disable + * state filtering entirely (returns rows in every state). + */ + state?: string | null; + seriesId?: string | null; + sourceId?: string | null; + language?: string | null; + /** @description Restrict to series belonging to this library. */ + libraryId?: string | null; + page?: number; + pageSize?: number; + }; + header?: never; + path?: never; + cookie?: never; + }; + requestBody?: never; + responses: { + /** @description Paginated inbox entries */ + 200: { + headers: { + [name: string]: unknown; + }; + content: { + "application/json": components["schemas"]["PaginatedResponse_ReleaseLedgerEntryDto"]; + }; + }; + /** @description Forbidden */ + 403: { + headers: { + [name: string]: unknown; + }; + content?: never; + }; + }; + }; + bulk_release_action: { + parameters: { + query?: never; + header?: never; + path?: never; + cookie?: never; + }; + requestBody: { + content: { + "application/json": components["schemas"]["BulkReleaseActionRequest"]; + }; + }; + responses: { + /** @description Bulk action applied */ + 200: { + headers: { + [name: string]: unknown; + }; + content: { + "application/json": components["schemas"]["BulkReleaseActionResponse"]; + }; + }; + /** @description Empty ID list or invalid action */ + 400: { + headers: { + [name: string]: unknown; + }; + content?: never; + }; + /** @description Forbidden */ + 403: { + headers: { + [name: string]: unknown; + }; + content?: never; + }; + }; + }; + list_release_facets: { + parameters: { + query?: { + state?: string | null; + seriesId?: string | null; + sourceId?: string | null; + language?: string | null; + libraryId?: string | null; + }; + header?: never; + path?: never; + cookie?: never; + }; + requestBody?: never; + responses: { + /** @description Facets for the inbox view */ + 200: { + headers: { + [name: string]: unknown; + }; + content: { + "application/json": components["schemas"]["ReleaseFacetsResponse"]; + }; + }; + /** @description Invalid state filter */ + 400: { + headers: { + [name: string]: unknown; + }; + content?: never; + }; + /** @description SeriesRead permission required */ + 403: { + headers: { + [name: string]: unknown; + }; + content?: never; + }; + }; + }; + delete_release: { + parameters: { + query?: never; + header?: never; + path: { + /** @description Ledger entry ID */ + release_id: string; + }; + cookie?: never; + }; + requestBody?: never; + responses: { + /** @description Release deleted */ + 200: { + headers: { + [name: string]: unknown; + }; + content: { + "application/json": components["schemas"]["DeleteReleaseResponse"]; + }; + }; + /** @description Forbidden */ + 403: { + headers: { + [name: string]: unknown; + }; + content?: never; + }; + /** @description Ledger entry not found */ + 404: { + headers: { + [name: string]: unknown; + }; + content?: never; + }; + }; + }; + update_release_entry: { + parameters: { + query?: never; + header?: never; + path: { + /** @description Ledger entry ID */ + release_id: string; + }; + cookie?: never; + }; + requestBody: { + content: { + "application/json": components["schemas"]["UpdateReleaseLedgerEntryRequest"]; + }; + }; + responses: { + /** @description Updated ledger entry */ + 200: { + headers: { + [name: string]: unknown; + }; + content: { + "application/json": components["schemas"]["ReleaseLedgerEntryDto"]; + }; + }; + /** @description Invalid state */ + 400: { + headers: { + [name: string]: unknown; + }; + content?: never; + }; + /** @description Forbidden */ + 403: { + headers: { + [name: string]: unknown; + }; + content?: never; + }; + /** @description Ledger entry not found */ + 404: { + headers: { + [name: string]: unknown; + }; + content?: never; + }; + }; + }; + dismiss_release: { + parameters: { + query?: never; + header?: never; + path: { + /** @description Ledger entry ID */ + release_id: string; + }; + cookie?: never; + }; + requestBody?: never; + responses: { + /** @description Release dismissed */ + 200: { + headers: { + [name: string]: unknown; + }; + content: { + "application/json": components["schemas"]["ReleaseLedgerEntryDto"]; + }; + }; + /** @description Forbidden */ + 403: { + headers: { + [name: string]: unknown; + }; + content?: never; + }; + /** @description Ledger entry not found */ + 404: { + headers: { + [name: string]: unknown; + }; + content?: never; + }; + }; + }; + mark_release_acquired: { + parameters: { + query?: never; + header?: never; + path: { + /** @description Ledger entry ID */ + release_id: string; + }; + cookie?: never; + }; + requestBody?: never; + responses: { + /** @description Release marked acquired */ + 200: { + headers: { + [name: string]: unknown; + }; + content: { + "application/json": components["schemas"]["ReleaseLedgerEntryDto"]; + }; + }; + /** @description Forbidden */ + 403: { + headers: { + [name: string]: unknown; + }; + content?: never; + }; + /** @description Ledger entry not found */ + 404: { + headers: { + [name: string]: unknown; + }; + content?: never; + }; + }; + }; + list_active_scans: { + parameters: { + query?: never; + header?: never; + path?: never; + cookie?: never; + }; + requestBody?: never; + responses: { + /** @description List of active scans */ + 200: { + headers: { + [name: string]: unknown; + }; + content: { + "application/json": components["schemas"]["ScanStatusDto"][]; + }; + }; + /** @description Permission denied */ + 403: { + headers: { + [name: string]: unknown; + }; + content?: never; + }; + }; + }; + scan_progress_stream: { + parameters: { + query?: never; + header?: never; + path?: never; + cookie?: never; + }; + requestBody?: never; + responses: { + /** @description SSE stream of scan progress updates */ + 200: { + headers: { + [name: string]: unknown; + }; + content?: never; + }; + /** @description Permission denied */ + 403: { + headers: { + [name: string]: unknown; + }; + content?: never; + }; + }; + }; + list_series: { + parameters: { + query?: { + /** @description Page number (1-indexed, default 1) */ + page?: number; + /** @description Number of items per page (max 100, default 50) */ + pageSize?: number; + /** @description Sort parameter (format: "field,direction" e.g. "name,asc") */ + sort?: string | null; + /** @description Filter by genres (comma-separated, AND logic - series must have ALL specified genres) */ + genres?: string | null; + /** @description Filter by tags (comma-separated, AND logic - series must have ALL specified tags) */ + tags?: string | null; + /** @description Filter by library ID */ + libraryId?: string | null; + /** + * @description Return full series data including metadata, locks, genres, tags, alternate titles, + * external ratings, and external links. Default is false for backward compatibility. + */ + full?: boolean; + }; + header?: never; + path?: never; cookie?: never; }; requestBody?: never; @@ -22940,7 +24487,83 @@ export interface operations { headers: { [name: string]: unknown; }; - content?: never; + content?: never; + }; + /** @description Unauthorized */ + 401: { + headers: { + [name: string]: unknown; + }; + content?: never; + }; + /** @description Forbidden */ + 403: { + headers: { + [name: string]: unknown; + }; + content?: never; + }; + }; + }; + bulk_generate_series_book_thumbnails: { + parameters: { + query?: never; + header?: never; + path?: never; + cookie?: never; + }; + requestBody: { + content: { + "application/json": components["schemas"]["BulkGenerateSeriesBookThumbnailsRequest"]; + }; + }; + responses: { + /** @description Thumbnail generation task queued */ + 200: { + headers: { + [name: string]: unknown; + }; + content: { + "application/json": components["schemas"]["BulkTaskResponse"]; + }; + }; + /** @description Unauthorized */ + 401: { + headers: { + [name: string]: unknown; + }; + content?: never; + }; + /** @description Forbidden */ + 403: { + headers: { + [name: string]: unknown; + }; + content?: never; + }; + }; + }; + bulk_generate_series_thumbnails: { + parameters: { + query?: never; + header?: never; + path?: never; + cookie?: never; + }; + requestBody: { + content: { + "application/json": components["schemas"]["BulkGenerateSeriesThumbnailsRequest"]; + }; + }; + responses: { + /** @description Series thumbnail generation task queued */ + 200: { + headers: { + [name: string]: unknown; + }; + content: { + "application/json": components["schemas"]["BulkTaskResponse"]; + }; }; /** @description Unauthorized */ 401: { @@ -22958,7 +24581,7 @@ export interface operations { }; }; }; - bulk_generate_series_book_thumbnails: { + bulk_reprocess_series_titles: { parameters: { query?: never; header?: never; @@ -22967,11 +24590,11 @@ export interface operations { }; requestBody: { content: { - "application/json": components["schemas"]["BulkGenerateSeriesBookThumbnailsRequest"]; + "application/json": components["schemas"]["BulkReprocessSeriesTitlesRequest"]; }; }; responses: { - /** @description Thumbnail generation task queued */ + /** @description Title reprocessing task queued */ 200: { headers: { [name: string]: unknown; @@ -22996,7 +24619,7 @@ export interface operations { }; }; }; - bulk_generate_series_thumbnails: { + bulk_track_series_for_releases: { parameters: { query?: never; header?: never; @@ -23005,17 +24628,17 @@ export interface operations { }; requestBody: { content: { - "application/json": components["schemas"]["BulkGenerateSeriesThumbnailsRequest"]; + "application/json": components["schemas"]["BulkSeriesRequest"]; }; }; responses: { - /** @description Series thumbnail generation task queued */ + /** @description Bulk-tracked series */ 200: { headers: { [name: string]: unknown; }; content: { - "application/json": components["schemas"]["BulkTaskResponse"]; + "application/json": components["schemas"]["BulkTrackForReleasesResponse"]; }; }; /** @description Unauthorized */ @@ -23034,7 +24657,7 @@ export interface operations { }; }; }; - bulk_reprocess_series_titles: { + bulk_mark_series_as_unread: { parameters: { query?: never; header?: never; @@ -23043,17 +24666,17 @@ export interface operations { }; requestBody: { content: { - "application/json": components["schemas"]["BulkReprocessSeriesTitlesRequest"]; + "application/json": components["schemas"]["BulkSeriesRequest"]; }; }; responses: { - /** @description Title reprocessing task queued */ + /** @description Series marked as unread */ 200: { headers: { [name: string]: unknown; }; content: { - "application/json": components["schemas"]["BulkTaskResponse"]; + "application/json": components["schemas"]["MarkReadResponse"]; }; }; /** @description Unauthorized */ @@ -23072,7 +24695,7 @@ export interface operations { }; }; }; - bulk_mark_series_as_unread: { + bulk_untrack_series_for_releases: { parameters: { query?: never; header?: never; @@ -23085,13 +24708,13 @@ export interface operations { }; }; responses: { - /** @description Series marked as unread */ + /** @description Bulk-untracked series */ 200: { headers: { [name: string]: unknown; }; content: { - "application/json": components["schemas"]["MarkReadResponse"]; + "application/json": components["schemas"]["BulkTrackForReleasesResponse"]; }; }; /** @description Unauthorized */ @@ -23770,6 +25393,137 @@ export interface operations { }; }; }; + list_series_aliases: { + parameters: { + query?: never; + header?: never; + path: { + /** @description Series ID */ + series_id: string; + }; + cookie?: never; + }; + requestBody?: never; + responses: { + /** @description List of aliases */ + 200: { + headers: { + [name: string]: unknown; + }; + content: { + "application/json": components["schemas"]["SeriesAliasListResponse"]; + }; + }; + /** @description Forbidden */ + 403: { + headers: { + [name: string]: unknown; + }; + content?: never; + }; + /** @description Series not found */ + 404: { + headers: { + [name: string]: unknown; + }; + content?: never; + }; + }; + }; + create_series_alias: { + parameters: { + query?: never; + header?: never; + path: { + /** @description Series ID */ + series_id: string; + }; + cookie?: never; + }; + requestBody: { + content: { + "application/json": components["schemas"]["CreateSeriesAliasRequest"]; + }; + }; + responses: { + /** @description Alias already existed (idempotent) */ + 200: { + headers: { + [name: string]: unknown; + }; + content: { + "application/json": components["schemas"]["SeriesAliasDto"]; + }; + }; + /** @description Alias created */ + 201: { + headers: { + [name: string]: unknown; + }; + content: { + "application/json": components["schemas"]["SeriesAliasDto"]; + }; + }; + /** @description Invalid alias (empty after normalization) */ + 400: { + headers: { + [name: string]: unknown; + }; + content?: never; + }; + /** @description Forbidden */ + 403: { + headers: { + [name: string]: unknown; + }; + content?: never; + }; + /** @description Series not found */ + 404: { + headers: { + [name: string]: unknown; + }; + content?: never; + }; + }; + }; + delete_series_alias: { + parameters: { + query?: never; + header?: never; + path: { + /** @description Series ID */ + series_id: string; + /** @description Alias ID */ + alias_id: string; + }; + cookie?: never; + }; + requestBody?: never; + responses: { + /** @description Alias deleted */ + 204: { + headers: { + [name: string]: unknown; + }; + content?: never; + }; + /** @description Forbidden */ + 403: { + headers: { + [name: string]: unknown; + }; + content?: never; + }; + /** @description Series or alias not found */ + 404: { + headers: { + [name: string]: unknown; + }; + content?: never; + }; + }; + }; get_series_alternate_titles: { parameters: { query?: never; @@ -25343,6 +27097,53 @@ export interface operations { }; }; }; + list_series_releases: { + parameters: { + query?: { + /** + * @description Filter by state. Defaults to all states (no filter) so the per-series + * view shows the full history. + */ + state?: string | null; + /** @description 1-indexed page number. */ + page?: number; + /** @description Items per page (max 500, default 50). */ + pageSize?: number; + }; + header?: never; + path: { + /** @description Series ID */ + series_id: string; + }; + cookie?: never; + }; + requestBody?: never; + responses: { + /** @description Paginated ledger entries for the series */ + 200: { + headers: { + [name: string]: unknown; + }; + content: { + "application/json": components["schemas"]["PaginatedResponse_ReleaseLedgerEntryDto"]; + }; + }; + /** @description Forbidden */ + 403: { + headers: { + [name: string]: unknown; + }; + content?: never; + }; + /** @description Series not found */ + 404: { + headers: { + [name: string]: unknown; + }; + content?: never; + }; + }; + }; renumber_series: { parameters: { query?: never; @@ -25802,6 +27603,84 @@ export interface operations { }; }; }; + get_series_tracking: { + parameters: { + query?: never; + header?: never; + path: { + /** @description Series ID */ + series_id: string; + }; + cookie?: never; + }; + requestBody?: never; + responses: { + /** @description Tracking config */ + 200: { + headers: { + [name: string]: unknown; + }; + content: { + "application/json": components["schemas"]["SeriesTrackingDto"]; + }; + }; + /** @description Forbidden */ + 403: { + headers: { + [name: string]: unknown; + }; + content?: never; + }; + /** @description Series not found */ + 404: { + headers: { + [name: string]: unknown; + }; + content?: never; + }; + }; + }; + update_series_tracking: { + parameters: { + query?: never; + header?: never; + path: { + /** @description Series ID */ + series_id: string; + }; + cookie?: never; + }; + requestBody: { + content: { + "application/json": components["schemas"]["UpdateSeriesTrackingRequest"]; + }; + }; + responses: { + /** @description Tracking config updated */ + 200: { + headers: { + [name: string]: unknown; + }; + content: { + "application/json": components["schemas"]["SeriesTrackingDto"]; + }; + }; + /** @description Forbidden */ + 403: { + headers: { + [name: string]: unknown; + }; + content?: never; + }; + /** @description Series not found */ + 404: { + headers: { + [name: string]: unknown; + }; + content?: never; + }; + }; + }; mark_series_as_unread: { parameters: { query?: never; diff --git a/web/src/types/index.ts b/web/src/types/index.ts index 93e1d708..bc20124c 100644 --- a/web/src/types/index.ts +++ b/web/src/types/index.ts @@ -244,6 +244,12 @@ export function isPluginEvent(event: EntityEvent): event is EntityEvent & { ); } +export function isReleaseAnnouncedEvent( + event: EntityEvent, +): event is EntityEvent & { type: "release_announced" } { + return event.type === "release_announced"; +} + // ============================================================================= // Re-export the raw generated types for advanced use cases // ============================================================================= diff --git a/web/src/types/preferences.ts b/web/src/types/preferences.ts index 841981f4..150cc4af 100644 --- a/web/src/types/preferences.ts +++ b/web/src/types/preferences.ts @@ -15,6 +15,15 @@ export interface TypedPreferences { // Library preferences "library.show_deleted_books": boolean; + + // Release-tracking preferences + /** + * Series IDs whose `release_announced` events should NOT bump the badge or + * surface a toast for this user. The series detail page exposes a per-series + * mute toggle that writes here; the Release Tracking settings page exposes + * a "Clear all mutes" action that deletes the preference. + */ + "release_tracking.muted_series_ids": string[]; } /** @@ -28,6 +37,7 @@ export type PreferenceKey = keyof TypedPreferences; export const PREFERENCE_DEFAULTS: TypedPreferences = { "ui.theme": "system", "library.show_deleted_books": false, + "release_tracking.muted_series_ids": [], }; /**