diff --git a/.env.example b/.env.example index a8829d6c..35808457 100644 --- a/.env.example +++ b/.env.example @@ -1,6 +1,7 @@ # vi: ft=sh # shellcheck disable=SC2034 +# THIS IS V5 .ENV ; IF YOU ARE ON V4 PLEASE LOOK AT THE .ENV HERE: https://github.com/zoriya/Kyoo/blob/v4.7.1/.env.example # Useful config options @@ -9,23 +10,8 @@ LIBRARY_ROOT=./video # You should set this to a path where kyoo can write large amount of data, this is used as a cache by the transcoder. # It will automatically be cleaned up on kyoo's startup/shutdown/runtime. CACHE_ROOT=/tmp/kyoo_cache -LIBRARY_LANGUAGES=en -# If this is true, kyoo will prefer to download the media in the original language of the item. -MEDIA_PREFER_ORIGINAL_LANGUAGE=false # A pattern (regex) to ignore files. -LIBRARY_IGNORE_PATTERN=".*/[dD]ownloads?/.*|.*[Tt][Rr][Aa][Ii][Ll][Ee][Rr].*" - -# If this is true, new accounts wont have any permissions before you approve them in your admin dashboard. -REQUIRE_ACCOUNT_VERIFICATION=true -# Specify permissions of guest accounts, default is no permissions. -UNLOGGED_PERMISSIONS= -# but you can allow anyone to use your instance without account by doing: -# UNLOGGED_PERMISSIONS=overall.read,overall.play -# You can specify this to allow guests users to see your collection without behing able to play videos for example: -# UNLOGGED_PERMISSIONS=overall.read - -# Specify permissions of new accounts. -DEFAULT_PERMISSIONS=overall.read,overall.play +LIBRARY_IGNORE_PATTERN=".*/[dD]ownloads?/.*" # Hardware transcoding (equivalent of --profile docker compose option). COMPOSE_PROFILES=cpu # cpu (no hardware acceleration) or vaapi or qsv or nvidia @@ -34,11 +20,6 @@ COMPOSE_PROFILES=cpu # cpu (no hardware acceleration) or vaapi or qsv or nvidia GOCODER_PRESET=fast -# The following value should be set to a random sequence of characters. -# You MUST change it when installing kyoo (for security) -# You can input multiple api keys separated by a , -KYOO_APIKEYS=t7H5!@4iMNsAaSJQ49pat4jprJgTcF656if#J3 - # Keep those empty to use kyoo's default api key. You can also specify a custom API key if you want. # go to https://www.themoviedb.org/settings/api and copy the api key (not the read access token, the api key) THEMOVIEDB_APIKEY= @@ -77,28 +58,14 @@ OIDC_SERVICE_AUTHMETHOD=ClientSecretBasic KYOO_URL= # Database things -POSTGRES_USER=KyooUser -POSTGRES_PASSWORD=KyooPassword -POSTGRES_DB=kyooDB -POSTGRES_SERVER=postgres -POSTGRES_PORT=5432 - -# Read by the api container to know if it should run meilisearch's migrations/sync -# and download missing images. This is a good idea to only have one instance with this on -# Note: it does not run postgres migrations, use the migration container for that. -RUN_MIGRATIONS=true - -MEILI_HOST="http://meilisearch:7700" -MEILI_MASTER_KEY="ghvjkgisbgkbgskegblfqbgjkebbhgwkjfb" - -RABBITMQ_HOST=rabbitmq -RABBITMQ_PORT=5672 -RABBITMQ_DEFAULT_USER=kyoo -RABBITMQ_DEFAULT_PASS=aohohunuhouhuhhoahothonseuhaoensuthoaentsuhha - +PGUSER=kyoo +PGPASSWORD=password +PGDATABASE=kyoo +PGHOST=postgres +PGPORT=5432 # v5 stuff, does absolutely nothing on master (aka: you can delete this) EXTRA_CLAIMS='{"permissions": ["core.read"], "verified": false}' -FIRST_USER_CLAIMS='{"permissions": ["users.read", "users.write", "apikeys.read", "apikeys.write", "users.delete", "core.read", "core.write"], "verified": true}' +FIRST_USER_CLAIMS='{"permissions": ["users.read", "users.write", "apikeys.read", "apikeys.write", "users.delete", "core.read", "core.write", "scanner.trigger"], "verified": true}' GUEST_CLAIMS='{"permissions": ["core.read"]}' PROTECTED_CLAIMS="permissions,verified" diff --git a/.github/workflows/auth-hurl.yml b/.github/workflows/auth-hurl.yml index 42de49a9..154d17d3 100644 --- a/.github/workflows/auth-hurl.yml +++ b/.github/workflows/auth-hurl.yml @@ -48,8 +48,8 @@ jobs: working-directory: ./auth run: | ./keibi > logs & - wget --retry-connrefused --retry-on-http-error=502 http://localhost:4568/health - hurl --error-format long --variable host=http://localhost:4568 tests/* + wget --retry-connrefused --retry-on-http-error=502 http://localhost:4568/auth/health + hurl --error-format long --variable host=http://localhost:4568/auth tests/* env: PGHOST: localhost FIRST_USER_CLAIMS: '{"permissions": ["users.read"]}' diff --git a/.github/workflows/coding-style.yml b/.github/workflows/coding-style.yml index 79f7581f..24767c0a 100644 --- a/.github/workflows/coding-style.yml +++ b/.github/workflows/coding-style.yml @@ -71,3 +71,15 @@ jobs: - name: Run go fmt run: if [ "$(gofmt -s -l . | wc -l)" -gt 0 ]; then exit 1; fi + + auth: + name: "Lint auth" + runs-on: ubuntu-latest + defaults: + run: + working-directory: ./auth + steps: + - uses: actions/checkout@v4 + + - name: Run go fmt + run: if [ "$(gofmt -s -l . | wc -l)" -gt 0 ]; then exit 1; fi diff --git a/.pg_format b/.pg_format index 882260a2..c9795ffe 100644 --- a/.pg_format +++ b/.pg_format @@ -3,3 +3,5 @@ function-case=1 #lowercase keyword-case=1 type-case=1 no-space-function=1 +keep-newline=1 +nogrouping=1 diff --git a/api/.dockerignore b/api/.dockerignore new file mode 100644 index 00000000..6eae1707 --- /dev/null +++ b/api/.dockerignore @@ -0,0 +1,7 @@ +** +!/package.json +!/bun.lock +!/tsconfig.json +!/patches +!/src +!/drizzle diff --git a/api/.env.example b/api/.env.example index 25721a6f..5a016a50 100644 --- a/api/.env.example +++ b/api/.env.example @@ -20,7 +20,7 @@ IMAGES_PATH=./images # https://www.postgresql.org/docs/current/libpq-envars.html PGUSER=kyoo PGPASSWORD=password -PGDATABASE=kyooDB +PGDATABASE=kyoo PGHOST=postgres PGPORT=5432 # PGOPTIONS=-c search_path=kyoo,public diff --git a/api/.gitignore b/api/.gitignore index e5ef0f40..ce7b6085 100644 --- a/api/.gitignore +++ b/api/.gitignore @@ -1,3 +1,3 @@ -node_modules +/node_modules **/*.bun images diff --git a/api/Dockerfile.dev b/api/Dockerfile.dev index 33cc0ec9..6553ac28 100644 --- a/api/Dockerfile.dev +++ b/api/Dockerfile.dev @@ -5,6 +5,8 @@ COPY package.json bun.lock . COPY patches patches RUN bun install --production +COPY . . + EXPOSE 3567 CMD ["bun", "dev"] diff --git a/api/bun.lock b/api/bun.lock index a1983672..309a2e3b 100644 --- a/api/bun.lock +++ b/api/bun.lock @@ -8,7 +8,7 @@ "blurhash": "^2.0.5", "drizzle-kit": "^0.31.0", "drizzle-orm": "0.43.1", - "elysia": "^1.2.25", + "elysia": "^1.3.0", "jose": "^6.0.10", "parjs": "^1.3.9", "pg": "^8.15.6", @@ -27,9 +27,9 @@ "packages": { "@drizzle-team/brocli": ["@drizzle-team/brocli@0.10.2", "", {}, "sha512-z33Il7l5dKjUgGULTqBsQBQwckHh5AbIuxhdsIxDDiZAzBOrZO6q9ogcWC65kU382AfynTfgNumVcNIjuIua6w=="], - "@elysiajs/swagger": ["@elysiajs/swagger@github:zoriya/elysia-swagger#ef89c17", { "dependencies": { "@scalar/themes": "^0.9.81", "@scalar/types": "^0.1.3", "openapi-types": "^12.1.3", "pathe": "^1.1.2" }, "peerDependencies": { "elysia": ">= 1.2.0" } }, "zoriya-elysia-swagger-ef89c17"], + "@elysiajs/swagger": ["@elysiajs/swagger@github:zoriya/elysia-swagger#f88fbc7", { "dependencies": { "@scalar/themes": "^0.9.81", "@scalar/types": "^0.1.3", "openapi-types": "^12.1.3", "pathe": "^1.1.2" }, "peerDependencies": { "elysia": ">= 1.3.0" } }, "zoriya-elysia-swagger-f88fbc7"], - "@emnapi/runtime": ["@emnapi/runtime@1.4.1", "", { "dependencies": { "tslib": "^2.4.0" } }, "sha512-LMshMVP0ZhACNjQNYXiU1iZJ6QCcv0lUdPDPugqGvCGXt5xtRVBPdtA0qU12pEXZzpWAhWlZYptfdAFq10DOVQ=="], + "@emnapi/runtime": ["@emnapi/runtime@1.4.3", "", { "dependencies": { "tslib": "^2.4.0" } }, "sha512-pBPWdu6MLKROBX05wSNKcNb++m5Er+KQ9QkB+WVM+pW2Kx9hoSrVTnu3BdkI5eBLZoKu/J6mW/B6i6bJB2ytXQ=="], "@esbuild-kit/core-utils": ["@esbuild-kit/core-utils@3.3.2", "", { "dependencies": { "esbuild": "~0.18.20", "source-map-support": "^0.5.21" } }, "sha512-sPRAnw9CdSsRmEtnsl2WXWdyquogVpB3yZ3dgwJfe8zrOzTsV7cJvmwrKVa+0ma5BoiGJ+BoqkMvawbayKUsqQ=="], @@ -85,9 +85,9 @@ "@esbuild/win32-x64": ["@esbuild/win32-x64@0.25.3", "", { "os": "win32", "cpu": "x64" }, "sha512-ICgUR+kPimx0vvRzf+N/7L7tVSQeE3BYY+NhHRHXS1kBuPO7z2+7ea2HbhDyZdTephgvNvKrlDDKUexuCVBVvg=="], - "@img/sharp-darwin-arm64": ["@img/sharp-darwin-arm64@0.34.1", "", { "optionalDependencies": { "@img/sharp-libvips-darwin-arm64": "1.1.0" }, "os": "darwin", "cpu": "arm64" }, "sha512-pn44xgBtgpEbZsu+lWf2KNb6OAf70X68k+yk69Ic2Xz11zHR/w24/U49XT7AeRwJ0Px+mhALhU5LPci1Aymk7A=="], + "@img/sharp-darwin-arm64": ["@img/sharp-darwin-arm64@0.34.2", "", { "optionalDependencies": { "@img/sharp-libvips-darwin-arm64": "1.1.0" }, "os": "darwin", "cpu": "arm64" }, "sha512-OfXHZPppddivUJnqyKoi5YVeHRkkNE2zUFT2gbpKxp/JZCFYEYubnMg+gOp6lWfasPrTS+KPosKqdI+ELYVDtg=="], - "@img/sharp-darwin-x64": ["@img/sharp-darwin-x64@0.34.1", "", { "optionalDependencies": { "@img/sharp-libvips-darwin-x64": "1.1.0" }, "os": "darwin", "cpu": "x64" }, "sha512-VfuYgG2r8BpYiOUN+BfYeFo69nP/MIwAtSJ7/Zpxc5QF3KS22z8Pvg3FkrSFJBPNQ7mmcUcYQFBmEQp7eu1F8Q=="], + "@img/sharp-darwin-x64": ["@img/sharp-darwin-x64@0.34.2", "", { "optionalDependencies": { "@img/sharp-libvips-darwin-x64": "1.1.0" }, "os": "darwin", "cpu": "x64" }, "sha512-dYvWqmjU9VxqXmjEtjmvHnGqF8GrVjM2Epj9rJ6BUIXvk8slvNDJbhGFvIoXzkDhrJC2jUxNLz/GUjjvSzfw+g=="], "@img/sharp-libvips-darwin-arm64": ["@img/sharp-libvips-darwin-arm64@1.1.0", "", { "os": "darwin", "cpu": "arm64" }, "sha512-HZ/JUmPwrJSoM4DIQPv/BfNh9yrOA8tlBbqbLz4JZ5uew2+o22Ik+tHQJcih7QJuSa0zo5coHTfD5J8inqj9DA=="], @@ -107,23 +107,25 @@ "@img/sharp-libvips-linuxmusl-x64": ["@img/sharp-libvips-linuxmusl-x64@1.1.0", "", { "os": "linux", "cpu": "x64" }, "sha512-wK7SBdwrAiycjXdkPnGCPLjYb9lD4l6Ze2gSdAGVZrEL05AOUJESWU2lhlC+Ffn5/G+VKuSm6zzbQSzFX/P65A=="], - "@img/sharp-linux-arm": ["@img/sharp-linux-arm@0.34.1", "", { "optionalDependencies": { "@img/sharp-libvips-linux-arm": "1.1.0" }, "os": "linux", "cpu": "arm" }, "sha512-anKiszvACti2sGy9CirTlNyk7BjjZPiML1jt2ZkTdcvpLU1YH6CXwRAZCA2UmRXnhiIftXQ7+Oh62Ji25W72jA=="], + "@img/sharp-linux-arm": ["@img/sharp-linux-arm@0.34.2", "", { "optionalDependencies": { "@img/sharp-libvips-linux-arm": "1.1.0" }, "os": "linux", "cpu": "arm" }, "sha512-0DZzkvuEOqQUP9mo2kjjKNok5AmnOr1jB2XYjkaoNRwpAYMDzRmAqUIa1nRi58S2WswqSfPOWLNOr0FDT3H5RQ=="], - "@img/sharp-linux-arm64": ["@img/sharp-linux-arm64@0.34.1", "", { "optionalDependencies": { "@img/sharp-libvips-linux-arm64": "1.1.0" }, "os": "linux", "cpu": "arm64" }, "sha512-kX2c+vbvaXC6vly1RDf/IWNXxrlxLNpBVWkdpRq5Ka7OOKj6nr66etKy2IENf6FtOgklkg9ZdGpEu9kwdlcwOQ=="], + "@img/sharp-linux-arm64": ["@img/sharp-linux-arm64@0.34.2", "", { "optionalDependencies": { "@img/sharp-libvips-linux-arm64": "1.1.0" }, "os": "linux", "cpu": "arm64" }, "sha512-D8n8wgWmPDakc83LORcfJepdOSN6MvWNzzz2ux0MnIbOqdieRZwVYY32zxVx+IFUT8er5KPcyU3XXsn+GzG/0Q=="], - "@img/sharp-linux-s390x": ["@img/sharp-linux-s390x@0.34.1", "", { "optionalDependencies": { "@img/sharp-libvips-linux-s390x": "1.1.0" }, "os": "linux", "cpu": "s390x" }, "sha512-7s0KX2tI9mZI2buRipKIw2X1ufdTeaRgwmRabt5bi9chYfhur+/C1OXg3TKg/eag1W+6CCWLVmSauV1owmRPxA=="], + "@img/sharp-linux-s390x": ["@img/sharp-linux-s390x@0.34.2", "", { "optionalDependencies": { "@img/sharp-libvips-linux-s390x": "1.1.0" }, "os": "linux", "cpu": "s390x" }, "sha512-EGZ1xwhBI7dNISwxjChqBGELCWMGDvmxZXKjQRuqMrakhO8QoMgqCrdjnAqJq/CScxfRn+Bb7suXBElKQpPDiw=="], - "@img/sharp-linux-x64": ["@img/sharp-linux-x64@0.34.1", "", { "optionalDependencies": { "@img/sharp-libvips-linux-x64": "1.1.0" }, "os": "linux", "cpu": "x64" }, "sha512-wExv7SH9nmoBW3Wr2gvQopX1k8q2g5V5Iag8Zk6AVENsjwd+3adjwxtp3Dcu2QhOXr8W9NusBU6XcQUohBZ5MA=="], + "@img/sharp-linux-x64": ["@img/sharp-linux-x64@0.34.2", "", { "optionalDependencies": { "@img/sharp-libvips-linux-x64": "1.1.0" }, "os": "linux", "cpu": "x64" }, "sha512-sD7J+h5nFLMMmOXYH4DD9UtSNBD05tWSSdWAcEyzqW8Cn5UxXvsHAxmxSesYUsTOBmUnjtxghKDl15EvfqLFbQ=="], - "@img/sharp-linuxmusl-arm64": ["@img/sharp-linuxmusl-arm64@0.34.1", "", { "optionalDependencies": { "@img/sharp-libvips-linuxmusl-arm64": "1.1.0" }, "os": "linux", "cpu": "arm64" }, "sha512-DfvyxzHxw4WGdPiTF0SOHnm11Xv4aQexvqhRDAoD00MzHekAj9a/jADXeXYCDFH/DzYruwHbXU7uz+H+nWmSOQ=="], + "@img/sharp-linuxmusl-arm64": ["@img/sharp-linuxmusl-arm64@0.34.2", "", { "optionalDependencies": { "@img/sharp-libvips-linuxmusl-arm64": "1.1.0" }, "os": "linux", "cpu": "arm64" }, "sha512-NEE2vQ6wcxYav1/A22OOxoSOGiKnNmDzCYFOZ949xFmrWZOVII1Bp3NqVVpvj+3UeHMFyN5eP/V5hzViQ5CZNA=="], - "@img/sharp-linuxmusl-x64": ["@img/sharp-linuxmusl-x64@0.34.1", "", { "optionalDependencies": { "@img/sharp-libvips-linuxmusl-x64": "1.1.0" }, "os": "linux", "cpu": "x64" }, "sha512-pax/kTR407vNb9qaSIiWVnQplPcGU8LRIJpDT5o8PdAx5aAA7AS3X9PS8Isw1/WfqgQorPotjrZL3Pqh6C5EBg=="], + "@img/sharp-linuxmusl-x64": ["@img/sharp-linuxmusl-x64@0.34.2", "", { "optionalDependencies": { "@img/sharp-libvips-linuxmusl-x64": "1.1.0" }, "os": "linux", "cpu": "x64" }, "sha512-DOYMrDm5E6/8bm/yQLCWyuDJwUnlevR8xtF8bs+gjZ7cyUNYXiSf/E8Kp0Ss5xasIaXSHzb888V1BE4i1hFhAA=="], - "@img/sharp-wasm32": ["@img/sharp-wasm32@0.34.1", "", { "dependencies": { "@emnapi/runtime": "^1.4.0" }, "cpu": "none" }, "sha512-YDybQnYrLQfEpzGOQe7OKcyLUCML4YOXl428gOOzBgN6Gw0rv8dpsJ7PqTHxBnXnwXr8S1mYFSLSa727tpz0xg=="], + "@img/sharp-wasm32": ["@img/sharp-wasm32@0.34.2", "", { "dependencies": { "@emnapi/runtime": "^1.4.3" }, "cpu": "none" }, "sha512-/VI4mdlJ9zkaq53MbIG6rZY+QRN3MLbR6usYlgITEzi4Rpx5S6LFKsycOQjkOGmqTNmkIdLjEvooFKwww6OpdQ=="], - "@img/sharp-win32-ia32": ["@img/sharp-win32-ia32@0.34.1", "", { "os": "win32", "cpu": "ia32" }, "sha512-WKf/NAZITnonBf3U1LfdjoMgNO5JYRSlhovhRhMxXVdvWYveM4kM3L8m35onYIdh75cOMCo1BexgVQcCDzyoWw=="], + "@img/sharp-win32-arm64": ["@img/sharp-win32-arm64@0.34.2", "", { "os": "win32", "cpu": "arm64" }, "sha512-cfP/r9FdS63VA5k0xiqaNaEoGxBg9k7uE+RQGzuK9fHt7jib4zAVVseR9LsE4gJcNWgT6APKMNnCcnyOtmSEUQ=="], - "@img/sharp-win32-x64": ["@img/sharp-win32-x64@0.34.1", "", { "os": "win32", "cpu": "x64" }, "sha512-hw1iIAHpNE8q3uMIRCgGOeDoz9KtFNarFLQclLxr/LK1VBkj8nby18RjFvr6aP7USRYAjTZW6yisnBWMX571Tw=="], + "@img/sharp-win32-ia32": ["@img/sharp-win32-ia32@0.34.2", "", { "os": "win32", "cpu": "ia32" }, "sha512-QLjGGvAbj0X/FXl8n1WbtQ6iVBpWU7JO94u/P2M4a8CFYsvQi4GW2mRy/JqkRx0qpBzaOdKJKw8uc930EX2AHw=="], + + "@img/sharp-win32-x64": ["@img/sharp-win32-x64@0.34.2", "", { "os": "win32", "cpu": "x64" }, "sha512-aUdT6zEYtDKCaxkofmmJDJYGCf0+pJg3eU9/oBuqvEeoB9dKI6ZLc/1iLJCTuJQDO4ptntAlkUmHgGjyuobZbw=="], "@scalar/openapi-types": ["@scalar/openapi-types@0.1.9", "", {}, "sha512-HQQudOSQBU7ewzfnBW9LhDmBE2XOJgSfwrh5PlUB7zJup/kaRkBGNgV2wMjNz9Af/uztiU/xNrO179FysmUT+g=="], @@ -133,9 +135,13 @@ "@sinclair/typebox": ["@sinclair/typebox@0.34.33", "", {}, "sha512-5HAV9exOMcXRUxo+9iYB5n09XxzCXnfy4VTNW4xnDv+FgjzAGY989C28BIdljKqmF+ZltUwujE3aossvcVtq6g=="], + "@tokenizer/inflate": ["@tokenizer/inflate@0.2.7", "", { "dependencies": { "debug": "^4.4.0", "fflate": "^0.8.2", "token-types": "^6.0.0" } }, "sha512-MADQgmZT1eKjp06jpI2yozxaU9uVs4GzzgSL+uEq7bVcJ9V1ZXQkeGNql1fsSI0gMy1vhvNTNbUqrx+pZfJVmg=="], + + "@tokenizer/token": ["@tokenizer/token@0.3.0", "", {}, "sha512-OvjF+z51L3ov0OyAU0duzsYuvO01PH7x4t6DJx+guahgTnBHkhJdG7soQeTSFLWN3efnHyibZ4Z8l2EuWwJN3A=="], + "@types/node": ["@types/node@22.13.13", "", { "dependencies": { "undici-types": "~6.20.0" } }, "sha512-ClsL5nMwKaBRwPcCvH8E7+nU4GxHVx1axNvMZTFHMEfNI7oahimt26P5zjVCRrjiIWj6YFXfE1v3dEp94wLcGQ=="], - "@types/pg": ["@types/pg@8.11.14", "", { "dependencies": { "@types/node": "*", "pg-protocol": "*", "pg-types": "^4.0.1" } }, "sha512-qyD11E5R3u0eJmd1lB0WnWKXJGA7s015nyARWljfz5DcX83TKAIlY+QrmvzQTsbIe+hkiFtkyL2gHC6qwF6Fbg=="], + "@types/pg": ["@types/pg@8.15.2", "", { "dependencies": { "@types/node": "*", "pg-protocol": "*", "pg-types": "^4.0.1" } }, "sha512-+BKxo5mM6+/A1soSHBI7ufUglqYXntChLDyTbvcAn1Lawi9J7J9Ok3jt6w7I0+T/UDJ4CyhHk66+GZbwmkYxSg=="], "@unhead/schema": ["@unhead/schema@1.11.20", "", { "dependencies": { "hookable": "^5.5.3", "zhead": "^2.2.4" } }, "sha512-0zWykKAaJdm+/Y7yi/Yds20PrUK7XabLe9c3IRcjnwYmSWY6z0Cr19VIs3ozCj8P+GhR+/TI2mwtGlueCEYouA=="], @@ -143,7 +149,7 @@ "buffer-from": ["buffer-from@1.1.2", "", {}, "sha512-E+XQCRwSbaaiChtv6k6Dwgc+bx+Bs6vuKJHHl5kox/BaKbhiXzqQOwK4cO22yElGp2OCmjwVhT3HmxgyPGnJfQ=="], - "bun-types": ["bun-types@1.2.11", "", { "dependencies": { "@types/node": "*" } }, "sha512-dbkp5Lo8HDrXkLrONm6bk+yiiYQSntvFUzQp0v3pzTAsXk6FtgVMjdQ+lzFNVAmQFUkPQZ3WMZqH5tTo+Dp/IA=="], + "bun-types": ["bun-types@1.2.14", "", { "dependencies": { "@types/node": "*" } }, "sha512-Kuh4Ub28ucMRWeiUUWMHsT9Wcbr4H3kLIO72RZZElSDxSu7vpetRvxIUDUaW6QtaIeixIpm7OXtNnZPf82EzwA=="], "char-info": ["char-info@0.3.5", "", { "dependencies": { "node-interval-tree": "^1.3.3" } }, "sha512-gRslEBFEcuLMGLNO1EFIrdN1MMUfO+aqa7y8iWzNyAzB3mYKnTIvP+ioW3jpyeEvqA5WapVLIPINGtFjEIH4cQ=="], @@ -159,27 +165,35 @@ "debug": ["debug@4.4.0", "", { "dependencies": { "ms": "^2.1.3" } }, "sha512-6WTZ/IxCY/T6BALoZHaE4ctp9xm+Z5kY/pzYaCHRFeyVhojxlrm+46y68HA6hr0TcwEssoxNiDEUJQjfPZ/RYA=="], - "detect-libc": ["detect-libc@2.0.3", "", {}, "sha512-bwy0MGW55bG41VqxxypOsdSdGqLwXPI/focwgTYCFMbdUiBAxLg9CFzG08sz2aqzknwiX7Hkl0bQENjg8iLByw=="], + "detect-libc": ["detect-libc@2.0.4", "", {}, "sha512-3UDv+G9CsCKO1WKMGw9fwq/SWJYbI0c5Y7LU1AXYoDdbhE2AHQ6N6Nb34sG8Fj7T5APy8qXDCKuuIHd1BR0tVA=="], - "drizzle-kit": ["drizzle-kit@0.31.0", "", { "dependencies": { "@drizzle-team/brocli": "^0.10.2", "@esbuild-kit/esm-loader": "^2.5.5", "esbuild": "^0.25.2", "esbuild-register": "^3.5.0" }, "bin": { "drizzle-kit": "bin.cjs" } }, "sha512-pcKVT+GbfPA+bUovPIilgVOoq+onNBo/YQBG86sf3/GFHkN6lRJPm1l7dKN0IMAk57RQoIm4GUllRrasLlcaSg=="], + "drizzle-kit": ["drizzle-kit@0.31.1", "", { "dependencies": { "@drizzle-team/brocli": "^0.10.2", "@esbuild-kit/esm-loader": "^2.5.5", "esbuild": "^0.25.2", "esbuild-register": "^3.5.0" }, "bin": { "drizzle-kit": "bin.cjs" } }, "sha512-PUjYKWtzOzPtdtQlTHQG3qfv4Y0XT8+Eas6UbxCmxTj7qgMf+39dDujf1BP1I+qqZtw9uzwTh8jYtkMuCq+B0Q=="], "drizzle-orm": ["drizzle-orm@0.43.1", "", { "peerDependencies": { "@aws-sdk/client-rds-data": ">=3", "@cloudflare/workers-types": ">=4", "@electric-sql/pglite": ">=0.2.0", "@libsql/client": ">=0.10.0", "@libsql/client-wasm": ">=0.10.0", "@neondatabase/serverless": ">=0.10.0", "@op-engineering/op-sqlite": ">=2", "@opentelemetry/api": "^1.4.1", "@planetscale/database": ">=1.13", "@prisma/client": "*", "@tidbcloud/serverless": "*", "@types/better-sqlite3": "*", "@types/pg": "*", "@types/sql.js": "*", "@vercel/postgres": ">=0.8.0", "@xata.io/client": "*", "better-sqlite3": ">=7", "bun-types": "*", "expo-sqlite": ">=14.0.0", "gel": ">=2", "knex": "*", "kysely": "*", "mysql2": ">=2", "pg": ">=8", "postgres": ">=3", "sql.js": ">=1", "sqlite3": ">=5" }, "optionalPeers": ["@aws-sdk/client-rds-data", "@cloudflare/workers-types", "@electric-sql/pglite", "@libsql/client", "@libsql/client-wasm", "@neondatabase/serverless", "@op-engineering/op-sqlite", "@opentelemetry/api", "@planetscale/database", "@prisma/client", "@tidbcloud/serverless", "@types/better-sqlite3", "@types/pg", "@types/sql.js", "@vercel/postgres", "@xata.io/client", "better-sqlite3", "bun-types", "expo-sqlite", "gel", "knex", "kysely", "mysql2", "pg", "postgres", "sql.js", "sqlite3"] }, "sha512-dUcDaZtE/zN4RV/xqGrVSMpnEczxd5cIaoDeor7Zst9wOe/HzC/7eAaulywWGYXdDEc9oBPMjayVEDg0ziTLJA=="], - "elysia": ["elysia@1.2.25", "", { "dependencies": { "@sinclair/typebox": "^0.34.27", "cookie": "^1.0.2", "memoirist": "^0.3.0", "openapi-types": "^12.1.3" }, "peerDependencies": { "typescript": ">= 5.0.0" }, "optionalPeers": ["typescript"] }, "sha512-WsdQpORJvb4uszzeqYT0lg97knw1iBW1NTzJ1Jm57tiHg+DfAotlWXYbjmvQ039ssV0fYELDHinLLoUazZkEHg=="], + "elysia": ["elysia@1.3.1", "", { "dependencies": { "cookie": "^1.0.2", "exact-mirror": "0.1.2", "fast-decode-uri-component": "^1.0.1" }, "optionalDependencies": { "@sinclair/typebox": "^0.34.33", "openapi-types": "^12.1.3" }, "peerDependencies": { "file-type": ">= 20.0.0", "typescript": ">= 5.0.0" } }, "sha512-En41P6cDHcHtQ0nvfsn9ayB+8ahQJqG1nzvPX8FVZjOriFK/RtZPQBtXMfZDq/AsVIk7JFZGFEtAVEmztNJVhQ=="], "esbuild": ["esbuild@0.25.3", "", { "optionalDependencies": { "@esbuild/aix-ppc64": "0.25.3", "@esbuild/android-arm": "0.25.3", "@esbuild/android-arm64": "0.25.3", "@esbuild/android-x64": "0.25.3", "@esbuild/darwin-arm64": "0.25.3", "@esbuild/darwin-x64": "0.25.3", "@esbuild/freebsd-arm64": "0.25.3", "@esbuild/freebsd-x64": "0.25.3", "@esbuild/linux-arm": "0.25.3", "@esbuild/linux-arm64": "0.25.3", "@esbuild/linux-ia32": "0.25.3", "@esbuild/linux-loong64": "0.25.3", "@esbuild/linux-mips64el": "0.25.3", "@esbuild/linux-ppc64": "0.25.3", "@esbuild/linux-riscv64": "0.25.3", "@esbuild/linux-s390x": "0.25.3", "@esbuild/linux-x64": "0.25.3", "@esbuild/netbsd-arm64": "0.25.3", "@esbuild/netbsd-x64": "0.25.3", "@esbuild/openbsd-arm64": "0.25.3", "@esbuild/openbsd-x64": "0.25.3", "@esbuild/sunos-x64": "0.25.3", "@esbuild/win32-arm64": "0.25.3", "@esbuild/win32-ia32": "0.25.3", "@esbuild/win32-x64": "0.25.3" }, "bin": { "esbuild": "bin/esbuild" } }, "sha512-qKA6Pvai73+M2FtftpNKRxJ78GIjmFXFxd/1DVBqGo/qNhLSfv+G12n9pNoWdytJC8U00TrViOwpjT0zgqQS8Q=="], "esbuild-register": ["esbuild-register@3.6.0", "", { "dependencies": { "debug": "^4.3.4" }, "peerDependencies": { "esbuild": ">=0.12 <1" } }, "sha512-H2/S7Pm8a9CL1uhp9OvjwrBh5Pvx0H8qVOxNu8Wed9Y7qv56MPtq+GGM8RJpq6glYJn9Wspr8uw7l55uyinNeg=="], + "exact-mirror": ["exact-mirror@0.1.2", "", { "peerDependencies": { "@sinclair/typebox": "^0.34.15" }, "optionalPeers": ["@sinclair/typebox"] }, "sha512-wFCPCDLmHbKGUb8TOi/IS7jLsgR8WVDGtDK3CzcB4Guf/weq7G+I+DkXiRSZfbemBFOxOINKpraM6ml78vo8Zw=="], + + "fast-decode-uri-component": ["fast-decode-uri-component@1.0.1", "", {}, "sha512-WKgKWg5eUxvRZGwW8FvfbaH7AXSh2cL+3j5fMGzUMCxWBJ3dV3a7Wz8y2f/uQ0e3B6WmodD3oS54jTQ9HVTIIg=="], + + "fflate": ["fflate@0.8.2", "", {}, "sha512-cPJU47OaAoCbg0pBvzsgpTPhmhqI5eJjh/JIu8tPj5q+T7iLvW/JAYUqmE7KOB4R1ZyEhzBaIQpQpardBF5z8A=="], + + "file-type": ["file-type@20.5.0", "", { "dependencies": { "@tokenizer/inflate": "^0.2.6", "strtok3": "^10.2.0", "token-types": "^6.0.0", "uint8array-extras": "^1.4.0" } }, "sha512-BfHZtG/l9iMm4Ecianu7P8HRD2tBHLtjXinm4X62XBOYzi7CYA7jyqfJzOvXHqzVrVPYqBo2/GvbARMaaJkKVg=="], + "get-tsconfig": ["get-tsconfig@4.10.0", "", { "dependencies": { "resolve-pkg-maps": "^1.0.0" } }, "sha512-kGzZ3LWWQcGIAmg6iWvXn0ei6WDtV26wzHRMwDSzmAbcXrTEXxHy6IehI6/4eT6VRKyMP1eF1VqwrVUmE/LR7A=="], "hookable": ["hookable@5.5.3", "", {}, "sha512-Yc+BQe8SvoXH1643Qez1zqLRmbA5rCL+sSmk6TVos0LWVfNIB7PGncdlId77WzLGSIB5KaWgTaNTs2lNVEI6VQ=="], + "ieee754": ["ieee754@1.2.1", "", {}, "sha512-dcyqhDvX1C46lXZcVqCpK+FtMRQVdIMN6/Df5js2zouUsqG7I6sFxitIC+7KYK29KdXOLHdu9zL4sFnoVQnqaA=="], + "is-arrayish": ["is-arrayish@0.3.2", "", {}, "sha512-eVRqCvVlZbuw3GrM63ovNSNAeA1K16kaR/LRY/92w0zxQ5/1YzwblUX652i4Xs9RwAGjW9d9y6X88t8OaAJfWQ=="], - "jose": ["jose@6.0.10", "", {}, "sha512-skIAxZqcMkOrSwjJvplIPYrlXGpxTPnro2/QWTDCxAdWQrSTV5/KqspMWmi5WAx5+ULswASJiZ0a+1B/Lxt9cw=="], - - "memoirist": ["memoirist@0.3.0", "", {}, "sha512-wR+4chMgVPq+T6OOsk40u9Wlpw1Pjx66NMNiYxCQQ4EUJ7jDs3D9kTCeKdBOkvAiqXlHLVJlvYL01PvIJ1MPNg=="], + "jose": ["jose@6.0.11", "", {}, "sha512-QxG7EaliDARm1O1S8BGakqncGT9s25bKL1WSf6/oa17Tkqwi8D2ZNglqCF+DsYF88/rV66Q/Q2mFAy697E1DUg=="], "ms": ["ms@2.1.3", "", {}, "sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA=="], @@ -195,19 +209,21 @@ "pathe": ["pathe@1.1.2", "", {}, "sha512-whLdWMYL2TwI08hn8/ZqAbrVemu0LNaNNJZX73O6qaIdCTfXutsLhMkjdENX0qhsQ9uIimo4/aQOmXkoon2nDQ=="], - "pg": ["pg@8.15.6", "", { "dependencies": { "pg-connection-string": "^2.8.5", "pg-pool": "^3.9.6", "pg-protocol": "^1.9.5", "pg-types": "^2.1.0", "pgpass": "1.x" }, "optionalDependencies": { "pg-cloudflare": "^1.2.5" }, "peerDependencies": { "pg-native": ">=3.0.1" }, "optionalPeers": ["pg-native"] }, "sha512-yvao7YI3GdmmrslNVsZgx9PfntfWrnXwtR+K/DjI0I/sTKif4Z623um+sjVZ1hk5670B+ODjvHDAckKdjmPTsg=="], + "peek-readable": ["peek-readable@7.0.0", "", {}, "sha512-nri2TO5JE3/mRryik9LlHFT53cgHfRK0Lt0BAZQXku/AW3E6XLt2GaY8siWi7dvW/m1z0ecn+J+bpDa9ZN3IsQ=="], + + "pg": ["pg@8.16.0", "", { "dependencies": { "pg-connection-string": "^2.9.0", "pg-pool": "^3.10.0", "pg-protocol": "^1.10.0", "pg-types": "2.2.0", "pgpass": "1.0.5" }, "optionalDependencies": { "pg-cloudflare": "^1.2.5" }, "peerDependencies": { "pg-native": ">=3.0.1" }, "optionalPeers": ["pg-native"] }, "sha512-7SKfdvP8CTNXjMUzfcVTaI+TDzBEeaUnVwiVGZQD1Hh33Kpev7liQba9uLd4CfN8r9mCVsD0JIpq03+Unpz+kg=="], "pg-cloudflare": ["pg-cloudflare@1.2.5", "", {}, "sha512-OOX22Vt0vOSRrdoUPKJ8Wi2OpE/o/h9T8X1s4qSkCedbNah9ei2W2765be8iMVxQUsvgT7zIAT2eIa9fs5+vtg=="], - "pg-connection-string": ["pg-connection-string@2.8.5", "", {}, "sha512-Ni8FuZ8yAF+sWZzojvtLE2b03cqjO5jNULcHFfM9ZZ0/JXrgom5pBREbtnAw7oxsxJqHw9Nz/XWORUEL3/IFow=="], + "pg-connection-string": ["pg-connection-string@2.9.0", "", {}, "sha512-P2DEBKuvh5RClafLngkAuGe9OUlFV7ebu8w1kmaaOgPcpJd1RIFh7otETfI6hAR8YupOLFTY7nuvvIn7PLciUQ=="], "pg-int8": ["pg-int8@1.0.1", "", {}, "sha512-WCtabS6t3c8SkpDBUlb1kjOs7l66xsGdKpIPZsg4wR+B3+u9UAum2odSsF9tnvxg80h4ZxLWMy4pRjOsFIqQpw=="], "pg-numeric": ["pg-numeric@1.0.2", "", {}, "sha512-BM/Thnrw5jm2kKLE5uJkXqqExRUY/toLHda65XgFTBTFYZyopbKjBe29Ii3RbkvlsMoFwD+tHeGaCjjv0gHlyw=="], - "pg-pool": ["pg-pool@3.9.6", "", { "peerDependencies": { "pg": ">=8.0" } }, "sha512-rFen0G7adh1YmgvrmE5IPIqbb+IgEzENUm+tzm6MLLDSlPRoZVhzU1WdML9PV2W5GOdRA9qBKURlbt1OsXOsPw=="], + "pg-pool": ["pg-pool@3.10.0", "", { "peerDependencies": { "pg": ">=8.0" } }, "sha512-DzZ26On4sQ0KmqnO34muPcmKbhrjmyiO4lCCR0VwEd7MjmiKf5NTg/6+apUEu0NF7ESa37CGzFxH513CoUmWnA=="], - "pg-protocol": ["pg-protocol@1.9.5", "", {}, "sha512-DYTWtWpfd5FOro3UnAfwvhD8jh59r2ig8bPtc9H8Ds7MscE/9NYruUQWFAOuraRl29jwcT2kyMFQ3MxeaVjUhg=="], + "pg-protocol": ["pg-protocol@1.10.0", "", {}, "sha512-IpdytjudNuLv8nhlHs/UrVBhU0e78J0oIS/0AVdTbWxSOkFUVdsHC/NrorO6nXsQNDTT1kzDSOMJubBQviX18Q=="], "pg-types": ["pg-types@4.0.2", "", { "dependencies": { "pg-int8": "1.0.1", "pg-numeric": "1.0.2", "postgres-array": "~3.0.1", "postgres-bytea": "~3.0.0", "postgres-date": "~2.1.0", "postgres-interval": "^3.0.0", "postgres-range": "^1.1.1" } }, "sha512-cRL3JpS3lKMGsKaWndugWQoLOCoP+Cic8oseVcbr0qhPzYD5DWXK+RZ9LY9wxRf7RQia4SCwQlXk0q6FCPrVng=="], @@ -225,11 +241,11 @@ "resolve-pkg-maps": ["resolve-pkg-maps@1.0.0", "", {}, "sha512-seS2Tj26TBVOC2NIc2rOe2y2ZO7efxITtLZcGSOnHHNOQ7CkiUBfw0Iw2ck6xkIhPwLhKNLS8BO+hEpngQlqzw=="], - "semver": ["semver@7.7.1", "", { "bin": { "semver": "bin/semver.js" } }, "sha512-hlq8tAfn0m/61p4BVRcPzIGr6LKiMwo4VM6dGi6pt4qcRkmNzTcWq6eCEjEh+qXjkMDvPlOFFSGwQjoEa6gyMA=="], + "semver": ["semver@7.7.2", "", { "bin": { "semver": "bin/semver.js" } }, "sha512-RF0Fw+rO5AMf9MAyaRXI4AV0Ulj5lMHqVxxdSgiVbixSCXoEmmX/jk0CuJw4+3SqroYO9VoUh+HcuJivvtJemA=="], "shallowequal": ["shallowequal@1.1.0", "", {}, "sha512-y0m1JoUZSlPAjXVtPPW70aZWfIL/dSP7AFkRnniLCrK/8MDKog3TySTBmckD+RObVxH0v4Tox67+F14PdED2oQ=="], - "sharp": ["sharp@0.34.1", "", { "dependencies": { "color": "^4.2.3", "detect-libc": "^2.0.3", "semver": "^7.7.1" }, "optionalDependencies": { "@img/sharp-darwin-arm64": "0.34.1", "@img/sharp-darwin-x64": "0.34.1", "@img/sharp-libvips-darwin-arm64": "1.1.0", "@img/sharp-libvips-darwin-x64": "1.1.0", "@img/sharp-libvips-linux-arm": "1.1.0", "@img/sharp-libvips-linux-arm64": "1.1.0", "@img/sharp-libvips-linux-ppc64": "1.1.0", "@img/sharp-libvips-linux-s390x": "1.1.0", "@img/sharp-libvips-linux-x64": "1.1.0", "@img/sharp-libvips-linuxmusl-arm64": "1.1.0", "@img/sharp-libvips-linuxmusl-x64": "1.1.0", "@img/sharp-linux-arm": "0.34.1", "@img/sharp-linux-arm64": "0.34.1", "@img/sharp-linux-s390x": "0.34.1", "@img/sharp-linux-x64": "0.34.1", "@img/sharp-linuxmusl-arm64": "0.34.1", "@img/sharp-linuxmusl-x64": "0.34.1", "@img/sharp-wasm32": "0.34.1", "@img/sharp-win32-ia32": "0.34.1", "@img/sharp-win32-x64": "0.34.1" } }, "sha512-1j0w61+eVxu7DawFJtnfYcvSv6qPFvfTaqzTQ2BLknVhHTwGS8sc63ZBF4rzkWMBVKybo4S5OBtDdZahh2A1xg=="], + "sharp": ["sharp@0.34.2", "", { "dependencies": { "color": "^4.2.3", "detect-libc": "^2.0.4", "semver": "^7.7.2" }, "optionalDependencies": { "@img/sharp-darwin-arm64": "0.34.2", "@img/sharp-darwin-x64": "0.34.2", "@img/sharp-libvips-darwin-arm64": "1.1.0", "@img/sharp-libvips-darwin-x64": "1.1.0", "@img/sharp-libvips-linux-arm": "1.1.0", "@img/sharp-libvips-linux-arm64": "1.1.0", "@img/sharp-libvips-linux-ppc64": "1.1.0", "@img/sharp-libvips-linux-s390x": "1.1.0", "@img/sharp-libvips-linux-x64": "1.1.0", "@img/sharp-libvips-linuxmusl-arm64": "1.1.0", "@img/sharp-libvips-linuxmusl-x64": "1.1.0", "@img/sharp-linux-arm": "0.34.2", "@img/sharp-linux-arm64": "0.34.2", "@img/sharp-linux-s390x": "0.34.2", "@img/sharp-linux-x64": "0.34.2", "@img/sharp-linuxmusl-arm64": "0.34.2", "@img/sharp-linuxmusl-x64": "0.34.2", "@img/sharp-wasm32": "0.34.2", "@img/sharp-win32-arm64": "0.34.2", "@img/sharp-win32-ia32": "0.34.2", "@img/sharp-win32-x64": "0.34.2" } }, "sha512-lszvBmB9QURERtyKT2bNmsgxXK0ShJrL/fvqlonCo7e6xBF8nT8xU6pW+PMIbLsz0RxQk3rgH9kd8UmvOzlMJg=="], "simple-swizzle": ["simple-swizzle@0.2.2", "", { "dependencies": { "is-arrayish": "^0.3.1" } }, "sha512-JA//kQgZtbuY83m+xT+tXJkmJncGMTFT+C+g2h2R9uxkYIrE2yy9sgmcLhCnw57/WSD+Eh3J97FPEDFnbXnDUg=="], @@ -239,8 +255,16 @@ "split2": ["split2@4.2.0", "", {}, "sha512-UcjcJOWknrNkF6PLX83qcHM6KHgVKNkV62Y8a5uYDVv9ydGQVwAHMKqHdJje1VTWpljG0WYpCDhrCdAOYH4TWg=="], + "strtok3": ["strtok3@10.2.2", "", { "dependencies": { "@tokenizer/token": "^0.3.0", "peek-readable": "^7.0.0" } }, "sha512-Xt18+h4s7Z8xyZ0tmBoRmzxcop97R4BAh+dXouUDCYn+Em+1P3qpkUfI5ueWLT8ynC5hZ+q4iPEmGG1urvQGBg=="], + + "token-types": ["token-types@6.0.0", "", { "dependencies": { "@tokenizer/token": "^0.3.0", "ieee754": "^1.2.1" } }, "sha512-lbDrTLVsHhOMljPscd0yitpozq7Ga2M5Cvez5AjGg8GASBjtt6iERCAJ93yommPmz62fb45oFIXHEZ3u9bfJEA=="], + "tslib": ["tslib@2.8.1", "", {}, "sha512-oJFu94HQb+KVduSUQL7wnpmqnfmLsOA/nAh6b6EH0wCEoK0/mPeXU6c3wKDV83MkOuHPRHtSXKKU99IBazS/2w=="], + "typescript": ["typescript@5.8.3", "", { "bin": { "tsc": "bin/tsc", "tsserver": "bin/tsserver" } }, "sha512-p1diW6TqL9L07nNxvRMM7hMMw4c5XOo/1ibL4aAIGmSAt9slTE1Xgw5KWuof2uTOvCg9BY7ZRi+GaF+7sfgPeQ=="], + + "uint8array-extras": ["uint8array-extras@1.4.0", "", {}, "sha512-ZPtzy0hu4cZjv3z5NW9gfKnNLjoz4y6uv4HlelAjDK7sY/xOkKZv9xK/WQpcsBB3jEybChz9DPC2U/+cusjJVQ=="], + "undici-types": ["undici-types@6.20.0", "", {}, "sha512-Ny6QZ2Nju20vw1SRHe3d9jVu6gJ+4e3+MMpqu7pqE5HT6WsTSlce++GQmK5UXS8mzV8DSYHrQH+Xrf2jVcuKNg=="], "xtend": ["xtend@4.0.2", "", {}, "sha512-LKYU1iAXJXUgAXn9URjiu+MWhyUXHsvfp7mcuYm9dSUKK0/CjtrUwFAxD82/mCWbtLsGjFIad0wIsod4zrTAEQ=="], diff --git a/api/drizzle/0021_crew.sql b/api/drizzle/0021_crew.sql new file mode 100644 index 00000000..e7d90a4b --- /dev/null +++ b/api/drizzle/0021_crew.sql @@ -0,0 +1 @@ +ALTER TYPE "kyoo"."role_kind" ADD VALUE 'crew' BEFORE 'other'; \ No newline at end of file diff --git a/api/drizzle/meta/0021_snapshot.json b/api/drizzle/meta/0021_snapshot.json new file mode 100644 index 00000000..86293b29 --- /dev/null +++ b/api/drizzle/meta/0021_snapshot.json @@ -0,0 +1,1859 @@ +{ + "id": "5d82ac8b-bd3b-4aa1-a633-6dd46f73d405", + "prevId": "0c44c1f6-0b4d-4beb-8f67-b6250f92c5e2", + "version": "7", + "dialect": "postgresql", + "tables": { + "kyoo.entries": { + "name": "entries", + "schema": "kyoo", + "columns": { + "pk": { + "name": "pk", + "type": "integer", + "primaryKey": true, + "notNull": true, + "identity": { + "type": "always", + "name": "entries_pk_seq", + "schema": "kyoo", + "increment": "1", + "startWith": "1", + "minValue": "1", + "maxValue": "2147483647", + "cache": "1", + "cycle": false + } + }, + "id": { + "name": "id", + "type": "uuid", + "primaryKey": false, + "notNull": true, + "default": "gen_random_uuid()" + }, + "slug": { + "name": "slug", + "type": "varchar(255)", + "primaryKey": false, + "notNull": true + }, + "show_pk": { + "name": "show_pk", + "type": "integer", + "primaryKey": false, + "notNull": true + }, + "order": { + "name": "order", + "type": "real", + "primaryKey": false, + "notNull": false + }, + "season_number": { + "name": "season_number", + "type": "integer", + "primaryKey": false, + "notNull": false + }, + "episode_number": { + "name": "episode_number", + "type": "integer", + "primaryKey": false, + "notNull": false + }, + "kind": { + "name": "kind", + "type": "entry_type", + "typeSchema": "kyoo", + "primaryKey": false, + "notNull": true + }, + "extra_kind": { + "name": "extra_kind", + "type": "text", + "primaryKey": false, + "notNull": false + }, + "air_date": { + "name": "air_date", + "type": "date", + "primaryKey": false, + "notNull": false + }, + "runtime": { + "name": "runtime", + "type": "integer", + "primaryKey": false, + "notNull": false + }, + "thumbnail": { + "name": "thumbnail", + "type": "jsonb", + "primaryKey": false, + "notNull": false + }, + "external_id": { + "name": "external_id", + "type": "jsonb", + "primaryKey": false, + "notNull": true, + "default": "'{}'::jsonb" + }, + "created_at": { + "name": "created_at", + "type": "timestamp with time zone", + "primaryKey": false, + "notNull": true, + "default": "now()" + }, + "updated_at": { + "name": "updated_at", + "type": "timestamp with time zone", + "primaryKey": false, + "notNull": true + }, + "available_since": { + "name": "available_since", + "type": "timestamp with time zone", + "primaryKey": false, + "notNull": false + }, + "next_refresh": { + "name": "next_refresh", + "type": "timestamp with time zone", + "primaryKey": false, + "notNull": true + } + }, + "indexes": { + "entry_kind": { + "name": "entry_kind", + "columns": [ + { + "expression": "kind", + "isExpression": false, + "asc": true, + "nulls": "last" + } + ], + "isUnique": false, + "concurrently": false, + "method": "hash", + "with": {} + }, + "entry_order": { + "name": "entry_order", + "columns": [ + { + "expression": "order", + "isExpression": false, + "asc": true, + "nulls": "last" + } + ], + "isUnique": false, + "concurrently": false, + "method": "btree", + "with": {} + } + }, + "foreignKeys": { + "entries_show_pk_shows_pk_fk": { + "name": "entries_show_pk_shows_pk_fk", + "tableFrom": "entries", + "tableTo": "shows", + "schemaTo": "kyoo", + "columnsFrom": ["show_pk"], + "columnsTo": ["pk"], + "onDelete": "cascade", + "onUpdate": "no action" + } + }, + "compositePrimaryKeys": {}, + "uniqueConstraints": { + "entries_id_unique": { + "name": "entries_id_unique", + "nullsNotDistinct": false, + "columns": ["id"] + }, + "entries_slug_unique": { + "name": "entries_slug_unique", + "nullsNotDistinct": false, + "columns": ["slug"] + }, + "entries_showPk_seasonNumber_episodeNumber_unique": { + "name": "entries_showPk_seasonNumber_episodeNumber_unique", + "nullsNotDistinct": false, + "columns": ["show_pk", "season_number", "episode_number"] + } + }, + "policies": {}, + "checkConstraints": { + "order_positive": { + "name": "order_positive", + "value": "\"kyoo\".\"entries\".\"order\" >= 0" + } + }, + "isRLSEnabled": false + }, + "kyoo.entry_translations": { + "name": "entry_translations", + "schema": "kyoo", + "columns": { + "pk": { + "name": "pk", + "type": "integer", + "primaryKey": false, + "notNull": true + }, + "language": { + "name": "language", + "type": "varchar(255)", + "primaryKey": false, + "notNull": true + }, + "name": { + "name": "name", + "type": "text", + "primaryKey": false, + "notNull": false + }, + "description": { + "name": "description", + "type": "text", + "primaryKey": false, + "notNull": false + }, + "tagline": { + "name": "tagline", + "type": "text", + "primaryKey": false, + "notNull": false + }, + "poster": { + "name": "poster", + "type": "jsonb", + "primaryKey": false, + "notNull": false + } + }, + "indexes": { + "entry_name_trgm": { + "name": "entry_name_trgm", + "columns": [ + { + "expression": "\"name\" gin_trgm_ops", + "asc": true, + "isExpression": true, + "nulls": "last" + } + ], + "isUnique": false, + "concurrently": false, + "method": "gin", + "with": {} + } + }, + "foreignKeys": { + "entry_translations_pk_entries_pk_fk": { + "name": "entry_translations_pk_entries_pk_fk", + "tableFrom": "entry_translations", + "tableTo": "entries", + "schemaTo": "kyoo", + "columnsFrom": ["pk"], + "columnsTo": ["pk"], + "onDelete": "cascade", + "onUpdate": "no action" + } + }, + "compositePrimaryKeys": { + "entry_translations_pk_language_pk": { + "name": "entry_translations_pk_language_pk", + "columns": ["pk", "language"] + } + }, + "uniqueConstraints": {}, + "policies": {}, + "checkConstraints": {}, + "isRLSEnabled": false + }, + "kyoo.history": { + "name": "history", + "schema": "kyoo", + "columns": { + "pk": { + "name": "pk", + "type": "integer", + "primaryKey": true, + "notNull": true, + "identity": { + "type": "always", + "name": "history_pk_seq", + "schema": "kyoo", + "increment": "1", + "startWith": "1", + "minValue": "1", + "maxValue": "2147483647", + "cache": "1", + "cycle": false + } + }, + "profile_pk": { + "name": "profile_pk", + "type": "integer", + "primaryKey": false, + "notNull": true + }, + "entry_pk": { + "name": "entry_pk", + "type": "integer", + "primaryKey": false, + "notNull": true + }, + "video_pk": { + "name": "video_pk", + "type": "integer", + "primaryKey": false, + "notNull": false + }, + "percent": { + "name": "percent", + "type": "integer", + "primaryKey": false, + "notNull": true, + "default": 0 + }, + "time": { + "name": "time", + "type": "integer", + "primaryKey": false, + "notNull": false + }, + "played_date": { + "name": "played_date", + "type": "timestamp with time zone", + "primaryKey": false, + "notNull": true, + "default": "now()" + } + }, + "indexes": { + "history_play_date": { + "name": "history_play_date", + "columns": [ + { + "expression": "played_date", + "isExpression": false, + "asc": false, + "nulls": "last" + } + ], + "isUnique": false, + "concurrently": false, + "method": "btree", + "with": {} + } + }, + "foreignKeys": { + "history_profile_pk_profiles_pk_fk": { + "name": "history_profile_pk_profiles_pk_fk", + "tableFrom": "history", + "tableTo": "profiles", + "schemaTo": "kyoo", + "columnsFrom": ["profile_pk"], + "columnsTo": ["pk"], + "onDelete": "cascade", + "onUpdate": "no action" + }, + "history_entry_pk_entries_pk_fk": { + "name": "history_entry_pk_entries_pk_fk", + "tableFrom": "history", + "tableTo": "entries", + "schemaTo": "kyoo", + "columnsFrom": ["entry_pk"], + "columnsTo": ["pk"], + "onDelete": "cascade", + "onUpdate": "no action" + }, + "history_video_pk_videos_pk_fk": { + "name": "history_video_pk_videos_pk_fk", + "tableFrom": "history", + "tableTo": "videos", + "schemaTo": "kyoo", + "columnsFrom": ["video_pk"], + "columnsTo": ["pk"], + "onDelete": "set null", + "onUpdate": "no action" + } + }, + "compositePrimaryKeys": {}, + "uniqueConstraints": {}, + "policies": {}, + "checkConstraints": { + "percent_valid": { + "name": "percent_valid", + "value": "\"kyoo\".\"history\".\"percent\" between 0 and 100" + } + }, + "isRLSEnabled": false + }, + "kyoo.season_translations": { + "name": "season_translations", + "schema": "kyoo", + "columns": { + "pk": { + "name": "pk", + "type": "integer", + "primaryKey": false, + "notNull": true + }, + "language": { + "name": "language", + "type": "varchar(255)", + "primaryKey": false, + "notNull": true + }, + "name": { + "name": "name", + "type": "text", + "primaryKey": false, + "notNull": false + }, + "description": { + "name": "description", + "type": "text", + "primaryKey": false, + "notNull": false + }, + "poster": { + "name": "poster", + "type": "jsonb", + "primaryKey": false, + "notNull": false + }, + "thumbnail": { + "name": "thumbnail", + "type": "jsonb", + "primaryKey": false, + "notNull": false + }, + "banner": { + "name": "banner", + "type": "jsonb", + "primaryKey": false, + "notNull": false + } + }, + "indexes": { + "season_name_trgm": { + "name": "season_name_trgm", + "columns": [ + { + "expression": "\"name\" gin_trgm_ops", + "asc": true, + "isExpression": true, + "nulls": "last" + } + ], + "isUnique": false, + "concurrently": false, + "method": "gin", + "with": {} + } + }, + "foreignKeys": { + "season_translations_pk_seasons_pk_fk": { + "name": "season_translations_pk_seasons_pk_fk", + "tableFrom": "season_translations", + "tableTo": "seasons", + "schemaTo": "kyoo", + "columnsFrom": ["pk"], + "columnsTo": ["pk"], + "onDelete": "cascade", + "onUpdate": "no action" + } + }, + "compositePrimaryKeys": { + "season_translations_pk_language_pk": { + "name": "season_translations_pk_language_pk", + "columns": ["pk", "language"] + } + }, + "uniqueConstraints": {}, + "policies": {}, + "checkConstraints": {}, + "isRLSEnabled": false + }, + "kyoo.seasons": { + "name": "seasons", + "schema": "kyoo", + "columns": { + "pk": { + "name": "pk", + "type": "integer", + "primaryKey": true, + "notNull": true, + "identity": { + "type": "always", + "name": "seasons_pk_seq", + "schema": "kyoo", + "increment": "1", + "startWith": "1", + "minValue": "1", + "maxValue": "2147483647", + "cache": "1", + "cycle": false + } + }, + "id": { + "name": "id", + "type": "uuid", + "primaryKey": false, + "notNull": true, + "default": "gen_random_uuid()" + }, + "slug": { + "name": "slug", + "type": "varchar(255)", + "primaryKey": false, + "notNull": true + }, + "show_pk": { + "name": "show_pk", + "type": "integer", + "primaryKey": false, + "notNull": false + }, + "season_number": { + "name": "season_number", + "type": "integer", + "primaryKey": false, + "notNull": true + }, + "start_air": { + "name": "start_air", + "type": "date", + "primaryKey": false, + "notNull": false + }, + "end_air": { + "name": "end_air", + "type": "date", + "primaryKey": false, + "notNull": false + }, + "external_id": { + "name": "external_id", + "type": "jsonb", + "primaryKey": false, + "notNull": true, + "default": "'{}'::jsonb" + }, + "created_at": { + "name": "created_at", + "type": "timestamp with time zone", + "primaryKey": false, + "notNull": true, + "default": "now()" + }, + "updated_at": { + "name": "updated_at", + "type": "timestamp with time zone", + "primaryKey": false, + "notNull": true + }, + "next_refresh": { + "name": "next_refresh", + "type": "timestamp with time zone", + "primaryKey": false, + "notNull": true + } + }, + "indexes": { + "show_fk": { + "name": "show_fk", + "columns": [ + { + "expression": "show_pk", + "isExpression": false, + "asc": true, + "nulls": "last" + } + ], + "isUnique": false, + "concurrently": false, + "method": "hash", + "with": {} + }, + "season_nbr": { + "name": "season_nbr", + "columns": [ + { + "expression": "season_number", + "isExpression": false, + "asc": true, + "nulls": "last" + } + ], + "isUnique": false, + "concurrently": false, + "method": "btree", + "with": {} + } + }, + "foreignKeys": { + "seasons_show_pk_shows_pk_fk": { + "name": "seasons_show_pk_shows_pk_fk", + "tableFrom": "seasons", + "tableTo": "shows", + "schemaTo": "kyoo", + "columnsFrom": ["show_pk"], + "columnsTo": ["pk"], + "onDelete": "cascade", + "onUpdate": "no action" + } + }, + "compositePrimaryKeys": {}, + "uniqueConstraints": { + "seasons_id_unique": { + "name": "seasons_id_unique", + "nullsNotDistinct": false, + "columns": ["id"] + }, + "seasons_slug_unique": { + "name": "seasons_slug_unique", + "nullsNotDistinct": false, + "columns": ["slug"] + }, + "seasons_showPk_seasonNumber_unique": { + "name": "seasons_showPk_seasonNumber_unique", + "nullsNotDistinct": false, + "columns": ["show_pk", "season_number"] + } + }, + "policies": {}, + "checkConstraints": {}, + "isRLSEnabled": false + }, + "kyoo.show_translations": { + "name": "show_translations", + "schema": "kyoo", + "columns": { + "pk": { + "name": "pk", + "type": "integer", + "primaryKey": false, + "notNull": true + }, + "language": { + "name": "language", + "type": "varchar(255)", + "primaryKey": false, + "notNull": true + }, + "name": { + "name": "name", + "type": "text", + "primaryKey": false, + "notNull": true + }, + "description": { + "name": "description", + "type": "text", + "primaryKey": false, + "notNull": false + }, + "tagline": { + "name": "tagline", + "type": "text", + "primaryKey": false, + "notNull": false + }, + "aliases": { + "name": "aliases", + "type": "text[]", + "primaryKey": false, + "notNull": true + }, + "tags": { + "name": "tags", + "type": "text[]", + "primaryKey": false, + "notNull": true + }, + "poster": { + "name": "poster", + "type": "jsonb", + "primaryKey": false, + "notNull": false + }, + "thumbnail": { + "name": "thumbnail", + "type": "jsonb", + "primaryKey": false, + "notNull": false + }, + "banner": { + "name": "banner", + "type": "jsonb", + "primaryKey": false, + "notNull": false + }, + "logo": { + "name": "logo", + "type": "jsonb", + "primaryKey": false, + "notNull": false + }, + "trailer_url": { + "name": "trailer_url", + "type": "text", + "primaryKey": false, + "notNull": false + } + }, + "indexes": { + "name_trgm": { + "name": "name_trgm", + "columns": [ + { + "expression": "\"name\" gin_trgm_ops", + "asc": true, + "isExpression": true, + "nulls": "last" + } + ], + "isUnique": false, + "concurrently": false, + "method": "gin", + "with": {} + }, + "tags": { + "name": "tags", + "columns": [ + { + "expression": "tags", + "isExpression": false, + "asc": true, + "nulls": "last" + } + ], + "isUnique": false, + "concurrently": false, + "method": "btree", + "with": {} + } + }, + "foreignKeys": { + "show_translations_pk_shows_pk_fk": { + "name": "show_translations_pk_shows_pk_fk", + "tableFrom": "show_translations", + "tableTo": "shows", + "schemaTo": "kyoo", + "columnsFrom": ["pk"], + "columnsTo": ["pk"], + "onDelete": "cascade", + "onUpdate": "no action" + } + }, + "compositePrimaryKeys": { + "show_translations_pk_language_pk": { + "name": "show_translations_pk_language_pk", + "columns": ["pk", "language"] + } + }, + "uniqueConstraints": {}, + "policies": {}, + "checkConstraints": {}, + "isRLSEnabled": false + }, + "kyoo.shows": { + "name": "shows", + "schema": "kyoo", + "columns": { + "pk": { + "name": "pk", + "type": "integer", + "primaryKey": true, + "notNull": true, + "identity": { + "type": "always", + "name": "shows_pk_seq", + "schema": "kyoo", + "increment": "1", + "startWith": "1", + "minValue": "1", + "maxValue": "2147483647", + "cache": "1", + "cycle": false + } + }, + "id": { + "name": "id", + "type": "uuid", + "primaryKey": false, + "notNull": true, + "default": "gen_random_uuid()" + }, + "slug": { + "name": "slug", + "type": "varchar(255)", + "primaryKey": false, + "notNull": true + }, + "kind": { + "name": "kind", + "type": "show_kind", + "typeSchema": "kyoo", + "primaryKey": false, + "notNull": true + }, + "genres": { + "name": "genres", + "type": "genres[]", + "typeSchema": "kyoo", + "primaryKey": false, + "notNull": true + }, + "rating": { + "name": "rating", + "type": "smallint", + "primaryKey": false, + "notNull": false + }, + "runtime": { + "name": "runtime", + "type": "integer", + "primaryKey": false, + "notNull": false + }, + "status": { + "name": "status", + "type": "show_status", + "typeSchema": "kyoo", + "primaryKey": false, + "notNull": true + }, + "start_air": { + "name": "start_air", + "type": "date", + "primaryKey": false, + "notNull": false + }, + "end_air": { + "name": "end_air", + "type": "date", + "primaryKey": false, + "notNull": false + }, + "original": { + "name": "original", + "type": "jsonb", + "primaryKey": false, + "notNull": true + }, + "collection_pk": { + "name": "collection_pk", + "type": "integer", + "primaryKey": false, + "notNull": false + }, + "entries_count": { + "name": "entries_count", + "type": "integer", + "primaryKey": false, + "notNull": true + }, + "available_count": { + "name": "available_count", + "type": "integer", + "primaryKey": false, + "notNull": true, + "default": 0 + }, + "external_id": { + "name": "external_id", + "type": "jsonb", + "primaryKey": false, + "notNull": true, + "default": "'{}'::jsonb" + }, + "created_at": { + "name": "created_at", + "type": "timestamp with time zone", + "primaryKey": false, + "notNull": true, + "default": "now()" + }, + "updated_at": { + "name": "updated_at", + "type": "timestamp with time zone", + "primaryKey": false, + "notNull": true + }, + "next_refresh": { + "name": "next_refresh", + "type": "timestamp with time zone", + "primaryKey": false, + "notNull": true + } + }, + "indexes": { + "kind": { + "name": "kind", + "columns": [ + { + "expression": "kind", + "isExpression": false, + "asc": true, + "nulls": "last" + } + ], + "isUnique": false, + "concurrently": false, + "method": "hash", + "with": {} + }, + "rating": { + "name": "rating", + "columns": [ + { + "expression": "rating", + "isExpression": false, + "asc": true, + "nulls": "last" + } + ], + "isUnique": false, + "concurrently": false, + "method": "btree", + "with": {} + }, + "startAir": { + "name": "startAir", + "columns": [ + { + "expression": "start_air", + "isExpression": false, + "asc": true, + "nulls": "last" + } + ], + "isUnique": false, + "concurrently": false, + "method": "btree", + "with": {} + } + }, + "foreignKeys": { + "shows_collection_pk_shows_pk_fk": { + "name": "shows_collection_pk_shows_pk_fk", + "tableFrom": "shows", + "tableTo": "shows", + "schemaTo": "kyoo", + "columnsFrom": ["collection_pk"], + "columnsTo": ["pk"], + "onDelete": "set null", + "onUpdate": "no action" + } + }, + "compositePrimaryKeys": {}, + "uniqueConstraints": { + "shows_id_unique": { + "name": "shows_id_unique", + "nullsNotDistinct": false, + "columns": ["id"] + }, + "shows_slug_unique": { + "name": "shows_slug_unique", + "nullsNotDistinct": false, + "columns": ["slug"] + } + }, + "policies": {}, + "checkConstraints": { + "rating_valid": { + "name": "rating_valid", + "value": "\"kyoo\".\"shows\".\"rating\" between 0 and 100" + }, + "runtime_valid": { + "name": "runtime_valid", + "value": "\"kyoo\".\"shows\".\"runtime\" >= 0" + } + }, + "isRLSEnabled": false + }, + "kyoo.show_studio_join": { + "name": "show_studio_join", + "schema": "kyoo", + "columns": { + "show_pk": { + "name": "show_pk", + "type": "integer", + "primaryKey": false, + "notNull": true + }, + "studio_pk": { + "name": "studio_pk", + "type": "integer", + "primaryKey": false, + "notNull": true + } + }, + "indexes": {}, + "foreignKeys": { + "show_studio_join_show_pk_shows_pk_fk": { + "name": "show_studio_join_show_pk_shows_pk_fk", + "tableFrom": "show_studio_join", + "tableTo": "shows", + "schemaTo": "kyoo", + "columnsFrom": ["show_pk"], + "columnsTo": ["pk"], + "onDelete": "cascade", + "onUpdate": "no action" + }, + "show_studio_join_studio_pk_studios_pk_fk": { + "name": "show_studio_join_studio_pk_studios_pk_fk", + "tableFrom": "show_studio_join", + "tableTo": "studios", + "schemaTo": "kyoo", + "columnsFrom": ["studio_pk"], + "columnsTo": ["pk"], + "onDelete": "cascade", + "onUpdate": "no action" + } + }, + "compositePrimaryKeys": { + "show_studio_join_show_pk_studio_pk_pk": { + "name": "show_studio_join_show_pk_studio_pk_pk", + "columns": ["show_pk", "studio_pk"] + } + }, + "uniqueConstraints": {}, + "policies": {}, + "checkConstraints": {}, + "isRLSEnabled": false + }, + "kyoo.studio_translations": { + "name": "studio_translations", + "schema": "kyoo", + "columns": { + "pk": { + "name": "pk", + "type": "integer", + "primaryKey": false, + "notNull": true + }, + "language": { + "name": "language", + "type": "varchar(255)", + "primaryKey": false, + "notNull": true + }, + "name": { + "name": "name", + "type": "text", + "primaryKey": false, + "notNull": true + }, + "logo": { + "name": "logo", + "type": "jsonb", + "primaryKey": false, + "notNull": false + } + }, + "indexes": { + "studio_name_trgm": { + "name": "studio_name_trgm", + "columns": [ + { + "expression": "\"name\" gin_trgm_ops", + "asc": true, + "isExpression": true, + "nulls": "last" + } + ], + "isUnique": false, + "concurrently": false, + "method": "gin", + "with": {} + } + }, + "foreignKeys": { + "studio_translations_pk_studios_pk_fk": { + "name": "studio_translations_pk_studios_pk_fk", + "tableFrom": "studio_translations", + "tableTo": "studios", + "schemaTo": "kyoo", + "columnsFrom": ["pk"], + "columnsTo": ["pk"], + "onDelete": "cascade", + "onUpdate": "no action" + } + }, + "compositePrimaryKeys": { + "studio_translations_pk_language_pk": { + "name": "studio_translations_pk_language_pk", + "columns": ["pk", "language"] + } + }, + "uniqueConstraints": {}, + "policies": {}, + "checkConstraints": {}, + "isRLSEnabled": false + }, + "kyoo.studios": { + "name": "studios", + "schema": "kyoo", + "columns": { + "pk": { + "name": "pk", + "type": "integer", + "primaryKey": true, + "notNull": true, + "identity": { + "type": "always", + "name": "studios_pk_seq", + "schema": "kyoo", + "increment": "1", + "startWith": "1", + "minValue": "1", + "maxValue": "2147483647", + "cache": "1", + "cycle": false + } + }, + "id": { + "name": "id", + "type": "uuid", + "primaryKey": false, + "notNull": true, + "default": "gen_random_uuid()" + }, + "slug": { + "name": "slug", + "type": "varchar(255)", + "primaryKey": false, + "notNull": true + }, + "external_id": { + "name": "external_id", + "type": "jsonb", + "primaryKey": false, + "notNull": true, + "default": "'{}'::jsonb" + }, + "created_at": { + "name": "created_at", + "type": "timestamp with time zone", + "primaryKey": false, + "notNull": true, + "default": "now()" + }, + "updated_at": { + "name": "updated_at", + "type": "timestamp with time zone", + "primaryKey": false, + "notNull": true + } + }, + "indexes": {}, + "foreignKeys": {}, + "compositePrimaryKeys": {}, + "uniqueConstraints": { + "studios_id_unique": { + "name": "studios_id_unique", + "nullsNotDistinct": false, + "columns": ["id"] + }, + "studios_slug_unique": { + "name": "studios_slug_unique", + "nullsNotDistinct": false, + "columns": ["slug"] + } + }, + "policies": {}, + "checkConstraints": {}, + "isRLSEnabled": false + }, + "kyoo.roles": { + "name": "roles", + "schema": "kyoo", + "columns": { + "pk": { + "name": "pk", + "type": "integer", + "primaryKey": true, + "notNull": true, + "identity": { + "type": "always", + "name": "roles_pk_seq", + "schema": "kyoo", + "increment": "1", + "startWith": "1", + "minValue": "1", + "maxValue": "2147483647", + "cache": "1", + "cycle": false + } + }, + "show_pk": { + "name": "show_pk", + "type": "integer", + "primaryKey": false, + "notNull": true + }, + "staff_pk": { + "name": "staff_pk", + "type": "integer", + "primaryKey": false, + "notNull": true + }, + "kind": { + "name": "kind", + "type": "role_kind", + "typeSchema": "kyoo", + "primaryKey": false, + "notNull": true + }, + "order": { + "name": "order", + "type": "integer", + "primaryKey": false, + "notNull": true + }, + "character": { + "name": "character", + "type": "jsonb", + "primaryKey": false, + "notNull": false + } + }, + "indexes": { + "role_kind": { + "name": "role_kind", + "columns": [ + { + "expression": "kind", + "isExpression": false, + "asc": true, + "nulls": "last" + } + ], + "isUnique": false, + "concurrently": false, + "method": "hash", + "with": {} + }, + "role_order": { + "name": "role_order", + "columns": [ + { + "expression": "order", + "isExpression": false, + "asc": true, + "nulls": "last" + } + ], + "isUnique": false, + "concurrently": false, + "method": "btree", + "with": {} + } + }, + "foreignKeys": { + "roles_show_pk_shows_pk_fk": { + "name": "roles_show_pk_shows_pk_fk", + "tableFrom": "roles", + "tableTo": "shows", + "schemaTo": "kyoo", + "columnsFrom": ["show_pk"], + "columnsTo": ["pk"], + "onDelete": "cascade", + "onUpdate": "no action" + }, + "roles_staff_pk_staff_pk_fk": { + "name": "roles_staff_pk_staff_pk_fk", + "tableFrom": "roles", + "tableTo": "staff", + "schemaTo": "kyoo", + "columnsFrom": ["staff_pk"], + "columnsTo": ["pk"], + "onDelete": "cascade", + "onUpdate": "no action" + } + }, + "compositePrimaryKeys": {}, + "uniqueConstraints": {}, + "policies": {}, + "checkConstraints": {}, + "isRLSEnabled": false + }, + "kyoo.staff": { + "name": "staff", + "schema": "kyoo", + "columns": { + "pk": { + "name": "pk", + "type": "integer", + "primaryKey": true, + "notNull": true, + "identity": { + "type": "always", + "name": "staff_pk_seq", + "schema": "kyoo", + "increment": "1", + "startWith": "1", + "minValue": "1", + "maxValue": "2147483647", + "cache": "1", + "cycle": false + } + }, + "id": { + "name": "id", + "type": "uuid", + "primaryKey": false, + "notNull": true, + "default": "gen_random_uuid()" + }, + "slug": { + "name": "slug", + "type": "varchar(255)", + "primaryKey": false, + "notNull": true + }, + "name": { + "name": "name", + "type": "text", + "primaryKey": false, + "notNull": true + }, + "latin_name": { + "name": "latin_name", + "type": "text", + "primaryKey": false, + "notNull": false + }, + "image": { + "name": "image", + "type": "jsonb", + "primaryKey": false, + "notNull": false + }, + "external_id": { + "name": "external_id", + "type": "jsonb", + "primaryKey": false, + "notNull": true, + "default": "'{}'::jsonb" + }, + "created_at": { + "name": "created_at", + "type": "timestamp with time zone", + "primaryKey": false, + "notNull": true, + "default": "now()" + }, + "updated_at": { + "name": "updated_at", + "type": "timestamp with time zone", + "primaryKey": false, + "notNull": true + } + }, + "indexes": {}, + "foreignKeys": {}, + "compositePrimaryKeys": {}, + "uniqueConstraints": { + "staff_id_unique": { + "name": "staff_id_unique", + "nullsNotDistinct": false, + "columns": ["id"] + }, + "staff_slug_unique": { + "name": "staff_slug_unique", + "nullsNotDistinct": false, + "columns": ["slug"] + } + }, + "policies": {}, + "checkConstraints": {}, + "isRLSEnabled": false + }, + "kyoo.entry_video_join": { + "name": "entry_video_join", + "schema": "kyoo", + "columns": { + "entry_pk": { + "name": "entry_pk", + "type": "integer", + "primaryKey": false, + "notNull": true + }, + "video_pk": { + "name": "video_pk", + "type": "integer", + "primaryKey": false, + "notNull": true + }, + "slug": { + "name": "slug", + "type": "varchar(255)", + "primaryKey": false, + "notNull": true + } + }, + "indexes": {}, + "foreignKeys": { + "entry_video_join_entry_pk_entries_pk_fk": { + "name": "entry_video_join_entry_pk_entries_pk_fk", + "tableFrom": "entry_video_join", + "tableTo": "entries", + "schemaTo": "kyoo", + "columnsFrom": ["entry_pk"], + "columnsTo": ["pk"], + "onDelete": "cascade", + "onUpdate": "no action" + }, + "entry_video_join_video_pk_videos_pk_fk": { + "name": "entry_video_join_video_pk_videos_pk_fk", + "tableFrom": "entry_video_join", + "tableTo": "videos", + "schemaTo": "kyoo", + "columnsFrom": ["video_pk"], + "columnsTo": ["pk"], + "onDelete": "cascade", + "onUpdate": "no action" + } + }, + "compositePrimaryKeys": { + "entry_video_join_entry_pk_video_pk_pk": { + "name": "entry_video_join_entry_pk_video_pk_pk", + "columns": ["entry_pk", "video_pk"] + } + }, + "uniqueConstraints": { + "entry_video_join_slug_unique": { + "name": "entry_video_join_slug_unique", + "nullsNotDistinct": false, + "columns": ["slug"] + } + }, + "policies": {}, + "checkConstraints": {}, + "isRLSEnabled": false + }, + "kyoo.videos": { + "name": "videos", + "schema": "kyoo", + "columns": { + "pk": { + "name": "pk", + "type": "integer", + "primaryKey": true, + "notNull": true, + "identity": { + "type": "always", + "name": "videos_pk_seq", + "schema": "kyoo", + "increment": "1", + "startWith": "1", + "minValue": "1", + "maxValue": "2147483647", + "cache": "1", + "cycle": false + } + }, + "id": { + "name": "id", + "type": "uuid", + "primaryKey": false, + "notNull": true, + "default": "gen_random_uuid()" + }, + "path": { + "name": "path", + "type": "text", + "primaryKey": false, + "notNull": true + }, + "rendering": { + "name": "rendering", + "type": "text", + "primaryKey": false, + "notNull": true + }, + "part": { + "name": "part", + "type": "integer", + "primaryKey": false, + "notNull": false + }, + "version": { + "name": "version", + "type": "integer", + "primaryKey": false, + "notNull": true, + "default": 1 + }, + "guess": { + "name": "guess", + "type": "jsonb", + "primaryKey": false, + "notNull": true + }, + "created_at": { + "name": "created_at", + "type": "timestamp with time zone", + "primaryKey": false, + "notNull": true, + "default": "now()" + }, + "updated_at": { + "name": "updated_at", + "type": "timestamp with time zone", + "primaryKey": false, + "notNull": true + } + }, + "indexes": {}, + "foreignKeys": {}, + "compositePrimaryKeys": {}, + "uniqueConstraints": { + "videos_id_unique": { + "name": "videos_id_unique", + "nullsNotDistinct": false, + "columns": ["id"] + }, + "videos_path_unique": { + "name": "videos_path_unique", + "nullsNotDistinct": false, + "columns": ["path"] + }, + "rendering_unique": { + "name": "rendering_unique", + "nullsNotDistinct": true, + "columns": ["rendering", "part", "version"] + } + }, + "policies": {}, + "checkConstraints": { + "part_pos": { + "name": "part_pos", + "value": "\"kyoo\".\"videos\".\"part\" >= 0" + }, + "version_pos": { + "name": "version_pos", + "value": "\"kyoo\".\"videos\".\"version\" >= 0" + } + }, + "isRLSEnabled": false + }, + "kyoo.profiles": { + "name": "profiles", + "schema": "kyoo", + "columns": { + "pk": { + "name": "pk", + "type": "integer", + "primaryKey": true, + "notNull": true, + "identity": { + "type": "always", + "name": "profiles_pk_seq", + "schema": "kyoo", + "increment": "1", + "startWith": "1", + "minValue": "1", + "maxValue": "2147483647", + "cache": "1", + "cycle": false + } + }, + "id": { + "name": "id", + "type": "uuid", + "primaryKey": false, + "notNull": true + } + }, + "indexes": {}, + "foreignKeys": {}, + "compositePrimaryKeys": {}, + "uniqueConstraints": { + "profiles_id_unique": { + "name": "profiles_id_unique", + "nullsNotDistinct": false, + "columns": ["id"] + } + }, + "policies": {}, + "checkConstraints": {}, + "isRLSEnabled": false + }, + "kyoo.mqueue": { + "name": "mqueue", + "schema": "kyoo", + "columns": { + "id": { + "name": "id", + "type": "uuid", + "primaryKey": true, + "notNull": true, + "default": "gen_random_uuid()" + }, + "kind": { + "name": "kind", + "type": "varchar(255)", + "primaryKey": false, + "notNull": true + }, + "message": { + "name": "message", + "type": "jsonb", + "primaryKey": false, + "notNull": true + }, + "attempt": { + "name": "attempt", + "type": "integer", + "primaryKey": false, + "notNull": true, + "default": 0 + }, + "created_at": { + "name": "created_at", + "type": "timestamp with time zone", + "primaryKey": false, + "notNull": true, + "default": "now()" + } + }, + "indexes": { + "mqueue_created": { + "name": "mqueue_created", + "columns": [ + { + "expression": "created_at", + "isExpression": false, + "asc": true, + "nulls": "last" + } + ], + "isUnique": false, + "concurrently": false, + "method": "btree", + "with": {} + } + }, + "foreignKeys": {}, + "compositePrimaryKeys": {}, + "uniqueConstraints": {}, + "policies": {}, + "checkConstraints": {}, + "isRLSEnabled": false + }, + "kyoo.watchlist": { + "name": "watchlist", + "schema": "kyoo", + "columns": { + "profile_pk": { + "name": "profile_pk", + "type": "integer", + "primaryKey": false, + "notNull": true + }, + "show_pk": { + "name": "show_pk", + "type": "integer", + "primaryKey": false, + "notNull": true + }, + "status": { + "name": "status", + "type": "watchlist_status", + "typeSchema": "kyoo", + "primaryKey": false, + "notNull": true + }, + "seen_count": { + "name": "seen_count", + "type": "integer", + "primaryKey": false, + "notNull": true, + "default": 0 + }, + "next_entry": { + "name": "next_entry", + "type": "integer", + "primaryKey": false, + "notNull": false + }, + "score": { + "name": "score", + "type": "integer", + "primaryKey": false, + "notNull": false + }, + "started_at": { + "name": "started_at", + "type": "timestamp with time zone", + "primaryKey": false, + "notNull": false + }, + "last_played_at": { + "name": "last_played_at", + "type": "timestamp with time zone", + "primaryKey": false, + "notNull": false + }, + "completed_at": { + "name": "completed_at", + "type": "timestamp with time zone", + "primaryKey": false, + "notNull": false + }, + "created_at": { + "name": "created_at", + "type": "timestamp with time zone", + "primaryKey": false, + "notNull": true, + "default": "now()" + }, + "updated_at": { + "name": "updated_at", + "type": "timestamp with time zone", + "primaryKey": false, + "notNull": true + } + }, + "indexes": {}, + "foreignKeys": { + "watchlist_profile_pk_profiles_pk_fk": { + "name": "watchlist_profile_pk_profiles_pk_fk", + "tableFrom": "watchlist", + "tableTo": "profiles", + "schemaTo": "kyoo", + "columnsFrom": ["profile_pk"], + "columnsTo": ["pk"], + "onDelete": "cascade", + "onUpdate": "no action" + }, + "watchlist_show_pk_shows_pk_fk": { + "name": "watchlist_show_pk_shows_pk_fk", + "tableFrom": "watchlist", + "tableTo": "shows", + "schemaTo": "kyoo", + "columnsFrom": ["show_pk"], + "columnsTo": ["pk"], + "onDelete": "cascade", + "onUpdate": "no action" + }, + "watchlist_next_entry_entries_pk_fk": { + "name": "watchlist_next_entry_entries_pk_fk", + "tableFrom": "watchlist", + "tableTo": "entries", + "schemaTo": "kyoo", + "columnsFrom": ["next_entry"], + "columnsTo": ["pk"], + "onDelete": "set null", + "onUpdate": "no action" + } + }, + "compositePrimaryKeys": { + "watchlist_profile_pk_show_pk_pk": { + "name": "watchlist_profile_pk_show_pk_pk", + "columns": ["profile_pk", "show_pk"] + } + }, + "uniqueConstraints": {}, + "policies": {}, + "checkConstraints": { + "score_percent": { + "name": "score_percent", + "value": "\"kyoo\".\"watchlist\".\"score\" between 0 and 100" + } + }, + "isRLSEnabled": false + } + }, + "enums": { + "kyoo.entry_type": { + "name": "entry_type", + "schema": "kyoo", + "values": ["episode", "movie", "special", "extra"] + }, + "kyoo.genres": { + "name": "genres", + "schema": "kyoo", + "values": [ + "action", + "adventure", + "animation", + "comedy", + "crime", + "documentary", + "drama", + "family", + "fantasy", + "history", + "horror", + "music", + "mystery", + "romance", + "science-fiction", + "thriller", + "war", + "western", + "kids", + "reality", + "politics", + "soap", + "talk" + ] + }, + "kyoo.show_kind": { + "name": "show_kind", + "schema": "kyoo", + "values": ["serie", "movie", "collection"] + }, + "kyoo.show_status": { + "name": "show_status", + "schema": "kyoo", + "values": ["unknown", "finished", "airing", "planned"] + }, + "kyoo.role_kind": { + "name": "role_kind", + "schema": "kyoo", + "values": [ + "actor", + "director", + "writter", + "producer", + "music", + "crew", + "other" + ] + }, + "kyoo.watchlist_status": { + "name": "watchlist_status", + "schema": "kyoo", + "values": ["watching", "rewatching", "completed", "dropped", "planned"] + } + }, + "schemas": { + "kyoo": "kyoo" + }, + "sequences": {}, + "roles": {}, + "policies": {}, + "views": {}, + "_meta": { + "columns": {}, + "schemas": {}, + "tables": {} + } +} diff --git a/api/drizzle/meta/_journal.json b/api/drizzle/meta/_journal.json index 34ec2fa1..2a936918 100644 --- a/api/drizzle/meta/_journal.json +++ b/api/drizzle/meta/_journal.json @@ -148,6 +148,13 @@ "when": 1746198322219, "tag": "0020_video_unique", "breakpoints": true + }, + { + "idx": 21, + "version": "7", + "when": 1747727831649, + "tag": "0021_crew", + "breakpoints": true } ] } diff --git a/api/package.json b/api/package.json index 1206b686..b32755aa 100644 --- a/api/package.json +++ b/api/package.json @@ -11,18 +11,18 @@ "dependencies": { "@elysiajs/swagger": "zoriya/elysia-swagger#build", "blurhash": "^2.0.5", - "drizzle-kit": "^0.31.0", + "drizzle-kit": "^0.31.1", "drizzle-orm": "0.43.1", - "elysia": "^1.2.25", - "jose": "^6.0.10", + "elysia": "^1.3.1", + "jose": "^6.0.11", "parjs": "^1.3.9", - "pg": "^8.15.6", - "sharp": "^0.34.1" + "pg": "^8.16.0", + "sharp": "^0.34.2" }, "devDependencies": { - "@types/pg": "^8.11.14", + "@types/pg": "^8.15.2", "node-addon-api": "^8.3.1", - "bun-types": "^1.2.11" + "bun-types": "^1.2.14" }, "module": "src/index.js", "patchedDependencies": { diff --git a/api/shell.nix b/api/shell.nix new file mode 100644 index 00000000..1ec82678 --- /dev/null +++ b/api/shell.nix @@ -0,0 +1,16 @@ +{pkgs ? import {}}: +pkgs.mkShell { + packages = with pkgs; [ + bun + biome + # for psql to debug from the cli + postgresql_15 + # to build libvips (for sharp) + nodejs + node-gyp + pkg-config + vips + ]; + + SHARP_FORCE_GLOBAL_LIBVIPS = 1; +} diff --git a/api/src/auth.ts b/api/src/auth.ts index 71a925be..2a38bfa4 100644 --- a/api/src/auth.ts +++ b/api/src/auth.ts @@ -37,17 +37,17 @@ export const auth = new Elysia({ name: "auth" }) .guard({ headers: t.Object( { - authorization: t.TemplateLiteral("Bearer ${string}"), + authorization: t.Optional(t.TemplateLiteral("Bearer ${string}")), }, { additionalProperties: true }, ), }) - .resolve(async ({ headers: { authorization }, error }) => { + .resolve(async ({ headers: { authorization }, status }) => { const bearer = authorization?.slice(7); if (!bearer) { - return error(500, { - status: 500, - message: "No jwt, auth server configuration error.", + return status(403, { + status: 403, + message: "No authorization header was found.", }); } @@ -63,7 +63,7 @@ export const auth = new Elysia({ name: "auth" }) return { jwt }; } catch (err) { - return error(403, { + return status(403, { status: 403, message: "Invalid jwt. Verification vailed", details: err, @@ -73,10 +73,10 @@ export const auth = new Elysia({ name: "auth" }) .macro({ permissions(perms: string[]) { return { - beforeHandle: ({ jwt, error }) => { + beforeHandle: ({ jwt, status }) => { for (const perm of perms) { if (!jwt!.permissions.includes(perm)) { - return error(403, { + return status(403, { status: 403, message: `Missing permission: '${perm}'.`, details: { current: jwt!.permissions, required: perms }, @@ -87,7 +87,7 @@ export const auth = new Elysia({ name: "auth" }) }; }, }) - .as("plugin"); + .as("scoped"); const User = t.Object({ id: t.String({ format: "uuid" }), diff --git a/api/src/base.ts b/api/src/base.ts index b4e285ef..0f15ea3d 100644 --- a/api/src/base.ts +++ b/api/src/base.ts @@ -52,10 +52,10 @@ export const base = new Elysia({ name: "base" }) detail: { description: "Check if the api is healthy." }, response: { 200: t.Object({ status: t.Literal("healthy") }) }, }) - .as("plugin"); + .as("scoped"); export const prefix = process.env.KYOO_PREFIX ?? ""; -export const app = new Elysia({ prefix }) +export const handlers = new Elysia({ prefix }) .use(base) .use(auth) .guard( diff --git a/api/src/controllers/entries.ts b/api/src/controllers/entries.ts index dce5edae..b994b8c2 100644 --- a/api/src/controllers/entries.ts +++ b/api/src/controllers/entries.ts @@ -72,6 +72,7 @@ export const entryFilters: FilterDef = { runtime: { column: entries.runtime, type: "float" }, airDate: { column: entries.airDate, type: "date" }, playedDate: { column: entryProgressQ.playedDate, type: "date" }, + isAvailable: { column: isNotNull(entries.availableSince), type: "bool" }, }; const extraFilters: FilterDef = { @@ -255,7 +256,7 @@ export const entriesH = new Elysia({ tags: ["series"] }) headers: { "accept-language": languages }, request: { url }, jwt: { sub }, - error, + status, }) => { const [serie] = await db .select({ pk: shows.pk }) @@ -269,7 +270,7 @@ export const entriesH = new Elysia({ tags: ["series"] }) .limit(1); if (!serie) { - return error(404, { + return status(404, { status: 404, message: `No serie with the id or slug: '${id}'.`, }); @@ -335,7 +336,7 @@ export const entriesH = new Elysia({ tags: ["series"] }) query: { limit, after, query, sort, filter }, request: { url }, jwt: { sub }, - error, + status, }) => { const [serie] = await db .select({ pk: shows.pk }) @@ -349,7 +350,7 @@ export const entriesH = new Elysia({ tags: ["series"] }) .limit(1); if (!serie) { - return error(404, { + return status(404, { status: 404, message: `No serie with the id or slug: '${id}'.`, }); diff --git a/api/src/controllers/images.ts b/api/src/controllers/images.ts index df04e794..2056592f 100644 --- a/api/src/controllers/images.ts +++ b/api/src/controllers/images.ts @@ -28,14 +28,14 @@ function getRedirectToImageHandler({ headers: { "accept-language": languages }, query: { quality }, set, - error, + status, redirect, }: { params: { id: string; image: "poster" | "thumbnail" | "banner" | "logo" }; headers: { "accept-language": string }; query: { quality: "high" | "medium" | "low" }; set: Context["set"]; - error: Context["error"]; + status: Context["status"]; redirect: Context["redirect"]; }) { id ??= "random"; @@ -76,13 +76,13 @@ function getRedirectToImageHandler({ .limit(1); if (!ret) { - return error(404, { + return status(404, { status: 404, message: `No item found with id or slug: '${id}'.`, }); } if (!ret.language) { - return error(422, { + return status(422, { status: 422, message: "Accept-Language header could not be satisfied.", }); @@ -162,7 +162,7 @@ export const imagesH = new Elysia({ tags: ["images"] }) }) .get( "/staff/:id/image", - async ({ params: { id }, query: { quality }, error, redirect }) => { + async ({ params: { id }, query: { quality }, status, redirect }) => { const [ret] = await db .select({ image: staff.image }) .from(staff) @@ -177,7 +177,7 @@ export const imagesH = new Elysia({ tags: ["images"] }) .limit(1); if (!ret) { - return error(404, { + return status(404, { status: 404, message: `No staff member found with id or slug: '${id}'.`, }); @@ -211,7 +211,7 @@ export const imagesH = new Elysia({ tags: ["images"] }) headers: { "accept-language": languages }, query: { quality }, set, - error, + status, redirect, }) => { const lang = processLanguages(languages); @@ -248,13 +248,13 @@ export const imagesH = new Elysia({ tags: ["images"] }) .limit(1); if (!ret) { - return error(404, { + return status(404, { status: 404, message: `No studio found with id or slug: '${id}'.`, }); } if (!ret.language) { - return error(422, { + return status(422, { status: 422, message: "Accept-Language header could not be satisfied.", }); diff --git a/api/src/controllers/profiles/history.ts b/api/src/controllers/profiles/history.ts index fdf239c2..a2d9c32b 100644 --- a/api/src/controllers/profiles/history.ts +++ b/api/src/controllers/profiles/history.ts @@ -111,10 +111,10 @@ export const historyH = new Elysia({ tags: ["profiles"] }) query: { sort, filter, query, limit, after }, headers: { "accept-language": languages, authorization }, request: { url }, - error, + status, }) => { const uInfo = await getUserInfo(id, { authorization }); - if ("status" in uInfo) return error(uInfo.status as 404, uInfo); + if ("status" in uInfo) return status(uInfo.status as 404, uInfo); const langs = processLanguages(languages); const items = (await getEntries({ @@ -163,7 +163,7 @@ export const historyH = new Elysia({ tags: ["profiles"] }) ) .post( "/profiles/me/history", - async ({ body, jwt: { sub }, error }) => { + async ({ body, jwt: { sub }, status }) => { const profilePk = await getOrCreateProfile(sub); const hist = values( @@ -321,7 +321,7 @@ export const historyH = new Elysia({ tags: ["profiles"] }) }, }); - return error(201, { status: 201, inserted: rows.length }); + return status(201, { status: 201, inserted: rows.length }); }, { detail: { description: "Bulk add entries/movies to your watch history." }, diff --git a/api/src/controllers/profiles/watchlist.ts b/api/src/controllers/profiles/watchlist.ts index 3e6df843..5adf5f16 100644 --- a/api/src/controllers/profiles/watchlist.ts +++ b/api/src/controllers/profiles/watchlist.ts @@ -197,10 +197,10 @@ export const watchlistH = new Elysia({ tags: ["profiles"] }) jwt: { settings }, headers: { "accept-language": languages, authorization }, request: { url }, - error, + status, }) => { const uInfo = await getUserInfo(id, { authorization }); - if ("status" in uInfo) return error(uInfo.status as 404, uInfo); + if ("status" in uInfo) return status(uInfo.status as 404, uInfo); const langs = processLanguages(languages); const items = await getShows({ @@ -261,7 +261,7 @@ export const watchlistH = new Elysia({ tags: ["profiles"] }) ) .post( "/series/:id/watchstatus", - async ({ params: { id }, body, jwt: { sub }, error }) => { + async ({ params: { id }, body, jwt: { sub }, status }) => { const [show] = await db .select({ pk: shows.pk, entriesCount: shows.entriesCount }) .from(shows) @@ -273,7 +273,7 @@ export const watchlistH = new Elysia({ tags: ["profiles"] }) ); if (!show) { - return error(404, { + return status(404, { status: 404, message: `No serie found for the id/slug: '${id}'.`, }); @@ -302,7 +302,7 @@ export const watchlistH = new Elysia({ tags: ["profiles"] }) ) .post( "/movies/:id/watchstatus", - async ({ params: { id }, body, jwt: { sub }, error }) => { + async ({ params: { id }, body, jwt: { sub }, status }) => { const [show] = await db .select({ pk: shows.pk }) .from(shows) @@ -314,7 +314,7 @@ export const watchlistH = new Elysia({ tags: ["profiles"] }) ); if (!show) { - return error(404, { + return status(404, { status: 404, message: `No movie found for the id/slug: '${id}'.`, }); diff --git a/api/src/controllers/seasons.ts b/api/src/controllers/seasons.ts index 1a28683b..82b61462 100644 --- a/api/src/controllers/seasons.ts +++ b/api/src/controllers/seasons.ts @@ -51,7 +51,7 @@ export const seasonsH = new Elysia({ tags: ["series"] }) query: { limit, after, query, sort, filter }, headers: { "accept-language": languages }, request: { url }, - error, + status, }) => { const langs = processLanguages(languages); @@ -67,7 +67,7 @@ export const seasonsH = new Elysia({ tags: ["series"] }) .limit(1); if (!serie) { - return error(404, { + return status(404, { status: 404, message: `No serie with the id or slug: '${id}'.`, }); diff --git a/api/src/controllers/seed/images.ts b/api/src/controllers/seed/images.ts index d61ee9f1..0961d44e 100644 --- a/api/src/controllers/seed/images.ts +++ b/api/src/controllers/seed/images.ts @@ -21,7 +21,7 @@ type ImageTask = { }; // this will only push a task to the image downloader service and not download it instantly. -// this is both done to prevent to many requests to be sent at once and to make sure POST +// this is both done to prevent too many requests to be sent at once and to make sure POST // requests are not blocked by image downloading or blurhash calculation export const enqueueOptImage = async ( tx: Transaction, @@ -68,7 +68,7 @@ export const enqueueOptImage = async ( kind: "image", message, }); - await tx.execute(sql`notify image`); + await tx.execute(sql`notify kyoo_image`); return { id, @@ -103,8 +103,8 @@ export const processImages = async () => { `); await tx.delete(mqueue).where(eq(mqueue.id, item.id)); - } catch (err) { - console.error("Failed to download image", img.url, err); + } catch (err: any) { + console.error("Failed to download image", img.url, err.message); await tx .update(mqueue) .set({ attempt: sql`${mqueue.attempt}+1` }) @@ -128,10 +128,10 @@ export const processImages = async () => { const client = (await db.$client.connect()) as PoolClient; client.on("notification", (evt) => { - if (evt.channel !== "image") return; + if (evt.channel !== "kyoo_image") return; processAll(); }); - await client.query("listen image"); + await client.query("listen kyoo_image"); // start processing old tasks await processAll(); @@ -139,7 +139,13 @@ export const processImages = async () => { }; async function downloadImage(id: string, url: string): Promise { - // TODO: check if file exists before downloading + const low = await getFile(path.join(imageDir, `${id}.low.jpg`)) + .arrayBuffer() + .catch(() => false as const); + if (low) { + return await getBlurhash(sharp(low)); + } + const resp = await fetch(url, { headers: { "User-Agent": `Kyoo v${version}` }, }); @@ -167,20 +173,15 @@ async function downloadImage(id: string, url: string): Promise { await Bun.write(file, buffer, { mode: 0o660 }); }), ); + return await getBlurhash(image); +} +async function getBlurhash(image: sharp.Sharp): Promise { const { data, info } = await image .resize(32, 32, { fit: "inside" }) .ensureAlpha() .raw() .toBuffer({ resolveWithObject: true }); - const blurHash = encode( - new Uint8ClampedArray(data), - info.width, - info.height, - 4, - 3, - ); - - return blurHash; + return encode(new Uint8ClampedArray(data), info.width, info.height, 4, 3); } diff --git a/api/src/controllers/seed/index.ts b/api/src/controllers/seed/index.ts index 341b1302..a68f8d5e 100644 --- a/api/src/controllers/seed/index.ts +++ b/api/src/controllers/seed/index.ts @@ -16,10 +16,10 @@ export const seed = new Elysia() }) .post( "/movies", - async ({ body, error }) => { + async ({ body, status }) => { const ret = await seedMovie(body); - if ("status" in ret) return error(ret.status, ret as any); - return error(ret.updated ? 200 : 201, ret); + if ("status" in ret) return status(ret.status, ret as any); + return status(ret.updated ? 200 : 201, ret); }, { detail: { @@ -47,10 +47,10 @@ export const seed = new Elysia() ) .post( "/series", - async ({ body, error }) => { + async ({ body, status }) => { const ret = await seedSerie(body); - if ("status" in ret) return error(ret.status, ret as any); - return error(ret.updated ? 200 : 201, ret); + if ("status" in ret) return status(ret.status, ret as any); + return status(ret.updated ? 200 : 201, ret); }, { detail: { diff --git a/api/src/controllers/seed/insert/entries.ts b/api/src/controllers/seed/insert/entries.ts index e91c882a..592ff4dd 100644 --- a/api/src/controllers/seed/insert/entries.ts +++ b/api/src/controllers/seed/insert/entries.ts @@ -47,7 +47,7 @@ export const insertEntries = async ( items: (SeedEntry | SeedExtra)[], onlyExtras = false, ) => { - if (!items) return []; + if (!items.length) return []; const retEntries = await db.transaction(async (tx) => { const vals: EntryI[] = await Promise.all( diff --git a/api/src/controllers/seed/insert/seasons.ts b/api/src/controllers/seed/insert/seasons.ts index 5f43b8a0..f3075380 100644 --- a/api/src/controllers/seed/insert/seasons.ts +++ b/api/src/controllers/seed/insert/seasons.ts @@ -12,13 +12,18 @@ export const insertSeasons = async ( show: { pk: number; slug: string }, items: SeedSeason[], ) => { + if (!items.length) return []; + return db.transaction(async (tx) => { const vals: SeasonI[] = items.map((x) => { const { translations, ...season } = x; return { ...season, showPk: show.pk, - slug: `${show.slug}-s${season.seasonNumber}`, + slug: + season.seasonNumber === 0 + ? `${show.slug}-specials` + : `${show.slug}-s${season.seasonNumber}`, nextRefresh: guessNextRefresh(season.startAir ?? new Date()), }; }); diff --git a/api/src/controllers/shows/collections.ts b/api/src/controllers/shows/collections.ts index 75bc5dcb..780cd9fb 100644 --- a/api/src/controllers/shows/collections.ts +++ b/api/src/controllers/shows/collections.ts @@ -41,7 +41,7 @@ export const collections = new Elysia({ headers: { "accept-language": languages }, query: { preferOriginal, with: relations }, jwt: { sub, settings }, - error, + status, set, }) => { const langs = processLanguages(languages); @@ -58,13 +58,13 @@ export const collections = new Elysia({ userId: sub, }); if (!ret) { - return error(404, { + return status(404, { status: 404, message: "Collection not found", }); } if (!ret.language) { - return error(422, { + return status(422, { status: 422, message: "Accept-Language header could not be satisfied.", }); @@ -109,7 +109,7 @@ export const collections = new Elysia({ ) .get( "random", - async ({ error, redirect }) => { + async ({ status, redirect }) => { const [serie] = await db .select({ slug: shows.slug }) .from(shows) @@ -117,7 +117,7 @@ export const collections = new Elysia({ .orderBy(sql`random()`) .limit(1); if (!serie) - return error(404, { + return status(404, { status: 404, message: "No collection in the database.", }); @@ -230,7 +230,7 @@ export const collections = new Elysia({ headers: { "accept-language": languages }, jwt: { sub, settings }, request: { url }, - error, + status, }) => { const [collection] = await db .select({ pk: shows.pk }) @@ -244,7 +244,7 @@ export const collections = new Elysia({ .limit(1); if (!collection) { - return error(404, { + return status(404, { status: 404, message: `No collection with the id or slug: '${id}'.`, }); @@ -287,7 +287,7 @@ export const collections = new Elysia({ headers: { "accept-language": languages }, jwt: { sub, settings }, request: { url }, - error, + status, }) => { const [collection] = await db .select({ pk: shows.pk }) @@ -301,7 +301,7 @@ export const collections = new Elysia({ .limit(1); if (!collection) { - return error(404, { + return status(404, { status: 404, message: `No collection with the id or slug: '${id}'.`, }); @@ -344,7 +344,7 @@ export const collections = new Elysia({ headers: { "accept-language": languages }, jwt: { sub, settings }, request: { url }, - error, + status, }) => { const [collection] = await db .select({ pk: shows.pk }) @@ -358,7 +358,7 @@ export const collections = new Elysia({ .limit(1); if (!collection) { - return error(404, { + return status(404, { status: 404, message: `No collection with the id or slug: '${id}'.`, }); diff --git a/api/src/controllers/shows/logic.ts b/api/src/controllers/shows/logic.ts index 26c2af66..e961127d 100644 --- a/api/src/controllers/shows/logic.ts +++ b/api/src/controllers/shows/logic.ts @@ -1,4 +1,4 @@ -import { type SQL, and, eq, exists, ne, sql } from "drizzle-orm"; +import { type SQL, and, eq, exists, gt, ne, sql } from "drizzle-orm"; import { db } from "~/db"; import { entries, @@ -60,7 +60,7 @@ export const showFilters: FilterDef = { runtime: { column: shows.runtime, type: "float" }, airDate: { column: shows.startAir, type: "date" }, startAir: { column: shows.startAir, type: "date" }, - endAir: { column: shows.startAir, type: "date" }, + endAir: { column: shows.endAir, type: "date" }, originalLanguage: { column: sql`${shows.original}->'language'`, type: "string", @@ -76,6 +76,7 @@ export const showFilters: FilterDef = { values: WatchlistStatus.enum, }, score: { column: watchStatusQ.score, type: "int" }, + isAvailable: { column: sql`(${shows.availableCount} > 0)`, type: "bool" }, }; export const showSort = Sort( { diff --git a/api/src/controllers/shows/movies.ts b/api/src/controllers/shows/movies.ts index 3fadc317..059ffad4 100644 --- a/api/src/controllers/shows/movies.ts +++ b/api/src/controllers/shows/movies.ts @@ -31,7 +31,7 @@ export const movies = new Elysia({ prefix: "/movies", tags: ["movies"] }) headers: { "accept-language": languages }, query: { preferOriginal, with: relations }, jwt: { sub, settings }, - error, + status, set, }) => { const langs = processLanguages(languages); @@ -48,13 +48,13 @@ export const movies = new Elysia({ prefix: "/movies", tags: ["movies"] }) userId: sub, }); if (!ret) { - return error(404, { + return status(404, { status: 404, message: `No movie found with id or slug: '${id}'.`, }); } if (!ret.language) { - return error(422, { + return status(422, { status: 422, message: "Accept-Language header could not be satisfied.", }); @@ -99,7 +99,7 @@ export const movies = new Elysia({ prefix: "/movies", tags: ["movies"] }) ) .get( "random", - async ({ error, redirect }) => { + async ({ status, redirect }) => { const [movie] = await db .select({ slug: shows.slug }) .from(shows) @@ -107,7 +107,7 @@ export const movies = new Elysia({ prefix: "/movies", tags: ["movies"] }) .orderBy(sql`random()`) .limit(1); if (!movie) - return error(404, { + return status(404, { status: 404, message: "No movies in the database.", }); diff --git a/api/src/controllers/shows/series.ts b/api/src/controllers/shows/series.ts index e3d1dbb6..e1a63c82 100644 --- a/api/src/controllers/shows/series.ts +++ b/api/src/controllers/shows/series.ts @@ -31,7 +31,7 @@ export const series = new Elysia({ prefix: "/series", tags: ["series"] }) headers: { "accept-language": languages }, query: { preferOriginal, with: relations }, jwt: { sub, settings }, - error, + status, set, }) => { const langs = processLanguages(languages); @@ -48,13 +48,13 @@ export const series = new Elysia({ prefix: "/series", tags: ["series"] }) userId: sub, }); if (!ret) { - return error(404, { + return status(404, { status: 404, message: `No serie found with the id or slug: '${id}'.`, }); } if (!ret.language) { - return error(422, { + return status(422, { status: 422, message: "Accept-Language header could not be satisfied.", }); @@ -102,7 +102,7 @@ export const series = new Elysia({ prefix: "/series", tags: ["series"] }) ) .get( "random", - async ({ error, redirect }) => { + async ({ status, redirect }) => { const [serie] = await db .select({ slug: shows.slug }) .from(shows) @@ -110,7 +110,7 @@ export const series = new Elysia({ prefix: "/series", tags: ["series"] }) .orderBy(sql`random()`) .limit(1); if (!serie) - return error(404, { + return status(404, { status: 404, message: "No series in the database.", }); diff --git a/api/src/controllers/shows/shows.ts b/api/src/controllers/shows/shows.ts index 85002465..c100bc6a 100644 --- a/api/src/controllers/shows/shows.ts +++ b/api/src/controllers/shows/shows.ts @@ -23,14 +23,14 @@ export const showsH = new Elysia({ prefix: "/shows", tags: ["shows"] }) .use(auth) .get( "random", - async ({ error, redirect }) => { + async ({ status, redirect }) => { const [show] = await db .select({ kind: shows.kind, slug: shows.slug }) .from(shows) .orderBy(sql`random()`) .limit(1); if (!show) - return error(404, { + return status(404, { status: 404, message: "No shows in the database.", }); diff --git a/api/src/controllers/staff.ts b/api/src/controllers/staff.ts index 3ee9321a..ee8266aa 100644 --- a/api/src/controllers/staff.ts +++ b/api/src/controllers/staff.ts @@ -120,14 +120,14 @@ export const staffH = new Elysia({ tags: ["staff"] }) .use(auth) .get( "/staff/:id", - async ({ params: { id }, error }) => { + async ({ params: { id }, status }) => { const [ret] = await db .select() .from(staff) .where(isUuid(id) ? eq(staff.id, id) : eq(staff.slug, id)) .limit(1); if (!ret) { - return error(404, { + return status(404, { status: 404, message: `No staff found with the id or slug: '${id}'`, }); @@ -155,14 +155,14 @@ export const staffH = new Elysia({ tags: ["staff"] }) ) .get( "/staff/random", - async ({ error, redirect }) => { + async ({ status, redirect }) => { const [member] = await db .select({ slug: staff.slug }) .from(staff) .orderBy(sql`random()`) .limit(1); if (!member) - return error(404, { + return status(404, { status: 404, message: "No staff in the database.", }); @@ -192,7 +192,7 @@ export const staffH = new Elysia({ tags: ["staff"] }) headers: { "accept-language": languages }, request: { url }, jwt: { sub, settings }, - error, + status, }) => { const [member] = await db .select({ pk: staff.pk }) @@ -201,7 +201,7 @@ export const staffH = new Elysia({ tags: ["staff"] }) .limit(1); if (!member) { - return error(404, { + return status(404, { status: 404, message: `No staff member with the id or slug: '${id}'.`, }); @@ -363,7 +363,7 @@ export const staffH = new Elysia({ tags: ["staff"] }) params: { id }, query: { limit, after, query, sort, filter }, request: { url }, - error, + status, }) => { const [movie] = await db .select({ pk: shows.pk }) @@ -377,7 +377,7 @@ export const staffH = new Elysia({ tags: ["staff"] }) .limit(1); if (!movie) { - return error(404, { + return status(404, { status: 404, message: `No movie with the id or slug: '${id}'.`, }); @@ -430,7 +430,7 @@ export const staffH = new Elysia({ tags: ["staff"] }) params: { id }, query: { limit, after, query, sort, filter }, request: { url }, - error, + status, }) => { const [serie] = await db .select({ pk: shows.pk }) @@ -444,7 +444,7 @@ export const staffH = new Elysia({ tags: ["staff"] }) .limit(1); if (!serie) { - return error(404, { + return status(404, { status: 404, message: `No serie with the id or slug: '${id}'.`, }); diff --git a/api/src/controllers/studios.ts b/api/src/controllers/studios.ts index 373df884..09fd4f84 100644 --- a/api/src/controllers/studios.ts +++ b/api/src/controllers/studios.ts @@ -135,7 +135,7 @@ export const studiosH = new Elysia({ prefix: "/studios", tags: ["studios"] }) params: { id }, headers: { "accept-language": languages }, query: { with: relations }, - error, + status, set, }) => { const langs = processLanguages(languages); @@ -147,13 +147,13 @@ export const studiosH = new Elysia({ prefix: "/studios", tags: ["studios"] }) relations, }); if (!ret) { - return error(404, { + return status(404, { status: 404, message: `No studio found with the id or slug: '${id}'`, }); } if (!ret.language) { - return error(422, { + return status(422, { status: 422, message: "Accept-Language header could not be satisfied.", }); @@ -195,14 +195,14 @@ export const studiosH = new Elysia({ prefix: "/studios", tags: ["studios"] }) ) .get( "random", - async ({ error, redirect }) => { + async ({ status, redirect }) => { const [studio] = await db .select({ slug: studios.slug }) .from(studios) .orderBy(sql`random()`) .limit(1); if (!studio) - return error(404, { + return status(404, { status: 404, message: "No studios in the database.", }); @@ -305,7 +305,7 @@ export const studiosH = new Elysia({ prefix: "/studios", tags: ["studios"] }) headers: { "accept-language": languages }, jwt: { sub, settings }, request: { url }, - error, + status, }) => { const [studio] = await db .select({ pk: studios.pk }) @@ -314,7 +314,7 @@ export const studiosH = new Elysia({ prefix: "/studios", tags: ["studios"] }) .limit(1); if (!studio) { - return error(404, { + return status(404, { status: 404, message: `No studios with the id or slug: '${id}'.`, }); @@ -366,7 +366,7 @@ export const studiosH = new Elysia({ prefix: "/studios", tags: ["studios"] }) headers: { "accept-language": languages }, jwt: { sub, settings }, request: { url }, - error, + status, }) => { const [studio] = await db .select({ pk: studios.pk }) @@ -375,7 +375,7 @@ export const studiosH = new Elysia({ prefix: "/studios", tags: ["studios"] }) .limit(1); if (!studio) { - return error(404, { + return status(404, { status: 404, message: `No studios with the id or slug: '${id}'.`, }); @@ -428,7 +428,7 @@ export const studiosH = new Elysia({ prefix: "/studios", tags: ["studios"] }) headers: { "accept-language": languages }, jwt: { sub, settings }, request: { url }, - error, + status, }) => { const [studio] = await db .select({ pk: studios.pk }) @@ -437,7 +437,7 @@ export const studiosH = new Elysia({ prefix: "/studios", tags: ["studios"] }) .limit(1); if (!studio) { - return error(404, { + return status(404, { status: 404, message: `No studios with the id or slug: '${id}'.`, }); diff --git a/api/src/controllers/videos.ts b/api/src/controllers/videos.ts index 0fe22fc5..2f9f414d 100644 --- a/api/src/controllers/videos.ts +++ b/api/src/controllers/videos.ts @@ -1,7 +1,6 @@ -import { and, eq, exists, inArray, not, notExists, or, sql } from "drizzle-orm"; -import { alias } from "drizzle-orm/pg-core"; +import { and, eq, notExists, or, sql } from "drizzle-orm"; import { Elysia, t } from "elysia"; -import { db } from "~/db"; +import { type Transaction, db } from "~/db"; import { entries, entryVideoJoin, shows, videos } from "~/db/schema"; import { conflictUpdateAllExcept, @@ -23,7 +22,7 @@ import { sortToSql, } from "~/models/utils"; import { desc as description } from "~/models/utils/descriptions"; -import { Guesses, SeedVideo, Video } from "~/models/video"; +import { Guess, Guesses, SeedVideo, Video } from "~/models/video"; import { comment } from "~/utils"; import { computeVideoSlug } from "./seed/insert/entries"; import { @@ -31,9 +30,144 @@ import { updateAvailableSince, } from "./seed/insert/shows"; +async function linkVideos( + tx: Transaction, + links: { + video: number; + entry: Omit & { + movie?: { id?: string; slug?: string }; + serie?: { id?: string; slug?: string }; + }; + }[], +) { + if (!links.length) return {}; + + const entriesQ = tx + .select({ + pk: entries.pk, + id: entries.id, + slug: entries.slug, + kind: entries.kind, + seasonNumber: entries.seasonNumber, + episodeNumber: entries.episodeNumber, + order: entries.order, + showId: sql`${shows.id}`.as("showId"), + showSlug: sql`${shows.slug}`.as("showSlug"), + externalId: entries.externalId, + }) + .from(entries) + .innerJoin(shows, eq(entries.showPk, shows.pk)) + .as("entriesQ"); + + const hasRenderingQ = tx + .select() + .from(entryVideoJoin) + .where(eq(entryVideoJoin.entryPk, entriesQ.pk)); + + const ret = await tx + .insert(entryVideoJoin) + .select( + tx + .selectDistinctOn([entriesQ.pk, videos.pk], { + entryPk: entriesQ.pk, + videoPk: videos.pk, + slug: computeVideoSlug(entriesQ.slug, sql`exists(${hasRenderingQ})`), + }) + .from( + values(links, { + video: "integer", + entry: "jsonb", + }).as("j"), + ) + .innerJoin(videos, eq(videos.pk, sql`j.video`)) + .innerJoin( + entriesQ, + or( + and( + sql`j.entry ? 'slug'`, + eq(entriesQ.slug, sql`j.entry->>'slug'`), + ), + and( + sql`j.entry ? 'movie'`, + or( + eq(entriesQ.showId, sql`(j.entry #>> '{movie, id}')::uuid`), + eq(entriesQ.showSlug, sql`j.entry #>> '{movie, slug}'`), + ), + eq(entriesQ.kind, "movie"), + ), + and( + sql`j.entry ? 'serie'`, + or( + eq(entriesQ.showId, sql`(j.entry #>> '{serie, id}')::uuid`), + eq(entriesQ.showSlug, sql`j.entry #>> '{serie, slug}'`), + ), + or( + and( + sql`j.entry ?& array['season', 'episode']`, + eq(entriesQ.seasonNumber, sql`(j.entry->>'season')::integer`), + eq( + entriesQ.episodeNumber, + sql`(j.entry->>'episode')::integer`, + ), + ), + and( + sql`j.entry ? 'order'`, + eq(entriesQ.order, sql`(j.entry->>'order')::float`), + ), + and( + sql`j.entry ? 'special'`, + eq( + entriesQ.episodeNumber, + sql`(j.entry->>'special')::integer`, + ), + eq(entriesQ.kind, "special"), + ), + ), + ), + and( + sql`j.entry ? 'externalId'`, + sql`j.entry->'externalId' <@ ${entriesQ.externalId}`, + ), + ), + ), + ) + .onConflictDoUpdate({ + target: [entryVideoJoin.entryPk, entryVideoJoin.videoPk], + // this is basically a `.onConflictDoNothing()` but we want `returning` to give us the existing data + set: { entryPk: sql`excluded.entry_pk` }, + }) + .returning({ + slug: entryVideoJoin.slug, + entryPk: entryVideoJoin.entryPk, + videoPk: entryVideoJoin.videoPk, + }); + + const entr = ret.reduce( + (acc, x) => { + acc[x.videoPk] ??= []; + acc[x.videoPk].push({ slug: x.slug }); + return acc; + }, + {} as Record, + ); + + const entriesPk = [...new Set(ret.map((x) => x.entryPk))]; + await updateAvailableCount( + tx, + tx + .selectDistinct({ pk: entries.showPk }) + .from(entries) + .where(eq(entries.pk, sql`any(${sqlarr(entriesPk)})`)), + ); + await updateAvailableSince(tx, entriesPk); + + return entr; +} + const CreatedVideo = t.Object({ id: t.String({ format: "uuid" }), path: t.String({ examples: [bubbleVideo.path] }), + guess: t.Omit(Guess, ["history"]), entries: t.Array( t.Object({ slug: t.String({ format: "slug", examples: ["bubble-v2"] }), @@ -60,7 +194,7 @@ export const videosH = new Elysia({ prefix: "/videos", tags: ["videos"] }) }) .from(videos) .leftJoin( - sql`jsonb_array_elements_text(${videos.guess}->'year') as year`, + sql`jsonb_array_elements_text(${videos.guess}->'years') as year`, sql`true`, ) .innerJoin(entryVideoJoin, eq(entryVideoJoin.videoPk, videos.pk)) @@ -119,7 +253,7 @@ export const videosH = new Elysia({ prefix: "/videos", tags: ["videos"] }) }, ) .get( - "unknowns", + "unmatched", async ({ query: { sort, query, limit, after }, request: { url } }) => { const ret = await db .select() @@ -146,7 +280,7 @@ export const videosH = new Elysia({ prefix: "/videos", tags: ["videos"] }) return createPage(ret, { url, sort, limit }); }, { - detail: { description: "Get unknown/unmatch videos." }, + detail: { description: "Get unknown/unmatched videos." }, query: t.Object({ sort: Sort( { createdAt: videos.createdAt, path: videos.path }, @@ -169,9 +303,9 @@ export const videosH = new Elysia({ prefix: "/videos", tags: ["videos"] }) ) .post( "", - async ({ body, error }) => { + async ({ body, status }) => { return await db.transaction(async (tx) => { - let vids: { pk: number; id: string; path: string }[] = []; + let vids: { pk: number; id: string; path: string; guess: Guess }[] = []; try { vids = await tx .insert(videos) @@ -184,10 +318,11 @@ export const videosH = new Elysia({ prefix: "/videos", tags: ["videos"] }) pk: videos.pk, id: videos.id, path: videos.path, + guess: videos.guess, }); } catch (e) { if (!isUniqueConstraint(e)) throw e; - return error(409, { + return status(409, { status: 409, message: comment` Invalid rendering. A video with the same (rendering, part, version) combo @@ -202,7 +337,6 @@ export const videosH = new Elysia({ prefix: "/videos", tags: ["videos"] }) if (!x.for) return []; return x.for.map((e) => ({ video: vids.find((v) => v.path === x.path)!.pk, - path: x.path, entry: { ...e, movie: @@ -222,148 +356,26 @@ export const videosH = new Elysia({ prefix: "/videos", tags: ["videos"] }) }); if (!vidEntries.length) { - return error( + return status( 201, - vids.map((x) => ({ id: x.id, path: x.path, entries: [] })), + vids.map((x) => ({ + id: x.id, + path: x.path, + guess: x.guess, + entries: [], + })), ); } - const entriesQ = tx - .select({ - pk: entries.pk, - id: entries.id, - slug: entries.slug, - kind: entries.kind, - seasonNumber: entries.seasonNumber, - episodeNumber: entries.episodeNumber, - order: entries.order, - showId: sql`${shows.id}`.as("showId"), - showSlug: sql`${shows.slug}`.as("showSlug"), - externalId: entries.externalId, - }) - .from(entries) - .innerJoin(shows, eq(entries.showPk, shows.pk)) - .as("entriesQ"); + const links = await linkVideos(tx, vidEntries); - const hasRenderingQ = tx - .select() - .from(entryVideoJoin) - .where(eq(entryVideoJoin.entryPk, entriesQ.pk)); - - const ret = await tx - .insert(entryVideoJoin) - .select( - tx - .selectDistinctOn([entriesQ.pk, videos.pk], { - entryPk: entriesQ.pk, - videoPk: videos.pk, - slug: computeVideoSlug( - entriesQ.slug, - sql`exists(${hasRenderingQ})`, - ), - }) - .from( - values(vidEntries, { - video: "integer", - entry: "jsonb", - }).as("j"), - ) - .innerJoin(videos, eq(videos.pk, sql`j.video`)) - .innerJoin( - entriesQ, - or( - and( - sql`j.entry ? 'slug'`, - eq(entriesQ.slug, sql`j.entry->>'slug'`), - ), - and( - sql`j.entry ? 'movie'`, - or( - eq( - entriesQ.showId, - sql`(j.entry #>> '{movie, id}')::uuid`, - ), - eq(entriesQ.showSlug, sql`j.entry #>> '{movie, slug}'`), - ), - eq(entriesQ.kind, "movie"), - ), - and( - sql`j.entry ? 'serie'`, - or( - eq( - entriesQ.showId, - sql`(j.entry #>> '{serie, id}')::uuid`, - ), - eq(entriesQ.showSlug, sql`j.entry #>> '{serie, slug}'`), - ), - or( - and( - sql`j.entry ?& array['season', 'episode']`, - eq( - entriesQ.seasonNumber, - sql`(j.entry->>'season')::integer`, - ), - eq( - entriesQ.episodeNumber, - sql`(j.entry->>'episode')::integer`, - ), - ), - and( - sql`j.entry ? 'order'`, - eq(entriesQ.order, sql`(j.entry->>'order')::float`), - ), - and( - sql`j.entry ? 'special'`, - eq( - entriesQ.episodeNumber, - sql`(j.entry->>'special')::integer`, - ), - eq(entriesQ.kind, "special"), - ), - ), - ), - and( - sql`j.entry ? 'externalId'`, - sql`j.entry->'externalId' <@ ${entriesQ.externalId}`, - ), - ), - ), - ) - .onConflictDoUpdate({ - target: [entryVideoJoin.entryPk, entryVideoJoin.videoPk], - // this is basically a `.onConflictDoNothing()` but we want `returning` to give us the existing data - set: { entryPk: sql`excluded.entry_pk` }, - }) - .returning({ - slug: entryVideoJoin.slug, - entryPk: entryVideoJoin.entryPk, - videoPk: entryVideoJoin.videoPk, - }); - const entr = ret.reduce( - (acc, x) => { - acc[x.videoPk] ??= []; - acc[x.videoPk].push({ slug: x.slug }); - return acc; - }, - {} as Record, - ); - - const entriesPk = [...new Set(ret.map((x) => x.entryPk))]; - await updateAvailableCount( - tx, - tx - .selectDistinct({ pk: entries.showPk }) - .from(entries) - .where(eq(entries.pk, sql`any(${sqlarr(entriesPk)})`)), - ); - await updateAvailableSince(tx, entriesPk); - - return error( + return status( 201, vids.map((x) => ({ id: x.id, path: x.path, - entries: entr[x.pk] ?? [], + guess: x.guess, + entries: links[x.pk] ?? [], })), ); }); @@ -446,4 +458,75 @@ export const videosH = new Elysia({ prefix: "/videos", tags: ["videos"] }) ), response: { 200: t.Array(t.String()) }, }, + ) + .post( + "/link", + async ({ body, status }) => { + return await db.transaction(async (tx) => { + const vids = await tx + .select({ pk: videos.pk, id: videos.id, path: videos.path }) + .from(videos) + .where(eq(videos.id, sql`any(${sqlarr(body.map((x) => x.id))})`)); + const lVids = body.flatMap((x) => { + return x.for.map((e) => ({ + video: vids.find((v) => v.id === x.id)!.pk, + entry: { + ...e, + movie: + "movie" in e + ? isUuid(e.movie) + ? { id: e.movie } + : { slug: e.movie } + : undefined, + serie: + "serie" in e + ? isUuid(e.serie) + ? { id: e.serie } + : { slug: e.serie } + : undefined, + }, + })); + }); + const links = await linkVideos(tx, lVids); + return status( + 201, + vids.map((x) => ({ + id: x.id, + path: x.path, + entries: links[x.pk] ?? [], + })), + ); + }); + }, + { + detail: { + description: "Link existing videos to existing entries", + }, + body: t.Array( + t.Object({ + id: t.String({ + description: "Id of the video", + format: "uuid", + }), + for: t.Array(SeedVideo.properties.for.items), + }), + ), + response: { + 201: t.Array( + t.Object({ + id: t.String({ format: "uuid" }), + path: t.String({ examples: ["/video/made in abyss s1e13.mkv"] }), + entries: t.Array( + t.Object({ + slug: t.String({ + format: "slug", + examples: ["made-in-abyss-s1e13"], + }), + }), + ), + }), + ), + 422: KError, + }, + }, ); diff --git a/api/src/db/schema/staff.ts b/api/src/db/schema/staff.ts index a23839cb..e261262a 100644 --- a/api/src/db/schema/staff.ts +++ b/api/src/db/schema/staff.ts @@ -19,6 +19,7 @@ export const roleKind = schema.enum("role_kind", [ "writter", "producer", "music", + "crew", "other", ]); diff --git a/api/src/index.ts b/api/src/index.ts index 1769ddaf..2ba8c418 100644 --- a/api/src/index.ts +++ b/api/src/index.ts @@ -1,6 +1,6 @@ import { swagger } from "@elysiajs/swagger"; import Elysia from "elysia"; -import { app } from "./base"; +import { handlers } from "./base"; import { processImages } from "./controllers/seed/images"; import { migrate } from "./db"; import { comment } from "./utils"; @@ -10,13 +10,14 @@ await migrate(); // run image processor task in background processImages(); -new Elysia() +const app = new Elysia() .use( swagger({ scalarConfig: { sources: [ { slug: "kyoo", url: "/swagger/json" }, - { slug: "keibi", url: "http://localhost:4568/auth/swagger/doc.json" }, + { slug: "keibi", url: "/auth/swagger/doc.json" }, + { slug: "scanner", url: "/scanner/openapi.json" }, ], }, documentation: { @@ -83,7 +84,7 @@ new Elysia() }, }), ) - .use(app) + .use(handlers) .listen(3567); console.log(`Api running at ${app.server?.hostname}:${app.server?.port}`); diff --git a/api/src/models/collections.ts b/api/src/models/collections.ts index f3221ae3..7da75645 100644 --- a/api/src/models/collections.ts +++ b/api/src/models/collections.ts @@ -65,7 +65,7 @@ export const FullCollection = t.Intersect([ export type FullCollection = Prettify; export const SeedCollection = t.Composite([ - t.Omit(BaseCollection, ["kind", "startAir", "endAir", "nextRefresh"]), + t.Omit(BaseCollection, ["startAir", "endAir", "nextRefresh"]), t.Object({ slug: t.String({ format: "slug" }), originalLanguage: Language({ diff --git a/api/src/models/entry/movie-entry.ts b/api/src/models/entry/movie-entry.ts index 1a8df313..3aa65ee9 100644 --- a/api/src/models/entry/movie-entry.ts +++ b/api/src/models/entry/movie-entry.ts @@ -56,7 +56,7 @@ export type MovieEntry = Prettify; export const SeedMovieEntry = t.Composite([ t.Omit(BaseMovieEntry, ["thumbnail", "nextRefresh"]), t.Object({ - slug: t.Optional(t.String({ format: "slug" })), + slug: t.Optional(t.Nullable(t.String({ format: "slug" }))), thumbnail: t.Nullable(SeedImage), translations: TranslationRecord( t.Intersect([ diff --git a/api/src/models/examples/bubble.ts b/api/src/models/examples/bubble.ts index 573c0cf4..498c6e5c 100644 --- a/api/src/models/examples/bubble.ts +++ b/api/src/models/examples/bubble.ts @@ -10,8 +10,9 @@ export const bubbleVideo: Video = { guess: { kind: "movie", title: "bubble", - year: [2022], + years: [2022], from: "guessit", + history: [], }, createdAt: "2024-11-23T15:01:24.968Z", updatedAt: "2024-11-23T15:01:24.968Z", @@ -33,7 +34,7 @@ export const bubble: SeedMovie = { "https://image.tmdb.org/t/p/original/a8Q2g0g7XzAF6gcB8qgn37ccb9Y.jpg", banner: null, logo: "https://image.tmdb.org/t/p/original/ihIs7fayAmZieMlMQbs6TWM77uf.png", - trailerUrl: "https://www.youtube.com/watch?v=vs7zsyIZkMM", + trailer: "https://www.youtube.com/watch?v=vs7zsyIZkMM", }, ja: { name: "バブル:2022", @@ -47,7 +48,7 @@ export const bubble: SeedMovie = { thumbnail: "https://image.tmdb.org/t/p/original/jp.jpg", banner: null, logo: null, - trailerUrl: "https://www.youtube.com/watch?v=vs7zsyIZkMM", + trailer: "https://www.youtube.com/watch?v=vs7zsyIZkMM", }, }, genres: ["animation", "adventure", "science-fiction", "fantasy"], diff --git a/api/src/models/examples/dune-1984.ts b/api/src/models/examples/dune-1984.ts index b417ed7e..d64dc06e 100644 --- a/api/src/models/examples/dune-1984.ts +++ b/api/src/models/examples/dune-1984.ts @@ -10,8 +10,9 @@ export const dune1984Video: Video = { guess: { kind: "movie", title: "dune", - year: [1984], + years: [1984], from: "guessit", + history: [], }, createdAt: "2024-12-02T11:45:12.968Z", updatedAt: "2024-12-02T11:45:12.968Z", @@ -33,7 +34,7 @@ export const dune1984: SeedMovie = { "https://image.tmdb.org/t/p/original/pCHV6BntWLO2H6wQOj4LwzAWqpa.jpg", banner: null, logo: "https://image.tmdb.org/t/p/original/olbKnk2VvFcM2STl0dJAf6kfydo.png", - trailerUrl: "https://www.youtube.com/watch?v=vczYTLQ6oiE", + trailer: "https://www.youtube.com/watch?v=vczYTLQ6oiE", }, }, genres: ["adventure", "drama", "science-fiction"], diff --git a/api/src/models/examples/dune-2021.ts b/api/src/models/examples/dune-2021.ts index 1c6b9427..f8e32e72 100644 --- a/api/src/models/examples/dune-2021.ts +++ b/api/src/models/examples/dune-2021.ts @@ -10,8 +10,9 @@ export const duneVideo: Video = { guess: { kind: "movie", title: "dune", - year: [2021], + years: [2021], from: "guessit", + history: [], }, createdAt: "2024-12-02T10:10:24.968Z", updatedAt: "2024-12-02T10:10:24.968Z", @@ -33,7 +34,7 @@ export const dune: SeedMovie = { "https://image.tmdb.org/t/p/original/k2ocXnNkmvE6rJomRkExIStFq3v.jpg", banner: null, logo: "https://image.tmdb.org/t/p/original/5nDsd3u1c6kDphbtIqkHseLg7HL.png", - trailerUrl: "https://www.youtube.com/watch?v=n9xhJrPXop4", + trailer: "https://www.youtube.com/watch?v=n9xhJrPXop4", }, }, genres: ["adventure", "drama", "science-fiction", "action"], diff --git a/api/src/models/examples/made-in-abyss.ts b/api/src/models/examples/made-in-abyss.ts index 58d1dfcf..fae77b19 100644 --- a/api/src/models/examples/made-in-abyss.ts +++ b/api/src/models/examples/made-in-abyss.ts @@ -9,10 +9,10 @@ export const madeInAbyssVideo: Video = { version: 1, guess: { title: "Made in abyss", - season: [1], - episode: [13], + episodes: [{ season: 1, episode: 13 }], kind: "episode", from: "guessit", + history: [], }, createdAt: "2024-11-23T15:01:24.968Z", updatedAt: "2024-11-23T15:01:24.968Z", @@ -57,7 +57,7 @@ export const madeInAbyss = { "https://image.tmdb.org/t/p/original/Df9XrvZFIeQfLKfu8evRmzvRsd.jpg", logo: "https://image.tmdb.org/t/p/original/7hY3Q4GhkiYPBfn4UoVg0AO4Zgk.png", banner: null, - trailerUrl: "https://www.youtube.com/watch?v=ePOyy6Wlk4s", + trailer: "https://www.youtube.com/watch?v=ePOyy6Wlk4s", }, ja: { name: "メイドインアビス", @@ -91,7 +91,7 @@ export const madeInAbyss = { "https://image.tmdb.org/t/p/original/Df9XrvZFIeQfLKfu8evRmzvRsd.jpg", logo: "https://image.tmdb.org/t/p/original/7hY3Q4GhkiYPBfn4UoVg0AO4Zgk.png", banner: null, - trailerUrl: "https://www.youtube.com/watch?v=ePOyy6Wlk4s", + trailer: "https://www.youtube.com/watch?v=ePOyy6Wlk4s", }, }, genres: [ diff --git a/api/src/models/movie.ts b/api/src/models/movie.ts index d107474a..3d14dc4f 100644 --- a/api/src/models/movie.ts +++ b/api/src/models/movie.ts @@ -72,7 +72,7 @@ export const FullMovie = t.Intersect([ export type FullMovie = Prettify; export const SeedMovie = t.Composite([ - t.Omit(BaseMovie, ["kind", "nextRefresh"]), + t.Omit(BaseMovie, ["nextRefresh"]), t.Object({ slug: t.String({ format: "slug", examples: ["bubble"] }), originalLanguage: Language({ @@ -80,12 +80,19 @@ export const SeedMovie = t.Composite([ }), translations: TranslationRecord( t.Composite([ - t.Omit(MovieTranslation, ["poster", "thumbnail", "banner", "logo"]), + t.Omit(MovieTranslation, [ + "poster", + "thumbnail", + "banner", + "logo", + "trailerUrl", + ]), t.Object({ poster: t.Nullable(SeedImage), thumbnail: t.Nullable(SeedImage), banner: t.Nullable(SeedImage), logo: t.Nullable(SeedImage), + trailer: t.Nullable(SeedImage), latinName: t.Optional(Original.properties.latinName), }), ]), diff --git a/api/src/models/season.ts b/api/src/models/season.ts index 79c11976..d37619b5 100644 --- a/api/src/models/season.ts +++ b/api/src/models/season.ts @@ -8,7 +8,7 @@ import { TranslationRecord } from "./utils/language"; import { Resource } from "./utils/resource"; export const BaseSeason = t.Object({ - seasonNumber: t.Integer({ minimum: 1 }), + seasonNumber: t.Integer({ minimum: 0 }), startAir: t.Nullable(t.String({ format: "date" })), endAir: t.Nullable(t.String({ format: "date" })), diff --git a/api/src/models/serie.ts b/api/src/models/serie.ts index e16a0ea6..61902a46 100644 --- a/api/src/models/serie.ts +++ b/api/src/models/serie.ts @@ -88,7 +88,7 @@ export const FullSerie = t.Intersect([ export type FullSerie = Prettify; export const SeedSerie = t.Composite([ - t.Omit(BaseSerie, ["kind", "nextRefresh"]), + t.Omit(BaseSerie, ["nextRefresh"]), t.Object({ slug: t.String({ format: "slug" }), originalLanguage: Language({ @@ -96,12 +96,19 @@ export const SeedSerie = t.Composite([ }), translations: TranslationRecord( t.Composite([ - t.Omit(SerieTranslation, ["poster", "thumbnail", "banner", "logo"]), + t.Omit(SerieTranslation, [ + "poster", + "thumbnail", + "banner", + "logo", + "trailerUrl", + ]), t.Object({ poster: t.Nullable(SeedImage), thumbnail: t.Nullable(SeedImage), banner: t.Nullable(SeedImage), logo: t.Nullable(SeedImage), + trailer: t.Nullable(SeedImage), latinName: t.Optional(Original.properties.latinName), }), ]), diff --git a/api/src/models/staff.ts b/api/src/models/staff.ts index 9d6fe403..c938c810 100644 --- a/api/src/models/staff.ts +++ b/api/src/models/staff.ts @@ -16,6 +16,7 @@ export const Role = t.Object({ "writter", "producer", "music", + "crew", "other", ]), character: t.Nullable(Character), diff --git a/api/src/models/utils/filters/index.ts b/api/src/models/utils/filters/index.ts index 18fcdd77..1de7a1ce 100644 --- a/api/src/models/utils/filters/index.ts +++ b/api/src/models/utils/filters/index.ts @@ -9,7 +9,7 @@ export type FilterDef = { [key: string]: | { column: Column | SQLWrapper; - type: "int" | "float" | "date" | "string"; + type: "int" | "float" | "date" | "string" | "bool"; isArray?: boolean; } | { diff --git a/api/src/models/utils/filters/parser.ts b/api/src/models/utils/filters/parser.ts index 26db94c8..ab82ee40 100644 --- a/api/src/models/utils/filters/parser.ts +++ b/api/src/models/utils/filters/parser.ts @@ -29,7 +29,8 @@ export type Value = | { type: "float"; value: number } | { type: "date"; value: string } | { type: "string"; value: string } - | { type: "enum"; value: string }; + | { type: "enum"; value: string } + | { type: "bool"; value: boolean }; const operators = ["eq", "ne", "gt", "ge", "lt", "le", "has"] as const; export type Operator = (typeof operators)[number]; export type Expression = diff --git a/api/src/models/utils/filters/to-sql.ts b/api/src/models/utils/filters/to-sql.ts index dbcb686a..a35d37c3 100644 --- a/api/src/models/utils/filters/to-sql.ts +++ b/api/src/models/utils/filters/to-sql.ts @@ -48,6 +48,18 @@ export const toDrizzle = (expr: Expression, config: FilterDef): SQL => { // but parser doesn't know if an enum should be a string expr.value = { type: "string", value: expr.value.value }; } + if (prop.type === "bool" && expr.value.type === "enum") { + if (expr.value.value !== "false" && expr.value.value !== "true") { + throw new KErrorT( + comment` + Invalid value for property ${expr.property}. + Get ${expr.value.value} but expected true or false. + `, + { in: where }, + ); + } + expr.value = { type: "bool", value: expr.value.value === "true" }; + } if (prop.type !== expr.value.type) { throw new KErrorT( comment` diff --git a/api/src/models/video.ts b/api/src/models/video.ts index f9cefaf2..c0c1298e 100644 --- a/api/src/models/video.ts +++ b/api/src/models/video.ts @@ -1,45 +1,37 @@ -import { PatternStringExact } from "@sinclair/typebox"; +import { PatternStringExact, type TSchema } from "@sinclair/typebox"; import { t } from "elysia"; import { type Prettify, comment } from "~/utils"; import { ExtraType } from "./entry/extra"; import { bubble, bubbleVideo, registerExamples } from "./examples"; import { DbMetadata, EpisodeId, ExternalId, Resource } from "./utils"; -const ExternalIds = t.Record( - t.String(), - t.Omit( - t.Union([ - EpisodeId.patternProperties[PatternStringExact], - ExternalId().patternProperties[PatternStringExact], - ]), - ["link"], - ), -); -type ExternalIds = typeof ExternalIds.static; +const Opt = (schema: TSchema) => t.Optional(t.Nullable(schema)); export const Guess = t.Recursive((Self) => t.Object( { title: t.String(), - year: t.Optional(t.Array(t.Integer(), { default: [] })), - season: t.Optional(t.Array(t.Integer(), { default: [] })), - episode: t.Optional(t.Array(t.Integer(), { default: [] })), - kind: t.Optional(t.UnionEnum(["episode", "movie", "extra"])), - extraKind: t.Optional(ExtraType), - externalId: t.Optional(ExternalIds), + kind: Opt(t.UnionEnum(["episode", "movie", "extra"])), + extraKind: Opt(ExtraType), + years: Opt(t.Array(t.Integer(), { default: [] })), + episodes: Opt( + t.Array( + t.Object({ season: t.Nullable(t.Integer()), episode: t.Integer() }), + { default: [] }, + ), + ), + externalId: Opt(t.Record(t.String(), t.String())), from: t.String({ description: "Name of the tool that made the guess", }), - history: t.Optional( - t.Array(t.Omit(Self, ["history"]), { - default: [], - description: comment` - When another tool refines the guess or a user manually edit it, the history of the guesses - are kept in this \`history\` value. - `, - }), - ), + history: t.Array(t.Omit(Self, ["history"]), { + default: [], + description: comment` + When another tool refines the guess or a user manually edit it, the history of the guesses + are kept in this \`history\` value. + `, + }), }, { additionalProperties: true, @@ -92,7 +84,16 @@ export const SeedVideo = t.Object({ }), }), t.Object({ - externalId: ExternalIds, + externalId: t.Record( + t.String(), + t.Omit( + t.Union([ + EpisodeId.patternProperties[PatternStringExact], + ExternalId().patternProperties[PatternStringExact], + ]), + ["link"], + ), + ), }), t.Object({ movie: t.Union([ diff --git a/api/tests/helpers/movies-helper.ts b/api/tests/helpers/movies-helper.ts index f0b1711f..e812486c 100644 --- a/api/tests/helpers/movies-helper.ts +++ b/api/tests/helpers/movies-helper.ts @@ -1,5 +1,5 @@ import { buildUrl } from "tests/utils"; -import { app } from "~/base"; +import { handlers } from "~/base"; import type { SeedMovie } from "~/models/movie"; import type { MovieWatchStatus } from "~/models/watchlist"; import { getJwtHeaders } from "./jwt"; @@ -11,7 +11,7 @@ export const getMovie = async ( ...query }: { langs?: string; preferOriginal?: boolean; with?: string[] }, ) => { - const resp = await app.handle( + const resp = await handlers.handle( new Request(buildUrl(`movies/${id}`, query), { method: "GET", headers: langs @@ -38,7 +38,7 @@ export const getMovies = async ({ langs?: string; preferOriginal?: boolean; }) => { - const resp = await app.handle( + const resp = await handlers.handle( new Request(buildUrl("movies", query), { method: "GET", headers: langs @@ -54,7 +54,7 @@ export const getMovies = async ({ }; export const createMovie = async (movie: SeedMovie) => { - const resp = await app.handle( + const resp = await handlers.handle( new Request(buildUrl("movies"), { method: "POST", body: JSON.stringify(movie), @@ -72,7 +72,7 @@ export const setMovieStatus = async ( id: string, status: Omit, ) => { - const resp = await app.handle( + const resp = await handlers.handle( new Request(buildUrl(`movies/${id}/watchstatus`), { method: "POST", body: JSON.stringify(status), diff --git a/api/tests/helpers/series-helper.ts b/api/tests/helpers/series-helper.ts index 836fce43..090d2a85 100644 --- a/api/tests/helpers/series-helper.ts +++ b/api/tests/helpers/series-helper.ts @@ -1,12 +1,12 @@ import { buildUrl } from "tests/utils"; -import { app } from "~/base"; +import { handlers } from "~/base"; import type { SeedHistory } from "~/models/history"; import type { SeedSerie } from "~/models/serie"; import type { SerieWatchStatus } from "~/models/watchlist"; import { getJwtHeaders } from "./jwt"; export const createSerie = async (serie: SeedSerie) => { - const resp = await app.handle( + const resp = await handlers.handle( new Request(buildUrl("series"), { method: "POST", body: JSON.stringify(serie), @@ -27,7 +27,7 @@ export const getSerie = async ( ...query }: { langs?: string; preferOriginal?: boolean; with?: string[] }, ) => { - const resp = await app.handle( + const resp = await handlers.handle( new Request(buildUrl(`series/${id}`, query), { method: "GET", headers: langs @@ -46,7 +46,7 @@ export const getSeries = async ({ langs, ...query }: { langs?: string; preferOriginal?: boolean; with?: string[] }) => { - const resp = await app.handle( + const resp = await handlers.handle( new Request(buildUrl("series", query), { method: "GET", headers: langs @@ -76,7 +76,7 @@ export const getSeasons = async ( preferOriginal?: boolean; }, ) => { - const resp = await app.handle( + const resp = await handlers.handle( new Request(buildUrl(`series/${serie}/seasons`, opts), { method: "GET", headers: langs @@ -106,7 +106,7 @@ export const getEntries = async ( preferOriginal?: boolean; }, ) => { - const resp = await app.handle( + const resp = await handlers.handle( new Request(buildUrl(`series/${serie}/entries`, opts), { method: "GET", headers: langs @@ -131,7 +131,7 @@ export const getExtras = async ( query?: string; }, ) => { - const resp = await app.handle( + const resp = await handlers.handle( new Request(buildUrl(`series/${serie}/extras`, opts), { method: "GET", headers: await getJwtHeaders(), @@ -148,7 +148,7 @@ export const getUnknowns = async (opts: { sort?: string | string[]; query?: string; }) => { - const resp = await app.handle( + const resp = await handlers.handle( new Request(buildUrl("unknowns", opts), { method: "GET", headers: await getJwtHeaders(), @@ -169,7 +169,7 @@ export const getNews = async ({ langs?: string; preferOriginal?: boolean; }) => { - const resp = await app.handle( + const resp = await handlers.handle( new Request(buildUrl("news", opts), { method: "GET", headers: langs @@ -188,7 +188,7 @@ export const setSerieStatus = async ( id: string, status: Omit, ) => { - const resp = await app.handle( + const resp = await handlers.handle( new Request(buildUrl(`series/${id}/watchstatus`), { method: "POST", body: JSON.stringify(status), @@ -216,7 +216,7 @@ export const getHistory = async ( preferOriginal?: boolean; }, ) => { - const resp = await app.handle( + const resp = await handlers.handle( new Request(buildUrl(`profiles/${profile}/history`, opts), { method: "GET", headers: langs @@ -232,7 +232,7 @@ export const getHistory = async ( }; export const addToHistory = async (profile: string, seed: SeedHistory[]) => { - const resp = await app.handle( + const resp = await handlers.handle( new Request(buildUrl(`profiles/${profile}/history`), { method: "POST", body: JSON.stringify(seed), diff --git a/api/tests/helpers/shows-helper.ts b/api/tests/helpers/shows-helper.ts index 80073a54..9507a92d 100644 --- a/api/tests/helpers/shows-helper.ts +++ b/api/tests/helpers/shows-helper.ts @@ -1,5 +1,5 @@ import { buildUrl } from "tests/utils"; -import { app } from "~/base"; +import { handlers } from "~/base"; import { getJwtHeaders } from "./jwt"; export const getShows = async ({ @@ -14,7 +14,7 @@ export const getShows = async ({ langs?: string; preferOriginal?: boolean; }) => { - const resp = await app.handle( + const resp = await handlers.handle( new Request(buildUrl("shows", query), { method: "GET", headers: langs @@ -44,7 +44,7 @@ export const getWatchlist = async ( preferOriginal?: boolean; }, ) => { - const resp = await app.handle( + const resp = await handlers.handle( new Request(buildUrl(`profiles/${id}/watchlist`, query), { method: "GET", headers: langs @@ -74,7 +74,7 @@ export const getNextup = async ( preferOriginal?: boolean; }, ) => { - const resp = await app.handle( + const resp = await handlers.handle( new Request(buildUrl(`profiles/${id}/nextup`, query), { method: "GET", headers: langs diff --git a/api/tests/helpers/staff-helper.ts b/api/tests/helpers/staff-helper.ts index dbb2a613..98378cf1 100644 --- a/api/tests/helpers/staff-helper.ts +++ b/api/tests/helpers/staff-helper.ts @@ -1,9 +1,9 @@ import { buildUrl } from "tests/utils"; -import { app } from "~/base"; +import { handlers } from "~/base"; import { getJwtHeaders } from "./jwt"; export const getStaff = async (id: string, query: {}) => { - const resp = await app.handle( + const resp = await handlers.handle( new Request(buildUrl(`staff/${id}`, query), { method: "GET", headers: await getJwtHeaders(), @@ -28,7 +28,7 @@ export const getStaffRoles = async ( preferOriginal?: boolean; }, ) => { - const resp = await app.handle( + const resp = await handlers.handle( new Request(buildUrl(`staff/${staff}/roles`, opts), { method: "GET", headers: langs @@ -52,7 +52,7 @@ export const getSerieStaff = async ( sort?: string | string[]; }, ) => { - const resp = await app.handle( + const resp = await handlers.handle( new Request(buildUrl(`series/${serie}/staff`, opts), { method: "GET", headers: await getJwtHeaders(), @@ -71,7 +71,7 @@ export const getMovieStaff = async ( sort?: string | string[]; }, ) => { - const resp = await app.handle( + const resp = await handlers.handle( new Request(buildUrl(`movies/${movie}/staff`, opts), { method: "GET", headers: await getJwtHeaders(), diff --git a/api/tests/helpers/studio-helper.ts b/api/tests/helpers/studio-helper.ts index 0ea8ea21..dfcc64bb 100644 --- a/api/tests/helpers/studio-helper.ts +++ b/api/tests/helpers/studio-helper.ts @@ -1,12 +1,12 @@ import { buildUrl } from "tests/utils"; -import { app } from "~/base"; +import { handlers } from "~/base"; import { getJwtHeaders } from "./jwt"; export const getStudio = async ( id: string, { langs, ...query }: { langs?: string; preferOriginal?: boolean }, ) => { - const resp = await app.handle( + const resp = await handlers.handle( new Request(buildUrl(`studios/${id}`, query), { method: "GET", headers: langs @@ -36,7 +36,7 @@ export const getShowsByStudio = async ( preferOriginal?: boolean; }, ) => { - const resp = await app.handle( + const resp = await handlers.handle( new Request(buildUrl(`studios/${studio}/shows`, opts), { method: "GET", headers: langs diff --git a/api/tests/helpers/videos-helper.ts b/api/tests/helpers/videos-helper.ts index cc750a1a..d21a94dd 100644 --- a/api/tests/helpers/videos-helper.ts +++ b/api/tests/helpers/videos-helper.ts @@ -1,10 +1,10 @@ import { buildUrl } from "tests/utils"; -import { app } from "~/base"; +import { handlers } from "~/base"; import type { SeedVideo } from "~/models/video"; import { getJwtHeaders } from "./jwt"; export const createVideo = async (video: SeedVideo | SeedVideo[]) => { - const resp = await app.handle( + const resp = await handlers.handle( new Request(buildUrl("videos"), { method: "POST", body: JSON.stringify(Array.isArray(video) ? video : [video]), @@ -19,7 +19,7 @@ export const createVideo = async (video: SeedVideo | SeedVideo[]) => { }; export const getVideos = async () => { - const resp = await app.handle( + const resp = await handlers.handle( new Request(buildUrl("videos"), { method: "GET", headers: await getJwtHeaders(), @@ -30,7 +30,7 @@ export const getVideos = async () => { }; export const deleteVideo = async (paths: string[]) => { - const resp = await app.handle( + const resp = await handlers.handle( new Request(buildUrl("videos"), { method: "DELETE", body: JSON.stringify(paths), @@ -43,3 +43,20 @@ export const deleteVideo = async (paths: string[]) => { const body = await resp.json(); return [resp, body] as const; }; + +export const linkVideos = async ( + links: { id: string; for: SeedVideo["for"] }[], +) => { + const resp = await handlers.handle( + new Request(buildUrl("videos/link"), { + method: "POST", + body: JSON.stringify(links), + headers: { + "Content-Type": "application/json", + ...(await getJwtHeaders()), + }, + }), + ); + const body = await resp.json(); + return [resp, body] as const; +}; diff --git a/api/tests/manual.ts b/api/tests/manual.ts index c038486b..98353b76 100644 --- a/api/tests/manual.ts +++ b/api/tests/manual.ts @@ -16,7 +16,11 @@ const [_, ser] = await createSerie(madeInAbyss); const [__, mov] = await createMovie(bubble); const [resp, body] = await createVideo([ { - guess: { title: "mia", season: [1], episode: [13], from: "test" }, + guess: { + title: "mia", + episodes: [{ season: 1, episode: 13 }], + from: "test", + }, part: null, path: "/video/mia s1e13.mkv", rendering: "sha2", @@ -26,9 +30,8 @@ const [resp, body] = await createVideo([ { guess: { title: "mia", - season: [2], - episode: [1], - year: [2017], + episodes: [{ season: 2, episode: 1 }], + years: [2017], from: "test", }, part: null, diff --git a/api/tests/movies/get-all-movies-with-null.test.ts b/api/tests/movies/get-all-movies-with-null.test.ts index 2cba7fb6..3a7d53d9 100644 --- a/api/tests/movies/get-all-movies-with-null.test.ts +++ b/api/tests/movies/get-all-movies-with-null.test.ts @@ -6,7 +6,7 @@ import { shows } from "~/db/schema"; import { bubble } from "~/models/examples"; import { dune1984 } from "~/models/examples/dune-1984"; import { dune } from "~/models/examples/dune-2021"; -import { app, createMovie, getMovies } from "../helpers"; +import { createMovie, getMovies, handlers } from "../helpers"; beforeAll(async () => { await db.delete(shows); @@ -21,7 +21,7 @@ describe("with a null value", () => { // instead we just make a new file for those /shrug // see: https://github.com/oven-sh/bun/issues/5738 beforeAll(async () => { - await createMovie({ + const [ret, body] = await createMovie({ slug: "no-air-date", translations: { en: { @@ -34,7 +34,7 @@ describe("with a null value", () => { tagline: null, tags: [], thumbnail: null, - trailerUrl: null, + trailer: null, }, }, genres: [], @@ -46,6 +46,7 @@ describe("with a null value", () => { externalId: {}, studios: [], }); + expectStatus(ret, body).toBe(201); }); it("sort by dates desc with a null value", async () => { @@ -74,7 +75,7 @@ describe("with a null value", () => { ), }); - resp = await app.handle( + resp = await handlers.handle( new Request(next, { headers: await getJwtHeaders() }), ); body = await resp.json(); @@ -123,7 +124,7 @@ describe("with a null value", () => { ), }); - resp = await app.handle( + resp = await handlers.handle( new Request(next, { headers: await getJwtHeaders() }), ); body = await resp.json(); diff --git a/api/tests/movies/get-all-movies.test.ts b/api/tests/movies/get-all-movies.test.ts index 94fef089..06f400d4 100644 --- a/api/tests/movies/get-all-movies.test.ts +++ b/api/tests/movies/get-all-movies.test.ts @@ -7,7 +7,7 @@ import { bubble } from "~/models/examples"; import { dune1984 } from "~/models/examples/dune-1984"; import { dune } from "~/models/examples/dune-2021"; import type { Movie } from "~/models/movie"; -import { app, createMovie, getMovies } from "../helpers"; +import { createMovie, getMovies, handlers } from "../helpers"; beforeAll(async () => { await db.delete(shows); @@ -72,7 +72,7 @@ describe("Get all movies", () => { }); expectStatus(resp, body).toBe(200); - resp = await app.handle( + resp = await handlers.handle( new Request(body.next, { headers: await getJwtHeaders() }), ); body = await resp.json(); @@ -107,7 +107,7 @@ describe("Get all movies", () => { ), }); - resp = await app.handle( + resp = await handlers.handle( new Request(next, { headers: await getJwtHeaders() }), ); body = await resp.json(); @@ -165,7 +165,7 @@ describe("Get all movies", () => { expect(items.length).toBe(1); expect(items[0].id).toBe(expectedIds[0]); // Get Second Page - resp = await app.handle( + resp = await handlers.handle( new Request(body.next, { headers: await getJwtHeaders() }), ); body = await resp.json(); @@ -182,7 +182,7 @@ describe("Get all movies", () => { }); expectStatus(resp, body).toBe(200); - const resp2 = await app.handle( + const resp2 = await handlers.handle( new Request(body.next, { headers: await getJwtHeaders() }), ); const body2 = await resp2.json(); @@ -195,7 +195,7 @@ describe("Get all movies", () => { }); it("Get /random", async () => { - const resp = await app.handle( + const resp = await handlers.handle( new Request("http://localhost/movies/random", { headers: await getJwtHeaders(), }), diff --git a/api/tests/movies/get-movie.test.ts b/api/tests/movies/get-movie.test.ts index 5e7b9ee9..96786c57 100644 --- a/api/tests/movies/get-movie.test.ts +++ b/api/tests/movies/get-movie.test.ts @@ -9,6 +9,7 @@ let bubbleId = ""; beforeAll(async () => { await db.delete(shows); + await db.delete(videos); await db.insert(videos).values(bubbleVideo); const [ret, body] = await createMovie(bubble); expect(ret.status).toBe(201); @@ -66,21 +67,29 @@ describe("Get movie", () => { const [resp, body] = await getMovie(bubble.slug, { langs: "fr,pr,*" }); expectStatus(resp, body).toBe(200); - expect(body).toMatchObject({ - slug: bubble.slug, - name: bubble.translations.en.name, - }); - expect(resp.headers.get("Content-Language")).toBe("en"); + expect(body.slug).toBe(bubble.slug); + const lang = resp.headers.get("Content-Language"); + if (lang === "en") { + expect(body.name).toBe(bubble.translations.en.name); + } else if (lang === "ja") { + expect(body.name).toBe(bubble.translations.ja.name); + } else { + expect(lang).toBe("en"); + } }); it("Works without accept-language header", async () => { const [resp, body] = await getMovie(bubble.slug, { langs: undefined }); expectStatus(resp, body).toBe(200); - expect(body).toMatchObject({ - slug: bubble.slug, - name: bubble.translations.en.name, - }); - expect(resp.headers.get("Content-Language")).toBe("en"); + expect(body.slug).toBe(bubble.slug); + const lang = resp.headers.get("Content-Language"); + if (lang === "en") { + expect(body.name).toBe(bubble.translations.en.name); + } else if (lang === "ja") { + expect(body.name).toBe(bubble.translations.ja.name); + } else { + expect(lang).toBe("en"); + } }); it("Fallback if translations does not exist", async () => { const [resp, body] = await getMovie(bubble.slug, { langs: "en-au" }); diff --git a/api/tests/movies/seed-movies.test.ts b/api/tests/movies/seed-movies.test.ts index 242a1161..6da873b8 100644 --- a/api/tests/movies/seed-movies.test.ts +++ b/api/tests/movies/seed-movies.test.ts @@ -49,7 +49,7 @@ describe("Movie seeding", () => { thumbnail: null, banner: null, logo: null, - trailerUrl: null, + trailer: null, }, }, }); @@ -154,7 +154,7 @@ describe("Movie seeding", () => { poster: null, thumbnail: null, logo: null, - trailerUrl: null, + trailer: null, }, }, }); @@ -180,7 +180,7 @@ describe("Movie seeding", () => { poster: null, thumbnail: null, logo: null, - trailerUrl: null, + trailer: null, }, }, }); @@ -308,7 +308,7 @@ describe("Movie seeding", () => { part: null, version: 1, rendering: "oeunhtoeuth", - guess: { title: "bubble", from: "test" }, + guess: { title: "bubble", from: "test", history: [] }, }); expectStatus(vresp, video).toBe(201); @@ -334,7 +334,7 @@ describe("Movie seeding", () => { part: null, version: 2, rendering: "oeunhtoeuth", - guess: { title: "bubble", from: "test" }, + guess: { title: "bubble", from: "test", history: [] }, }); expectStatus(vresp, video).toBe(201); @@ -359,7 +359,7 @@ describe("Movie seeding", () => { part: 1, version: 2, rendering: "oaoeueunhtoeuth", - guess: { title: "bubble", from: "test" }, + guess: { title: "bubble", from: "test", history: [] }, }); expectStatus(vresp, video).toBe(201); @@ -385,14 +385,14 @@ describe("Movie seeding", () => { part: null, version: 1, rendering: "oeunhtoeuthoeu", - guess: { title: "bubble", from: "test" }, + guess: { title: "bubble", from: "test", history: [] }, }, { path: "/video/bubble4.mkv", part: null, version: 1, rendering: "aoeuaoeu", - guess: { title: "bubble", from: "test" }, + guess: { title: "bubble", from: "test", history: [] }, }, ]); expectStatus(vresp, video).toBe(201); diff --git a/api/tests/videos/getdel.test.ts b/api/tests/videos/getdel.test.ts index 9f5b8aea..aa346f4f 100644 --- a/api/tests/videos/getdel.test.ts +++ b/api/tests/videos/getdel.test.ts @@ -6,10 +6,11 @@ import { createVideo, deleteVideo, getVideos, + linkVideos, } from "tests/helpers"; import { expectStatus } from "tests/utils"; import { db } from "~/db"; -import { entries, shows, videos } from "~/db/schema"; +import { entries, entryVideoJoin, shows, videos } from "~/db/schema"; import { bubble, madeInAbyss } from "~/models/examples"; beforeAll(async () => { @@ -23,7 +24,12 @@ beforeAll(async () => { [ret, body] = await createVideo([ { - guess: { title: "mia", season: [1], episode: [13], from: "test" }, + guess: { + title: "mia", + episodes: [{ season: 1, episode: 13 }], + from: "test", + history: [], + }, part: null, path: "/video/mia s1e13.mkv", rendering: "sha2", @@ -33,10 +39,10 @@ beforeAll(async () => { { guess: { title: "mia", - season: [2], - episode: [1], - year: [2017], + episodes: [{ season: 2, episode: 1 }], + years: [2017], from: "test", + history: [], }, part: null, path: "/video/mia 2017 s2e1.mkv", @@ -45,19 +51,42 @@ beforeAll(async () => { for: [{ slug: `${madeInAbyss.slug}-s2e1` }], }, { - guess: { title: "bubble", from: "test" }, + guess: { title: "bubble", from: "test", history: [] }, part: null, path: "/video/bubble.mkv", rendering: "sha5", version: 1, for: [{ movie: bubble.slug }], }, + { + guess: { + title: "mia", + episodes: [{ season: 1, episode: 1 }], // Different episode for unlinked + from: "test", + history: [], + }, + part: null, + path: "/video/mia-unlinked.mkv", + rendering: "sha-unlinked-1", + version: 1, + // No 'for' initially + }, + { + guess: { title: "bubble", from: "test", history: [] }, + part: null, + path: "/video/bubble-unlinked.mkv", + rendering: "sha-unlinked-2", + version: 1, + // No 'for' initially + }, ]); expectStatus(ret, body).toBe(201); - expect(body).toBeArrayOfSize(3); + expect(body).toBeArrayOfSize(5); expect(body[0].entries).toBeArrayOfSize(1); expect(body[1].entries).toBeArrayOfSize(1); expect(body[2].entries).toBeArrayOfSize(1); + expect(body[3].entries).toBeArrayOfSize(0); // Unlinked + expect(body[4].entries).toBeArrayOfSize(0); // Unlinked const items = await db.query.shows.findMany(); expect(items.find((x) => x.slug === "bubble")!.availableCount).toBe(1); @@ -103,7 +132,12 @@ describe("Video get/deletion", () => { it("With unknown", async () => { let [resp, body] = await createVideo({ - guess: { title: "mia", season: [1], episode: [13], from: "test" }, + guess: { + title: "mia", + episodes: [{ season: 1, episode: 13 }], + from: "test", + history: [], + }, part: null, path: "/video/mia s1e13 unknown test.mkv", rendering: "shanthnth", @@ -131,13 +165,20 @@ describe("Video get/deletion", () => { }, }, }); - expect(body.unmatched).toBeArrayOfSize(1); - expect(body.unmatched[0]).toBe("/video/mia s1e13 unknown test.mkv"); + expect(body.unmatched).toBeArrayOfSize(3); + expect(body.unmatched).toContain("/video/mia s1e13 unknown test.mkv"); + expect(body.unmatched).toContain("/video/mia-unlinked.mkv"); + expect(body.unmatched).toContain("/video/bubble-unlinked.mkv"); }); it("Mismatch title guess", async () => { let [resp, body] = await createVideo({ - guess: { title: "mia", season: [1], episode: [13], from: "test" }, + guess: { + title: "mia", + episodes: [{ season: 1, episode: 13 }], + from: "test", + history: [], + }, part: null, path: "/video/mia s1e13 mismatch.mkv", rendering: "mismatch", @@ -234,3 +275,69 @@ describe("Video get/deletion", () => { expect(body[0]).toBe("/video/mia s1e13 unknown test.mkv"); }); }); + +describe("Video linking", () => { + it("Should link videos to entries", async () => { + const allVideos = await db + .select({ + id: videos.id, + path: videos.path, + rendering: videos.rendering, + }) + .from(videos); + + const miaUnlinkedVideo = allVideos.find( + (v) => v.rendering === "sha-unlinked-1", + ); + const bubbleUnlinkedVideo = allVideos.find( + (v) => v.rendering === "sha-unlinked-2", + ); + + expect(miaUnlinkedVideo).toBeDefined(); + expect(bubbleUnlinkedVideo).toBeDefined(); + + const [resp, body] = await linkVideos([ + { + id: miaUnlinkedVideo!.id, + for: [{ slug: `${madeInAbyss.slug}-s1e13` }], + }, + { + id: bubbleUnlinkedVideo!.id, + for: [{ movie: bubble.slug }], + }, + ]); + + expectStatus(resp, body).toBe(201); + expect(body).toBeArrayOfSize(2); + + expect(body[0]).toMatchObject({ + id: miaUnlinkedVideo!.id, + path: "/video/mia-unlinked.mkv", + entries: [ + { + slug: expect.stringContaining(`${madeInAbyss.slug}-s1e13`), + }, + ], + }); + + expect(body[1]).toMatchObject({ + id: bubbleUnlinkedVideo!.id, + path: "/video/bubble-unlinked.mkv", + entries: [ + { + slug: expect.stringContaining(bubble.slug), + }, + ], + }); + + const miaShow = await db.query.shows.findFirst({ + where: eq(shows.slug, madeInAbyss.slug), + }); + expect(miaShow!.availableCount).toBe(1); + + const bubbleShow = await db.query.shows.findFirst({ + where: eq(shows.slug, bubble.slug), + }); + expect(bubbleShow!.availableCount).toBe(1); + }); +}); diff --git a/api/tests/videos/scanner.test.ts b/api/tests/videos/scanner.test.ts index 011c2094..9e0760eb 100644 --- a/api/tests/videos/scanner.test.ts +++ b/api/tests/videos/scanner.test.ts @@ -18,7 +18,7 @@ beforeAll(async () => { describe("Video seeding", () => { it("Can create a video without entry", async () => { const [resp, body] = await createVideo({ - guess: { title: "unknown", from: "test" }, + guess: { title: "unknown", from: "test", history: [] }, part: null, path: "/video/unknown s1e13.mkv", rendering: "sha", @@ -46,7 +46,12 @@ describe("Video seeding", () => { it("With slug", async () => { const [resp, body] = await createVideo({ - guess: { title: "mia", season: [1], episode: [13], from: "test" }, + guess: { + title: "mia", + episodes: [{ season: 1, episode: 13 }], + from: "test", + history: [], + }, part: null, path: "/video/mia s1e13.mkv", rendering: "sha2", @@ -78,7 +83,7 @@ describe("Video seeding", () => { it("With movie", async () => { const [resp, body] = await createVideo({ - guess: { title: "bubble", from: "test" }, + guess: { title: "bubble", from: "test", history: [] }, part: null, path: "/video/bubble.mkv", rendering: "sha3", @@ -110,7 +115,7 @@ describe("Video seeding", () => { it("Conflicting path", async () => { const [resp, body] = await createVideo({ - guess: { title: "bubble", from: "test" }, + guess: { title: "bubble", from: "test", history: [] }, part: null, path: "/video/bubble.mkv", rendering: "sha4", @@ -142,7 +147,12 @@ describe("Video seeding", () => { it("With season/episode", async () => { const [resp, body] = await createVideo({ - guess: { title: "mia", season: [2], episode: [1], from: "test" }, + guess: { + title: "mia", + episodes: [{ season: 2, episode: 1 }], + from: "test", + history: [], + }, part: null, path: "/video/mia s2e1.mkv", rendering: "renderingsha", @@ -180,7 +190,12 @@ describe("Video seeding", () => { it("With special", async () => { const [resp, body] = await createVideo({ - guess: { title: "mia", season: [0], episode: [3], from: "test" }, + guess: { + title: "mia", + episodes: [{ season: 0, episode: 3 }], + from: "test", + history: [], + }, part: null, path: "/video/mia sp3.mkv", rendering: "notehu", @@ -217,7 +232,12 @@ describe("Video seeding", () => { it("With order", async () => { const [resp, body] = await createVideo({ - guess: { title: "mia", season: [0], episode: [3], from: "test" }, + guess: { + title: "mia", + episodes: [{ season: 0, episode: 3 }], + from: "test", + history: [], + }, part: null, path: "/video/mia 13.5.mkv", rendering: "notehu2", @@ -256,11 +276,11 @@ describe("Video seeding", () => { const [resp, body] = await createVideo({ guess: { title: "mia", - season: [0], - episode: [3], + episodes: [{ season: 1, episode: 13 }], from: "test", + history: [], externalId: { - themoviedatabase: { serieId: "72636", season: 1, episode: 13 }, + themoviedatabase: "72636", }, }, part: null, @@ -303,8 +323,9 @@ describe("Video seeding", () => { guess: { title: "bubble", from: "test", + history: [], externalId: { - themoviedatabase: { dataId: "912598" }, + themoviedatabase: "912598", }, }, part: null, @@ -344,7 +365,7 @@ describe("Video seeding", () => { it("Different path, same sha", async () => { const [resp, body] = await createVideo({ - guess: { title: "bubble", from: "test" }, + guess: { title: "bubble", from: "test", history: [] }, part: null, path: "/video/bubble invalid-sha.mkv", rendering: "sha", @@ -362,8 +383,9 @@ describe("Video seeding", () => { guess: { title: "bubble", from: "test", + history: [], externalId: { - themoviedatabase: { dataId: "912598" }, + themoviedatabase: "912598", }, }, part: null, @@ -408,8 +430,9 @@ describe("Video seeding", () => { guess: { title: "bubble", from: "test", + history: [], externalId: { - themoviedatabase: { dataId: "912598" }, + themoviedatabase: "912598", }, }, part: null, @@ -455,8 +478,9 @@ describe("Video seeding", () => { guess: { title: "bubble", from: "test", + history: [], externalId: { - themoviedatabase: { dataId: "912598" }, + themoviedatabase: "912598", }, }, part: 1, @@ -476,8 +500,9 @@ describe("Video seeding", () => { guess: { title: "bubble", from: "test", + history: [], externalId: { - themoviedatabase: { dataId: "912598" }, + themoviedatabase: "912598", }, }, part: 2, @@ -521,9 +546,12 @@ describe("Video seeding", () => { const [resp, body] = await createVideo({ guess: { title: "mia", - season: [1, 2], - episode: [13, 1], + episodes: [ + { season: 1, episode: 13 }, + { season: 2, episode: 1 }, + ], from: "test", + history: [], }, part: null, path: "/video/mia s1e13 & s2e1 [tmdb=72636].mkv", diff --git a/auth/.dockerignore b/auth/.dockerignore index 72546c82..1513d634 100644 --- a/auth/.dockerignore +++ b/auth/.dockerignore @@ -1,12 +1,9 @@ -Dockerfile* -*.md -.dockerignore -.gitignore -.env* - +** +!/go.mod +!/go.sum +!/**.go # generated via sqlc -dbc/ +!/sql +!/dbc # genereated via swag -docs/ - -# vim: ft=gitignore +!/docs diff --git a/auth/.env.example b/auth/.env.example index 5f5195d6..39220724 100644 --- a/auth/.env.example +++ b/auth/.env.example @@ -1,9 +1,6 @@ # vi: ft=sh # shellcheck disable=SC2034 -# http route prefix (will listen to $KEIBI_PREFIX/users for example) -KEIBI_PREFIX="" - # path of the private key used to sign jwts. If this is empty, a new one will be generated on startup RSA_PRIVATE_KEY_PATH="" diff --git a/auth/Dockerfile b/auth/Dockerfile index cc700b89..f094f210 100644 --- a/auth/Dockerfile +++ b/auth/Dockerfile @@ -1,18 +1,10 @@ FROM golang:1.24 AS build WORKDIR /app -RUN go install github.com/sqlc-dev/sqlc/cmd/sqlc@latest -RUN go install github.com/swaggo/swag/cmd/swag@latest - COPY go.mod go.sum ./ RUN go mod download -COPY sqlc.yaml ./ -COPY sql ./sql -RUN sqlc generate - COPY . . -RUN swag init --parseDependency --outputTypes json,go RUN CGO_ENABLED=0 GOOS=linux go build -o /keibi FROM gcr.io/distroless/base-debian11 @@ -23,5 +15,4 @@ USER nonroot:nonroot COPY --from=build /keibi /app/keibi COPY sql ./sql -HEALTHCHECK --interval=30s --retries=15 CMD curl --fail http://localhost:4568$KEIBI_PREFIX/health || exit CMD ["/app/keibi"] diff --git a/auth/Dockerfile.dev b/auth/Dockerfile.dev index fbbb7edf..932ab12a 100644 --- a/auth/Dockerfile.dev +++ b/auth/Dockerfile.dev @@ -1,11 +1,10 @@ FROM golang:1.24 AS build WORKDIR /app -RUN go install github.com/bokwoon95/wgo@latest - COPY go.mod go.sum ./ RUN go mod download +COPY . . + EXPOSE 4568 -HEALTHCHECK --interval=30s --retries=15 CMD curl --fail http://localhost:4568$KEIBI_PREFIX/health || exit -CMD ["wgo", "run", "-race", "."] +CMD ["go", "run", "-race", "."] diff --git a/auth/apikey.go b/auth/apikey.go index 2f7736d4..a09b4019 100644 --- a/auth/apikey.go +++ b/auth/apikey.go @@ -19,11 +19,11 @@ import ( ) type ApiKey struct { - Id uuid.UUID `json:"id" example:"e05089d6-9179-4b5b-a63e-94dd5fc2a397"` - Name string `json:"name" example:"myapp"` - CreatedAt time.Time `json:"createAt" example:"2025-03-29T18:20:05.267Z"` - LastUsed time.Time `json:"lastUsed" example:"2025-03-29T18:20:05.267Z"` - Claims jwt.MapClaims `json:"claims" example:"isAdmin: true"` + Id uuid.UUID `json:"id" example:"e05089d6-9179-4b5b-a63e-94dd5fc2a397"` + Name string `json:"name" example:"myapp"` + CreatedAt time.Time `json:"createAt" example:"2025-03-29T18:20:05.267Z"` + LastUsed time.Time `json:"lastUsed" example:"2025-03-29T18:20:05.267Z"` + Claims jwt.MapClaims `json:"claims" example:"isAdmin: true"` } type ApiKeyWToken struct { @@ -32,18 +32,18 @@ type ApiKeyWToken struct { } type ApiKeyDto struct { - Name string `json:"name" example:"myapp" validate:"alpha"` + Name string `json:"name" example:"myapp" validate:"alpha"` Claims jwt.MapClaims `json:"claims" example:"isAdmin: true"` } func MapDbKey(key *dbc.Apikey) ApiKeyWToken { return ApiKeyWToken{ ApiKey: ApiKey{ - Id: key.Id, - Name: key.Name, - Claims: key.Claims, + Id: key.Id, + Name: key.Name, + Claims: key.Claims, CreatedAt: key.CreatedAt, - LastUsed: key.LastUsed, + LastUsed: key.LastUsed, }, Token: fmt.Sprintf("%s-%s", key.Name, key.Token), } @@ -91,15 +91,15 @@ func (h *Handler) CreateApiKey(c echo.Context) error { if err != nil { u, _ := h.db.GetUser(context.Background(), dbc.GetUserParams{ UseId: true, - Id: uid, + Id: uid, }) user = &u[0].User.Pk } dbkey, err := h.db.CreateApiKey(context.Background(), dbc.CreateApiKeyParams{ - Name: req.Name, - Token: base64.RawURLEncoding.EncodeToString(id), - Claims: req.Claims, + Name: req.Name, + Token: base64.RawURLEncoding.EncodeToString(id), + Claims: req.Claims, CreatedBy: user, }) if ErrIs(err, pgerrcode.UniqueViolation) { @@ -169,7 +169,7 @@ func (h *Handler) ListApiKey(c echo.Context) error { return c.JSON(200, Page[ApiKey]{ Items: ret, - This: c.Request().URL.String(), + This: c.Request().URL.String(), }) } @@ -182,7 +182,7 @@ func (h *Handler) createApiJwt(apikey string) (string, error) { key, fromEnv := h.config.EnvApiKeys[info[0]] if !fromEnv { dbKey, err := h.db.GetApiKey(context.Background(), dbc.GetApiKeyParams{ - Name: info[0], + Name: info[0], Token: info[1], }) if err == pgx.ErrNoRows { diff --git a/auth/config.go b/auth/config.go index 86ad7190..37f9a65a 100644 --- a/auth/config.go +++ b/auth/config.go @@ -2,9 +2,11 @@ package main import ( "context" + "crypto" "crypto/rand" "crypto/rsa" "crypto/x509" + "encoding/base64" "encoding/json" "encoding/pem" "fmt" @@ -15,20 +17,21 @@ import ( "github.com/golang-jwt/jwt/v5" "github.com/google/uuid" + "github.com/lestrrat-go/jwx/v3/jwk" "github.com/zoriya/kyoo/keibi/dbc" ) type Configuration struct { - Prefix string JwtPrivateKey *rsa.PrivateKey JwtPublicKey *rsa.PublicKey + JwtKid string PublicUrl string DefaultClaims jwt.MapClaims FirstUserClaims jwt.MapClaims GuestClaims jwt.MapClaims ProtectedClaims []string ExpirationDelay time.Duration - EnvApiKeys map[string]ApiKeyWToken + EnvApiKeys map[string]ApiKeyWToken } var DefaultConfig = Configuration{ @@ -36,14 +39,13 @@ var DefaultConfig = Configuration{ FirstUserClaims: make(jwt.MapClaims), ProtectedClaims: []string{"permissions"}, ExpirationDelay: 30 * 24 * time.Hour, - EnvApiKeys: make(map[string]ApiKeyWToken), + EnvApiKeys: make(map[string]ApiKeyWToken), } func LoadConfiguration(db *dbc.Queries) (*Configuration, error) { ret := DefaultConfig ret.PublicUrl = os.Getenv("PUBLIC_URL") - ret.Prefix = os.Getenv("KEIBI_PREFIX") claims := os.Getenv("EXTRA_CLAIMS") if claims != "" { @@ -100,11 +102,20 @@ func LoadConfiguration(db *dbc.Queries) (*Configuration, error) { if err != nil { return nil, err } - ret.JwtPublicKey = &ret.JwtPrivateKey.PublicKey } + ret.JwtPublicKey = &ret.JwtPrivateKey.PublicKey + key, err := jwk.Import(ret.JwtPublicKey) + if err != nil { + return nil, err + } + thumbprint, err := key.Thumbprint(crypto.SHA256) + if err != nil { + return nil, err + } + ret.JwtKid = base64.RawStdEncoding.EncodeToString(thumbprint) for _, env := range os.Environ() { - if !strings.HasPrefix(env, "KEIBI_APIKEY_"){ + if !strings.HasPrefix(env, "KEIBI_APIKEY_") { continue } v := strings.Split(env, "=") @@ -128,8 +139,8 @@ func LoadConfiguration(db *dbc.Queries) (*Configuration, error) { name = strings.ToLower(name) ret.EnvApiKeys[name] = ApiKeyWToken{ ApiKey: ApiKey{ - Id: uuid.New(), - Name: name, + Id: uuid.New(), + Name: name, Claims: claims, }, Token: v[1], diff --git a/auth/jwt.go b/auth/jwt.go index d1dc7d30..a0977593 100644 --- a/auth/jwt.go +++ b/auth/jwt.go @@ -79,6 +79,7 @@ func (h *Handler) createGuestJwt() *string { Time: time.Now().UTC().Add(time.Hour), } jwt := jwt.NewWithClaims(jwt.SigningMethodRS256, claims) + jwt.Header["kid"] = h.config.JwtKid t, err := jwt.SignedString(h.config.JwtPrivateKey) if err != nil { return nil @@ -112,6 +113,7 @@ func (h *Handler) createJwt(token string) (string, error) { Time: time.Now().UTC().Add(time.Hour), } jwt := jwt.NewWithClaims(jwt.SigningMethodRS256, claims) + jwt.Header["kid"] = h.config.JwtKid t, err := jwt.SignedString(h.config.JwtPrivateKey) if err != nil { return "", err @@ -144,6 +146,7 @@ func (h *Handler) GetJwks(c echo.Context) error { key.Set("use", "sig") key.Set("key_ops", "verify") + key.Set("kid", h.config.JwtKid) set := jwk.NewSet() set.AddKey(key) return c.JSON(200, set) diff --git a/auth/main.go b/auth/main.go index 96f106b2..4360df5c 100644 --- a/auth/main.go +++ b/auth/main.go @@ -228,8 +228,8 @@ func main() { } h.config = conf - g := e.Group(conf.Prefix) - r := e.Group(conf.Prefix) + g := e.Group("/auth") + r := e.Group("/auth") r.Use(h.TokenToJwt) r.Use(echojwt.WithConfig(echojwt.Config{ SigningMethod: "RS256", diff --git a/auth/shell.nix b/auth/shell.nix new file mode 100644 index 00000000..0cf2b1f4 --- /dev/null +++ b/auth/shell.nix @@ -0,0 +1,15 @@ +{pkgs ? import {}}: +pkgs.mkShell { + packages = with pkgs; [ + go + wgo + go-migrate + sqlc + go-swag + # for psql in cli (+ pgformatter for sql files) + postgresql_15 + pgformatter + # to run tests + hurl + ]; +} diff --git a/auth/utils.go b/auth/utils.go index d336a7bd..dea74483 100644 --- a/auth/utils.go +++ b/auth/utils.go @@ -56,7 +56,7 @@ func GetCurrentSessionId(c echo.Context) (uuid.UUID, error) { func CheckPermissions(c echo.Context, perms []string) error { token, ok := c.Get("user").(*jwt.Token) - if !ok{ + if !ok { return echo.NewHTTPError(401, "Not logged in") } sub, err := token.Claims.GetSubject() diff --git a/back/shell.nix b/back/shell.nix new file mode 100644 index 00000000..b4fd8ad0 --- /dev/null +++ b/back/shell.nix @@ -0,0 +1,15 @@ +{pkgs ? import {}}: let + dotnet = with pkgs.dotnetCorePackages; + combinePackages [ + sdk_8_0 + aspnetcore_8_0 + ]; +in + pkgs.mkShell { + packages = with pkgs; [ + dotnet + csharpier + ]; + + DOTNET_ROOT = "${dotnet}"; + } diff --git a/chart/shell.nix b/chart/shell.nix new file mode 100644 index 00000000..b6333513 --- /dev/null +++ b/chart/shell.nix @@ -0,0 +1,6 @@ +{pkgs ? import {}}: +pkgs.mkShell { + packages = with pkgs; [ + kubernetes-helm + ]; +} diff --git a/docker-compose.build.yml b/docker-compose.build.yml index b3ad1082..87fceeb6 100644 --- a/docker-compose.build.yml +++ b/docker-compose.build.yml @@ -66,8 +66,6 @@ services: condition: service_healthy env_file: - ./.env - environment: - - KEIBI_PREFIX=/auth labels: - "traefik.enable=true" - "traefik.http.routers.auth.rule=PathPrefix(`/auth/`)" diff --git a/docker-compose.dev-v5.yml b/docker-compose.dev-v5.yml deleted file mode 100644 index bf29b057..00000000 --- a/docker-compose.dev-v5.yml +++ /dev/null @@ -1,188 +0,0 @@ -x-transcoder: &transcoder-base - build: - context: ./transcoder - dockerfile: Dockerfile.dev - networks: - default: - aliases: - - transcoder - ports: - - "7666:7666" - restart: on-failure - cpus: 1 - env_file: - - ./.env - environment: - - GOCODER_PREFIX=/video - volumes: - - ./transcoder:/app - - ${LIBRARY_ROOT}:/video:ro - - ${CACHE_ROOT}:/cache - - transcoder_metadata:/metadata - -services: - front: - build: - context: ./front - dockerfile: Dockerfile.dev - volumes: - - ./front:/app - - /app/.yarn - - /app/node_modules - - /app/apps/mobile/node_modules - - /app/apps/web/.next/ - - /app/apps/mobile/.expo/ - ports: - - "3000:3000" - - "8081:8081" - restart: on-failure - environment: - - KYOO_URL=${KYOO_URL:-http://api:5000/api} - labels: - - "traefik.enable=true" - - "traefik.http.routers.front.rule=PathPrefix(`/`)" - - auth: - build: - context: ./auth - dockerfile: Dockerfile.dev - restart: on-failure - depends_on: - postgres: - condition: service_healthy - ports: - - "4568:4568" - env_file: - - ./.env - environment: - - KEIBI_PREFIX=/auth - volumes: - - ./auth:/app - labels: - - "traefik.enable=true" - - "traefik.http.routers.auth.rule=PathPrefix(`/auth/`) || PathPrefix(`/.well-known/`)" - - api: - build: - context: ./api - dockerfile: Dockerfile.dev - restart: on-failure - depends_on: - postgres: - condition: service_healthy - volumes: - - ./api:/app - - /app/node_modules - ports: - - "3567:3567" - environment: - - KYOO_PREFIX=/api - - JWT_ISSUER=${PUBLIC_URL} - env_file: - - ./.env - labels: - - "traefik.enable=true" - - "traefik.http.routers.api.rule=PathPrefix(`/api/`) || PathPrefix(`/swagger`)" - - "traefik.http.routers.api.middlewares=phantom-token" - - "traefik.http.middlewares.phantom-token.forwardauth.address=http://auth:4568/auth/jwt" - - "traefik.http.middlewares.phantom-token.forwardauth.authRequestHeaders=Authorization,X-Api-Key" - - "traefik.http.middlewares.phantom-token.forwardauth.authResponseHeaders=Authorization" - - # scanner: - # build: ./scanner - # restart: on-failure - # depends_on: - # back: - # condition: service_healthy - # env_file: - # - ./.env - # environment: - # - KYOO_URL=${KYOO_URL:-http://back:5000/api} - # volumes: - # - ${LIBRARY_ROOT}:/video:ro - # - # matcher: - # build: ./scanner - # command: matcher - # restart: on-failure - # depends_on: - # back: - # condition: service_healthy - # env_file: - # - ./.env - # environment: - # - KYOO_URL=${KYOO_URL:-http://back:5000/api} - - transcoder: - <<: *transcoder-base - profiles: ['', 'cpu'] - - transcoder-nvidia: - <<: *transcoder-base - deploy: - resources: - reservations: - devices: - - capabilities: [gpu] - driver: cdi - device_ids: - - nvidia.com/gpu=all - environment: - - GOCODER_PREFIX=/video - - GOCODER_HWACCEL=nvidia - profiles: ['nvidia'] - - transcoder-vaapi: - <<: *transcoder-base - devices: - - /dev/dri:/dev/dri - environment: - - GOCODER_PREFIX=/video - - GOCODER_HWACCEL=vaapi - - GOCODER_VAAPI_RENDERER=${GOCODER_VAAPI_RENDERER:-/dev/dri/renderD128} - profiles: ['vaapi'] - # qsv is the same setup as vaapi but with the hwaccel env var different - transcoder-qsv: - <<: *transcoder-base - devices: - - /dev/dri:/dev/dri - environment: - - GOCODER_PREFIX=/video - - GOCODER_HWACCEL=qsv - - GOCODER_VAAPI_RENDERER=${GOCODER_VAAPI_RENDERER:-/dev/dri/renderD128} - profiles: ['qsv'] - - traefik: - image: traefik:v3.4 - restart: on-failure - command: - - "--providers.docker=true" - - "--providers.docker.exposedbydefault=false" - - "--entryPoints.web.address=:8901" - - "--accesslog=true" - ports: - - "8901:8901" - volumes: - - "/var/run/docker.sock:/var/run/docker.sock:ro" - - postgres: - image: postgres:15 - restart: on-failure - env_file: - - ./.env - volumes: - - db:/var/lib/postgresql/data - ports: - - "5432:5432" - environment: - - POSTGRES_HOST_AUTH_METHOD=trust - command: ["postgres", "-c", "log_statement=all"] - healthcheck: - test: ["CMD-SHELL", "pg_isready -U ${POSTGRES_USER} -d ${POSTGRES_DB}"] - interval: 5s - timeout: 5s - retries: 5 - -volumes: - db: - transcoder_metadata: diff --git a/docker-compose.dev.yml b/docker-compose.dev.yml index b2335816..deb0c9da 100644 --- a/docker-compose.dev.yml +++ b/docker-compose.dev.yml @@ -8,85 +8,49 @@ x-transcoder: &transcoder-base - transcoder ports: - "7666:7666" - restart: on-failure + restart: unless-stopped cpus: 1 env_file: - ./.env environment: - GOCODER_PREFIX=/video volumes: - - ./transcoder:/app - ${LIBRARY_ROOT}:/video:ro - ${CACHE_ROOT}:/cache - - metadata:/metadata + - transcoder_metadata:/metadata + develop: + watch: + - action: sync+restart + path: ./transcoder + target: /app services: - back: - build: - context: ./back - dockerfile: Dockerfile.dev - ports: - - "5000:5000" - restart: on-failure - environment: - - TRANSCODER_URL=${TRANSCODER_URL:-http://transcoder:7666/video} - - KYOO_PREFIX=/api - env_file: - - ./.env - depends_on: - postgres: - condition: service_healthy - meilisearch: - condition: service_healthy - rabbitmq: - condition: service_healthy - migrations: - condition: service_completed_successfully - volumes: - - ./back:/app - - /app/out/ - - kyoo:/metadata - labels: - - "traefik.enable=true" - - "traefik.http.routers.api.rule=PathPrefix(`/api/`)" - - migrations: - build: - context: ./back - dockerfile: Dockerfile.migrations - restart: "no" - depends_on: - postgres: - condition: service_healthy - env_file: - - ./.env - - front: - build: - context: ./front - dockerfile: Dockerfile.dev - volumes: - - ./front:/app - - /app/.yarn - - /app/node_modules - - /app/apps/mobile/node_modules - - /app/apps/web/.next/ - - /app/apps/mobile/.expo/ - ports: - - "3000:3000" - - "8081:8081" - restart: on-failure - environment: - - KYOO_URL=${KYOO_URL:-http://back:5000/api} - labels: - - "traefik.enable=true" - - "traefik.http.routers.front.rule=PathPrefix(`/`)" + # front: + # build: + # context: ./front + # dockerfile: Dockerfile.dev + # volumes: + # - ./front:/app + # - /app/.yarn + # - /app/node_modules + # - /app/apps/mobile/node_modules + # - /app/apps/web/.next/ + # - /app/apps/mobile/.expo/ + # ports: + # - "3000:3000" + # - "8081:8081" + # restart: unless-stopped + # environment: + # - KYOO_URL=${KYOO_URL:-http://api:5000/api} + # labels: + # - "traefik.enable=true" + # - "traefik.http.routers.front.rule=PathPrefix(`/`)" auth: build: context: ./auth dockerfile: Dockerfile.dev - restart: on-failure + restart: unless-stopped depends_on: postgres: condition: service_healthy @@ -94,52 +58,84 @@ services: - "4568:4568" env_file: - ./.env - environment: - - KEIBI_PREFIX=/auth - volumes: - - ./auth:/app labels: - "traefik.enable=true" - - "traefik.http.routers.auth.rule=PathPrefix(`/auth/`)" + - "traefik.http.routers.auth.rule=PathPrefix(`/auth/`) || PathPrefix(`/.well-known/`)" + develop: + watch: + - action: sync+restart + path: ./auth + target: /app + + api: + build: + context: ./api + dockerfile: Dockerfile.dev + restart: unless-stopped + depends_on: + postgres: + condition: service_healthy + ports: + - "3567:3567" + environment: + - KYOO_PREFIX=/api + - JWT_ISSUER=${PUBLIC_URL} + env_file: + - ./.env + labels: + - "traefik.enable=true" + - "traefik.http.routers.api.rule=PathPrefix(`/api/`) || PathPrefix(`/swagger`)" + - "traefik.http.routers.api.middlewares=phantom-token" + - "traefik.http.middlewares.phantom-token.forwardauth.address=http://auth:4568/auth/jwt" + - "traefik.http.middlewares.phantom-token.forwardauth.authRequestHeaders=Authorization,X-Api-Key" + - "traefik.http.middlewares.phantom-token.forwardauth.authResponseHeaders=Authorization" + develop: + watch: + - action: sync + path: ./api + target: /app + - action: rebuild + path: ./api/packages.json scanner: build: ./scanner - restart: on-failure + restart: unless-stopped depends_on: - back: + api: + condition: service_started + postgres: condition: service_healthy env_file: - ./.env + ports: + - "4389:4389" environment: - - KYOO_URL=${KYOO_URL:-http://back:5000/api} + # Use this env var once we use mTLS for auth + # - KYOO_URL=${KYOO_URL:-http://api:3567/api} + - KYOO_URL=${KYOO_URL:-http://traefik:8901/api} + - JWKS_URL=http://auth:4568/.well-known/jwks.json + - JWT_ISSUER=${PUBLIC_URL} volumes: - ${LIBRARY_ROOT}:/video:ro - - matcher: - build: ./scanner - command: matcher - restart: on-failure - depends_on: - back: - condition: service_healthy - env_file: - - ./.env - environment: - - KYOO_URL=${KYOO_URL:-http://back:5000/api} - - autosync: - build: ./autosync - restart: on-failure - depends_on: - rabbitmq: - condition: service_healthy - env_file: - - ./.env + labels: + - "traefik.enable=true" + - "traefik.http.routers.scanner.rule=PathPrefix(`/scanner/`)" + - "traefik.http.routers.scanner.middlewares=phantom-token" + - "traefik.http.middlewares.phantom-token.forwardauth.address=http://auth:4568/auth/jwt" + - "traefik.http.middlewares.phantom-token.forwardauth.authRequestHeaders=Authorization,X-Api-Key" + - "traefik.http.middlewares.phantom-token.forwardauth.authResponseHeaders=Authorization" + command: fastapi dev scanner --host 0.0.0.0 --port 4389 + develop: + watch: + - action: sync + path: ./scanner + target: /app + - action: rebuild + path: ./scanner/pyproject.toml transcoder: <<: *transcoder-base profiles: ['', 'cpu'] - transcoder-nvidia: <<: *transcoder-base deploy: @@ -154,7 +150,6 @@ services: - GOCODER_PREFIX=/video - GOCODER_HWACCEL=nvidia profiles: ['nvidia'] - transcoder-vaapi: <<: *transcoder-base devices: @@ -177,7 +172,7 @@ services: traefik: image: traefik:v3.4 - restart: on-failure + restart: unless-stopped command: - "--providers.docker=true" - "--providers.docker.exposedbydefault=false" @@ -190,7 +185,7 @@ services: postgres: image: postgres:15 - restart: on-failure + restart: unless-stopped env_file: - ./.env volumes: @@ -198,49 +193,17 @@ services: ports: - "5432:5432" environment: + - POSTGRES_USER=$PGUSER + - POSTGRES_PASSWORD=$PGPASSWORD + - POSTGRES_DB=$PGDATABASE - POSTGRES_HOST_AUTH_METHOD=trust command: ["postgres", "-c", "log_statement=all"] healthcheck: - test: ["CMD-SHELL", "pg_isready -U ${POSTGRES_USER} -d ${POSTGRES_DB}"] + test: ["CMD-SHELL", "pg_isready -U ${PGUSER} -d ${PGDATABASE}"] interval: 5s timeout: 5s retries: 5 - meilisearch: - image: getmeili/meilisearch:v1.4 - restart: on-failure - ports: - - "7700:7700" - volumes: - - search:/meili_data - environment: - - MEILI_ENV=development - env_file: - - .env - healthcheck: - test: ["CMD", "wget", "--no-verbose", "--spider", "http://meilisearch:7700/health"] - interval: 30s - timeout: 5s - retries: 5 - - rabbitmq: - image: rabbitmq:4-management-alpine - restart: on-failure - environment: - - RABBITMQ_DEFAULT_USER=${RABBITMQ_DEFAULT_USER} - - RABBITMQ_DEFAULT_PASS=${RABBITMQ_DEFAULT_PASS} - ports: - - 5672:5672 - - 15672:15672 - healthcheck: - test: rabbitmq-diagnostics -q ping - interval: 30s - timeout: 10s - retries: 5 - start_period: 10s - volumes: - kyoo: db: - metadata: - search: + transcoder_metadata: diff --git a/scanner/.dockerignore b/scanner/.dockerignore index e136516d..9dcf9e85 100644 --- a/scanner/.dockerignore +++ b/scanner/.dockerignore @@ -1,2 +1,5 @@ -Dockerfile* - +** +!/pyproject.toml +!/uv.lock +!/scanner +!/migrations diff --git a/scanner/.env.example b/scanner/.env.example index 90b614e7..15b1a847 100644 --- a/scanner/.env.example +++ b/scanner/.env.example @@ -1,12 +1,25 @@ # vi: ft=sh # shellcheck disable=SC2034 -# RabbitMQ settings -# URL examples: https://docs.aio-pika.com/#url-examples -# This uses AIORMQ (https://github.com/mosquito/aiormq/) under the hood, and supports whatever the library supports. -# RABBITMQ_URL=ampqs://user:password@rabbitmq-server:1234/vhost?capath=/path/to/cacert.pem&certfile=/path/to/cert.pem&keyfile=/path/to/key.pem -# These values are ignored when the RABBITMQ_URL is set -RABBITMQ_HOST=rabbitmq -RABBITMQ_PORT=5672 -RABBITMQ_USER=guest -RABBITMQ_PASSWORD=guest +# Root directory that will be traversed to find video files (inside the container) +SCANNER_LIBRARY_ROOT="/video" +# A pattern (regex) to ignore video files. +LIBRARY_IGNORE_PATTERN=".*/[dD]ownloads?/.*" + +# Keep those empty to use kyoo's default api key. You can also specify a custom API key if you want. +# go to https://www.themoviedb.org/settings/api and copy the read access token (not the api key) +THEMOVIEDB_API_ACCESS_TOKEN="" + +KYOO_URL="http://api:3567/api" +KYOO_APIKEY="" + +JWKS_URL="http://auth:4568/.well-known/jwks.json" +JWT_ISSUER=$PUBLIC_URL + +# The behavior of the below variables match what is documented here: +# https://www.postgresql.org/docs/current/libpq-envars.html +PGUSER=kyoo +PGPASSWORD=password +PGDATABASE=kyoo +PGHOST=postgres +PGPORT=5432 diff --git a/scanner/.gitignore b/scanner/.gitignore index bee8a64b..91953647 100644 --- a/scanner/.gitignore +++ b/scanner/.gitignore @@ -1 +1,2 @@ __pycache__ +/.venv diff --git a/scanner/Dockerfile b/scanner/Dockerfile index 71311684..72160d50 100644 --- a/scanner/Dockerfile +++ b/scanner/Dockerfile @@ -1,9 +1,14 @@ FROM python:3.13 +ENV UV_COMPILE_BYTECODE=1 UV_LINK_MODE=copy UV_PYTHON_DOWNLOADS=0 WORKDIR /app -COPY ./requirements.txt . -RUN pip3 install -r ./requirements.txt +RUN --mount=from=ghcr.io/astral-sh/uv,source=/uv,target=/bin/uv \ + --mount=type=cache,target=/root/.cache/uv \ + --mount=type=bind,source=uv.lock,target=uv.lock \ + --mount=type=bind,source=pyproject.toml,target=pyproject.toml \ + uv sync --locked +ENV PATH="/app/.venv/bin:$PATH" COPY . . -ENTRYPOINT ["python3", "-m"] -CMD ["scanner"] + +CMD ["fastapi", "run", "scanner", "--port", "4389"] diff --git a/scanner/README.md b/scanner/README.md index 6247aed6..5532b1c1 100644 --- a/scanner/README.md +++ b/scanner/README.md @@ -1,6 +1,6 @@ # Scanner -## Workflow (for v5, not current) +## Workflow In order of action: @@ -17,9 +17,8 @@ In order of action: from: "guessit" kind: movie | episode | extra title: string, - year?: number[], - season?: number[], - episode?: number[], + years?: number[], + episodes?: {season?: number, episode: number}[], ... }, } @@ -36,41 +35,42 @@ In order of action: from: "anilist", kind: movie | episode | extra name: string, - year: number | null, - season?: number[], - episode?: number[], - absolute?: number[], + years: number[], + episodes?: {season?: number, episode: number}[], externalId: Record, history: { from: "guessit" kind: movie | episode | extra title: string, - year?: number, - season?: number[], - episode?: number[], - ... + years?: number[], + episodes?: {season?: number, episode: number}[], }, ... }, } ``` - - If kind is episode, try to find the serie's id on kyoo (using the previously fetched data from `/videos`): + - Try to find the series id on kyoo (using the previously fetched data from `/videos`): - if another video in the list of already registered videos has the same `kind`, `name` & `year`, assume it's the same - if a match is found, add to the video's json: ```json5 { - entries: (uuid | slug | { - show: uuid | slug, - season: number, - episode: number, - externalId?: Record // takes priority over season/episode for matching if we have one + entries: ( + | { slug: string } + | { movie: uuid | string } + | { serie: uuid | slug, season: number, episode: number } + | { serie: uuid | slug, order: number } + | { serie: uuid | slug, special: number } + | { externalId?: Record } + | { externalId?: Record } })[], } ``` - Scanner pushes everything to the api in a single post `/videos` call - - Api registers every video in the database - - For each video without an associated entry, the guess data + the video's id is sent to the Matcher via a queue. - - Matcher retrieves metadata from the movie/serie + ALL episodes/seasons (from an external provider) - - Matcher pushes every metadata to the api (if there are 1000 episodes but only 1 video, still push the 1000 episodes) + - Api registers every video in the database & return the list of videos not matched to an existing serie/movie. + - Scanner adds every non-matched video to a queue - +For each item in the queue, the scanner will: + - retrieves metadata from the movie/serie + ALL episodes/seasons (from an external provider) + - pushes every metadata to the api (if there are 1000 episodes but only 1 video, still push the 1000 episodes) + + diff --git a/scanner/matcher/__init__.py b/scanner/matcher/__init__.py deleted file mode 100644 index 4445f9aa..00000000 --- a/scanner/matcher/__init__.py +++ /dev/null @@ -1,18 +0,0 @@ -async def main(): - import logging - import sys - from providers.provider import Provider - from providers.kyoo_client import KyooClient - from .matcher import Matcher - from .subscriber import Subscriber - - logging.basicConfig(level=logging.INFO) - if len(sys.argv) > 1 and sys.argv[1] == "-v": - logging.basicConfig(level=logging.DEBUG) - logging.getLogger("watchfiles").setLevel(logging.WARNING) - logging.getLogger("rebulk").setLevel(logging.WARNING) - - async with KyooClient() as kyoo, Subscriber() as sub: - provider = Provider.get_default(kyoo.client) - matcher = Matcher(kyoo, provider) - await sub.listen(matcher) diff --git a/scanner/matcher/__main__.py b/scanner/matcher/__main__.py deleted file mode 100644 index 670779da..00000000 --- a/scanner/matcher/__main__.py +++ /dev/null @@ -1,6 +0,0 @@ -#!/usr/bin/env python - -import asyncio -import matcher - -asyncio.run(matcher.main()) diff --git a/scanner/matcher/matcher.py b/scanner/matcher/matcher.py deleted file mode 100644 index ecc06363..00000000 --- a/scanner/matcher/matcher.py +++ /dev/null @@ -1,242 +0,0 @@ -from datetime import timedelta -from typing import Literal, Optional -import asyncio -from logging import getLogger -from providers.provider import Provider, ProviderError -from providers.types.collection import Collection -from providers.types.show import Show -from providers.types.episode import Episode, PartialShow -from providers.types.season import Season -from providers.kyoo_client import KyooClient -from .parser.guess import guessit -from .cache import cache, exec_as_cache, make_key - -logger = getLogger(__name__) - - -class Matcher: - def __init__(self, client: KyooClient, provider: Provider) -> None: - self._client = client - self._provider = provider - - self._collection_cache = {} - self._show_cache = {} - self._season_cache = {} - - async def delete(self, path: str): - try: - await self._client.delete(path) - return True - except Exception as e: - logger.exception("Unhandled error", exc_info=e) - return False - - async def identify(self, path: str): - try: - await self._identify(path) - await self._client.delete_issue(path) - except ProviderError as e: - logger.error(e) - await self._client.create_issue(path, str(e)) - except Exception as e: - logger.exception("Unhandled error", exc_info=e) - await self._client.create_issue( - path, "Unknown error", {"type": type(e).__name__, "message": str(e)} - ) - return False - return True - - async def _identify(self, path: str): - raw = guessit(path, xem_titles=await self._provider.get_expected_titles()) - - if "mimetype" not in raw or not raw["mimetype"].startswith("video"): - return - - logger.info("Identified %s: %s", path, raw) - - title = raw.get("title") - if not isinstance(title, str): - raise ProviderError(f"Could not guess title, found: {title}") - - year = raw.get("year") - if year is not None and not isinstance(year, int): - year = None - logger.warn(f"Invalid year value. Found {year}. Ignoring") - - if raw["type"] == "movie": - await self.search_movie(title, year, path) - elif raw["type"] == "episode": - season = raw.get("season") - if isinstance(season, list): - raise ProviderError( - f"An episode can't have multiple seasons (found {raw.get('season')} for {path})" - ) - if season is not None and not isinstance(season, int): - raise ProviderError(f"Could not guess season, found: {season}") - episode = raw.get("episode") - if isinstance(episode, list): - raise ProviderError( - f"Multi-episodes files are not yet supported (for {path})" - ) - if not isinstance(episode, int): - raise ProviderError(f"Could not guess episode, found: {episode}") - - await self.search_episode(title, year, season, episode, path) - else: - logger.warn("Unknown video file type: %s", raw["type"]) - - async def search_movie(self, title: str, year: Optional[int], path: str): - movie = await self._provider.search_movie(title, year) - movie.file_title = title - movie.path = path - logger.debug("Got movie: %s", movie) - movie_id = await self._client.post("movies", data=movie.to_kyoo()) - - if any(movie.collections): - ids = await asyncio.gather( - *(self.create_or_get_collection(x) for x in movie.collections) - ) - await asyncio.gather( - *(self._client.link_collection(x, "movie", movie_id) for x in ids) - ) - - async def search_episode( - self, - title: str, - year: Optional[int], - season: Optional[int], - episode_nbr: int, - path: str, - ): - episode = await self._provider.search_episode( - title, - season=season, - episode_nbr=episode_nbr if season is not None else None, - absolute=episode_nbr if season is None else None, - year=year, - ) - episode.path = path - logger.debug("Got episode: %s", episode) - episode.show_id = await self.create_or_get_show(episode, title) - - if episode.season_number is not None: - episode.season_id = await self.register_seasons( - episode.show, episode.show_id, episode.season_number - ) - await self._client.post("episodes", data=episode.to_kyoo()) - - async def create_or_get_collection(self, collection: Collection) -> str: - @cache(ttl=timedelta(days=1), cache=self._collection_cache) - async def create_collection(provider_id: str): - # TODO: Check if a collection with the same metadata id exists already on kyoo. - new_collection = ( - await self._provider.identify_collection(provider_id) - if not any(collection.translations.keys()) - else collection - ) - logger.debug("Got collection: %s", new_collection) - return await self._client.post("collection", data=new_collection.to_kyoo()) - - # The parameter is only used as a key for the cache. - provider_id = collection.external_id[self._provider.name].data_id - return await create_collection(provider_id) - - async def create_or_get_show(self, episode: Episode, fallback_name: str) -> str: - @cache(ttl=timedelta(days=1), cache=self._show_cache) - async def create_show(_: str): - # TODO: Check if a show with the same metadata id exists already on kyoo. - show = ( - await self._provider.identify_show( - episode.show.external_id[self._provider.name].data_id, - ) - if isinstance(episode.show, PartialShow) - else episode.show - ) - show.file_title = fallback_name - # TODO: collections - logger.debug("Got show: %s", episode) - ret = await self._client.post("show", data=show.to_kyoo()) - - async def create_season(season: Season, id: str): - try: - season.show_id = id - return await self._client.post("seasons", data=season.to_kyoo()) - except Exception as e: - logger.exception("Unhandled error create a season", exc_info=e) - - season_tasks = map( - lambda s: exec_as_cache( - self._season_cache, - make_key((ret, s.season_number)), - lambda: create_season(s, ret), - ), - show.seasons, - ) - await asyncio.gather(*season_tasks) - - return ret - - # The parameter is only used as a key for the cache. - provider_id = episode.show.external_id[self._provider.name].data_id - return await create_show(provider_id) - - async def register_seasons( - self, show: Show | PartialShow, show_id: str, season_number: int - ) -> str: - # We use an external season cache because we want to edit this cache programatically - @cache(ttl=timedelta(days=1), cache=self._season_cache) - async def create_season(_: str, __: int): - season = await self._provider.identify_season( - show.external_id[self._provider.name].data_id, season_number - ) - season.show_id = show_id - return await self._client.post("seasons", data=season.to_kyoo()) - - return await create_season(show_id, season_number) - - async def refresh( - self, - kind: Literal["collection", "movie", "episode", "show", "season"], - kyoo_id: str, - ): - async def id_movie(movie: dict, id: dict): - ret = await self._provider.identify_movie(id["dataId"]) - ret.path = movie["path"] - return ret - - async def id_season(season: dict, id: dict): - ret = await self._provider.identify_season( - id["dataId"], season["seasonNumber"] - ) - ret.show_id = season["showId"] - return ret - - async def id_episode(episode: dict, id: dict): - ret = await self._provider.identify_episode( - id["showId"], id["season"], id["episode"], episode["absoluteNumber"] - ) - ret.show_id = episode["showId"] - ret.season_id = episode["seasonId"] - ret.path = episode["path"] - return ret - - identify_table = { - "collection": lambda _, id: self._provider.identify_collection( - id["dataId"] - ), - "movie": id_movie, - "show": lambda _, id: self._provider.identify_show(id["dataId"]), - "season": id_season, - "episode": id_episode, - } - - current = await self._client.get(f"{kind}/{kyoo_id}") - if self._provider.name not in current["externalId"]: - logger.error( - f"Could not refresh metadata of {kind}/{kyoo_id}. Missing provider id." - ) - return False - provider_id = current["externalId"][self._provider.name] - new_value = await identify_table[kind](current, provider_id) - await self._client.put(f"{kind}/{kyoo_id}", data=new_value.to_kyoo()) - return True diff --git a/scanner/matcher/parser/guess.py b/scanner/matcher/parser/guess.py deleted file mode 100644 index a13347c4..00000000 --- a/scanner/matcher/parser/guess.py +++ /dev/null @@ -1,56 +0,0 @@ -#!/usr/bin/env python3 - -if __name__ == "__main__": - import sys - from pathlib import Path - - sys.path.append(str(Path(f"{__file__}/../../..").resolve())) - -from guessit.api import default_api -from typing import cast, List, Any -from rebulk import Rebulk - -try: - from . import rules -except: - import rules - -default_api.configure({}) -rblk = cast(Rebulk, default_api.rebulk) -rblk.rules(rules) - - -def guessit(name: str, *, xem_titles: List[str] = [], extra_flags: dict[str, Any] = {}): - return default_api.guessit( - name, - { - "episode_prefer_number": True, - "excludes": "language", - "expected_title": xem_titles, - } - | extra_flags, - ) - - -# Only used to test localy -if __name__ == "__main__": - import sys - import json - from providers.implementations.thexem import TheXemClient - from guessit.jsonutils import GuessitEncoder - from aiohttp import ClientSession - import asyncio - - async def main(): - async with ClientSession() as client: - xem = TheXemClient(client) - - advanced = any(x == "-a" for x in sys.argv) - ret = guessit( - sys.argv[1], - xem_titles=await xem.get_expected_titles(), - extra_flags={"advanced": advanced}, - ) - print(json.dumps(ret, cls=GuessitEncoder, indent=4)) - - asyncio.run(main()) diff --git a/scanner/matcher/subscriber.py b/scanner/matcher/subscriber.py deleted file mode 100644 index 77fe78d2..00000000 --- a/scanner/matcher/subscriber.py +++ /dev/null @@ -1,62 +0,0 @@ -import asyncio -from typing import Union, Literal -from msgspec import Struct, json -from logging import getLogger -from aio_pika.abc import AbstractIncomingMessage - -from providers.rabbit_base import RabbitBase -from matcher.matcher import Matcher - -logger = getLogger(__name__) - - -class Message(Struct, tag_field="action", tag=str.lower): - pass - - -class Scan(Message): - path: str - - -class Delete(Message): - path: str - - -class Refresh(Message): - kind: Literal["collection", "show", "movie", "season", "episode"] - id: str - - -decoder = json.Decoder(Union[Scan, Delete, Refresh]) - - -class Subscriber(RabbitBase): - async def listen(self, matcher: Matcher): - async def on_message(message: AbstractIncomingMessage): - try: - msg = decoder.decode(message.body) - ack = False - match msg: - case Scan(path): - ack = await matcher.identify(path) - case Delete(path): - ack = await matcher.delete(path) - case Refresh(kind, id): - ack = await matcher.refresh(kind, id) - case _: - logger.error(f"Invalid action: {msg.action}") - if ack: - logger.info("finished processing %s", msg) - await message.ack() - else: - logger.warn("failed to process %s", msg) - await message.reject() - except Exception as e: - logger.exception("Unhandled error", exc_info=e) - await message.reject() - - # Allow up to 20 scan requests to run in parallel on the same listener. - # Since most work is calling API not doing that is a waste. - await self._channel.set_qos(prefetch_count=20) - await self._queue.consume(on_message) - await asyncio.Future() diff --git a/scanner/migrations/000001_request.down.sql b/scanner/migrations/000001_request.down.sql new file mode 100644 index 00000000..06421546 --- /dev/null +++ b/scanner/migrations/000001_request.down.sql @@ -0,0 +1,5 @@ +drop table scanner.requests; + +drop type scanner.request_kind; +drop type scanner.request_status; + diff --git a/scanner/migrations/000001_request.up.sql b/scanner/migrations/000001_request.up.sql new file mode 100644 index 00000000..fbbbe71d --- /dev/null +++ b/scanner/migrations/000001_request.up.sql @@ -0,0 +1,24 @@ +create type scanner.request_kind as enum( + 'episode', + 'movie' +); + +create type scanner.request_status as enum( + 'pending', + 'running', + 'failed' +); + +create table scanner.requests( + pk serial primary key, + id uuid not null default gen_random_uuid() unique, + kind scanner.request_kind not null, + title text not null, + year integer, + external_id jsonb not null default '{}'::jsonb, + videos jsonb not null default '[]'::jsonb, + status scanner.request_status not null default 'pending', + started_at timestamptz, + created_at timestamptz not null default now()::timestamptz, + constraint unique_kty unique(kind, title, year) +); diff --git a/scanner/old/__init__.py b/scanner/old/__init__.py new file mode 100644 index 00000000..23dda67b --- /dev/null +++ b/scanner/old/__init__.py @@ -0,0 +1,30 @@ +async def main(): + import asyncio + import os + import logging + from .monitor import monitor + from .scanner import scan + from .refresher import refresh + from .publisher import Publisher + from .subscriber import Subscriber + from old.kyoo_client import KyooClient + + logging.basicConfig(level=logging.INFO) + logging.getLogger("watchfiles").setLevel(logging.WARNING) + + async with ( + Publisher() as publisher, + Subscriber() as subscriber, + KyooClient() as client, + ): + path = os.environ.get("SCANNER_LIBRARY_ROOT", "/video") + + async def scan_all(): + await scan(path, publisher, client, remove_deleted=True) + + await asyncio.gather( + monitor(path, publisher, client), + scan_all(), + refresh(publisher, client), + subscriber.listen(scan_all), + ) diff --git a/scanner/matcher/cache.py b/scanner/old/cache.py similarity index 100% rename from scanner/matcher/cache.py rename to scanner/old/cache.py diff --git a/scanner/providers/implementations/thetvdb.py b/scanner/old/implementations/thetvdb.py similarity index 96% rename from scanner/providers/implementations/thetvdb.py rename to scanner/old/implementations/thetvdb.py index 73147a0b..e587a9e3 100644 --- a/scanner/providers/implementations/thetvdb.py +++ b/scanner/old/implementations/thetvdb.py @@ -7,6 +7,7 @@ from typing import Optional, Any, Callable, OrderedDict from langcodes import Language from matcher.cache import cache +from scanner.models.staff import Role from ..provider import Provider, ProviderError from ..utils import normalize_lang @@ -75,6 +76,19 @@ class TVDB(Provider): "martial-arts": None, "awards-show": None, } + self._roles_map = { + "Actor": Role.ACTOR, + "Creator": Role.OTHER, + "Crew": Role.CREW, + "Director": Role.DIRECTOR, + "Executive Producer": Role.OTHER, + "Guest Star": Role.OTHER, + "Host": Role.OTHER, + "Musical Guest": Role.MUSIC, + "Producer": Role.PRODUCER, + "Showrunner": Role.OTHER, + "Writer": Role.WRITTER, + } @cache(ttl=timedelta(days=30)) async def login(self) -> str: diff --git a/scanner/providers/implementations/thexem.py b/scanner/old/implementations/thexem.py similarity index 100% rename from scanner/providers/implementations/thexem.py rename to scanner/old/implementations/thexem.py diff --git a/scanner/old/matcher.py b/scanner/old/matcher.py new file mode 100644 index 00000000..a8507bf2 --- /dev/null +++ b/scanner/old/matcher.py @@ -0,0 +1,67 @@ +from datetime import timedelta +from typing import Literal, Optional +import asyncio +from logging import getLogger +from old.provider import Provider, ProviderError +from old.types.collection import Collection +from old.types.show import Show +from old.types.episode import Episode, PartialShow +from old.types.season import Season +from old.kyoo_client import KyooClient +from .parser.guess import guessit +from .cache import cache, exec_as_cache, make_key + +logger = getLogger(__name__) + + +class Matcher: + def __init__(self, client: KyooClient, provider: Provider) -> None: + self._client = client + self._provider = provider + + async def refresh( + self, + kind: Literal["collection", "movie", "episode", "show", "season"], + kyoo_id: str, + ): + async def id_movie(movie: dict, id: dict): + ret = await self._provider.identify_movie(id["dataId"]) + ret.path = movie["path"] + return ret + + async def id_season(season: dict, id: dict): + ret = await self._provider.identify_season( + id["dataId"], season["seasonNumber"] + ) + ret.show_id = season["showId"] + return ret + + async def id_episode(episode: dict, id: dict): + ret = await self._provider.identify_episode( + id["showId"], id["season"], id["episode"], episode["absoluteNumber"] + ) + ret.show_id = episode["showId"] + ret.season_id = episode["seasonId"] + ret.path = episode["path"] + return ret + + identify_table = { + "collection": lambda _, id: self._provider.identify_collection( + id["dataId"] + ), + "movie": id_movie, + "show": lambda _, id: self._provider.identify_show(id["dataId"]), + "season": id_season, + "episode": id_episode, + } + + current = await self._client.get(f"{kind}/{kyoo_id}") + if self._provider.name not in current["externalId"]: + logger.error( + f"Could not refresh metadata of {kind}/{kyoo_id}. Missing provider id." + ) + return False + provider_id = current["externalId"][self._provider.name] + new_value = await identify_table[kind](current, provider_id) + await self._client.put(f"{kind}/{kyoo_id}", data=new_value.to_kyoo()) + return True diff --git a/scanner/scanner/refresher.py b/scanner/old/refresher.py similarity index 91% rename from scanner/scanner/refresher.py rename to scanner/old/refresher.py index 48eeb61a..a5a572c9 100644 --- a/scanner/scanner/refresher.py +++ b/scanner/old/refresher.py @@ -1,7 +1,7 @@ import asyncio from logging import getLogger -from providers.kyoo_client import KyooClient +from old.kyoo_client import KyooClient from scanner.publisher import Publisher diff --git a/scanner/providers/implementations/themoviedatabase.py b/scanner/providers/implementations/themoviedatabase.py deleted file mode 100644 index 85a8fb4b..00000000 --- a/scanner/providers/implementations/themoviedatabase.py +++ /dev/null @@ -1,769 +0,0 @@ -import asyncio -from aiohttp import ClientSession -from datetime import datetime, timedelta -from logging import getLogger -from typing import Awaitable, Callable, Dict, List, Optional, Any, TypeVar -from itertools import accumulate, zip_longest -from langcodes import Language - -from providers.utils import ProviderError -from matcher.cache import cache - -from ..provider import Provider -from ..types.movie import Movie, MovieTranslation, Status as MovieStatus -from ..types.season import Season, SeasonTranslation -from ..types.episode import Episode, EpisodeTranslation, PartialShow, EpisodeID -from ..types.studio import Studio -from ..types.genre import Genre -from ..types.metadataid import MetadataID -from ..types.show import Show, ShowTranslation, Status as ShowStatus -from ..types.collection import Collection, CollectionTranslation - -logger = getLogger(__name__) - - -class TheMovieDatabase(Provider): - DEFAULT_API_KEY = "c9f328a01011b28f22483717395fc3fa" - - def __init__( - self, - languages: list[str], - client: ClientSession, - api_key: str, - ) -> None: - super().__init__() - self._languages = [Language.get(l) for l in languages] - self._client = client - self.base = "https://api.themoviedb.org/3" - self.api_key = api_key - self.genre_map = { - 28: Genre.ACTION, - 12: Genre.ADVENTURE, - 16: Genre.ANIMATION, - 35: Genre.COMEDY, - 80: Genre.CRIME, - 99: Genre.DOCUMENTARY, - 18: Genre.DRAMA, - 10751: Genre.FAMILY, - 14: Genre.FANTASY, - 36: Genre.HISTORY, - 27: Genre.HORROR, - 10402: Genre.MUSIC, - 9648: Genre.MYSTERY, - 10749: Genre.ROMANCE, - 878: Genre.SCIENCE_FICTION, - 53: Genre.THRILLER, - 10752: Genre.WAR, - 37: Genre.WESTERN, - 10759: [Genre.ACTION, Genre.ADVENTURE], - 10762: Genre.KIDS, - 10763: Genre.NEWS, - 10764: Genre.REALITY, - 10765: [Genre.SCIENCE_FICTION, Genre.FANTASY], - 10766: Genre.SOAP, - 10767: Genre.TALK, - 10768: [Genre.WAR, Genre.POLITICS], - } - - @property - def name(self) -> str: - return "themoviedatabase" - - def process_genres(self, genres) -> list[Genre]: - def flatten(x: Genre | list[Genre]) -> list[Genre]: - if isinstance(x, list): - return [j for i in x for j in flatten(i)] - return [x] - - return flatten( - [self.genre_map[x["id"]] for x in genres if x["id"] in self.genre_map] - ) - - def get_languages(self, *args) -> list[Language]: - return self._languages + list(args) - - async def get( - self, - path: str, - *, - params: dict[str, Any] = {}, - not_found_fail: Optional[str] = None, - ): - params = {k: v for k, v in params.items() if v is not None} - async with self._client.get( - f"{self.base}/{path}", params={"api_key": self.api_key, **params} - ) as r: - if not_found_fail and r.status == 404: - raise ProviderError(not_found_fail) - r.raise_for_status() - return await r.json() - - T = TypeVar("T") - - def merge_translations(self, host, translations, *, languages: list[Language]): - host.translations = { - k.to_tag(): v.translations[k.to_tag()] - for k, v in zip(languages, translations) - } - return host - - async def process_translations( - self, - for_language: Callable[[str], Awaitable[T]], - languages: list[Language], - post_merge: Callable[[T, list[T]], T] | None = None, - ) -> T: - tasks = map(lambda lng: for_language(lng), languages) - items: list[Any] = await asyncio.gather(*tasks) - item = self.merge_translations(items[0], items, languages=languages) - if post_merge: - item = post_merge(item, items) - return item - - def get_image(self, images: list[Dict[str, Any]]) -> list[str]: - return [ - f"https://image.tmdb.org/t/p/original{x['file_path']}" - for x in images - if x["file_path"] - ] - - def to_studio(self, company: dict[str, Any]) -> Studio: - return Studio( - name=company["name"], - logos=[f"https://image.tmdb.org/t/p/original{company['logo_path']}"] - if "logo_path" in company - else [], - external_id={ - self.name: MetadataID( - company["id"], f"https://www.themoviedb.org/company/{company['id']}" - ) - }, - ) - - def get_best_image( - self, item: dict[str, Any], lng: Language, key: str - ) -> list[dict]: - """ - Retrieves the best available images for a item based on localization. - - Args: - item (dict): A dictionary containing item information, including images and language details. - lng (Language): The preferred language for the images. - key (str): The key to access the images in the item dictionary. (e.g. "posters", "backdrops", "logos") - Returns: - list: A list of images, prioritized by localization, original language, and any available image. - """ - # Order images by size and vote average - item["images"][key] = sorted( - item["images"][key], - key=lambda x: (x.get("vote_average", 0), x.get("width", 0)), - reverse=True, - ) - - # Step 1: Try to get localized images - localized_images = [ - image - for image in item["images"][key] - if image.get("iso_639_1") == lng.language - ] - - # Step 2: If no localized images, try images in the original language - if not localized_images: - localized_images = [ - image - for image in item["images"][key] - if image.get("iso_639_1") == item.get("original_language") - ] - - # Step 3: If still no images, use any available images - if not localized_images: - localized_images = item["images"][key] - - # Step 4: If there are no images at all, fallback to _path attribute. - if not localized_images: - localized_images = self._get_image_fallback(item, key) - - return self.get_image(localized_images) - - def _get_image_fallback(self, item: dict[str, Any], key: str) -> list[dict]: - """ - Fallback to _path attribute if there are no images available in the images list. - """ - if key == "posters": - return [{"file_path": item.get("poster_path")}] - elif key == "backdrops": - return [{"file_path": item.get("backdrop_path")}] - - return [] - - async def search_movie(self, name: str, year: Optional[int]) -> Movie: - search_results = ( - await self.get("search/movie", params={"query": name, "year": year}) - )["results"] - if len(search_results) == 0: - raise ProviderError(f"No result for a movie named: {name}") - search = self.get_best_result(search_results, name, year) - original_language = Language.get(search["original_language"]) - return await self.identify_movie( - search["id"], original_language=original_language - ) - - async def identify_movie( - self, movie_id: str, original_language: Optional[Language] = None - ) -> Movie: - languages = self.get_languages() - - async def for_language(lng: Language) -> Movie: - movie = await self.get( - f"movie/{movie_id}", - params={ - "language": lng.to_tag(), - "append_to_response": "alternative_titles,videos,credits,keywords,images", - "include_image_language": f"{lng.language},null,{original_language.language if original_language else ''}", - }, - ) - logger.debug("TMDb responded: %s", movie) - - ret = Movie( - original_language=movie["original_language"], - aliases=[x["title"] for x in movie["alternative_titles"]["titles"]], - air_date=datetime.strptime(movie["release_date"], "%Y-%m-%d").date() - if movie["release_date"] - else None, - status=MovieStatus.FINISHED - if movie["status"] == "Released" - else MovieStatus.PLANNED, - rating=round(float(movie["vote_average"]) * 10), - runtime=int(movie["runtime"]) if movie["runtime"] is not None else None, - studios=[self.to_studio(x) for x in movie["production_companies"]], - genres=self.process_genres(movie["genres"]), - external_id=( - { - self.name: MetadataID( - movie["id"], - f"https://www.themoviedb.org/movie/{movie['id']}", - ) - } - | ( - { - "imdb": MetadataID( - movie["imdb_id"], - f"https://www.imdb.com/title/{movie['imdb_id']}", - ) - } - if movie["imdb_id"] - else {} - ) - ), - collections=[ - Collection( - external_id={ - self.name: MetadataID( - movie["belongs_to_collection"]["id"], - f"https://www.themoviedb.org/collection/{movie['belongs_to_collection']['id']}", - ) - }, - ) - ] - if movie["belongs_to_collection"] is not None - else [], - # TODO: Add cast information - ) - translation = MovieTranslation( - name=movie["title"], - tagline=movie["tagline"] if movie["tagline"] else None, - tags=list(map(lambda x: x["name"], movie["keywords"]["keywords"])), - overview=movie["overview"], - posters=self.get_best_image(movie, lng, "posters"), - logos=self.get_best_image(movie, lng, "logos"), - thumbnails=self.get_best_image(movie, lng, "backdrops"), - trailers=[ - f"https://www.youtube.com/watch?v={x['key']}" - for x in movie["videos"]["results"] - if x["type"] == "Trailer" and x["site"] == "YouTube" - ], - ) - ret.translations = {lng.to_tag(): translation} - return ret - - ret = await self.process_translations(for_language, languages) - if ( - ret.original_language is not None - and ret.original_language not in ret.translations - ): - orig_language = Language.get(ret.original_language) - ret.translations[orig_language.to_tag()] = ( - await for_language(orig_language) - ).translations[orig_language.to_tag()] - return ret - - @cache(ttl=timedelta(days=1)) - async def identify_show( - self, - show_id: str, - ) -> Show: - languages = self.get_languages() - - async def for_language(lng: Language) -> Show: - show = await self.get( - f"tv/{show_id}", - params={ - "language": lng.to_tag(), - "append_to_response": "alternative_titles,videos,credits,keywords,images,external_ids", - "include_image_language": f"{lng.language},null,en", - }, - ) - logger.debug("TMDb responded: %s", show) - - ret = Show( - original_language=show["original_language"], - aliases=[x["title"] for x in show["alternative_titles"]["results"]], - start_air=datetime.strptime(show["first_air_date"], "%Y-%m-%d").date() - if show["first_air_date"] - else None, - end_air=datetime.strptime(show["last_air_date"], "%Y-%m-%d").date() - if show["last_air_date"] - else None, - status=ShowStatus.FINISHED - if show["status"] == "Released" - else ShowStatus.AIRING - if show["in_production"] - else ShowStatus.FINISHED, - rating=round(float(show["vote_average"]) * 10), - studios=[self.to_studio(x) for x in show["production_companies"]], - genres=self.process_genres(show["genres"]), - external_id={ - self.name: MetadataID( - show["id"], f"https://www.themoviedb.org/tv/{show['id']}" - ), - } - | ( - { - "imdb": MetadataID( - show["external_ids"]["imdb_id"], - f"https://www.imdb.com/title/{show['external_ids']['imdb_id']}", - ) - } - if show["external_ids"]["imdb_id"] - else {} - ) - | ( - {"tvdb": MetadataID(show["external_ids"]["tvdb_id"], link=None)} - if show["external_ids"]["tvdb_id"] - else {} - ), - seasons=[ - self.to_season(x, language=lng, show_id=show["id"]) - for x in show["seasons"] - ], - # TODO: Add cast information - ) - translation = ShowTranslation( - name=show["name"], - tagline=show["tagline"] if show["tagline"] else None, - tags=list(map(lambda x: x["name"], show["keywords"]["results"])), - overview=show["overview"], - posters=self.get_best_image(show, lng, "posters"), - logos=self.get_best_image(show, lng, "logos"), - thumbnails=self.get_best_image(show, lng, "backdrops"), - trailers=[ - f"https://www.youtube.com/watch?v={x['key']}" - for x in show["videos"]["results"] - if x["type"] == "Trailer" and x["site"] == "YouTube" - ], - ) - ret.translations = {lng.to_tag(): translation} - return ret - - def merge_seasons_translations(item: Show, items: list[Show]) -> Show: - item.seasons = [ - self.merge_translations( - season, - [ - next( - y - for y in x.seasons - if y.season_number == season.season_number - ) - for x in items - ], - languages=languages, - ) - for season in item.seasons - ] - return item - - ret = await self.process_translations( - for_language, languages, merge_seasons_translations - ) - if ( - ret.original_language is not None - and ret.original_language not in ret.translations - ): - orig_language = Language.get(ret.original_language) - ret.translations[orig_language.to_tag()] = ( - await for_language(orig_language) - ).translations[orig_language.to_tag()] - return ret - - def to_season( - self, season: dict[str, Any], *, language: Language, show_id: str - ) -> Season: - return Season( - season_number=season["season_number"], - episodes_count=season["episode_count"], - start_air=datetime.strptime(season["air_date"], "%Y-%m-%d").date() - if season["air_date"] - else None, - end_air=None, - external_id={ - self.name: MetadataID( - show_id, - f"https://www.themoviedb.org/tv/{show_id}/season/{season['season_number']}", - ) - }, - translations={ - language.to_tag(): SeasonTranslation( - name=season["name"], - overview=season["overview"], - posters=[ - f"https://image.tmdb.org/t/p/original{season['poster_path']}" - ] - if season["poster_path"] is not None - else [], - thumbnails=[], - ) - }, - ) - - async def identify_season(self, show_id: str, season: int) -> Season: - # We already get seasons info in the identify_show and chances are this gets cached already - show = await self.identify_show(show_id) - ret = next((x for x in show.seasons if x.season_number == season), None) - if ret is None: - raise ProviderError( - f"Could not find season {season} for show {show.to_kyoo()['name']}" - ) - return ret - - @cache(ttl=timedelta(days=1)) - async def search_show(self, name: str, year: Optional[int]) -> PartialShow: - search_results = ( - await self.get("search/tv", params={"query": name, "year": year}) - )["results"] - - if len(search_results) == 0: - raise ProviderError(f"No result for a tv show named: {name}") - - search = self.get_best_result(search_results, name, year) - show_id = search["id"] - return PartialShow( - name=search["name"], - original_language=search["original_language"], - external_id={ - self.name: MetadataID( - show_id, f"https://www.themoviedb.org/tv/{show_id}" - ) - }, - ) - - async def search_episode( - self, - name: str, - season: Optional[int], - episode_nbr: Optional[int], - absolute: Optional[int], - year: Optional[int], - ) -> Episode: - show = await self.search_show(name, year) - show_id = show.external_id[self.name].data_id - - if absolute is not None and (season is None or episode_nbr is None): - (season, episode_nbr) = await self.get_episode_from_absolute( - show_id, absolute - ) - - if season is None or episode_nbr is None: - raise ProviderError( - f"Could not guess season or episode number of the episode {show.name} {season}-{episode_nbr} ({absolute})", - ) - - if absolute is None: - absolute = await self.get_absolute_number(show_id, season, episode_nbr) - return await self.identify_episode(show_id, season, episode_nbr, absolute) - - async def identify_episode( - self, show_id: str, season: Optional[int], episode_nbr: int, absolute: int - ) -> Episode: - async def for_language(lng: Language) -> Episode: - try: - episode = await self.get( - f"tv/{show_id}/season/{season}/episode/{episode_nbr}", - params={ - "language": lng.to_tag(), - }, - ) - except: - episode = await self.get( - f"tv/{show_id}/season/{season}/episode/{absolute}", - params={ - "language": lng.to_tag(), - }, - not_found_fail=f"Could not find episode {episode_nbr} of season {season} of serie {show_id} (absolute: {absolute})", - ) - logger.debug("TMDb responded: %s", episode) - - ret = Episode( - show=PartialShow( - name=show_id, - original_language=None, - external_id={ - self.name: MetadataID( - show_id, f"https://www.themoviedb.org/tv/{show_id}" - ) - }, - ), - season_number=episode["season_number"], - episode_number=episode["episode_number"], - absolute_number=absolute, - runtime=int(episode["runtime"]) - if episode["runtime"] is not None - else None, - release_date=datetime.strptime(episode["air_date"], "%Y-%m-%d").date() - if episode["air_date"] - else None, - thumbnail=f"https://image.tmdb.org/t/p/original{episode['still_path']}" - if "still_path" in episode and episode["still_path"] is not None - else None, - external_id={ - self.name: EpisodeID( - show_id, - episode["season_number"], - episode["episode_number"], - f"https://www.themoviedb.org/tv/{show_id}/season/{episode['season_number']}/episode/{episode['episode_number']}", - ), - }, - ) - translation = EpisodeTranslation( - name=episode["name"], - overview=episode["overview"], - ) - ret.translations = {lng.to_tag(): translation} - return ret - - return await self.process_translations(for_language, self.get_languages()) - - def get_best_result( - self, search_results: List[Any], name: str, year: Optional[int] - ) -> Any: - results = search_results - - # Find perfect match by year since sometime tmdb decides to discard the year parameter. - if year: - results = list( - x - for x in search_results - if ("first_air_date" in x and x["first_air_date"].startswith(str(year))) - or ("release_date" in x and x["release_date"].startswith(str(year))) - ) - if not results: - results = search_results - - # If there is a perfect match use it (and if there are multiple, use the most popular one) - res = sorted( - ( - x - for x in results - if ("name" in x and x["name"].casefold() == name.casefold()) - or ("title" in x and x["title"].casefold() == name.casefold()) - ), - key=lambda x: (x["vote_count"], x["popularity"]), - reverse=True, - ) - if res: - results = res - else: - # Ignore totally unpopular shows or unknown ones. - # sorted is stable and False= ep_count // 1.5 - ), - None, - ) - - if group_id is None: - return None - group = await self.get(f"tv/episode_group/{group_id}") - absgrp = [ - ep - for grp in sorted(group["groups"], key=lambda x: x["order"]) - # Some shows include specials as the first absolute group (like TenSura) - if grp["name"] != "Specials" - for ep in sorted(grp["episodes"], key=lambda x: x["order"]) - ] - season_starts = [ - next( - ( - x["episode_number"] - for x in absgrp - if x["season_number"] == s.season_number - ), - 1, - ) - for s in show.seasons - ] - complete_abs = absgrp + [ - {"season_number": s.season_number, "episode_number": e} - for s in show.seasons - # ignore specials not specified in the absgrp - if s.season_number > 0 - for e in range(1, s.episodes_count + 1) - if not any( - x["season_number"] == s.season_number - and ( - x["episode_number"] == e - # take into account weird absolute (for example one piece, episodes are not reset to 1 when the season starts) - or x["episode_number"] == season_starts[s.season_number - 1] + e - ) - for x in absgrp - ) - ] - if len(complete_abs) != len(absgrp): - logger.warn( - f"Incomplete absolute group for show {show_id}. Filling missing values by assuming season/episode order is ascending" - ) - return complete_abs - except Exception as e: - logger.exception( - "Could not retrieve absolute ordering information", exc_info=e - ) - return None - - async def get_episode_from_absolute(self, show_id: str, absolute: int): - absgrp = await self.get_absolute_order(show_id) - - if absgrp is not None and len(absgrp) >= absolute: - # Using absolute - 1 since the array is 0based (absolute episode 1 is at index 0) - season = absgrp[absolute - 1]["season_number"] - episode_nbr = absgrp[absolute - 1]["episode_number"] - return (season, episode_nbr) - # We assume that each season should be played in order with no special episodes. - show = await self.identify_show(show_id) - # Dont forget to ingore the special season (season_number 0) - seasons_nbrs = [x.season_number for x in show.seasons if x.season_number != 0] - seasons_eps = [x.episodes_count for x in show.seasons if x.season_number != 0] - - if not any(seasons_nbrs): - return (None, None) - - # zip_longest(seasons_nbrs[1:], accumulate(seasons_eps)) return [(2, 12), (None, 24)] if the show has two seasons with 12 eps - # we take the last group that has less total episodes than the absolute number. - return next( - ( - (snbr, absolute - ep_cnt) - for snbr, ep_cnt in reversed( - list(zip_longest(seasons_nbrs[1:], accumulate(seasons_eps))) - ) - if ep_cnt < absolute - ), - # If the absolute episode number is lower than the 1st season number of episode, it is part of it. - (seasons_nbrs[0], absolute), - ) - - async def get_absolute_number( - self, show_id: str, season: int, episode_nbr: int - ) -> int: - absgrp = await self.get_absolute_order(show_id) - if absgrp is None: - # We assume that each season should be played in order with no special episodes. - show = await self.identify_show(show_id) - return ( - sum( - x.episodes_count - for x in show.seasons - if 0 < x.season_number < season - ) - + episode_nbr - ) - absolute = next( - ( - # The + 1 is to go from 0based index to 1based absolute number - i + 1 - for i, x in enumerate(absgrp) - if x["episode_number"] == episode_nbr and x["season_number"] == season - ), - None, - ) - if absolute is not None: - return absolute - # assume we use tmdb weird absolute by default (for example, One Piece S21E800, the first - # episode of S21 is not reset to 0 but keep increasing so it can be 800 - start = next( - (x["episode_number"] for x in absgrp if x["season_number"] == season), None - ) - if start is None or start <= episode_nbr: - raise ProviderError( - f"Could not guess absolute number of episode {show_id} s{season} e{episode_nbr}" - ) - # add back the continuous number (imagine the user has one piece S21e31 - # but tmdb registered it as S21E831 since S21's first ep is 800 - return await self.get_absolute_number(show_id, season, episode_nbr + start) - - async def identify_collection(self, provider_id: str) -> Collection: - languages = self.get_languages() - - async def for_language(lng: Language) -> Collection: - collection = await self.get( - f"collection/{provider_id}", - params={ - "language": lng.to_tag(), - "append_to_response": "images", - "include_image_language": f"{lng.language},null,en", - }, - ) - logger.debug("TMDb responded: %s", collection) - - ret = Collection( - external_id={ - self.name: MetadataID( - collection["id"], - f"https://www.themoviedb.org/collection/{collection['id']}", - ) - }, - ) - translation = CollectionTranslation( - name=collection["name"], - overview=collection["overview"], - posters=self.get_best_image(collection, lng, "posters"), - logos=[], - thumbnails=self.get_best_image(collection, lng, "backdrops"), - ) - ret.translations = {lng.to_tag(): translation} - return ret - - return await self.process_translations(for_language, languages) diff --git a/scanner/providers/kyoo_client.py b/scanner/providers/kyoo_client.py deleted file mode 100644 index 50d220aa..00000000 --- a/scanner/providers/kyoo_client.py +++ /dev/null @@ -1,159 +0,0 @@ -import os -import jsons -from aiohttp import ClientSession -from datetime import date -from logging import getLogger -from typing import List, Literal, Any, Optional -from urllib.parse import quote - -from .utils import format_date - -logger = getLogger(__name__) - - -class KyooClient: - def __init__(self) -> None: - self._api_key = os.environ.get("KYOO_APIKEY") - if not self._api_key: - self._api_key = os.environ.get("KYOO_APIKEYS") - if not self._api_key: - print("Missing environment variable 'KYOO_APIKEY'.") - exit(2) - self._api_key = self._api_key.split(",")[0] - - self._url = os.environ.get("KYOO_URL", "http://back:5000") - - async def __aenter__(self): - jsons.set_serializer(lambda x, **_: format_date(x), type[Optional[date | int]]) - self.client = ClientSession( - headers={ - "User-Agent": "kyoo", - }, - json_serialize=lambda *args, **kwargs: jsons.dumps( - *args, key_transformer=jsons.KEY_TRANSFORMER_CAMELCASE, **kwargs - ), - ) - return self - - async def __aexit__(self, exc_type, exc_value, exc_tb): - await self.client.close() - - async def get_registered_paths(self) -> List[str]: - async with self.client.get( - f"{self._url}/paths", - headers={"X-API-Key": self._api_key}, - ) as r: - r.raise_for_status() - return await r.json() - - async def create_issue(self, path: str, issue: str, extra: dict | None = None): - async with self.client.post( - f"{self._url}/issues", - json={ - "domain": "scanner", - "cause": path, - "reason": issue, - "extra": extra if extra is not None else {}, - }, - headers={"X-API-Key": self._api_key}, - ) as r: - if not r.ok: - logger.error(f"Request error: {await r.text()}") - r.raise_for_status() - - async def get_issues(self) -> List[str]: - async with self.client.get( - f"{self._url}/issues", - params={"limit": 0}, - headers={"X-API-Key": self._api_key}, - ) as r: - r.raise_for_status() - ret = await r.json() - return [x["cause"] for x in ret if x["domain"] == "scanner"] - - async def delete_issue(self, path: str): - async with self.client.delete( - f'{self._url}/issues?filter=domain eq scanner and cause eq "{quote(path)}"', - headers={"X-API-Key": self._api_key}, - ) as r: - if not r.ok: - logger.error(f"Request error: {await r.text()}") - r.raise_for_status() - - async def link_collection( - self, collection: str, type: Literal["movie"] | Literal["show"], id: str - ): - async with self.client.put( - f"{self._url}/collections/{collection}/{type}/{id}", - headers={"X-API-Key": self._api_key}, - ) as r: - # Allow 409 and continue as if it worked. - if not r.ok and r.status != 409: - logger.error(f"Request error: {await r.text()}") - r.raise_for_status() - - async def post(self, path: str, *, data: dict[str, Any]) -> str: - logger.debug( - "Sending %s: %s", - path, - jsons.dumps( - data, - key_transformer=jsons.KEY_TRANSFORMER_CAMELCASE, - jdkwargs={"indent": 4}, - ), - ) - async with self.client.post( - f"{self._url}/{path}", - json=data, - headers={"X-API-Key": self._api_key}, - ) as r: - # Allow 409 and continue as if it worked. - if not r.ok and r.status != 409: - logger.error(f"Request error: {await r.text()}") - r.raise_for_status() - ret = await r.json() - return ret["id"] - - async def delete( - self, - path: str, - ): - logger.info("Deleting %s", path) - - async with self.client.delete( - f"{self._url}/paths?recursive=true&path={quote(path)}", - headers={"X-API-Key": self._api_key}, - ) as r: - if not r.ok: - logger.error(f"Request error: {await r.text()}") - r.raise_for_status() - - async def get(self, path: str): - async with self.client.get( - f"{self._url}/{path}", - headers={"X-API-Key": self._api_key}, - ) as r: - if not r.ok: - logger.error(f"Request error: {await r.text()}") - r.raise_for_status() - return await r.json() - - async def put(self, path: str, *, data: dict[str, Any]): - logger.debug( - "Sending %s: %s", - path, - jsons.dumps( - data, - key_transformer=jsons.KEY_TRANSFORMER_CAMELCASE, - jdkwargs={"indent": 4}, - ), - ) - async with self.client.put( - f"{self._url}/{path}", - json=data, - headers={"X-API-Key": self._api_key}, - ) as r: - # Allow 409 and continue as if it worked. - if not r.ok and r.status != 409: - logger.error(f"Request error: {await r.text()}") - r.raise_for_status() diff --git a/scanner/providers/provider.py b/scanner/providers/provider.py deleted file mode 100644 index 535de39a..00000000 --- a/scanner/providers/provider.py +++ /dev/null @@ -1,97 +0,0 @@ -from logging import getLogger -import os -from aiohttp import ClientSession -from abc import abstractmethod, abstractproperty -from typing import Optional - -from providers.utils import ProviderError - -from .types.show import Show -from .types.season import Season -from .types.episode import Episode -from .types.movie import Movie -from .types.collection import Collection - -logger = getLogger(__name__) - - -class Provider: - @classmethod - def get_default(cls, client: ClientSession): - languages = os.environ.get("LIBRARY_LANGUAGES") - if not languages: - print("Missing environment variable 'LIBRARY_LANGUAGES'.") - exit(2) - languages = languages.split(",") - providers = [] - - from providers.implementations.themoviedatabase import TheMovieDatabase - - tmdb = os.environ.get("THEMOVIEDB_APIKEY") or TheMovieDatabase.DEFAULT_API_KEY - if tmdb != "disabled": - tmdb = TheMovieDatabase(languages, client, tmdb) - providers.append(tmdb) - - from providers.implementations.thetvdb import TVDB - - tvdb = os.environ.get("TVDB_APIKEY") or TVDB.DEFAULT_API_KEY - if tvdb != "disabled": - pin = os.environ.get("TVDB_PIN") or None - tvdb = TVDB(client, tvdb, pin, languages) - providers.append(tvdb) - - if not any(providers): - raise ProviderError( - "No provider configured. You probably forgot to specify an API Key" - ) - - from providers.implementations.thexem import TheXem - - provider = next(iter(providers)) - logger.info(f"Starting with provider: {provider.name}") - return TheXem(client, provider) - - @abstractproperty - def name(self) -> str: - raise NotImplementedError - - @abstractmethod - async def search_movie(self, name: str, year: Optional[int]) -> Movie: - raise NotImplementedError - - @abstractmethod - async def search_episode( - self, - name: str, - season: Optional[int], - episode_nbr: Optional[int], - absolute: Optional[int], - year: Optional[int], - ) -> Episode: - raise NotImplementedError - - @abstractmethod - async def identify_movie(self, movie_id: str) -> Movie: - raise NotImplementedError - - @abstractmethod - async def identify_show(self, show_id: str) -> Show: - raise NotImplementedError - - @abstractmethod - async def identify_season(self, show_id: str, season: int) -> Season: - raise NotImplementedError - - @abstractmethod - async def identify_episode( - self, show_id: str, season: Optional[int], episode_nbr: int, absolute: int - ) -> Episode: - raise NotImplementedError - - @abstractmethod - async def identify_collection(self, provider_id: str) -> Collection: - raise NotImplementedError - - @abstractmethod - async def get_expected_titles(self) -> list[str]: - return [] diff --git a/scanner/providers/rabbit_base.py b/scanner/providers/rabbit_base.py deleted file mode 100644 index 064fa3d1..00000000 --- a/scanner/providers/rabbit_base.py +++ /dev/null @@ -1,34 +0,0 @@ -import os -from aio_pika import connect_robust - - -class RabbitBase: - QUEUE = "scanner" - - async def __aenter__(self): - self._con = await connect_robust( - os.environ.get("RABBITMQ_URL"), - host=os.environ.get("RABBITMQ_HOST", "rabbitmq"), - port=int(os.environ.get("RABBITMQ_PORT", "5672")), - login=os.environ.get("RABBITMQ_DEFAULT_USER", "guest"), - password=os.environ.get("RABBITMQ_DEFAULT_PASS", "guest"), - ) - - # Attempt to declare the queue passively in case it already exists. - try: - self._channel = await self._con.channel() - self._queue = await self._channel.declare_queue(self.QUEUE, passive=True) - return self - except Exception: - # The server will close the channel on error. - # Cleanup the reference to it. - await self._channel.close() - - # The queue does not exist, so actively declare it. - self._channel = await self._con.channel() - self._queue = await self._channel.declare_queue(self.QUEUE) - return self - - async def __aexit__(self, exc_type, exc_value, exc_tb): - await self._channel.close() - await self._con.close() diff --git a/scanner/providers/types/collection.py b/scanner/providers/types/collection.py deleted file mode 100644 index c6d01603..00000000 --- a/scanner/providers/types/collection.py +++ /dev/null @@ -1,35 +0,0 @@ -from dataclasses import asdict, dataclass, field -from typing import Optional - -from providers.utils import ProviderError, select_translation, select_image - -from .metadataid import MetadataID - - -@dataclass -class CollectionTranslation: - name: str - overview: Optional[str] = None - posters: list[str] = field(default_factory=list) - logos: list[str] = field(default_factory=list) - thumbnails: list[str] = field(default_factory=list) - - -@dataclass -class Collection: - external_id: dict[str, MetadataID] - translations: dict[str, CollectionTranslation] = field(default_factory=dict) - - def to_kyoo(self): - trans = select_translation(self) - if trans is None: - raise ProviderError( - "Could not find translations for the collection. Aborting" - ) - return { - **asdict(self), - **asdict(trans), - "poster": select_image(self, "posters"), - "thumbnail": select_image(self, "thumbnails"), - "logo": select_image(self, "logos"), - } diff --git a/scanner/providers/types/episode.py b/scanner/providers/types/episode.py deleted file mode 100644 index 8da11c44..00000000 --- a/scanner/providers/types/episode.py +++ /dev/null @@ -1,54 +0,0 @@ -from datetime import date -from dataclasses import dataclass, field, asdict -from typing import Optional - -from providers.utils import select_translation - -from .show import Show -from .metadataid import MetadataID - - -@dataclass -class PartialShow: - name: str - original_language: Optional[str] - external_id: dict[str, MetadataID] - - -@dataclass -class EpisodeID: - show_id: str - season: Optional[int] - episode: int - link: str - - -@dataclass -class EpisodeTranslation: - name: Optional[str] - overview: Optional[str] = None - - -@dataclass -class Episode: - show: Show | PartialShow - season_number: int - episode_number: int - absolute_number: int - runtime: Optional[int] - release_date: Optional[date | int] - thumbnail: Optional[str] - external_id: dict[str, EpisodeID] - - path: Optional[str] = None - show_id: Optional[str] = None - season_id: Optional[str] = None - translations: dict[str, EpisodeTranslation] = field(default_factory=dict) - - def to_kyoo(self): - trans = select_translation(self) or EpisodeTranslation("") - return { - **asdict(self), - **asdict(trans), - "show": None, - } diff --git a/scanner/providers/types/genre.py b/scanner/providers/types/genre.py deleted file mode 100644 index d596a2f2..00000000 --- a/scanner/providers/types/genre.py +++ /dev/null @@ -1,31 +0,0 @@ -from enum import Enum - - -class Genre(str, Enum): - ACTION = "Action" - ADVENTURE = "Adventure" - ANIMATION = "Animation" - COMEDY = "Comedy" - CRIME = "Crime" - DOCUMENTARY = "Documentary" - DRAMA = "Drama" - FAMILY = "Family" - FANTASY = "Fantasy" - HISTORY = "History" - HORROR = "Horror" - MUSIC = "Music" - MYSTERY = "Mystery" - ROMANCE = "Romance" - SCIENCE_FICTION = "ScienceFiction" - THRILLER = "Thriller" - WAR = "War" - WESTERN = "Western" - KIDS = "Kids" - NEWS = "News" - REALITY = "Reality" - SOAP = "Soap" - TALK = "Talk" - POLITICS = "Politics" - - def to_kyoo(self): - return self.value diff --git a/scanner/providers/types/metadataid.py b/scanner/providers/types/metadataid.py deleted file mode 100644 index a4944400..00000000 --- a/scanner/providers/types/metadataid.py +++ /dev/null @@ -1,11 +0,0 @@ -from dataclasses import dataclass -from typing import Optional - - -@dataclass -class MetadataID: - data_id: str - link: Optional[str] - - def __post_init__(self): - self.data_id = str(self.data_id) diff --git a/scanner/providers/types/movie.py b/scanner/providers/types/movie.py deleted file mode 100644 index 0a984e39..00000000 --- a/scanner/providers/types/movie.py +++ /dev/null @@ -1,66 +0,0 @@ -from dataclasses import asdict, dataclass, field -from datetime import date -from typing import Optional -from enum import Enum - -from providers.utils import select_translation, select_image - -from .collection import Collection -from .genre import Genre -from .studio import Studio -from .metadataid import MetadataID - - -class Status(str, Enum): - UNKNOWN = "unknown" - FINISHED = "finished" - PLANNED = "planned" - - -@dataclass -class MovieTranslation: - name: str - tagline: Optional[str] = None - tags: list[str] = field(default_factory=list) - overview: Optional[str] = None - - posters: list[str] = field(default_factory=list) - logos: list[str] = field(default_factory=list) - trailers: list[str] = field(default_factory=list) - thumbnails: list[str] = field(default_factory=list) - - -@dataclass -class Movie: - original_language: Optional[str] - aliases: list[str] - air_date: Optional[date | int] - status: Status - rating: int - runtime: Optional[int] - studios: list[Studio] - genres: list[Genre] - # TODO: handle staff - # staff: list[Staff] - external_id: dict[str, MetadataID] - - path: Optional[str] = None - # The title of this show according to it's filename (None only for ease of use in providers) - file_title: Optional[str] = None - collections: list[Collection] = field(default_factory=list) - translations: dict[str, MovieTranslation] = field(default_factory=dict) - - def to_kyoo(self): - trans = select_translation(self) or MovieTranslation(name=self.file_title or "") - return { - **asdict(self), - **asdict(trans), - "poster": select_image(self, "posters"), - "thumbnail": select_image(self, "thumbnails"), - "logo": select_image(self, "logos"), - "trailer": select_image(self, "trailers"), - "studio": next((x.to_kyoo() for x in self.studios), None), - "genres": [x.to_kyoo() for x in self.genres], - "collections": None, - "file_title": None, - } diff --git a/scanner/providers/types/season.py b/scanner/providers/types/season.py deleted file mode 100644 index b553dae0..00000000 --- a/scanner/providers/types/season.py +++ /dev/null @@ -1,38 +0,0 @@ -from datetime import date -from dataclasses import dataclass, field, asdict -from typing import Optional - -from providers.utils import select_translation, select_image - -from .metadataid import MetadataID - - -@dataclass -class SeasonTranslation: - name: Optional[str] = None - overview: Optional[str] = None - posters: list[str] = field(default_factory=list) - thumbnails: list[str] = field(default_factory=list) - - -@dataclass -class Season: - season_number: int - # This is not used by kyoo, this is just used internaly by the TMDB provider. - # maybe this should be moved? - episodes_count: int - start_air: Optional[date | int] = None - end_air: Optional[date | int] = None - external_id: dict[str, MetadataID] = field(default_factory=dict) - - show_id: Optional[str] = None - translations: dict[str, SeasonTranslation] = field(default_factory=dict) - - def to_kyoo(self): - trans = select_translation(self) or SeasonTranslation() - return { - **asdict(self), - **asdict(trans), - "poster": select_image(self, "posters"), - "thumbnail": select_image(self, "thumbnails"), - } diff --git a/scanner/providers/types/show.py b/scanner/providers/types/show.py deleted file mode 100644 index df9abba4..00000000 --- a/scanner/providers/types/show.py +++ /dev/null @@ -1,67 +0,0 @@ -from dataclasses import asdict, dataclass, field -from datetime import date -from typing import Optional -from enum import Enum - -from providers.utils import select_translation, select_image - -from .genre import Genre -from .studio import Studio -from .season import Season -from .metadataid import MetadataID - - -class Status(str, Enum): - UNKNOWN = "unknown" - FINISHED = "finished" - AIRING = "airing" - PLANNED = "planned" - - -@dataclass -class ShowTranslation: - name: str - tagline: Optional[str] = None - tags: list[str] = field(default_factory=list) - overview: Optional[str] = None - - posters: list[str] = field(default_factory=list) - logos: list[str] = field(default_factory=list) - trailers: list[str] = field(default_factory=list) - thumbnails: list[str] = field(default_factory=list) - - -@dataclass -class Show: - original_language: Optional[str] - aliases: list[str] - start_air: Optional[date | int] - end_air: Optional[date | int] - status: Status - rating: Optional[int] - studios: list[Studio] - genres: list[Genre] - seasons: list[Season] - # TODO: handle staff - # staff: list[Staff] - external_id: dict[str, MetadataID] - - translations: dict[str, ShowTranslation] = field(default_factory=dict) - # The title of this show according to it's filename (None only for ease of use in providers) - file_title: Optional[str] = None - - def to_kyoo(self): - trans = select_translation(self) or ShowTranslation(name=self.file_title or "") - return { - **asdict(self), - **asdict(trans), - "rating": self.rating or 0, - "studio": next((x.to_kyoo() for x in self.studios), None), - "seasons": None, - "poster": select_image(self, "posters"), - "thumbnail": select_image(self, "thumbnails"), - "logo": select_image(self, "logos"), - "trailer": select_image(self, "trailers"), - "genres": [x.to_kyoo() for x in self.genres], - "file_title": None, - } diff --git a/scanner/providers/types/studio.py b/scanner/providers/types/studio.py deleted file mode 100644 index 0ed11f6a..00000000 --- a/scanner/providers/types/studio.py +++ /dev/null @@ -1,16 +0,0 @@ -from dataclasses import asdict, dataclass, field - -from .metadataid import MetadataID - - -@dataclass -class Studio: - name: str - logos: list[str] = field(default_factory=list) - external_id: dict[str, MetadataID] = field(default_factory=dict) - - def to_kyoo(self): - return { - **asdict(self), - "logo": next(iter(self.logos), None), - } diff --git a/scanner/providers/utils.py b/scanner/providers/utils.py deleted file mode 100644 index ddcc4ffb..00000000 --- a/scanner/providers/utils.py +++ /dev/null @@ -1,81 +0,0 @@ -from __future__ import annotations - -import os -from datetime import date -from itertools import chain -from langcodes import Language -from typing import TYPE_CHECKING, Literal, Any, Optional - -if TYPE_CHECKING: - from providers.types.movie import Movie - from providers.types.show import Show - from providers.types.season import Season - from providers.types.episode import Episode - from providers.types.collection import Collection - - -def format_date(date: date | int | None) -> str | None: - if date is None: - return None - if isinstance(date, int): - return f"{date}-01-01" - return date.isoformat() - - -def normalize_lang(lang: str) -> str: - return str(Language.get(lang)) - - -# For now, the API of kyoo only support one language so we remove the others. -default_languages = os.environ.get("LIBRARY_LANGUAGES", "").split(",") -media_prefer_original_language = ( - os.environ.get("MEDIA_PREFER_ORIGINAL_LANGUAGE", "false").lower() == "true" -) - - -def sort_translations( - value: Movie | Show | Season | Episode | Collection, - *, - prefer_orginal=False, -): - from providers.types.movie import Movie - from providers.types.show import Show - - if ( - prefer_orginal - and (isinstance(value, Movie) or isinstance(value, Show)) - and value.original_language - and value.original_language in value.translations - ): - yield value.translations[value.original_language] - for lang in default_languages: - if lang in value.translations: - yield value.translations[lang] - - -def select_translation( - value: Movie | Show | Season | Episode | Collection, *, prefer_orginal=False -) -> Optional[Any]: - return next(sort_translations(value, prefer_orginal=prefer_orginal), None) - - -def select_image( - value: Movie | Show | Season | Collection, - kind: Literal["posters", "thumbnails", "logos", "trailers"], -) -> str | None: - return next( - chain( - *( - getattr(trans, kind) - for trans in sort_translations( - value, prefer_orginal=media_prefer_original_language - ) - ) - ), - None, - ) - - -class ProviderError(RuntimeError): - def __init__(self, *args: object) -> None: - super().__init__(*args) diff --git a/scanner/pyproject.toml b/scanner/pyproject.toml index ce8becbf..e4794c8e 100644 --- a/scanner/pyproject.toml +++ b/scanner/pyproject.toml @@ -1,5 +1,38 @@ +[project] +name = "scanner" +version = "0.1.0" +description = "Register video files to kyoo" +readme = "README.md" +requires-python = ">=3.13" +dependencies = [ + "aiohttp>=3.11.18", + "asyncpg>=0.30.0", + "fastapi[standard]>=0.115.12", + "guessit", + "langcodes>=3.5.0", + "pydantic>=2.11.4", + "pyjwt[crypto]>=2.10.1", + "python-slugify>=8.0.4", + "watchfiles>=1.0.5", +] + +[tool.uv.sources] +guessit = { git = "https://github.com/zoriya/guessit" } + [tool.ruff.format] indent-style = "tab" [tool.pyright] reportAbstractUsage = false +reportUnannotatedClassAttribute = false +enableTypeIgnoreComments = true +reportIgnoreCommentWithoutRule = false +reportUnknownArgumentType = false +reportUnknownVariableType = false +reportMissingParameterType = false +reportUnknownParameterType = false +reportUnknownMemberType = false +reportAny = false +reportExplicitAny = false +reportMissingTypeStubs = false +reportUnknownLambdaType = false diff --git a/scanner/requirements.txt b/scanner/requirements.txt deleted file mode 100644 index 25089dc5..00000000 --- a/scanner/requirements.txt +++ /dev/null @@ -1,7 +0,0 @@ -guessit@git+https://github.com/zoriya/guessit -aiohttp -jsons -watchfiles -aio-pika -msgspec -langcodes diff --git a/scanner/scanner/__init__.py b/scanner/scanner/__init__.py index 208dc4c7..30c6126d 100644 --- a/scanner/scanner/__init__.py +++ b/scanner/scanner/__init__.py @@ -1,30 +1,69 @@ -async def main(): - import asyncio - import os - import logging - from .monitor import monitor - from .scanner import scan - from .refresher import refresh - from .publisher import Publisher - from .subscriber import Subscriber - from providers.kyoo_client import KyooClient +import logging +from asyncio import CancelledError, TaskGroup, create_task +from contextlib import asynccontextmanager - logging.basicConfig(level=logging.INFO) - logging.getLogger("watchfiles").setLevel(logging.WARNING) +from fastapi import FastAPI +from scanner.client import KyooClient +from scanner.fsscan import FsScanner +from scanner.providers.composite import CompositeProvider +from scanner.providers.themoviedatabase import TheMovieDatabase +from scanner.requests import RequestCreator, RequestProcessor + +from .database import get_db, init_pool, migrate +from .routers.routes import router + +logging.basicConfig(level=logging.DEBUG) +logging.getLogger("watchfiles").setLevel(logging.WARNING) +logging.getLogger("rebulk").setLevel(logging.WARNING) + + +@asynccontextmanager +async def lifespan(_): async with ( - Publisher() as publisher, - Subscriber() as subscriber, + init_pool() as pool, + get_db() as db, KyooClient() as client, + TheMovieDatabase() as tmdb, ): - path = os.environ.get("SCANNER_LIBRARY_ROOT", "/video") - - async def scan_all(): - await scan(path, publisher, client, remove_deleted=True) - - await asyncio.gather( - monitor(path, publisher, client), - scan_all(), - refresh(publisher, client), - subscriber.listen(scan_all), + # there's no way someone else used the same id, right? + is_master = await db.fetchval("select pg_try_advisory_lock(198347)") + if is_master: + await migrate() + processor = RequestProcessor(pool, client, tmdb) + scanner = FsScanner(client, RequestCreator(db)) + tasks = create_task( + background_startup( + scanner, + processor, + is_master, + ) ) + yield + _ = tasks.cancel() + + +async def background_startup( + scanner: FsScanner, + processor: RequestProcessor, + is_master: bool | None, +): + async with TaskGroup() as tg: + _ = tg.create_task(processor.listen(tg)) + if is_master: + _ = tg.create_task(scanner.monitor()) + _ = tg.create_task(scanner.scan(remove_deleted=True)) + + +async def cancel(): + raise CancelledError() + + +app = FastAPI( + title="Scanner", + description="API to control the long running scanner or interacting with external databases (themoviedb, tvdb...)\n\n" + + "Most of those APIs are for admins only.", + root_path="/scanner", + lifespan=lifespan, +) +app.include_router(router) diff --git a/scanner/scanner/__main__.py b/scanner/scanner/__main__.py deleted file mode 100644 index ac4e42e3..00000000 --- a/scanner/scanner/__main__.py +++ /dev/null @@ -1,6 +0,0 @@ -#!/usr/bin/env python - -import asyncio -import scanner - -asyncio.run(scanner.main()) diff --git a/scanner/scanner/client.py b/scanner/scanner/client.py new file mode 100644 index 00000000..37742c0d --- /dev/null +++ b/scanner/scanner/client.py @@ -0,0 +1,103 @@ +import os +from logging import getLogger +from types import TracebackType +from typing import Literal + +from aiohttp import ClientSession +from pydantic import TypeAdapter + +from .models.movie import Movie +from .models.request import Request +from .models.serie import Serie +from .models.videos import For, Resource, Video, VideoCreated, VideoInfo, VideoLink +from .utils import Singleton + +logger = getLogger(__name__) + + +class KyooClient(metaclass=Singleton): + def __init__(self) -> None: + self._client = ClientSession( + base_url=os.environ.get("KYOO_URL", "http://api:3567/api") + "/", + headers={ + "User-Agent": "kyoo scanner v5", + "Content-type": "application/json", + }, + ) + if api_key := os.environ.get("KYOO_APIKEY"): + self._client.headers["X-API-KEY"] = api_key + + async def __aenter__(self): + return self + + async def __aexit__( + self, + exc_type: type[BaseException] | None, + exc_value: BaseException | None, + traceback: TracebackType | None, + ): + await self._client.close() + + async def get_videos_info(self) -> VideoInfo: + async with self._client.get("videos") as r: + r.raise_for_status() + return VideoInfo(**await r.json()) + + async def create_videos(self, videos: list[Video]) -> list[VideoCreated]: + async with self._client.post( + "videos", + data=TypeAdapter(list[Video]).dump_json(videos, by_alias=True), + ) as r: + r.raise_for_status() + return TypeAdapter(list[VideoCreated]).validate_json(await r.text()) + + async def delete_videos(self, videos: list[str] | set[str]): + async with self._client.delete( + "videos", + data=TypeAdapter(list[str] | set[str]).dump_json(videos, by_alias=True), + ) as r: + r.raise_for_status() + + async def create_movie(self, movie: Movie) -> Resource: + async with self._client.post( + "movies", + data=movie.model_dump_json(by_alias=True), + ) as r: + r.raise_for_status() + return Resource.model_validate(await r.json()) + + async def create_serie(self, serie: Serie) -> Resource: + async with self._client.post( + "series", + data=serie.model_dump_json(by_alias=True), + ) as r: + r.raise_for_status() + return Resource.model_validate(await r.json()) + + async def link_videos( + self, + kind: Literal["movie", "serie"], + show: str, + videos: list[Request.Video], + ): + def map_request(request: Request.Video): + if kind == "movie": + return VideoLink(id=request.id, for_=[For.Movie(movie=show)]) + return VideoLink( + id=request.id, + for_=[ + For.Special(serie=show, special=ep.episode) + if ep.season is None or ep.season == 0 + else For.Episode(serie=show, season=ep.season, episode=ep.episode) + for ep in request.episodes + ], + ) + + async with self._client.post( + "videos/link", + data=TypeAdapter(list[VideoLink]).dump_json( + [map_request(x) for x in videos], + by_alias=True, + ), + ) as r: + r.raise_for_status() diff --git a/scanner/scanner/database.py b/scanner/scanner/database.py new file mode 100644 index 00000000..918ae0c7 --- /dev/null +++ b/scanner/scanner/database.py @@ -0,0 +1,97 @@ +import json +import os +from contextlib import asynccontextmanager +from logging import getLogger +from typing import Any, cast + +from asyncpg import Connection, Pool, create_pool + +logger = getLogger(__name__) + +pool: Pool + + +@asynccontextmanager +async def init_pool(): + url = os.environ.get("POSTGRES_URL") + connection: dict[str, Any] = ( + { + "user": os.environ.get("PGUSER", "kyoo"), + "host": os.environ.get("PGHOST", "postgres"), + "password": os.environ.get("PGPASSWORD", "password"), + } + if url is None + else {"dns": url} + ) + async with await create_pool(**connection) as p: + global pool + pool = p + yield pool + pool = None # type: ignore + + +@asynccontextmanager +async def get_db(): + async with pool.acquire() as db: + await db.set_type_codec( + "json", + encoder=json.dumps, + decoder=json.loads, + schema="pg_catalog", + ) + await db.set_type_codec( + "jsonb", + encoder=lambda data: b"\x01" + bytes(json.dumps(data), encoding="utf8"), + decoder=lambda data: json.loads(data[1:]), + schema="pg_catalog", + format="binary", + ) + yield cast(Connection, db) + + +async def migrate(migrations_dir="./migrations"): + async with get_db() as db: + _ = await db.execute( + """ + create schema if not exists scanner; + + create table if not exists scanner._migrations( + pk serial primary key, + name text not null, + applied_at timestamptz not null default now() ::timestamptz)""", + ) + + applied = await db.fetchval( + """ + select + count(*) + from + scanner._migrations + """ + ) + + if not os.path.exists(migrations_dir): + logger.warning(f"Migrations directory '{migrations_dir}' not found") + return + + migrations = sorted( + f for f in os.listdir(migrations_dir) if f.endswith("up.sql") + ) + for migration in migrations[applied:]: + file_path = os.path.join(migrations_dir, migration) + logger.info(f"Applying migration: {migration}") + try: + with open(file_path, "r") as f: + sql = f.read() + async with db.transaction(): + _ = await db.execute(sql) + _ = await db.execute( + """ + insert into scanner._migrations(name) + values ($1) + """, + migration, + ) + except Exception as e: + logger.error(f"Failed to apply migration {migration}", exc_info=e) + raise diff --git a/scanner/scanner/fsscan.py b/scanner/scanner/fsscan.py new file mode 100644 index 00000000..0fe6aef7 --- /dev/null +++ b/scanner/scanner/fsscan.py @@ -0,0 +1,217 @@ +import os +import re +from contextlib import asynccontextmanager +from logging import getLogger +from mimetypes import guess_file_type +from os.path import dirname, exists, isdir, join + +from watchfiles import Change, awatch + +from .client import KyooClient +from .database import get_db +from .identifiers.identify import identify +from .models.metadataid import EpisodeId, MetadataId +from .models.request import Request +from .models.videos import For, Video, VideoInfo +from .requests import RequestCreator + +logger = getLogger(__name__) + + +@asynccontextmanager +async def create_scanner(): + async with get_db() as db: + yield FsScanner(KyooClient(), RequestCreator(db)) + + +class FsScanner: + def __init__(self, client: KyooClient, requests: RequestCreator): + self._client = client + self._requests = requests + self._info: VideoInfo = None # type: ignore + self._root_path = os.environ.get("SCANNER_LIBRARY_ROOT", "/video") + try: + pattern = os.environ.get("LIBRARY_IGNORE_PATTERN") + self._ignore_pattern = re.compile(pattern) if pattern else None + except re.error as e: + logger.error(f"Invalid ignore pattern. Ignoring. Error: {e}") + + async def scan(self, path: str | None = None, remove_deleted=False): + if path is None: + path = self._root_path + logger.info("Starting scan at %s. This may take some time...", path) + if self._ignore_pattern: + logger.info(f"Applying ignore pattern: {self._ignore_pattern}") + await self._requests.clear_failed() + + try: + videos = self.walk_fs(path) + + self._info = await self._client.get_videos_info() + + to_register = videos - self._info.paths + to_delete = self._info.paths - videos if remove_deleted else set() + + if ( + not any(to_register) + and any(to_delete) + and len(to_delete) == len(self._info.paths) + ): + logger.warning("All video files are unavailable. Check your disks.") + return + + # delete stale files before creating new ones to prevent potential conflicts + if to_delete: + logger.info("Removing %d stale files.", len(to_delete)) + await self._client.delete_videos(to_delete) + + if to_register: + logger.info("Found %d new files to register.", len(to_register)) + await self._register(to_register) + if self._info.unmatched: + logger.info( + "Retrying & updating %d unmatched files.", len(self._info.unmatched) + ) + await self._register(self._info.unmatched) + + logger.info("Scan finished for %s.", path) + except Exception as e: + logger.error("Unexpected error while running scan.", exc_info=e) + + async def monitor(self): + logger.info(f"Watching for new files in {self._root_path}") + async for changes in awatch(self._root_path, ignore_permission_denied=True): + try: + for event, file in changes: + if not isdir(file) and not is_video(file): + continue + if ( + self._ignore_pattern and self._ignore_pattern.match(file) + ) or is_ignored_path(file): + logger.info("Ignoring event %s for file %s", event, file) + continue + + match event: + case Change.added if isdir(file): + logger.info("New dir found: %s", file) + await self.scan(file) + case Change.added: + logger.info("New video found: %s", file) + await self._register([file]) + case Change.deleted: + logger.info("Delete video at: %s", file) + await self._client.delete_videos([file]) + case Change.modified: + pass + except Exception as e: + logger.error("Unexpected error while monitoring files.", exc_info=e) + + async def _register(self, videos: list[str] | set[str]): + # TODO: we should probably chunk those + vids: list[Video] = [] + for path in list(videos): + try: + vid = await identify(path) + vid = self._match(vid) + vids.append(vid) + except Exception as e: + logger.error("Couldn't identify %s.", path, exc_info=e) + created = await self._client.create_videos(vids) + + await self._requests.enqueue( + [ + Request( + kind=x.guess.kind, + title=x.guess.title, + year=next(iter(x.guess.years), None), + external_id=x.guess.external_id, + videos=[Request.Video(id=x.id, episodes=x.guess.episodes)], + ) + for x in created + if not any(x.entries) and x.guess.kind != "extra" + ] + ) + + def _match(self, video: Video) -> Video: + video.for_ = [] + + year_info = ( + self._info.guesses[video.guess.title] + if video.guess.title in self._info.guesses + else {} + ) + slugs = set( + x + for x in ( + [ + year_info[str(y)].slug if str(y) in year_info else None + for y in video.guess.years + ] + + ([year_info["unknown"].slug] if "unknown" in year_info else []) + ) + if x is not None + ) + + if video.guess.kind == "movie": + for slug in slugs: + video.for_.append(For.Movie(movie=slug)) + + for k, v in video.guess.external_id.items(): + video.for_.append( + For.ExternalId(external_id={k: MetadataId(data_id=v)}) + ) + else: + for ep in video.guess.episodes: + for slug in slugs: + video.for_.append( + For.Episode(serie=slug, season=ep.season, episode=ep.episode) + if ep.season is not None and ep.season != 0 + else For.Special(serie=slug, special=ep.episode) + ) + + for k, v in video.guess.external_id.items(): + video.for_.append( + For.ExternalId( + external_id={ + k: EpisodeId( + serie_id=v, season=ep.season, episode=ep.episode + ) + } + ) + ) + + # TODO: handle specials & movie as episodes (needs animelist or thexem) + return video + + def walk_fs(self, root_path: str) -> set[str]: + videos: set[str] = set() + for dirpath, dirnames, files in os.walk(root_path): + # Skip directories with a `.ignore` file + if ".ignore" in files: + # Prevents os.walk from descending into this directory + dirnames.clear() + continue + + for file in files: + file_path = os.path.join(dirpath, file) + # Apply ignore pattern, if any + if self._ignore_pattern and self._ignore_pattern.match(file_path): + continue + if is_video(file_path): + videos.add(file_path) + return videos + + +def is_ignored_path(path: str) -> bool: + current_path = path + # Traverse up to the root directory + while current_path != "/": + if exists(join(current_path, ".ignore")): + return True + current_path = dirname(current_path) + return False + + +def is_video(path: str) -> bool: + (mime, _) = guess_file_type(path, strict=False) + return mime is not None and mime.startswith("video/") diff --git a/scanner/scanner/identifiers/guess/guess.py b/scanner/scanner/identifiers/guess/guess.py new file mode 100644 index 00000000..c1a2b32f --- /dev/null +++ b/scanner/scanner/identifiers/guess/guess.py @@ -0,0 +1,29 @@ +from typing import Any, cast + +from guessit.api import default_api +from rebulk import Rebulk +from rebulk.match import Match + +from . import rules + +default_api.configure({}) +rblk = cast(Rebulk, default_api.rebulk).rules(rules) + + +def guessit( + name: str, + *, + expected_titles: list[str] = [], + extra_flags: dict[str, Any] = {}, +) -> dict[str, list[Match]]: + return default_api.guessit( + name, + { + "episode_prefer_number": True, + "excludes": "language", + "expected_title": expected_titles, + "enforce_list": True, + "advanced": True, + } + | extra_flags, + ) diff --git a/scanner/matcher/parser/rules.py b/scanner/scanner/identifiers/guess/rules.py similarity index 81% rename from scanner/matcher/parser/rules.py rename to scanner/scanner/identifiers/guess/rules.py index f304412c..2baaa588 100644 --- a/scanner/matcher/parser/rules.py +++ b/scanner/scanner/identifiers/guess/rules.py @@ -1,10 +1,11 @@ # Read that for examples/rules: https://github.com/pymedusa/Medusa/blob/master/medusa/name_parser/rules/rules.py -from logging import getLogger -from typing import Any, List, Optional, cast -from rebulk import Rule, RemoveMatch, AppendMatch, POST_PROCESS -from rebulk.match import Matches, Match from copy import copy +from logging import getLogger +from typing import Any, cast, override + +from rebulk import POST_PROCESS, AppendMatch, RemoveMatch, Rule +from rebulk.match import Match, Matches logger = getLogger(__name__) @@ -51,11 +52,12 @@ class UnlistTitles(Rule): priority = POST_PROCESS consequence = [RemoveMatch, AppendMatch] + @override def when(self, matches: Matches, context) -> Any: - fileparts: List[Match] = matches.markers.named("path") # type: ignore + fileparts: list[Match] = matches.markers.named("path") # type: ignore for part in fileparts: - titles: List[Match] = matches.range( + titles: list[Match] = matches.range( part.start, part.end, lambda x: x.name == "title" ) # type: ignore @@ -65,9 +67,9 @@ class UnlistTitles(Rule): title = copy(titles[0]) for nmatch in titles[1:]: # Check if titles are next to each other, if they are not ignore it. - next: List[Match] = matches.next(title) # type: ignore + next: list[Match] = matches.next(title) # type: ignore if not next or next[0] != nmatch: - logger.warn(f"Ignoring potential part of title: {nmatch.value}") + logger.warning(f"Ignoring potential part of title: {nmatch.value}") continue title.end = nmatch.end @@ -106,14 +108,15 @@ class MultipleSeasonRule(Rule): priority = POST_PROCESS consequence = [RemoveMatch, AppendMatch] + @override def when(self, matches: Matches, context) -> Any: - seasons: List[Match] = matches.named("season") # type: ignore + seasons: list[Match] = matches.named("season") # type: ignore if not seasons: return # Only apply this rule if all seasons are due to the same match - initiator: Optional[Match] = seasons[0].initiator + initiator: Match | None = seasons[0].initiator if not initiator or any( True for match in seasons if match.initiator != initiator ): @@ -129,7 +132,7 @@ class MultipleSeasonRule(Rule): try: episodes = [int(x) for x in new_episodes] - parents: List[Match] = [match.parent for match in to_remove] # type: ignore + parents: list[Match] = [match.parent for match in to_remove] # type: ignore for episode in episodes: smatch = next( x @@ -176,12 +179,13 @@ class SeasonYearDedup(Rule): """ # This rules does the opposite of the YearSeason rule of guessit (with POST_PROCESS priority) - # To overide it, we need the -1. (rule: https://github.com/guessit-io/guessit/blob/develop/guessit/rules/processors.py#L195) + # To override it, we need the -1. (rule: https://github.com/guessit-io/guessit/blob/develop/guessit/rules/processors.py#L195) priority = POST_PROCESS - 1 consequence = RemoveMatch + @override def when(self, matches: Matches, context) -> Any: - season: List[Match] = matches.named("season") # type: ignore - year: List[Match] = matches.named("year") # type: ignore + season: list[Match] = matches.named("season") # type: ignore + year: list[Match] = matches.named("year") # type: ignore if len(season) == 1 and len(year) == 1 and season[0].value == year[0].value: return season diff --git a/scanner/scanner/identifiers/identify.py b/scanner/scanner/identifiers/identify.py new file mode 100644 index 00000000..393efcc7 --- /dev/null +++ b/scanner/scanner/identifiers/identify.py @@ -0,0 +1,88 @@ +from collections.abc import Awaitable +from hashlib import sha256 +from itertools import zip_longest +from logging import getLogger +from typing import Callable, Literal, cast + +from rebulk.match import Match + +from ..models.videos import Guess, Video +from .guess.guess import guessit + +logger = getLogger(__name__) + +pipeline: list[Callable[[str, Guess], Awaitable[Guess]]] = [ + # TODO: add nfo scanner + # TODO: add thexem + # TODO: add anilist +] + + +async def identify(path: str) -> Video: + raw = guessit(path, expected_titles=[]) + + # guessit should only return one (according to the doc) + title = raw.get("title", [])[0] + kind = raw.get("type", [])[0] + version = next(iter(raw.get("version", [])), None) + # apparently guessit can return multiples but tbh idk what to do with + # multiples part. we'll just ignore them for now + part = next(iter(raw.get("part", [])), None) + + years = raw.get("year", []) + seasons = raw.get("season", []) + episodes = raw.get("episode", []) + + # just strip the version & part number from the path + rendering_path = "".join( + c + for i, c in enumerate(path) + if not (version and version.start <= i < version.end) + and not (part and part.start <= i < part.end) + ) + + guess = Guess( + title=cast(str, title.value), + kind=cast(Literal["episode", "movie"], kind.value), + extra_kind=None, + years=[cast(int, y.value) for y in years], + episodes=[ + Guess.Episode(season=cast(int, s.value), episode=cast(int, e.value)) + for s, e in zip_longest( + seasons, + episodes, + fillvalue=seasons[-1] if any(seasons) else Match(0, 0, value=1), + ) + ], + external_id={}, + from_="guessit", + raw={ + k: [x.value if x.value is int else str(x.value) for x in v] + for k, v in raw.items() + }, + ) + + for step in pipeline: + try: + guess = await step(path, guess) + except Exception as e: + logger.error("Couldn't run %s.", step.__name__, exc_info=e) + + return Video( + path=path, + rendering=sha256(rendering_path.encode()).hexdigest(), + part=cast(int, part.value) if part else None, + version=cast(int, version.value) if version else 1, + guess=guess, + ) + + +if __name__ == "__main__": + import asyncio + import sys + + async def main(): + ret = await identify(sys.argv[1]) + print(ret.model_dump_json(indent=4, by_alias=True)) + + asyncio.run(main()) diff --git a/scanner/scanner/jwt.py b/scanner/scanner/jwt.py new file mode 100644 index 00000000..1f02b8cf --- /dev/null +++ b/scanner/scanner/jwt.py @@ -0,0 +1,46 @@ +import os +from logging import getLogger +from typing import Annotated + +import jwt +from fastapi import Depends, HTTPException +from fastapi.security import HTTPAuthorizationCredentials, HTTPBearer, SecurityScopes +from jwt import PyJWKClient + +logger = getLogger(__name__) + +jwks_client = PyJWKClient( + os.environ.get("JWKS_URL", "http://auth:4568/.well-known/jwks.json") +) + +security = HTTPBearer(scheme_name="Bearer") + + +def validate_bearer( + token: Annotated[HTTPAuthorizationCredentials, Depends(security)], + perms: SecurityScopes, +): + try: + payload = jwt.decode( + token.credentials, + jwks_client.get_signing_key_from_jwt(token.credentials).key, + algorithms=["RS256"], + issuer=os.environ.get("JWT_ISSUER"), + ) + for scope in perms.scopes: + if scope not in payload["permissions"]: + raise HTTPException( + status_code=403, + detail=f"Missing permissions {', '.join(perms.scopes)}", + headers={ + "WWW-Authenticate": f'Bearer permissions="{",".join(perms.scopes)}"' + }, + ) + return payload + except Exception as e: + logger.error("Failed to parse token", exc_info=e) + raise HTTPException( + status_code=403, + detail="Could not validate credentials", + headers={"WWW-Authenticate": "Bearer"}, + ) from e diff --git a/scanner/scanner/models/collection.py b/scanner/scanner/models/collection.py new file mode 100644 index 00000000..1229357b --- /dev/null +++ b/scanner/scanner/models/collection.py @@ -0,0 +1,29 @@ +from __future__ import annotations + +from ..utils import Language, Model +from .genre import Genre +from .metadataid import MetadataId + + +class Collection(Model): + slug: str + original_language: Language | None + genres: list[Genre] + rating: int | None + external_id: dict[str, MetadataId] + + translations: dict[Language, CollectionTranslation] = {} + + +class CollectionTranslation(Model): + name: str + latin_name: str | None + description: str | None + tagline: str | None + aliases: list[str] + tags: list[str] + + poster: str | None + thumbnail: str | None + banner: str | None + logo: str | None diff --git a/scanner/scanner/models/entry.py b/scanner/scanner/models/entry.py new file mode 100644 index 00000000..ae5cfe56 --- /dev/null +++ b/scanner/scanner/models/entry.py @@ -0,0 +1,36 @@ +from __future__ import annotations + +from datetime import date +from typing import Literal + +from ..utils import Language, Model +from .metadataid import EpisodeId, MetadataId + + +class Entry(Model): + kind: Literal["episode", "movie", "special"] + order: float + runtime: int | None + air_date: date | None + thumbnail: str | None + + # Movie-specific fields + slug: str | None + + # Episode-specific fields + season_number: int | None + episode_number: int | None + + # Special-specific fields + number: int | None + + external_id: dict[str, MetadataId | EpisodeId] + translations: dict[Language, EntryTranslation] = {} + videos: list[str] = [] + + +class EntryTranslation(Model): + name: str | None + description: str | None + tagline: str | None + poster: str | None diff --git a/scanner/scanner/models/extra.py b/scanner/scanner/models/extra.py new file mode 100644 index 00000000..e495ecff --- /dev/null +++ b/scanner/scanner/models/extra.py @@ -0,0 +1,21 @@ +from enum import StrEnum + +from ..utils import Model + + +class ExtraKind(StrEnum): + OTHER = "other" + TRAILER = "trailer" + INTERVIEW = "interview" + BEHIND_THE_SCENE = "behind-the-scene" + DELETED_SCENE = "deleted-scene" + BLOOPER = "blooper" + + +class Extra(Model): + kind: ExtraKind + slug: str + name: str + runtime: int | None + thumbnail: str | None + video: str diff --git a/scanner/scanner/models/genre.py b/scanner/scanner/models/genre.py new file mode 100644 index 00000000..c6651f81 --- /dev/null +++ b/scanner/scanner/models/genre.py @@ -0,0 +1,27 @@ +from enum import StrEnum + + +class Genre(StrEnum): + ACTION = "action" + ADVENTURE = "adventure" + ANIMATION = "animation" + COMEDY = "comedy" + CRIME = "crime" + DOCUMENTARY = "documentary" + DRAMA = "drama" + FAMILY = "family" + FANTASY = "fantasy" + HISTORY = "history" + HORROR = "horror" + MUSIC = "music" + MYSTERY = "mystery" + ROMANCE = "romance" + SCIENCE_FICTION = "science-fiction" + THRILLER = "thriller" + WAR = "war" + WESTERN = "western" + KIDS = "kids" + REALITY = "reality" + POLITICS = "politics" + SOAP = "soap" + TALK = "talk" diff --git a/scanner/scanner/models/metadataid.py b/scanner/scanner/models/metadataid.py new file mode 100644 index 00000000..c14ab21f --- /dev/null +++ b/scanner/scanner/models/metadataid.py @@ -0,0 +1,19 @@ +from ..utils import Model + + +class MetadataId(Model): + data_id: str + link: str | None = None + + +class SeasonId(Model): + serie_id: str + season: int + link: str | None = None + + +class EpisodeId(Model): + serie_id: str + season: int | None + episode: int + link: str | None = None diff --git a/scanner/scanner/models/movie.py b/scanner/scanner/models/movie.py new file mode 100644 index 00000000..756881bc --- /dev/null +++ b/scanner/scanner/models/movie.py @@ -0,0 +1,59 @@ +from __future__ import annotations + +from datetime import date +from enum import StrEnum + +from ..utils import Language, Model +from .collection import Collection +from .genre import Genre +from .metadataid import MetadataId +from .staff import Staff +from .studio import Studio + + +class MovieStatus(StrEnum): + UNKNOWN = "unknown" + FINISHED = "finished" + PLANNED = "planned" + + +class Movie(Model): + slug: str + original_language: Language | None + genres: list[Genre] + rating: int | None + status: MovieStatus + runtime: int | None + air_date: date | None + + external_id: dict[str, MetadataId] + translations: dict[Language, MovieTranslation] = {} + collections: list[Collection] = [] + studios: list[Studio] = [] + staff: list[Staff] = [] + videos: list[str] = [] + + +class MovieTranslation(Model): + name: str + latin_name: str | None + description: str | None + tagline: str | None + aliases: list[str] + tags: list[str] + + poster: str | None + thumbnail: str | None + banner: str | None + logo: str | None + trailer: str | None + + +class SearchMovie(Model): + slug: str + name: str + description: str | None + air_date: date | None + poster: str | None + original_language: Language | None + external_id: dict[str, MetadataId] diff --git a/scanner/scanner/models/request.py b/scanner/scanner/models/request.py new file mode 100644 index 00000000..55b5cb0c --- /dev/null +++ b/scanner/scanner/models/request.py @@ -0,0 +1,20 @@ +from __future__ import annotations +from typing import Literal + +from pydantic import Field + +from .videos import Guess +from ..utils import Model + + +class Request(Model, extra="allow"): + pk: int | None = Field(exclude=True, default=None) + kind: Literal["episode", "movie"] + title: str + year: int | None + external_id: dict[str, str] + videos: list[Request.Video] + + class Video(Model): + id: str + episodes: list[Guess.Episode] diff --git a/scanner/scanner/models/season.py b/scanner/scanner/models/season.py new file mode 100644 index 00000000..0de0c0f1 --- /dev/null +++ b/scanner/scanner/models/season.py @@ -0,0 +1,26 @@ +from __future__ import annotations + +from datetime import date +from typing import Any + +from pydantic import Field + +from ..utils import Language, Model +from .metadataid import SeasonId + + +class Season(Model): + season_number: int + start_air: date | None + end_air: date | None + external_id: dict[str, SeasonId] + translations: dict[Language, SeasonTranslation] = {} + extra: dict[str, Any] = Field(exclude=True) + + +class SeasonTranslation(Model): + name: str | None + description: str | None + poster: str | None + thumbnail: str | None + banner: str | None diff --git a/scanner/scanner/models/serie.py b/scanner/scanner/models/serie.py new file mode 100644 index 00000000..4e50f895 --- /dev/null +++ b/scanner/scanner/models/serie.py @@ -0,0 +1,67 @@ +from __future__ import annotations + +from datetime import date +from enum import StrEnum + +from ..utils import Language, Model +from .collection import Collection +from .entry import Entry +from .extra import Extra +from .genre import Genre +from .metadataid import MetadataId +from .season import Season +from .staff import Staff +from .studio import Studio + + +class SerieStatus(StrEnum): + UNKNOWN = "unknown" + FINISHED = "finished" + AIRING = "airing" + PLANNED = "planned" + + +class Serie(Model): + slug: str + original_language: Language | None + genres: list[Genre] + rating: int | None + status: SerieStatus + runtime: int | None + start_air: date | None + end_air: date | None + + external_id: dict[str, MetadataId] + translations: dict[Language, SerieTranslation] = {} + seasons: list[Season] = [] + entries: list[Entry] = [] + extra: list[Extra] = [] + collections: list[Collection] = [] + studios: list[Studio] = [] + staff: list[Staff] = [] + + +class SerieTranslation(Model): + name: str + latin_name: str | None + description: str | None + tagline: str | None + aliases: list[str] + tags: list[str] + + poster: str | None + thumbnail: str | None + banner: str | None + logo: str | None + trailer: str | None + + +class SearchSerie(Model): + slug: str + name: str + description: str | None + start_air: date | None + end_air: date | None + poster: str | None + original_language: Language | None + external_id: dict[str, MetadataId] diff --git a/scanner/scanner/models/staff.py b/scanner/scanner/models/staff.py new file mode 100644 index 00000000..dfe82bc3 --- /dev/null +++ b/scanner/scanner/models/staff.py @@ -0,0 +1,36 @@ +from __future__ import annotations + +from enum import StrEnum + +from ..utils import Model +from .metadataid import MetadataId + + +class Role(StrEnum): + ACTOR = "actor" + DIRECTOR = "director" + WRITTER = "writter" + PRODUCER = "producer" + MUSIC = "music" + CREW = "crew" + OTHER = "other" + + +class Staff(Model): + kind: Role + character: Character | None + staff: Person + + +class Character(Model): + name: str + latin_name: str | None + image: str | None + + +class Person(Model): + slug: str + name: str + latin_name: str | None + image: str | None + external_id: dict[str, MetadataId] diff --git a/scanner/scanner/models/studio.py b/scanner/scanner/models/studio.py new file mode 100644 index 00000000..eff7840b --- /dev/null +++ b/scanner/scanner/models/studio.py @@ -0,0 +1,15 @@ +from __future__ import annotations + +from ..utils import Model +from .metadataid import MetadataId + + +class Studio(Model): + slug: str + external_id: dict[str, MetadataId] + translations: dict[str, StudioTranslation] = {} + + +class StudioTranslation(Model): + name: str + logo: str | None diff --git a/scanner/scanner/models/videos.py b/scanner/scanner/models/videos.py new file mode 100644 index 00000000..72a21a06 --- /dev/null +++ b/scanner/scanner/models/videos.py @@ -0,0 +1,87 @@ +from __future__ import annotations + +from typing import Any, Literal + +from ..utils import Model +from .extra import ExtraKind +from .metadataid import EpisodeId, MetadataId + + +class Resource(Model): + id: str + slug: str + + +class VideoInfo(Model): + paths: set[str] + unmatched: set[str] + guesses: dict[str, dict[str, Resource]] + + +class Guess(Model, extra="allow"): + title: str + kind: Literal["episode"] | Literal["movie"] | Literal["extra"] + extra_kind: ExtraKind | None + years: list[int] + episodes: list[Guess.Episode] + external_id: dict[str, str] + raw: dict[str, Any] = {} + + from_: str + history: list[Guess] = [] + + class Episode(Model): + season: int | None + episode: int + + +_ = Guess.model_rebuild() + + +class For(Model): + class Slug(Model): + slug: str + + class ExternalId(Model): + external_id: dict[str, MetadataId | EpisodeId] + + class Movie(Model): + movie: str + + class Episode(Model): + serie: str + season: int + episode: int + + class Order(Model): + serie: str + order: float + + class Special(Model): + serie: str + special: int + + +class Video(Model): + path: str + rendering: str + part: int | None + version: int = 1 + guess: Guess + for_: list[ + For.Slug | For.ExternalId | For.Movie | For.Episode | For.Order | For.Special + ] = [] + + +class VideoCreated(Model): + id: str + path: str + guess: Guess + entries: list[Resource] + + +class VideoLink(Model): + id: str + for_: list[ + For.Slug | For.ExternalId | For.Movie | For.Episode | For.Order | For.Special + ] diff --git a/scanner/scanner/monitor.py b/scanner/scanner/monitor.py deleted file mode 100644 index 1584622f..00000000 --- a/scanner/scanner/monitor.py +++ /dev/null @@ -1,48 +0,0 @@ -from logging import getLogger -from os.path import isdir, dirname, exists, join -from watchfiles import awatch, Change -from .publisher import Publisher -from .scanner import scan, get_ignore_pattern -from providers.kyoo_client import KyooClient - -logger = getLogger(__name__) - - -def is_ignored_path(path: str) -> bool: - """Check if the path is within a directory that contains a `.ignore` file.""" - current_path = path - while current_path != "/": # Traverse up to the root directory - if exists(join(current_path, ".ignore")): - return True - current_path = dirname(current_path) - return False - - -async def monitor(path: str, publisher: Publisher, client: KyooClient): - ignore_pattern = get_ignore_pattern() - async for changes in awatch(path, ignore_permission_denied=True): - for event, file in changes: - # Check for ignore conditions - if is_ignored_path(file): - logger.info( - "Ignoring event %s for file %s (due to .ignore file)", event, file - ) - continue - if ignore_pattern and ignore_pattern.match(file): - logger.info( - "Ignoring event %s for file %s (due to IGNORE_PATTERN)", event, file - ) - continue - - logger.info("Change %s occurred for file %s", event, file) - match event: - case Change.added if isdir(file): - await scan(file, publisher, client) - case Change.added: - await publisher.add(file) - case Change.deleted: - await publisher.delete(file) - case Change.modified: - pass - case _: - logger.warning("Unknown file event %s (for file %s)", event, file) diff --git a/scanner/scanner/providers/composite.py b/scanner/scanner/providers/composite.py new file mode 100644 index 00000000..dc52af8d --- /dev/null +++ b/scanner/scanner/providers/composite.py @@ -0,0 +1,42 @@ +from typing import override + +from langcodes import Language + +from ..models.movie import Movie, SearchMovie +from ..models.serie import SearchSerie, Serie +from .provider import Provider + + +class CompositeProvider(Provider): + def __init__(self, themoviedb: Provider): + self._tvdb: Provider = None # type: ignore + self._themoviedb = themoviedb + + @property + @override + def name(self): + return "composite" + + @override + async def search_movies( + self, title: str, year: int | None, *, language: list[Language] + ) -> list[SearchMovie]: + return await self._themoviedb.search_movies(title, year, language=language) + + @override + async def get_movie(self, external_id: dict[str, str]) -> Movie | None: + return await self._themoviedb.get_movie(external_id) + + @override + async def search_series( + self, title: str, year: int | None, *, language: list[Language] + ) -> list[SearchSerie]: + return await self._tvdb.search_series(title, year, language=language) + + @override + async def get_serie(self, external_id: dict[str, str]) -> Serie | None: + ret = await self._tvdb.get_serie(external_id) + if ret is None: + return None + # TODO: complete metadata with info from tmdb + return ret diff --git a/scanner/scanner/providers/provider.py b/scanner/scanner/providers/provider.py new file mode 100644 index 00000000..4f732fad --- /dev/null +++ b/scanner/scanner/providers/provider.py @@ -0,0 +1,83 @@ +from abc import ABC, abstractmethod +from logging import getLogger + +from langcodes import Language + +from ..models.movie import Movie, SearchMovie +from ..models.serie import SearchSerie, Serie + +logger = getLogger(__name__) + + +class Provider(ABC): + @property + @abstractmethod + def name(self) -> str: + raise NotImplementedError + + @abstractmethod + async def search_movies( + self, title: str, year: int | None, *, language: list[Language] + ) -> list[SearchMovie]: + raise NotImplementedError + + @abstractmethod + async def search_series( + self, title: str, year: int | None, *, language: list[Language] + ) -> list[SearchSerie]: + raise NotImplementedError + + @abstractmethod + async def get_movie(self, external_id: dict[str, str]) -> Movie | None: + raise NotImplementedError + + @abstractmethod + async def get_serie(self, external_id: dict[str, str]) -> Serie | None: + raise NotImplementedError + + async def find_movie( + self, + title: str, + year: int | None, + external_id: dict[str, str], + ) -> Movie: + ret = await self.get_movie(external_id) + if ret is not None: + return ret + search = await self.search_movies(title, year, language=[]) + if not any(search): + raise ProviderError( + f"Couldn't find a movie with title {title}. (year: {year}" + ) + ret = await self.get_movie( + {k: v.data_id for k, v in search[0].external_id.items()} + ) + if not ret: + raise ValueError() + return ret + + async def find_serie( + self, + title: str, + year: int | None, + external_id: dict[str, str], + ) -> Serie: + ret = await self.get_serie(external_id) + if ret is not None: + return ret + search = await self.search_series(title, year, language=[]) + if not any(search): + raise ProviderError( + f"Couldn't find a serie with title {title}. (year: {year}" + ) + ret = await self.get_serie( + {k: v.data_id for k, v in search[0].external_id.items()} + ) + if not ret: + raise ValueError() + return ret + + +class ProviderError(RuntimeError): + def __init__(self, *args: object) -> None: + super().__init__(*args) diff --git a/scanner/scanner/providers/themoviedatabase.py b/scanner/scanner/providers/themoviedatabase.py new file mode 100644 index 00000000..bb214512 --- /dev/null +++ b/scanner/scanner/providers/themoviedatabase.py @@ -0,0 +1,722 @@ +import asyncio +import os +from collections.abc import Generator +from datetime import datetime +from logging import getLogger +from statistics import mean +from types import TracebackType +from typing import Any, cast, override + +from aiohttp import ClientSession +from langcodes import Language + +from ..models.collection import Collection, CollectionTranslation +from ..models.entry import Entry, EntryTranslation +from ..models.genre import Genre +from ..models.metadataid import EpisodeId, MetadataId, SeasonId +from ..models.movie import Movie, MovieStatus, MovieTranslation, SearchMovie +from ..models.season import Season, SeasonTranslation +from ..models.serie import SearchSerie, Serie, SerieStatus, SerieTranslation +from ..models.staff import Character, Person, Role, Staff +from ..models.studio import Studio, StudioTranslation +from ..utils import clean, to_slug +from .provider import Provider, ProviderError + +logger = getLogger(__name__) + + +class TheMovieDatabase(Provider): + THEMOVIEDB_API_ACCESS_TOKEN = "eyJhbGciOiJIUzI1NiJ9.eyJhdWQiOiJjOWYzMjhhMDEwMTFiMjhmMjI0ODM3MTczOTVmYzNmYSIsIm5iZiI6MTU4MTYzMTExOS44NjgsInN1YiI6IjVlNDVjNjhmODNlZTY3MDAxMTFmMmU5NiIsInNjb3BlcyI6WyJhcGlfcmVhZCJdLCJ2ZXJzaW9uIjoxfQ.CeXrQwgB3roCAVs-Z2ayLRx99VIJbym7XSpcRjGzyLA" + + def __init__(self) -> None: + super().__init__() + bearer = ( + os.environ.get("THEMOVIEDB_API_ACCESS_TOKEN") + or TheMovieDatabase.THEMOVIEDB_API_ACCESS_TOKEN + ) + self._client = ClientSession( + base_url="https://api.themoviedb.org/3/", + headers={ + "User-Agent": "kyoo scanner v5", + "Authorization": f"Bearer {bearer}", + }, + ) + self._image_path = "https://image.tmdb.org/t/p/original" + self._genre_map = { + 28: Genre.ACTION, + 12: Genre.ADVENTURE, + 16: Genre.ANIMATION, + 35: Genre.COMEDY, + 80: Genre.CRIME, + 99: Genre.DOCUMENTARY, + 18: Genre.DRAMA, + 10751: Genre.FAMILY, + 14: Genre.FANTASY, + 36: Genre.HISTORY, + 27: Genre.HORROR, + 10402: Genre.MUSIC, + 9648: Genre.MYSTERY, + 10749: Genre.ROMANCE, + 878: Genre.SCIENCE_FICTION, + 53: Genre.THRILLER, + 10752: Genre.WAR, + 37: Genre.WESTERN, + 10759: [Genre.ACTION, Genre.ADVENTURE], + 10762: Genre.KIDS, + 10764: Genre.REALITY, + 10765: [Genre.SCIENCE_FICTION, Genre.FANTASY], + 10766: Genre.SOAP, + 10767: Genre.TALK, + 10768: [Genre.WAR, Genre.POLITICS], + } + self._roles_map = { + "Camera": Role.OTHER, + "Costume & Make-Up": Role.OTHER, + "Lighting": Role.OTHER, + "Art": Role.OTHER, + "Visual Effects": Role.OTHER, + "Crew": Role.CREW, + "Writing": Role.WRITTER, + "Production": Role.PRODUCER, + "Editing": Role.OTHER, + "Directing": Role.DIRECTOR, + "Sound": Role.MUSIC, + "Actors": Role.ACTOR, + } + + async def __aenter__(self): + return self + + async def __aexit__( + self, + exc_type: type[BaseException] | None, + exc_value: BaseException | None, + traceback: TracebackType | None, + ): + await self._client.close() + + @property + @override + def name(self) -> str: + return "themoviedatabase" + + @override + async def search_movies( + self, title: str, year: int | None, *, language: list[Language] + ) -> list[SearchMovie]: + search = ( + await self._get( + "search/movie", + params={ + "query": title, + "year": year, + "languages": [str(x) for x in language], + }, + ) + )["results"] + search = self._sort_search(search, title, year) + return [ + SearchMovie( + slug=to_slug(x["title"]), + name=x["title"], + description=x["overview"], + air_date=datetime.strptime(x["release_date"], "%Y-%m-%d").date() + if x["release_date"] + else None, + poster=self._map_image(x["poster_path"]), + original_language=Language.get(x["original_language"]), + external_id={ + self.name: MetadataId( + data_id=str(x["id"]), + link=f"https://www.themoviedb.org/movie/{x['id']}", + ) + }, + ) + for x in search + ] + + @override + async def get_movie(self, external_id: dict[str, str]) -> Movie | None: + # TODO: fallback to search via another id + if self.name not in external_id: + return None + + movie = await self._get( + f"movie/{external_id[self.name]}", + params={ + "append_to_response": "alternative_titles,videos,credits,keywords,images,translations", + }, + ) + + return Movie( + slug=to_slug(movie["title"]), + original_language=Language.get(movie["original_language"]), + genres=self._map_genres(x["id"] for x in movie["genres"]), + rating=round(float(movie["vote_average"]) * 10), + status=MovieStatus.FINISHED + if movie["status"] == "Released" + else MovieStatus.PLANNED, + runtime=int(movie["runtime"]) if movie["runtime"] is not None else None, + air_date=datetime.strptime(movie["release_date"], "%Y-%m-%d").date() + if movie["release_date"] + else None, + external_id=( + { + self.name: MetadataId( + data_id=str(movie["id"]), + link=f"https://www.themoviedb.org/movie/{movie['id']}", + ) + } + | ( + { + "imdb": MetadataId( + data_id=str(movie["imdb_id"]), + link=f"https://www.imdb.com/title/{movie['imdb_id']}", + ) + } + if movie["imdb_id"] + else {} + ) + ), + translations={ + Language.get( + f"{trans['iso_639_1']}-{trans['iso_3166_1']}" + ): MovieTranslation( + name=clean(trans["data"]["title"]) + or ( + clean(movie["original_title"]) + if movie["original_language"] == trans["iso_639_1"] + else None + ) + or movie["title"], + latin_name=next( + ( + x["title"] + for x in movie["alternative_titles"]["titles"] + if x["iso_3166_1"] == trans["iso_3166_1"] + and x["type"] == "Romaji" + ), + None, + ), + description=clean(trans["data"]["overview"]), + tagline=clean(trans["data"]["tagline"]), + aliases=[ + x["title"] + for x in movie["alternative_titles"]["titles"] + if x["iso_3166_1"] == trans["iso_3166_1"] + ], + tags=[x["name"] for x in movie["keywords"]["keywords"]], + poster=self._pick_image(movie, trans["iso_639_1"], "posters"), + logo=self._pick_image(movie, trans["iso_639_1"], "logos"), + banner=None, + thumbnail=self._pick_image(movie, trans["iso_639_1"], "backdrops"), + trailer=None, + # TODO: should the trailer be added? or all of them as extra? + # [ + # f"https://www.youtube.com/watch?v={x['key']}" + # for x in movie["videos"]["results"] + # if x["type"] == "Trailer" and x["site"] == "YouTube" + # ], + ) + for trans in movie["translations"]["translations"] + }, + collections=[ + await self._get_collection(movie["belongs_to_collection"]["id"]) + ] + if movie["belongs_to_collection"] is not None + else [], + studios=[self._map_studio(x) for x in movie["production_companies"]], + # TODO: add crew + staff=[self._map_staff(x) for x in movie["credits"]["cast"]], + ) + + @override + async def search_series( + self, title: str, year: int | None, *, language: list[Language] + ) -> list[SearchSerie]: + search = ( + await self._get( + "search/tv", + params={ + "query": title, + "year": year, + "languages": [str(x) for x in language], + }, + ) + )["results"] + search = self._sort_search(search, title, year) + return [ + SearchSerie( + slug=to_slug(x["name"]), + name=x["name"], + description=x["overview"], + start_air=datetime.strptime(x["first_air_date"], "%Y-%m-%d").date() + if x["first_air_date"] + else None, + end_air=None, + poster=self._map_image(x["poster_path"]), + original_language=Language.get(x["original_language"]), + external_id={ + self.name: MetadataId( + data_id=str(x["id"]), + link=f"https://www.themoviedb.org/tv/{x['id']}", + ) + }, + ) + for x in search + ] + + @override + async def get_serie(self, external_id: dict[str, str]) -> Serie | None: + # TODO: fallback to search via another id + if self.name not in external_id: + return None + + serie = await self._get( + f"tv/{external_id[self.name]}", + params={ + "append_to_response": "alternative_titles,videos,credits,keywords,images,external_ids,translations", + }, + ) + seasons = await asyncio.gather( + *[ + self._get_season(serie["id"], x["season_number"]) + for x in serie["seasons"] + ] + ) + entries = await self._get_all_entries(serie["id"], seasons) + + return Serie( + slug=to_slug(serie["name"]), + original_language=Language.get(serie["original_language"]), + genres=self._map_genres(x["id"] for x in serie["genres"]), + rating=round(float(serie["vote_average"]) * 10), + status=SerieStatus.FINISHED + if serie["status"] == "Released" + else SerieStatus.AIRING + if serie["in_production"] + else SerieStatus.FINISHED, + runtime=serie["last_episode_to_air"]["runtime"] + if serie["last_episode_to_air"] + else None, + start_air=datetime.strptime(serie["first_air_date"], "%Y-%m-%d").date() + if serie["first_air_date"] + else None, + end_air=datetime.strptime(serie["last_air_date"], "%Y-%m-%d").date() + if serie["last_air_date"] + else None, + external_id={ + self.name: MetadataId( + data_id=str((serie["id"])), + link=f"https://www.themoviedb.org/tv/{serie['id']}", + ), + } + | ( + { + "imdb": MetadataId( + data_id=str(serie["external_ids"]["imdb_id"]), + link=f"https://www.imdb.com/title/{serie['external_ids']['imdb_id']}", + ) + } + if serie["external_ids"]["imdb_id"] + else {} + ) + | ( + { + "tvdb": MetadataId( + data_id=str(serie["external_ids"]["tvdb_id"]), + link=None, + ) + } + if serie["external_ids"]["tvdb_id"] + else {} + ), + translations={ + Language.get( + f"{trans['iso_639_1']}-{trans['iso_3166_1']}" + ): SerieTranslation( + name=clean(trans["data"]["name"]) + or ( + clean(serie["original_name"]) + if serie["original_language"] == trans["iso_639_1"] + else None + ) + or serie["name"], + latin_name=next( + ( + x["title"] + for x in serie["alternative_titles"]["results"] + if x["iso_3166_1"] == trans["iso_3166_1"] + and x["type"] == "Romaji" + ), + None, + ), + description=clean(trans["data"]["overview"]), + tagline=clean(trans["data"]["tagline"]), + aliases=[ + x["title"] + for x in serie["alternative_titles"]["results"] + if x["iso_3166_1"] == trans["iso_3166_1"] + ], + tags=[x["name"] for x in serie["keywords"]["results"]], + poster=self._pick_image(serie, trans["iso_639_1"], "posters"), + logo=self._pick_image(serie, trans["iso_639_1"], "logos"), + banner=None, + thumbnail=self._pick_image(serie, trans["iso_639_1"], "backdrops"), + trailer=None, + # TODO: should the trailer be added? or all of them as extra? + # [ + # f"https://www.youtube.com/watch?v={x['key']}" + # for x in show["videos"]["results"] + # if x["type"] == "Trailer" and x["site"] == "YouTube" + # ], + ) + for trans in serie["translations"]["translations"] + }, + seasons=seasons, + entries=entries, + extra=[], + collections=[], + studios=[self._map_studio(x) for x in serie["production_companies"]], + # TODO: add crew + staff=[self._map_staff(x) for x in serie["credits"]["cast"]], + ) + + async def _get_season(self, serie_id: str | int, season_number: int) -> Season: + season = await self._get( + f"tv/{serie_id}/season/{season_number}", + params={ + "append_to_response": "translations,images", + }, + ) + + return Season( + season_number=season["season_number"], + start_air=datetime.strptime(season["air_date"], "%Y-%m-%d").date() + if season["air_date"] + else None, + end_air=None, + external_id={ + self.name: SeasonId( + serie_id=str(serie_id), + season=season["season_number"], + link=f"https://www.themoviedb.org/tv/{serie_id}/season/{season['season_number']}", + ) + }, + translations={ + Language.get( + f"{trans['iso_639_1']}-{trans['iso_3166_1']}" + ): SeasonTranslation( + name=clean(trans["data"]["name"]), + description=clean(trans["data"]["overview"]), + poster=self._pick_image(season, trans["iso_639_1"], "posters"), + thumbnail=None, + banner=None, + ) + for trans in season["translations"]["translations"] + }, + extra={ + "first_entry": next( + (x["episode_number"] for x in season["episodes"]), None + ), + "entries_count": len(season["episodes"]), + }, + ) + + async def _get_all_entries( + self, serie_id: str | int, seasons: list[Season] + ) -> list[Entry]: + # TODO: batch those + ret = await asyncio.gather( + *[ + self._get_entry(serie_id, s.season_number, s.extra["first_entry"] + e) + for s in seasons + for e in range(0, s.extra["entries_count"]) + ] + ) + + # find the absolute ordering of entries (to set the `order` field) + try: + groups = await self._get(f"tv/{serie_id}/episode_groups") + group = max( + (x for x in groups["results"] if x["type"] == 2), + key=lambda x: x["episode_count"], + default=None, + ) + # if it doesn't have 75% of all episodes, it's probably unmaintained. keep default order + if group is None or group["episode_count"] < len(ret) // 1.5: + raise ProviderError("No valid absolute ordering group.") + + # groups of groups (each `episode_group` contains a `group` that acts like a season) + gog = await self._get(f"tv/episode_group/{group['id']}") + episodes = [ + ep + for grp in sorted(gog["groups"], key=lambda x: x["order"]) + for ep in sorted(grp["episodes"], key=lambda x: x["order"]) + ] + # the episode number of the first episode of each season + # this is because tmdb has some weird absolute groups, for example: + # one piece's s22e1089 is the first ep of s22. + # this is because episode_numbers simply don't reset after season start + # (eg s21e1088 is the last ep of s21) + season_starts = [s.extra["first_entry"] for s in seasons] + + if len(episodes) != len(ret): + logger.warning( + f"Incomplete absolute group for show {serie_id}. Filling missing values by assuming season/episode order is ascending." + ) + episodes += [ + {"season_number": s.season_number, "episode_number": e} + for s in seasons + for e in range(1, s.extra["entries_count"] + 1) + if not any( + x["season_number"] == s.season_number + and ( + x["episode_number"] == e + # take into account weird absolute (for example one piece, episodes are not reset to 1 when the season starts) + or x["episode_number"] + == season_starts[s.season_number - 1] + e + ) + for x in episodes + ) + ] + for ep in ret: + snbr = cast(int, ep.season_number) + enbr = cast(int, ep.episode_number) + ep.order = next( + # Using absolute + 1 since the array is 0based (absolute episode 1 is at index 0) + i + 1 + for i, x in enumerate(episodes) + if x["season_number"] == snbr + and ( + x["episode_number"] == enbr + # don't forget weird numbering + or x["episode_number"] == enbr + season_starts[snbr - 1] + ) + ) + except Exception as e: + if not isinstance(e, ProviderError): + logger.exception( + "Could not retrieve absolute ordering information", exc_info=e + ) + ret = sorted(ret, key=lambda ep: (ep.season_number, ep.episode_number)) + for order, ep in enumerate(ret): + ep.order = order + 1 + + return ret + + async def _get_entry( + self, + serie_id: str | int, + season: int, + episode_nbr: int, + ) -> Entry: + episode = await self._get( + f"tv/{serie_id}/season/{season}/episode/{episode_nbr}", + params={ + "append_to_response": "translations", + }, + ) + + return Entry( + kind="episode" if episode["season_number"] != 0 else "special", + order=0, + runtime=int(episode["runtime"]) if episode["runtime"] is not None else None, + air_date=datetime.strptime(episode["air_date"], "%Y-%m-%d").date() + if episode["air_date"] + else None, + thumbnail=self._map_image(episode["still_path"]), + slug=None, + season_number=episode["season_number"], + episode_number=episode["episode_number"], + number=episode["episode_number"], + external_id={ + self.name: EpisodeId( + serie_id=str(serie_id), + season=episode["season_number"], + episode=episode["episode_number"], + link=f"https://www.themoviedb.org/tv/{serie_id}/season/{episode['season_number']}/episode/{episode['episode_number']}", + ), + }, + translations={ + Language.get( + f"{trans['iso_639_1']}-{trans['iso_3166_1']}" + ): EntryTranslation( + name=clean(trans["data"]["name"]), + description=clean(trans["data"]["overview"]), + tagline=None, + poster=None, + ) + for trans in episode["translations"]["translations"] + }, + ) + + async def _get_collection(self, provider_id: str | int) -> Collection: + collection = await self._get( + f"collection/{provider_id}", + params={ + "append_to_response": "images,translations", + }, + ) + + return Collection( + slug=to_slug(collection["name"]), + # assume all parts are in the same language + original_language=Language.get(collection["parts"][0]["original_language"]), + genres=[ + y for x in collection["parts"] for y in self._map_genres(x["genre_ids"]) + ], + rating=round( + mean(float(x["vote_average"]) * 10 for x in collection["parts"]) + ), + external_id={ + self.name: MetadataId( + data_id=str(collection["id"]), + link=f"https://www.themoviedb.org/collection/{collection['id']}", + ) + }, + translations={ + Language.get( + f"{trans['iso_639_1']}-{trans['iso_3166_1']}" + ): CollectionTranslation( + name=clean(trans["data"]["title"]) or collection["name"], + latin_name=None, + description=trans["data"]["overview"], + tagline=None, + aliases=[], + tags=[], + poster=self._pick_image(collection, trans["iso_639_1"], "posters"), + thumbnail=self._pick_image( + collection, trans["iso_639_1"], "backdrops" + ), + banner=None, + logo=None, + ) + for trans in collection["translations"]["translations"] + }, + ) + + def _sort_search(self, search: list[Any], name: str, year: int | None) -> Any: + results = search + + # Find perfect match by year since sometime tmdb decides to discard the year parameter. + if year: + results = [ + x + for x in search + if ("first_air_date" in x and x["first_air_date"].startswith(str(year))) + or ("release_date" in x and x["release_date"].startswith(str(year))) + ] + if not results: + results = search + + # If there is a perfect match use it (and if there are multiple, use the most popular one) + res = sorted( + ( + x + for x in results + if ("name" in x and x["name"].casefold() == name.casefold()) + or ("title" in x and x["title"].casefold() == name.casefold()) + ), + key=lambda x: (x["vote_count"], x["popularity"]), + reverse=True, + ) + if res: + results = res + else: + # Ignore totally unpopular shows or unknown ones. + # sorted is stable and False list[Genre]: + def flatten(x: Genre | list[Genre] | list[Genre | list[Genre]]) -> list[Genre]: + if isinstance(x, list): + return [j for i in x for j in flatten(i)] + return [x] + + return flatten([self._genre_map[x] for x in genres if x in self._genre_map]) + + def _map_studio(self, company: dict[str, Any]) -> Studio: + return Studio( + slug=to_slug(company["name"]), + external_id={ + self.name: MetadataId( + data_id=str(company["id"]), + link=f"https://www.themoviedb.org/company/{company['id']}", + ) + }, + translations={ + "en": StudioTranslation( + name=company["name"], + logo=self._map_image(company["logo_path"]) + if "logo_path" in company + else None, + ), + }, + ) + + def _map_staff(self, person: dict[str, Any]) -> Staff: + return Staff( + kind=self._roles_map.get(person["known_for_department"], Role.OTHER), + character=Character( + name=person["character"], + latin_name=None, + image=None, + ), + staff=Person( + slug=to_slug(person["name"]), + name=person["original_name"], + latin_name=person["name"], + image=self._map_image(person["profile_path"]), + external_id={ + self.name: MetadataId( + data_id=str(person["id"]), + link=f"https://www.themoviedb.org/person/{person['id']}", + ) + }, + ), + ) + + def _map_image(self, image: str | None) -> str | None: + if not image: + return None + return self._image_path + image + + def _pick_image(self, item: dict[str, Any], lng: str, key: str) -> str | None: + images = sorted( + item["images"][key], + key=lambda x: (x.get("vote_average", 0), x.get("width", 0)), + reverse=True, + ) + + # check images in your language + localized = next((x for x in images if x["iso_639_1"] == lng), None) + if localized: + return self._image_path + localized["file_path"] + # if failed, check images without text + notext = next((x for x in images if x["iso_639_1"] == None), None) + if notext: + return self._image_path + notext["file_path"] + # take a random image, it's better than nothing + random_img = next((x for x in images if x["iso_639_1"] == None), None) + if random_img: + return self._image_path + random_img["file_path"] + return None diff --git a/scanner/scanner/publisher.py b/scanner/scanner/publisher.py deleted file mode 100644 index d8ad9cc0..00000000 --- a/scanner/scanner/publisher.py +++ /dev/null @@ -1,30 +0,0 @@ -from guessit.jsonutils import json -from aio_pika import Message -from logging import getLogger -from typing import Literal - -from providers.rabbit_base import RabbitBase - -logger = getLogger(__name__) - - -class Publisher(RabbitBase): - async def _publish(self, data: dict): - await self._channel.default_exchange.publish( - Message(json.dumps(data).encode()), - routing_key=self.QUEUE, - ) - - async def add(self, path: str): - await self._publish({"action": "scan", "path": path}) - - async def delete(self, path: str): - await self._publish({"action": "delete", "path": path}) - - async def refresh( - self, - kind: Literal["collection", "show", "movie", "season", "episode"], - id: str, - **_kwargs, - ): - await self._publish({"action": "refresh", "kind": kind, "id": id}) diff --git a/scanner/scanner/requests.py b/scanner/scanner/requests.py new file mode 100644 index 00000000..5217b2b7 --- /dev/null +++ b/scanner/scanner/requests.py @@ -0,0 +1,192 @@ +from asyncio import CancelledError, Event, TaskGroup +from logging import getLogger +from typing import cast + +from asyncpg import Connection, Pool +from pydantic import TypeAdapter + +from .client import KyooClient +from .models.request import Request +from .models.videos import Resource +from .providers.provider import Provider + +logger = getLogger(__name__) + + +class RequestCreator: + def __init__(self, database: Connection): + self._database = database + + async def enqueue(self, requests: list[Request]): + await self._database.executemany( + """ + insert into scanner.requests(kind, title, year, external_id, videos) + values ($1, $2, $3, $4, $5) + on conflict (kind, title, year) + do update set + videos = requests.videos || excluded.videos + """, + [ + [x["kind"], x["title"], x["year"], x["external_id"], x["videos"]] + for x in TypeAdapter(list[Request]).dump_python(requests) + ], + ) + _ = await self._database.execute("notify scanner_requests") + + async def clear_failed(self): + _ = await self._database.execute( + """ + delete from scanner.requests + where status = 'failed' + """ + ) + + +class RequestProcessor: + def __init__( + self, + pool: Pool, + client: KyooClient, + providers: Provider, + ): + self._pool = pool + self._database: Connection = None # type: ignore + self._client = client + self._providers = providers + + async def listen(self, tg: TaskGroup): + closed = Event() + + def process(*_): + _ = tg.create_task(self.process_all()) + + def terminated(*_): + closed.set() + + while True: + closed.clear() + # TODO: unsure if timeout actually work, i think not... + async with self._pool.acquire(timeout=10) as db: + try: + self._database = cast(Connection, db) + self._database.add_termination_listener(terminated) + await self._database.add_listener("scanner_requests", process) + + logger.info("Listening for requestes") + _ = await closed.wait() + logger.info("stopping...") + except CancelledError: + logger.info("Stopped listening for requsets") + await self._database.remove_listener("scanner_requests", process) + self._database.remove_termination_listener(terminated) + raise + + async def process_all(self): + found = True + while found: + try: + found = await self.process_request() + except Exception as e: + logger.error( + "Failed to process one of the metadata request", exc_info=e + ) + + async def process_request(self): + cur = await self._database.fetchrow( + """ + update + scanner.requests + set + status = 'running', + started_at = now()::timestamptz + where + pk in ( + select + pk + from + scanner.requests + where + status = 'pending' + limit 1 + for update + skip locked) + returning + * + """ + ) + if cur is None: + return False + request = Request.model_validate(cur) + + logger.info(f"Starting to process {request.title}") + try: + show = await self._run_request(request) + finished = await self._database.fetchrow( + """ + delete from scanner.requests + where pk = $1 + returning + videos + """, + request.pk, + ) + if finished and finished["videos"] != request.videos: + videos = TypeAdapter(list[Request.Video]).validate_python( + finished["videos"] + ) + await self._client.link_videos( + "movie" if request.kind == "movie" else "serie", + show.slug, + videos, + ) + except Exception as e: + logger.error("Couldn't process request", exc_info=e) + cur = await self._database.execute( + """ + update + scanner.requests + set + status = 'failed' + where + pk = $1 + """, + request.pk, + ) + return True + + async def _run_request(self, request: Request) -> Resource: + if request.kind == "movie": + movie = await self._providers.find_movie( + request.title, + request.year, + request.external_id, + ) + movie.videos = [x.id for x in request.videos] + return await self._client.create_movie(movie) + + serie = await self._providers.find_serie( + request.title, + request.year, + request.external_id, + ) + for vid in request.videos: + for ep in vid.episodes: + entry = next( + ( + x + for x in serie.entries + if (ep.season is None and x.order == ep.episode) + or ( + x.season_number == ep.season + and x.episode_number == ep.episode + ) + ), + None, + ) + if entry is None: + logger.warning( + f"Couldn't match entry for {serie.slug} {ep.season or 'abs'}-e{ep.episode}." + ) + continue + entry.videos.append(vid.id) + return await self._client.create_serie(serie) diff --git a/scanner/scanner/routers/routes.py b/scanner/scanner/routers/routes.py new file mode 100644 index 00000000..90808df5 --- /dev/null +++ b/scanner/scanner/routers/routes.py @@ -0,0 +1,28 @@ +from typing import Annotated + +from fastapi import APIRouter, BackgroundTasks, Security + +from ..fsscan import create_scanner +from ..jwt import validate_bearer + +router = APIRouter() + + +@router.put( + "/scan", + status_code=204, + response_description="Scan started.", +) +async def trigger_scan( + tasks: BackgroundTasks, + _: Annotated[None, Security(validate_bearer, scopes=["scanner.trigger"])], +): + """ + Trigger a full scan of the filesystem, trying to find new videos & deleting old ones. + """ + + async def run(): + async with create_scanner() as scanner: + await scanner.scan() + + tasks.add_task(run) diff --git a/scanner/scanner/scanner.py b/scanner/scanner/scanner.py deleted file mode 100644 index 9fceac58..00000000 --- a/scanner/scanner/scanner.py +++ /dev/null @@ -1,73 +0,0 @@ -import os -import re -import asyncio -from typing import Optional -from logging import getLogger - -from .publisher import Publisher -from providers.kyoo_client import KyooClient - -logger = getLogger(__name__) - - -def get_ignore_pattern(): - try: - pattern = os.environ.get("LIBRARY_IGNORE_PATTERN") - return re.compile(pattern) if pattern else None - except re.error as e: - logger.error(f"Invalid ignore pattern. Ignoring. Error: {e}") - return None - - -async def scan( - path_: Optional[str], publisher: Publisher, client: KyooClient, remove_deleted=False -): - path = path_ or os.environ.get("SCANNER_LIBRARY_ROOT", "/video") - logger.info("Starting scan at %s. This may take some time...", path) - - ignore_pattern = get_ignore_pattern() - if ignore_pattern: - logger.info(f"Applying ignore pattern: {ignore_pattern}") - - registered = set(await client.get_registered_paths()) - videos = set() - - for dirpath, dirnames, files in os.walk(path): - # Skip directories with a `.ignore` file - if ".ignore" in files: - dirnames.clear() # Prevents os.walk from descending into this directory - continue - - for file in files: - file_path = os.path.join(dirpath, file) - # Apply ignore pattern, if any - if ignore_pattern and ignore_pattern.match(file_path): - continue - videos.add(file_path) - - to_register = videos - registered - to_delete = registered - videos if remove_deleted else set() - - if not any(to_register) and any(to_delete) and len(to_delete) == len(registered): - logger.warning("All video files are unavailable. Check your disks.") - return - - # delete stale files before creating new ones to prevent potential conflicts - if to_delete: - logger.info("Removing %d stale files.", len(to_delete)) - await asyncio.gather(*[publisher.delete(path) for path in to_delete]) - - if to_register: - logger.info("Found %d new files to register.", len(to_register)) - await asyncio.gather(*[publisher.add(path) for path in to_register]) - - if remove_deleted: - issues = set(await client.get_issues()) - issues_to_delete = issues - videos - if issues_to_delete: - logger.info("Removing %d stale issues.", len(issues_to_delete)) - await asyncio.gather( - *[client.delete_issue(issue) for issue in issues_to_delete] - ) - - logger.info("Scan finished for %s.", path) diff --git a/scanner/scanner/subscriber.py b/scanner/scanner/subscriber.py deleted file mode 100644 index 98149d26..00000000 --- a/scanner/scanner/subscriber.py +++ /dev/null @@ -1,24 +0,0 @@ -import asyncio -from guessit.jsonutils import json -from aio_pika.abc import AbstractIncomingMessage -from logging import getLogger - -from providers.rabbit_base import RabbitBase - -logger = getLogger(__name__) - - -class Subscriber(RabbitBase): - QUEUE = "scanner.rescan" - - async def listen(self, scan): - async def on_message(message: AbstractIncomingMessage): - try: - await scan() - await message.ack() - except Exception as e: - logger.exception("Unhandled error", exc_info=e) - await message.reject() - - await self._queue.consume(on_message) - await asyncio.Future() diff --git a/scanner/scanner/utils.py b/scanner/scanner/utils.py new file mode 100644 index 00000000..c60e8541 --- /dev/null +++ b/scanner/scanner/utils.py @@ -0,0 +1,78 @@ +from abc import ABCMeta +from collections.abc import Mapping +from typing import Annotated, Any, Callable, override + +from asyncpg import Record +from langcodes import Language as BaseLanguage +from pydantic import BaseModel, ConfigDict, GetJsonSchemaHandler +from pydantic.alias_generators import to_camel +from pydantic.json_schema import JsonSchemaValue +from pydantic_core import core_schema +from slugify import slugify + + +def to_slug(title: str) -> str: + return slugify(title) + + +def clean(val: str) -> str | None: + return val or None + + +class Singleton(ABCMeta, type): + _instances = {} + + @override + def __call__(cls, *args, **kwargs): + if cls not in cls._instances: + cls._instances[cls] = super(Singleton, cls).__call__(*args, **kwargs) + return cls._instances[cls] + + +class Model(BaseModel): + model_config = ConfigDict( + use_enum_values=True, + validate_by_name=True, + alias_generator=lambda x: to_camel(x[:-1] if x[-1] == "_" else x), + ) + + +Mapping.register(Record) # type: ignore + + +class _LanguagePydanticAnnotation: + @classmethod + def __get_pydantic_core_schema__( + cls, + _source_type: Any, + _handler: Callable[[Any], core_schema.CoreSchema], + ) -> core_schema.CoreSchema: + def validate_from_str(value: str) -> BaseLanguage: + return BaseLanguage.get(value) + + from_str_schema = core_schema.chain_schema( + [ + core_schema.str_schema(), + core_schema.no_info_plain_validator_function(validate_from_str), + ] + ) + + return core_schema.json_or_python_schema( + json_schema=from_str_schema, + python_schema=core_schema.union_schema( + [ + core_schema.is_instance_schema(BaseLanguage), + from_str_schema, + ] + ), + serialization=core_schema.to_string_ser_schema(), + ) + + @classmethod + def __get_pydantic_json_schema__( + cls, _core_schema: core_schema.CoreSchema, handler: GetJsonSchemaHandler + ) -> JsonSchemaValue: + return handler(core_schema.str_schema()) + + +Language = Annotated[BaseLanguage, _LanguagePydanticAnnotation] diff --git a/scanner/shell.nix b/scanner/shell.nix new file mode 100644 index 00000000..a8e0d9f6 --- /dev/null +++ b/scanner/shell.nix @@ -0,0 +1,26 @@ +{pkgs ? import {}}: let + python = pkgs.python313.withPackages (ps: + with ps; [ + fastapi + pydantic + guessit + aiohttp + watchfiles + langcodes + asyncpg + pyjwt + python-slugify + ]); +in + pkgs.mkShell { + packages = with pkgs; [ + python + uv + ruff + fastapi-cli + pgformatter + ]; + + UV_PYTHON_PREFERENCE = "only-system"; + UV_PYTHON = pkgs.python313; + } diff --git a/scanner/uv.lock b/scanner/uv.lock new file mode 100644 index 00000000..0568f014 --- /dev/null +++ b/scanner/uv.lock @@ -0,0 +1,991 @@ +version = 1 +revision = 2 +requires-python = ">=3.13" + +[[package]] +name = "aiohappyeyeballs" +version = "2.6.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/26/30/f84a107a9c4331c14b2b586036f40965c128aa4fee4dda5d3d51cb14ad54/aiohappyeyeballs-2.6.1.tar.gz", hash = "sha256:c3f9d0113123803ccadfdf3f0faa505bc78e6a72d1cc4806cbd719826e943558", size = 22760, upload-time = "2025-03-12T01:42:48.764Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/0f/15/5bf3b99495fb160b63f95972b81750f18f7f4e02ad051373b669d17d44f2/aiohappyeyeballs-2.6.1-py3-none-any.whl", hash = "sha256:f349ba8f4b75cb25c99c5c2d84e997e485204d2902a9597802b0371f09331fb8", size = 15265, upload-time = "2025-03-12T01:42:47.083Z" }, +] + +[[package]] +name = "aiohttp" +version = "3.11.18" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "aiohappyeyeballs" }, + { name = "aiosignal" }, + { name = "attrs" }, + { name = "frozenlist" }, + { name = "multidict" }, + { name = "propcache" }, + { name = "yarl" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/63/e7/fa1a8c00e2c54b05dc8cb5d1439f627f7c267874e3f7bb047146116020f9/aiohttp-3.11.18.tar.gz", hash = "sha256:ae856e1138612b7e412db63b7708735cff4d38d0399f6a5435d3dac2669f558a", size = 7678653, upload-time = "2025-04-21T09:43:09.191Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/0a/18/be8b5dd6b9cf1b2172301dbed28e8e5e878ee687c21947a6c81d6ceaa15d/aiohttp-3.11.18-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:474215ec618974054cf5dc465497ae9708543cbfc312c65212325d4212525811", size = 699833, upload-time = "2025-04-21T09:42:00.298Z" }, + { url = "https://files.pythonhosted.org/packages/0d/84/ecdc68e293110e6f6f6d7b57786a77555a85f70edd2b180fb1fafaff361a/aiohttp-3.11.18-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:6ced70adf03920d4e67c373fd692123e34d3ac81dfa1c27e45904a628567d804", size = 462774, upload-time = "2025-04-21T09:42:02.015Z" }, + { url = "https://files.pythonhosted.org/packages/d7/85/f07718cca55884dad83cc2433746384d267ee970e91f0dcc75c6d5544079/aiohttp-3.11.18-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:2d9f6c0152f8d71361905aaf9ed979259537981f47ad099c8b3d81e0319814bd", size = 454429, upload-time = "2025-04-21T09:42:03.728Z" }, + { url = "https://files.pythonhosted.org/packages/82/02/7f669c3d4d39810db8842c4e572ce4fe3b3a9b82945fdd64affea4c6947e/aiohttp-3.11.18-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a35197013ed929c0aed5c9096de1fc5a9d336914d73ab3f9df14741668c0616c", size = 1670283, upload-time = "2025-04-21T09:42:06.053Z" }, + { url = "https://files.pythonhosted.org/packages/ec/79/b82a12f67009b377b6c07a26bdd1b81dab7409fc2902d669dbfa79e5ac02/aiohttp-3.11.18-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:540b8a1f3a424f1af63e0af2d2853a759242a1769f9f1ab053996a392bd70118", size = 1717231, upload-time = "2025-04-21T09:42:07.953Z" }, + { url = "https://files.pythonhosted.org/packages/a6/38/d5a1f28c3904a840642b9a12c286ff41fc66dfa28b87e204b1f242dbd5e6/aiohttp-3.11.18-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f9e6710ebebfce2ba21cee6d91e7452d1125100f41b906fb5af3da8c78b764c1", size = 1769621, upload-time = "2025-04-21T09:42:09.855Z" }, + { url = "https://files.pythonhosted.org/packages/53/2d/deb3749ba293e716b5714dda06e257f123c5b8679072346b1eb28b766a0b/aiohttp-3.11.18-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f8af2ef3b4b652ff109f98087242e2ab974b2b2b496304063585e3d78de0b000", size = 1678667, upload-time = "2025-04-21T09:42:11.741Z" }, + { url = "https://files.pythonhosted.org/packages/b8/a8/04b6e11683a54e104b984bd19a9790eb1ae5f50968b601bb202d0406f0ff/aiohttp-3.11.18-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:28c3f975e5ae3dbcbe95b7e3dcd30e51da561a0a0f2cfbcdea30fc1308d72137", size = 1601592, upload-time = "2025-04-21T09:42:14.137Z" }, + { url = "https://files.pythonhosted.org/packages/5e/9d/c33305ae8370b789423623f0e073d09ac775cd9c831ac0f11338b81c16e0/aiohttp-3.11.18-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:c28875e316c7b4c3e745172d882d8a5c835b11018e33432d281211af35794a93", size = 1621679, upload-time = "2025-04-21T09:42:16.056Z" }, + { url = "https://files.pythonhosted.org/packages/56/45/8e9a27fff0538173d47ba60362823358f7a5f1653c6c30c613469f94150e/aiohttp-3.11.18-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:13cd38515568ae230e1ef6919e2e33da5d0f46862943fcda74e7e915096815f3", size = 1656878, upload-time = "2025-04-21T09:42:18.368Z" }, + { url = "https://files.pythonhosted.org/packages/84/5b/8c5378f10d7a5a46b10cb9161a3aac3eeae6dba54ec0f627fc4ddc4f2e72/aiohttp-3.11.18-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:0e2a92101efb9f4c2942252c69c63ddb26d20f46f540c239ccfa5af865197bb8", size = 1620509, upload-time = "2025-04-21T09:42:20.141Z" }, + { url = "https://files.pythonhosted.org/packages/9e/2f/99dee7bd91c62c5ff0aa3c55f4ae7e1bc99c6affef780d7777c60c5b3735/aiohttp-3.11.18-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:e6d3e32b8753c8d45ac550b11a1090dd66d110d4ef805ffe60fa61495360b3b2", size = 1680263, upload-time = "2025-04-21T09:42:21.993Z" }, + { url = "https://files.pythonhosted.org/packages/03/0a/378745e4ff88acb83e2d5c884a4fe993a6e9f04600a4560ce0e9b19936e3/aiohttp-3.11.18-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:ea4cf2488156e0f281f93cc2fd365025efcba3e2d217cbe3df2840f8c73db261", size = 1715014, upload-time = "2025-04-21T09:42:23.87Z" }, + { url = "https://files.pythonhosted.org/packages/f6/0b/b5524b3bb4b01e91bc4323aad0c2fcaebdf2f1b4d2eb22743948ba364958/aiohttp-3.11.18-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:9d4df95ad522c53f2b9ebc07f12ccd2cb15550941e11a5bbc5ddca2ca56316d7", size = 1666614, upload-time = "2025-04-21T09:42:25.764Z" }, + { url = "https://files.pythonhosted.org/packages/c7/b7/3d7b036d5a4ed5a4c704e0754afe2eef24a824dfab08e6efbffb0f6dd36a/aiohttp-3.11.18-cp313-cp313-win32.whl", hash = "sha256:cdd1bbaf1e61f0d94aced116d6e95fe25942f7a5f42382195fd9501089db5d78", size = 411358, upload-time = "2025-04-21T09:42:27.558Z" }, + { url = "https://files.pythonhosted.org/packages/1e/3c/143831b32cd23b5263a995b2a1794e10aa42f8a895aae5074c20fda36c07/aiohttp-3.11.18-cp313-cp313-win_amd64.whl", hash = "sha256:bdd619c27e44382cf642223f11cfd4d795161362a5a1fc1fa3940397bc89db01", size = 437658, upload-time = "2025-04-21T09:42:29.209Z" }, +] + +[[package]] +name = "aiosignal" +version = "1.3.2" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "frozenlist" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/ba/b5/6d55e80f6d8a08ce22b982eafa278d823b541c925f11ee774b0b9c43473d/aiosignal-1.3.2.tar.gz", hash = "sha256:a8c255c66fafb1e499c9351d0bf32ff2d8a0321595ebac3b93713656d2436f54", size = 19424, upload-time = "2024-12-13T17:10:40.86Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/ec/6a/bc7e17a3e87a2985d3e8f4da4cd0f481060eb78fb08596c42be62c90a4d9/aiosignal-1.3.2-py2.py3-none-any.whl", hash = "sha256:45cde58e409a301715980c2b01d0c28bdde3770d8290b5eb2173759d9acb31a5", size = 7597, upload-time = "2024-12-13T17:10:38.469Z" }, +] + +[[package]] +name = "annotated-types" +version = "0.7.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/ee/67/531ea369ba64dcff5ec9c3402f9f51bf748cec26dde048a2f973a4eea7f5/annotated_types-0.7.0.tar.gz", hash = "sha256:aff07c09a53a08bc8cfccb9c85b05f1aa9a2a6f23728d790723543408344ce89", size = 16081, upload-time = "2024-05-20T21:33:25.928Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/78/b6/6307fbef88d9b5ee7421e68d78a9f162e0da4900bc5f5793f6d3d0e34fb8/annotated_types-0.7.0-py3-none-any.whl", hash = "sha256:1f02e8b43a8fbbc3f3e0d4f0f4bfc8131bcb4eebe8849b8e5c773f3a1c582a53", size = 13643, upload-time = "2024-05-20T21:33:24.1Z" }, +] + +[[package]] +name = "anyio" +version = "4.9.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "idna" }, + { name = "sniffio" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/95/7d/4c1bd541d4dffa1b52bd83fb8527089e097a106fc90b467a7313b105f840/anyio-4.9.0.tar.gz", hash = "sha256:673c0c244e15788651a4ff38710fea9675823028a6f08a5eda409e0c9840a028", size = 190949, upload-time = "2025-03-17T00:02:54.77Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/a1/ee/48ca1a7c89ffec8b6a0c5d02b89c305671d5ffd8d3c94acf8b8c408575bb/anyio-4.9.0-py3-none-any.whl", hash = "sha256:9f76d541cad6e36af7beb62e978876f3b41e3e04f2c1fbf0884604c0a9c4d93c", size = 100916, upload-time = "2025-03-17T00:02:52.713Z" }, +] + +[[package]] +name = "asyncpg" +version = "0.30.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/2f/4c/7c991e080e106d854809030d8584e15b2e996e26f16aee6d757e387bc17d/asyncpg-0.30.0.tar.gz", hash = "sha256:c551e9928ab6707602f44811817f82ba3c446e018bfe1d3abecc8ba5f3eac851", size = 957746, upload-time = "2024-10-20T00:30:41.127Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/3a/22/e20602e1218dc07692acf70d5b902be820168d6282e69ef0d3cb920dc36f/asyncpg-0.30.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:05b185ebb8083c8568ea8a40e896d5f7af4b8554b64d7719c0eaa1eb5a5c3a70", size = 670373, upload-time = "2024-10-20T00:29:55.165Z" }, + { url = "https://files.pythonhosted.org/packages/3d/b3/0cf269a9d647852a95c06eb00b815d0b95a4eb4b55aa2d6ba680971733b9/asyncpg-0.30.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:c47806b1a8cbb0a0db896f4cd34d89942effe353a5035c62734ab13b9f938da3", size = 634745, upload-time = "2024-10-20T00:29:57.14Z" }, + { url = "https://files.pythonhosted.org/packages/8e/6d/a4f31bf358ce8491d2a31bfe0d7bcf25269e80481e49de4d8616c4295a34/asyncpg-0.30.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9b6fde867a74e8c76c71e2f64f80c64c0f3163e687f1763cfaf21633ec24ec33", size = 3512103, upload-time = "2024-10-20T00:29:58.499Z" }, + { url = "https://files.pythonhosted.org/packages/96/19/139227a6e67f407b9c386cb594d9628c6c78c9024f26df87c912fabd4368/asyncpg-0.30.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:46973045b567972128a27d40001124fbc821c87a6cade040cfcd4fa8a30bcdc4", size = 3592471, upload-time = "2024-10-20T00:30:00.354Z" }, + { url = "https://files.pythonhosted.org/packages/67/e4/ab3ca38f628f53f0fd28d3ff20edff1c975dd1cb22482e0061916b4b9a74/asyncpg-0.30.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:9110df111cabc2ed81aad2f35394a00cadf4f2e0635603db6ebbd0fc896f46a4", size = 3496253, upload-time = "2024-10-20T00:30:02.794Z" }, + { url = "https://files.pythonhosted.org/packages/ef/5f/0bf65511d4eeac3a1f41c54034a492515a707c6edbc642174ae79034d3ba/asyncpg-0.30.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:04ff0785ae7eed6cc138e73fc67b8e51d54ee7a3ce9b63666ce55a0bf095f7ba", size = 3662720, upload-time = "2024-10-20T00:30:04.501Z" }, + { url = "https://files.pythonhosted.org/packages/e7/31/1513d5a6412b98052c3ed9158d783b1e09d0910f51fbe0e05f56cc370bc4/asyncpg-0.30.0-cp313-cp313-win32.whl", hash = "sha256:ae374585f51c2b444510cdf3595b97ece4f233fde739aa14b50e0d64e8a7a590", size = 560404, upload-time = "2024-10-20T00:30:06.537Z" }, + { url = "https://files.pythonhosted.org/packages/c8/a4/cec76b3389c4c5ff66301cd100fe88c318563ec8a520e0b2e792b5b84972/asyncpg-0.30.0-cp313-cp313-win_amd64.whl", hash = "sha256:f59b430b8e27557c3fb9869222559f7417ced18688375825f8f12302c34e915e", size = 621623, upload-time = "2024-10-20T00:30:09.024Z" }, +] + +[[package]] +name = "attrs" +version = "25.3.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/5a/b0/1367933a8532ee6ff8d63537de4f1177af4bff9f3e829baf7331f595bb24/attrs-25.3.0.tar.gz", hash = "sha256:75d7cefc7fb576747b2c81b4442d4d4a1ce0900973527c011d1030fd3bf4af1b", size = 812032, upload-time = "2025-03-13T11:10:22.779Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/77/06/bb80f5f86020c4551da315d78b3ab75e8228f89f0162f2c3a819e407941a/attrs-25.3.0-py3-none-any.whl", hash = "sha256:427318ce031701fea540783410126f03899a97ffc6f61596ad581ac2e40e3bc3", size = 63815, upload-time = "2025-03-13T11:10:21.14Z" }, +] + +[[package]] +name = "babelfish" +version = "0.6.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/c5/8f/17ff889327f8a1c36a28418e686727dabc06c080ed49c95e3e2424a77aa6/babelfish-0.6.1.tar.gz", hash = "sha256:decb67a4660888d48480ab6998309837174158d0f1aa63bebb1c2e11aab97aab", size = 87706, upload-time = "2024-05-09T21:16:24.357Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/95/a1/bd4f759db13cd8beb9c9f68682aced5d966781b9d7380cf514a306f56762/babelfish-0.6.1-py3-none-any.whl", hash = "sha256:512f1501d4c8f7d38f0921f48660be7542de1a7b24abb6a6a65324a670150293", size = 94231, upload-time = "2024-05-09T21:16:22.633Z" }, +] + +[[package]] +name = "certifi" +version = "2025.4.26" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/e8/9e/c05b3920a3b7d20d3d3310465f50348e5b3694f4f88c6daf736eef3024c4/certifi-2025.4.26.tar.gz", hash = "sha256:0a816057ea3cdefcef70270d2c515e4506bbc954f417fa5ade2021213bb8f0c6", size = 160705, upload-time = "2025-04-26T02:12:29.51Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/4a/7e/3db2bd1b1f9e95f7cddca6d6e75e2f2bd9f51b1246e546d88addca0106bd/certifi-2025.4.26-py3-none-any.whl", hash = "sha256:30350364dfe371162649852c63336a15c70c6510c2ad5015b21c2345311805f3", size = 159618, upload-time = "2025-04-26T02:12:27.662Z" }, +] + +[[package]] +name = "cffi" +version = "1.17.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "pycparser" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/fc/97/c783634659c2920c3fc70419e3af40972dbaf758daa229a7d6ea6135c90d/cffi-1.17.1.tar.gz", hash = "sha256:1c39c6016c32bc48dd54561950ebd6836e1670f2ae46128f67cf49e789c52824", size = 516621, upload-time = "2024-09-04T20:45:21.852Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/8d/f8/dd6c246b148639254dad4d6803eb6a54e8c85c6e11ec9df2cffa87571dbe/cffi-1.17.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:f3a2b4222ce6b60e2e8b337bb9596923045681d71e5a082783484d845390938e", size = 182989, upload-time = "2024-09-04T20:44:28.956Z" }, + { url = "https://files.pythonhosted.org/packages/8b/f1/672d303ddf17c24fc83afd712316fda78dc6fce1cd53011b839483e1ecc8/cffi-1.17.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:0984a4925a435b1da406122d4d7968dd861c1385afe3b45ba82b750f229811e2", size = 178802, upload-time = "2024-09-04T20:44:30.289Z" }, + { url = "https://files.pythonhosted.org/packages/0e/2d/eab2e858a91fdff70533cab61dcff4a1f55ec60425832ddfdc9cd36bc8af/cffi-1.17.1-cp313-cp313-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d01b12eeeb4427d3110de311e1774046ad344f5b1a7403101878976ecd7a10f3", size = 454792, upload-time = "2024-09-04T20:44:32.01Z" }, + { url = "https://files.pythonhosted.org/packages/75/b2/fbaec7c4455c604e29388d55599b99ebcc250a60050610fadde58932b7ee/cffi-1.17.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:706510fe141c86a69c8ddc029c7910003a17353970cff3b904ff0686a5927683", size = 478893, upload-time = "2024-09-04T20:44:33.606Z" }, + { url = "https://files.pythonhosted.org/packages/4f/b7/6e4a2162178bf1935c336d4da8a9352cccab4d3a5d7914065490f08c0690/cffi-1.17.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:de55b766c7aa2e2a3092c51e0483d700341182f08e67c63630d5b6f200bb28e5", size = 485810, upload-time = "2024-09-04T20:44:35.191Z" }, + { url = "https://files.pythonhosted.org/packages/c7/8a/1d0e4a9c26e54746dc08c2c6c037889124d4f59dffd853a659fa545f1b40/cffi-1.17.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c59d6e989d07460165cc5ad3c61f9fd8f1b4796eacbd81cee78957842b834af4", size = 471200, upload-time = "2024-09-04T20:44:36.743Z" }, + { url = "https://files.pythonhosted.org/packages/26/9f/1aab65a6c0db35f43c4d1b4f580e8df53914310afc10ae0397d29d697af4/cffi-1.17.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dd398dbc6773384a17fe0d3e7eeb8d1a21c2200473ee6806bb5e6a8e62bb73dd", size = 479447, upload-time = "2024-09-04T20:44:38.492Z" }, + { url = "https://files.pythonhosted.org/packages/5f/e4/fb8b3dd8dc0e98edf1135ff067ae070bb32ef9d509d6cb0f538cd6f7483f/cffi-1.17.1-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:3edc8d958eb099c634dace3c7e16560ae474aa3803a5df240542b305d14e14ed", size = 484358, upload-time = "2024-09-04T20:44:40.046Z" }, + { url = "https://files.pythonhosted.org/packages/f1/47/d7145bf2dc04684935d57d67dff9d6d795b2ba2796806bb109864be3a151/cffi-1.17.1-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:72e72408cad3d5419375fc87d289076ee319835bdfa2caad331e377589aebba9", size = 488469, upload-time = "2024-09-04T20:44:41.616Z" }, + { url = "https://files.pythonhosted.org/packages/bf/ee/f94057fa6426481d663b88637a9a10e859e492c73d0384514a17d78ee205/cffi-1.17.1-cp313-cp313-win32.whl", hash = "sha256:e03eab0a8677fa80d646b5ddece1cbeaf556c313dcfac435ba11f107ba117b5d", size = 172475, upload-time = "2024-09-04T20:44:43.733Z" }, + { url = "https://files.pythonhosted.org/packages/7c/fc/6a8cb64e5f0324877d503c854da15d76c1e50eb722e320b15345c4d0c6de/cffi-1.17.1-cp313-cp313-win_amd64.whl", hash = "sha256:f6a16c31041f09ead72d69f583767292f750d24913dadacf5756b966aacb3f1a", size = 182009, upload-time = "2024-09-04T20:44:45.309Z" }, +] + +[[package]] +name = "click" +version = "8.1.8" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "colorama", marker = "sys_platform == 'win32'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/b9/2e/0090cbf739cee7d23781ad4b89a9894a41538e4fcf4c31dcdd705b78eb8b/click-8.1.8.tar.gz", hash = "sha256:ed53c9d8990d83c2a27deae68e4ee337473f6330c040a31d4225c9574d16096a", size = 226593, upload-time = "2024-12-21T18:38:44.339Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/7e/d4/7ebdbd03970677812aac39c869717059dbb71a4cfc033ca6e5221787892c/click-8.1.8-py3-none-any.whl", hash = "sha256:63c132bbbed01578a06712a2d1f497bb62d9c1c0d329b7903a866228027263b2", size = 98188, upload-time = "2024-12-21T18:38:41.666Z" }, +] + +[[package]] +name = "colorama" +version = "0.4.6" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/d8/53/6f443c9a4a8358a93a6792e2acffb9d9d5cb0a5cfd8802644b7b1c9a02e4/colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44", size = 27697, upload-time = "2022-10-25T02:36:22.414Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/d1/d6/3965ed04c63042e047cb6a3e6ed1a63a35087b6a609aa3a15ed8ac56c221/colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6", size = 25335, upload-time = "2022-10-25T02:36:20.889Z" }, +] + +[[package]] +name = "cryptography" +version = "44.0.3" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "cffi", marker = "platform_python_implementation != 'PyPy'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/53/d6/1411ab4d6108ab167d06254c5be517681f1e331f90edf1379895bcb87020/cryptography-44.0.3.tar.gz", hash = "sha256:fe19d8bc5536a91a24a8133328880a41831b6c5df54599a8417b62fe015d3053", size = 711096, upload-time = "2025-05-02T19:36:04.667Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/08/53/c776d80e9d26441bb3868457909b4e74dd9ccabd182e10b2b0ae7a07e265/cryptography-44.0.3-cp37-abi3-macosx_10_9_universal2.whl", hash = "sha256:962bc30480a08d133e631e8dfd4783ab71cc9e33d5d7c1e192f0b7c06397bb88", size = 6670281, upload-time = "2025-05-02T19:34:50.665Z" }, + { url = "https://files.pythonhosted.org/packages/6a/06/af2cf8d56ef87c77319e9086601bef621bedf40f6f59069e1b6d1ec498c5/cryptography-44.0.3-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4ffc61e8f3bf5b60346d89cd3d37231019c17a081208dfbbd6e1605ba03fa137", size = 3959305, upload-time = "2025-05-02T19:34:53.042Z" }, + { url = "https://files.pythonhosted.org/packages/ae/01/80de3bec64627207d030f47bf3536889efee8913cd363e78ca9a09b13c8e/cryptography-44.0.3-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:58968d331425a6f9eedcee087f77fd3c927c88f55368f43ff7e0a19891f2642c", size = 4171040, upload-time = "2025-05-02T19:34:54.675Z" }, + { url = "https://files.pythonhosted.org/packages/bd/48/bb16b7541d207a19d9ae8b541c70037a05e473ddc72ccb1386524d4f023c/cryptography-44.0.3-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:e28d62e59a4dbd1d22e747f57d4f00c459af22181f0b2f787ea83f5a876d7c76", size = 3963411, upload-time = "2025-05-02T19:34:56.61Z" }, + { url = "https://files.pythonhosted.org/packages/42/b2/7d31f2af5591d217d71d37d044ef5412945a8a8e98d5a2a8ae4fd9cd4489/cryptography-44.0.3-cp37-abi3-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:af653022a0c25ef2e3ffb2c673a50e5a0d02fecc41608f4954176f1933b12359", size = 3689263, upload-time = "2025-05-02T19:34:58.591Z" }, + { url = "https://files.pythonhosted.org/packages/25/50/c0dfb9d87ae88ccc01aad8eb93e23cfbcea6a6a106a9b63a7b14c1f93c75/cryptography-44.0.3-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:157f1f3b8d941c2bd8f3ffee0af9b049c9665c39d3da9db2dc338feca5e98a43", size = 4196198, upload-time = "2025-05-02T19:35:00.988Z" }, + { url = "https://files.pythonhosted.org/packages/66/c9/55c6b8794a74da652690c898cb43906310a3e4e4f6ee0b5f8b3b3e70c441/cryptography-44.0.3-cp37-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:c6cd67722619e4d55fdb42ead64ed8843d64638e9c07f4011163e46bc512cf01", size = 3966502, upload-time = "2025-05-02T19:35:03.091Z" }, + { url = "https://files.pythonhosted.org/packages/b6/f7/7cb5488c682ca59a02a32ec5f975074084db4c983f849d47b7b67cc8697a/cryptography-44.0.3-cp37-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:b424563394c369a804ecbee9b06dfb34997f19d00b3518e39f83a5642618397d", size = 4196173, upload-time = "2025-05-02T19:35:05.018Z" }, + { url = "https://files.pythonhosted.org/packages/d2/0b/2f789a8403ae089b0b121f8f54f4a3e5228df756e2146efdf4a09a3d5083/cryptography-44.0.3-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:c91fc8e8fd78af553f98bc7f2a1d8db977334e4eea302a4bfd75b9461c2d8904", size = 4087713, upload-time = "2025-05-02T19:35:07.187Z" }, + { url = "https://files.pythonhosted.org/packages/1d/aa/330c13655f1af398fc154089295cf259252f0ba5df93b4bc9d9c7d7f843e/cryptography-44.0.3-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:25cd194c39fa5a0aa4169125ee27d1172097857b27109a45fadc59653ec06f44", size = 4299064, upload-time = "2025-05-02T19:35:08.879Z" }, + { url = "https://files.pythonhosted.org/packages/10/a8/8c540a421b44fd267a7d58a1fd5f072a552d72204a3f08194f98889de76d/cryptography-44.0.3-cp37-abi3-win32.whl", hash = "sha256:3be3f649d91cb182c3a6bd336de8b61a0a71965bd13d1a04a0e15b39c3d5809d", size = 2773887, upload-time = "2025-05-02T19:35:10.41Z" }, + { url = "https://files.pythonhosted.org/packages/b9/0d/c4b1657c39ead18d76bbd122da86bd95bdc4095413460d09544000a17d56/cryptography-44.0.3-cp37-abi3-win_amd64.whl", hash = "sha256:3883076d5c4cc56dbef0b898a74eb6992fdac29a7b9013870b34efe4ddb39a0d", size = 3209737, upload-time = "2025-05-02T19:35:12.12Z" }, + { url = "https://files.pythonhosted.org/packages/34/a3/ad08e0bcc34ad436013458d7528e83ac29910943cea42ad7dd4141a27bbb/cryptography-44.0.3-cp39-abi3-macosx_10_9_universal2.whl", hash = "sha256:5639c2b16764c6f76eedf722dbad9a0914960d3489c0cc38694ddf9464f1bb2f", size = 6673501, upload-time = "2025-05-02T19:35:13.775Z" }, + { url = "https://files.pythonhosted.org/packages/b1/f0/7491d44bba8d28b464a5bc8cc709f25a51e3eac54c0a4444cf2473a57c37/cryptography-44.0.3-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f3ffef566ac88f75967d7abd852ed5f182da252d23fac11b4766da3957766759", size = 3960307, upload-time = "2025-05-02T19:35:15.917Z" }, + { url = "https://files.pythonhosted.org/packages/f7/c8/e5c5d0e1364d3346a5747cdcd7ecbb23ca87e6dea4f942a44e88be349f06/cryptography-44.0.3-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:192ed30fac1728f7587c6f4613c29c584abdc565d7417c13904708db10206645", size = 4170876, upload-time = "2025-05-02T19:35:18.138Z" }, + { url = "https://files.pythonhosted.org/packages/73/96/025cb26fc351d8c7d3a1c44e20cf9a01e9f7cf740353c9c7a17072e4b264/cryptography-44.0.3-cp39-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:7d5fe7195c27c32a64955740b949070f21cba664604291c298518d2e255931d2", size = 3964127, upload-time = "2025-05-02T19:35:19.864Z" }, + { url = "https://files.pythonhosted.org/packages/01/44/eb6522db7d9f84e8833ba3bf63313f8e257729cf3a8917379473fcfd6601/cryptography-44.0.3-cp39-abi3-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:3f07943aa4d7dad689e3bb1638ddc4944cc5e0921e3c227486daae0e31a05e54", size = 3689164, upload-time = "2025-05-02T19:35:21.449Z" }, + { url = "https://files.pythonhosted.org/packages/68/fb/d61a4defd0d6cee20b1b8a1ea8f5e25007e26aeb413ca53835f0cae2bcd1/cryptography-44.0.3-cp39-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:cb90f60e03d563ca2445099edf605c16ed1d5b15182d21831f58460c48bffb93", size = 4198081, upload-time = "2025-05-02T19:35:23.187Z" }, + { url = "https://files.pythonhosted.org/packages/1b/50/457f6911d36432a8811c3ab8bd5a6090e8d18ce655c22820994913dd06ea/cryptography-44.0.3-cp39-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:ab0b005721cc0039e885ac3503825661bd9810b15d4f374e473f8c89b7d5460c", size = 3967716, upload-time = "2025-05-02T19:35:25.426Z" }, + { url = "https://files.pythonhosted.org/packages/35/6e/dca39d553075980ccb631955c47b93d87d27f3596da8d48b1ae81463d915/cryptography-44.0.3-cp39-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:3bb0847e6363c037df8f6ede57d88eaf3410ca2267fb12275370a76f85786a6f", size = 4197398, upload-time = "2025-05-02T19:35:27.678Z" }, + { url = "https://files.pythonhosted.org/packages/9b/9d/d1f2fe681eabc682067c66a74addd46c887ebacf39038ba01f8860338d3d/cryptography-44.0.3-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:b0cc66c74c797e1db750aaa842ad5b8b78e14805a9b5d1348dc603612d3e3ff5", size = 4087900, upload-time = "2025-05-02T19:35:29.312Z" }, + { url = "https://files.pythonhosted.org/packages/c4/f5/3599e48c5464580b73b236aafb20973b953cd2e7b44c7c2533de1d888446/cryptography-44.0.3-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:6866df152b581f9429020320e5eb9794c8780e90f7ccb021940d7f50ee00ae0b", size = 4301067, upload-time = "2025-05-02T19:35:31.547Z" }, + { url = "https://files.pythonhosted.org/packages/a7/6c/d2c48c8137eb39d0c193274db5c04a75dab20d2f7c3f81a7dcc3a8897701/cryptography-44.0.3-cp39-abi3-win32.whl", hash = "sha256:c138abae3a12a94c75c10499f1cbae81294a6f983b3af066390adee73f433028", size = 2775467, upload-time = "2025-05-02T19:35:33.805Z" }, + { url = "https://files.pythonhosted.org/packages/c9/ad/51f212198681ea7b0deaaf8846ee10af99fba4e894f67b353524eab2bbe5/cryptography-44.0.3-cp39-abi3-win_amd64.whl", hash = "sha256:5d186f32e52e66994dce4f766884bcb9c68b8da62d61d9d215bfe5fb56d21334", size = 3210375, upload-time = "2025-05-02T19:35:35.369Z" }, +] + +[[package]] +name = "dnspython" +version = "2.7.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/b5/4a/263763cb2ba3816dd94b08ad3a33d5fdae34ecb856678773cc40a3605829/dnspython-2.7.0.tar.gz", hash = "sha256:ce9c432eda0dc91cf618a5cedf1a4e142651196bbcd2c80e89ed5a907e5cfaf1", size = 345197, upload-time = "2024-10-05T20:14:59.362Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/68/1b/e0a87d256e40e8c888847551b20a017a6b98139178505dc7ffb96f04e954/dnspython-2.7.0-py3-none-any.whl", hash = "sha256:b4c34b7d10b51bcc3a5071e7b8dee77939f1e878477eeecc965e9835f63c6c86", size = 313632, upload-time = "2024-10-05T20:14:57.687Z" }, +] + +[[package]] +name = "email-validator" +version = "2.2.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "dnspython" }, + { name = "idna" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/48/ce/13508a1ec3f8bb981ae4ca79ea40384becc868bfae97fd1c942bb3a001b1/email_validator-2.2.0.tar.gz", hash = "sha256:cb690f344c617a714f22e66ae771445a1ceb46821152df8e165c5f9a364582b7", size = 48967, upload-time = "2024-06-20T11:30:30.034Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/d7/ee/bf0adb559ad3c786f12bcbc9296b3f5675f529199bef03e2df281fa1fadb/email_validator-2.2.0-py3-none-any.whl", hash = "sha256:561977c2d73ce3611850a06fa56b414621e0c8faa9d66f2611407d87465da631", size = 33521, upload-time = "2024-06-20T11:30:28.248Z" }, +] + +[[package]] +name = "fastapi" +version = "0.115.12" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "pydantic" }, + { name = "starlette" }, + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/f4/55/ae499352d82338331ca1e28c7f4a63bfd09479b16395dce38cf50a39e2c2/fastapi-0.115.12.tar.gz", hash = "sha256:1e2c2a2646905f9e83d32f04a3f86aff4a286669c6c950ca95b5fd68c2602681", size = 295236, upload-time = "2025-03-23T22:55:43.822Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/50/b3/b51f09c2ba432a576fe63758bddc81f78f0c6309d9e5c10d194313bf021e/fastapi-0.115.12-py3-none-any.whl", hash = "sha256:e94613d6c05e27be7ffebdd6ea5f388112e5e430c8f7d6494a9d1d88d43e814d", size = 95164, upload-time = "2025-03-23T22:55:42.101Z" }, +] + +[package.optional-dependencies] +standard = [ + { name = "email-validator" }, + { name = "fastapi-cli", extra = ["standard"] }, + { name = "httpx" }, + { name = "jinja2" }, + { name = "python-multipart" }, + { name = "uvicorn", extra = ["standard"] }, +] + +[[package]] +name = "fastapi-cli" +version = "0.0.7" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "rich-toolkit" }, + { name = "typer" }, + { name = "uvicorn", extra = ["standard"] }, +] +sdist = { url = "https://files.pythonhosted.org/packages/fe/73/82a5831fbbf8ed75905bacf5b2d9d3dfd6f04d6968b29fe6f72a5ae9ceb1/fastapi_cli-0.0.7.tar.gz", hash = "sha256:02b3b65956f526412515907a0793c9094abd4bfb5457b389f645b0ea6ba3605e", size = 16753, upload-time = "2024-12-15T14:28:10.028Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/a1/e6/5daefc851b514ce2287d8f5d358ae4341089185f78f3217a69d0ce3a390c/fastapi_cli-0.0.7-py3-none-any.whl", hash = "sha256:d549368ff584b2804336c61f192d86ddea080c11255f375959627911944804f4", size = 10705, upload-time = "2024-12-15T14:28:06.18Z" }, +] + +[package.optional-dependencies] +standard = [ + { name = "uvicorn", extra = ["standard"] }, +] + +[[package]] +name = "frozenlist" +version = "1.6.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/ee/f4/d744cba2da59b5c1d88823cf9e8a6c74e4659e2b27604ed973be2a0bf5ab/frozenlist-1.6.0.tar.gz", hash = "sha256:b99655c32c1c8e06d111e7f41c06c29a5318cb1835df23a45518e02a47c63b68", size = 42831, upload-time = "2025-04-17T22:38:53.099Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/6f/e5/04c7090c514d96ca00887932417f04343ab94904a56ab7f57861bf63652d/frozenlist-1.6.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:1d7fb014fe0fbfee3efd6a94fc635aeaa68e5e1720fe9e57357f2e2c6e1a647e", size = 158182, upload-time = "2025-04-17T22:37:16.837Z" }, + { url = "https://files.pythonhosted.org/packages/e9/8f/60d0555c61eec855783a6356268314d204137f5e0c53b59ae2fc28938c99/frozenlist-1.6.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:01bcaa305a0fdad12745502bfd16a1c75b14558dabae226852f9159364573117", size = 122838, upload-time = "2025-04-17T22:37:18.352Z" }, + { url = "https://files.pythonhosted.org/packages/5a/a7/d0ec890e3665b4b3b7c05dc80e477ed8dc2e2e77719368e78e2cd9fec9c8/frozenlist-1.6.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:8b314faa3051a6d45da196a2c495e922f987dc848e967d8cfeaee8a0328b1cd4", size = 120980, upload-time = "2025-04-17T22:37:19.857Z" }, + { url = "https://files.pythonhosted.org/packages/cc/19/9b355a5e7a8eba903a008579964192c3e427444752f20b2144b10bb336df/frozenlist-1.6.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:da62fecac21a3ee10463d153549d8db87549a5e77eefb8c91ac84bb42bb1e4e3", size = 305463, upload-time = "2025-04-17T22:37:21.328Z" }, + { url = "https://files.pythonhosted.org/packages/9c/8d/5b4c758c2550131d66935ef2fa700ada2461c08866aef4229ae1554b93ca/frozenlist-1.6.0-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:d1eb89bf3454e2132e046f9599fbcf0a4483ed43b40f545551a39316d0201cd1", size = 297985, upload-time = "2025-04-17T22:37:23.55Z" }, + { url = "https://files.pythonhosted.org/packages/48/2c/537ec09e032b5865715726b2d1d9813e6589b571d34d01550c7aeaad7e53/frozenlist-1.6.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d18689b40cb3936acd971f663ccb8e2589c45db5e2c5f07e0ec6207664029a9c", size = 311188, upload-time = "2025-04-17T22:37:25.221Z" }, + { url = "https://files.pythonhosted.org/packages/31/2f/1aa74b33f74d54817055de9a4961eff798f066cdc6f67591905d4fc82a84/frozenlist-1.6.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e67ddb0749ed066b1a03fba812e2dcae791dd50e5da03be50b6a14d0c1a9ee45", size = 311874, upload-time = "2025-04-17T22:37:26.791Z" }, + { url = "https://files.pythonhosted.org/packages/bf/f0/cfec18838f13ebf4b37cfebc8649db5ea71a1b25dacd691444a10729776c/frozenlist-1.6.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:fc5e64626e6682638d6e44398c9baf1d6ce6bc236d40b4b57255c9d3f9761f1f", size = 291897, upload-time = "2025-04-17T22:37:28.958Z" }, + { url = "https://files.pythonhosted.org/packages/ea/a5/deb39325cbbea6cd0a46db8ccd76150ae2fcbe60d63243d9df4a0b8c3205/frozenlist-1.6.0-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:437cfd39564744ae32ad5929e55b18ebd88817f9180e4cc05e7d53b75f79ce85", size = 305799, upload-time = "2025-04-17T22:37:30.889Z" }, + { url = "https://files.pythonhosted.org/packages/78/22/6ddec55c5243a59f605e4280f10cee8c95a449f81e40117163383829c241/frozenlist-1.6.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:62dd7df78e74d924952e2feb7357d826af8d2f307557a779d14ddf94d7311be8", size = 302804, upload-time = "2025-04-17T22:37:32.489Z" }, + { url = "https://files.pythonhosted.org/packages/5d/b7/d9ca9bab87f28855063c4d202936800219e39db9e46f9fb004d521152623/frozenlist-1.6.0-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:a66781d7e4cddcbbcfd64de3d41a61d6bdde370fc2e38623f30b2bd539e84a9f", size = 316404, upload-time = "2025-04-17T22:37:34.59Z" }, + { url = "https://files.pythonhosted.org/packages/a6/3a/1255305db7874d0b9eddb4fe4a27469e1fb63720f1fc6d325a5118492d18/frozenlist-1.6.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:482fe06e9a3fffbcd41950f9d890034b4a54395c60b5e61fae875d37a699813f", size = 295572, upload-time = "2025-04-17T22:37:36.337Z" }, + { url = "https://files.pythonhosted.org/packages/2a/f2/8d38eeee39a0e3a91b75867cc102159ecccf441deb6ddf67be96d3410b84/frozenlist-1.6.0-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:e4f9373c500dfc02feea39f7a56e4f543e670212102cc2eeb51d3a99c7ffbde6", size = 307601, upload-time = "2025-04-17T22:37:37.923Z" }, + { url = "https://files.pythonhosted.org/packages/38/04/80ec8e6b92f61ef085422d7b196822820404f940950dde5b2e367bede8bc/frozenlist-1.6.0-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:e69bb81de06827147b7bfbaeb284d85219fa92d9f097e32cc73675f279d70188", size = 314232, upload-time = "2025-04-17T22:37:39.669Z" }, + { url = "https://files.pythonhosted.org/packages/3a/58/93b41fb23e75f38f453ae92a2f987274c64637c450285577bd81c599b715/frozenlist-1.6.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:7613d9977d2ab4a9141dde4a149f4357e4065949674c5649f920fec86ecb393e", size = 308187, upload-time = "2025-04-17T22:37:41.662Z" }, + { url = "https://files.pythonhosted.org/packages/6a/a2/e64df5c5aa36ab3dee5a40d254f3e471bb0603c225f81664267281c46a2d/frozenlist-1.6.0-cp313-cp313-win32.whl", hash = "sha256:4def87ef6d90429f777c9d9de3961679abf938cb6b7b63d4a7eb8a268babfce4", size = 114772, upload-time = "2025-04-17T22:37:43.132Z" }, + { url = "https://files.pythonhosted.org/packages/a0/77/fead27441e749b2d574bb73d693530d59d520d4b9e9679b8e3cb779d37f2/frozenlist-1.6.0-cp313-cp313-win_amd64.whl", hash = "sha256:37a8a52c3dfff01515e9bbbee0e6063181362f9de3db2ccf9bc96189b557cbfd", size = 119847, upload-time = "2025-04-17T22:37:45.118Z" }, + { url = "https://files.pythonhosted.org/packages/df/bd/cc6d934991c1e5d9cafda83dfdc52f987c7b28343686aef2e58a9cf89f20/frozenlist-1.6.0-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:46138f5a0773d064ff663d273b309b696293d7a7c00a0994c5c13a5078134b64", size = 174937, upload-time = "2025-04-17T22:37:46.635Z" }, + { url = "https://files.pythonhosted.org/packages/f2/a2/daf945f335abdbfdd5993e9dc348ef4507436936ab3c26d7cfe72f4843bf/frozenlist-1.6.0-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:f88bc0a2b9c2a835cb888b32246c27cdab5740059fb3688852bf91e915399b91", size = 136029, upload-time = "2025-04-17T22:37:48.192Z" }, + { url = "https://files.pythonhosted.org/packages/51/65/4c3145f237a31247c3429e1c94c384d053f69b52110a0d04bfc8afc55fb2/frozenlist-1.6.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:777704c1d7655b802c7850255639672e90e81ad6fa42b99ce5ed3fbf45e338dd", size = 134831, upload-time = "2025-04-17T22:37:50.485Z" }, + { url = "https://files.pythonhosted.org/packages/77/38/03d316507d8dea84dfb99bdd515ea245628af964b2bf57759e3c9205cc5e/frozenlist-1.6.0-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:85ef8d41764c7de0dcdaf64f733a27352248493a85a80661f3c678acd27e31f2", size = 392981, upload-time = "2025-04-17T22:37:52.558Z" }, + { url = "https://files.pythonhosted.org/packages/37/02/46285ef9828f318ba400a51d5bb616ded38db8466836a9cfa39f3903260b/frozenlist-1.6.0-cp313-cp313t-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:da5cb36623f2b846fb25009d9d9215322318ff1c63403075f812b3b2876c8506", size = 371999, upload-time = "2025-04-17T22:37:54.092Z" }, + { url = "https://files.pythonhosted.org/packages/0d/64/1212fea37a112c3c5c05bfb5f0a81af4836ce349e69be75af93f99644da9/frozenlist-1.6.0-cp313-cp313t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:cbb56587a16cf0fb8acd19e90ff9924979ac1431baea8681712716a8337577b0", size = 392200, upload-time = "2025-04-17T22:37:55.951Z" }, + { url = "https://files.pythonhosted.org/packages/81/ce/9a6ea1763e3366e44a5208f76bf37c76c5da570772375e4d0be85180e588/frozenlist-1.6.0-cp313-cp313t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c6154c3ba59cda3f954c6333025369e42c3acd0c6e8b6ce31eb5c5b8116c07e0", size = 390134, upload-time = "2025-04-17T22:37:57.633Z" }, + { url = "https://files.pythonhosted.org/packages/bc/36/939738b0b495b2c6d0c39ba51563e453232813042a8d908b8f9544296c29/frozenlist-1.6.0-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2e8246877afa3f1ae5c979fe85f567d220f86a50dc6c493b9b7d8191181ae01e", size = 365208, upload-time = "2025-04-17T22:37:59.742Z" }, + { url = "https://files.pythonhosted.org/packages/b4/8b/939e62e93c63409949c25220d1ba8e88e3960f8ef6a8d9ede8f94b459d27/frozenlist-1.6.0-cp313-cp313t-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7b0f6cce16306d2e117cf9db71ab3a9e8878a28176aeaf0dbe35248d97b28d0c", size = 385548, upload-time = "2025-04-17T22:38:01.416Z" }, + { url = "https://files.pythonhosted.org/packages/62/38/22d2873c90102e06a7c5a3a5b82ca47e393c6079413e8a75c72bff067fa8/frozenlist-1.6.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:1b8e8cd8032ba266f91136d7105706ad57770f3522eac4a111d77ac126a25a9b", size = 391123, upload-time = "2025-04-17T22:38:03.049Z" }, + { url = "https://files.pythonhosted.org/packages/44/78/63aaaf533ee0701549500f6d819be092c6065cb5c577edb70c09df74d5d0/frozenlist-1.6.0-cp313-cp313t-musllinux_1_2_armv7l.whl", hash = "sha256:e2ada1d8515d3ea5378c018a5f6d14b4994d4036591a52ceaf1a1549dec8e1ad", size = 394199, upload-time = "2025-04-17T22:38:04.776Z" }, + { url = "https://files.pythonhosted.org/packages/54/45/71a6b48981d429e8fbcc08454dc99c4c2639865a646d549812883e9c9dd3/frozenlist-1.6.0-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:cdb2c7f071e4026c19a3e32b93a09e59b12000751fc9b0b7758da899e657d215", size = 373854, upload-time = "2025-04-17T22:38:06.576Z" }, + { url = "https://files.pythonhosted.org/packages/3f/f3/dbf2a5e11736ea81a66e37288bf9f881143a7822b288a992579ba1b4204d/frozenlist-1.6.0-cp313-cp313t-musllinux_1_2_ppc64le.whl", hash = "sha256:03572933a1969a6d6ab509d509e5af82ef80d4a5d4e1e9f2e1cdd22c77a3f4d2", size = 395412, upload-time = "2025-04-17T22:38:08.197Z" }, + { url = "https://files.pythonhosted.org/packages/b3/f1/c63166806b331f05104d8ea385c4acd511598568b1f3e4e8297ca54f2676/frozenlist-1.6.0-cp313-cp313t-musllinux_1_2_s390x.whl", hash = "sha256:77effc978947548b676c54bbd6a08992759ea6f410d4987d69feea9cd0919911", size = 394936, upload-time = "2025-04-17T22:38:10.056Z" }, + { url = "https://files.pythonhosted.org/packages/ef/ea/4f3e69e179a430473eaa1a75ff986526571215fefc6b9281cdc1f09a4eb8/frozenlist-1.6.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:a2bda8be77660ad4089caf2223fdbd6db1858462c4b85b67fbfa22102021e497", size = 391459, upload-time = "2025-04-17T22:38:11.826Z" }, + { url = "https://files.pythonhosted.org/packages/d3/c3/0fc2c97dea550df9afd072a37c1e95421652e3206bbeaa02378b24c2b480/frozenlist-1.6.0-cp313-cp313t-win32.whl", hash = "sha256:a4d96dc5bcdbd834ec6b0f91027817214216b5b30316494d2b1aebffb87c534f", size = 128797, upload-time = "2025-04-17T22:38:14.013Z" }, + { url = "https://files.pythonhosted.org/packages/ae/f5/79c9320c5656b1965634fe4be9c82b12a3305bdbc58ad9cb941131107b20/frozenlist-1.6.0-cp313-cp313t-win_amd64.whl", hash = "sha256:e18036cb4caa17ea151fd5f3d70be9d354c99eb8cf817a3ccde8a7873b074348", size = 134709, upload-time = "2025-04-17T22:38:15.551Z" }, + { url = "https://files.pythonhosted.org/packages/71/3e/b04a0adda73bd52b390d730071c0d577073d3d26740ee1bad25c3ad0f37b/frozenlist-1.6.0-py3-none-any.whl", hash = "sha256:535eec9987adb04701266b92745d6cdcef2e77669299359c3009c3404dd5d191", size = 12404, upload-time = "2025-04-17T22:38:51.668Z" }, +] + +[[package]] +name = "guessit" +version = "3.8.0" +source = { git = "https://github.com/zoriya/guessit#f1d818cfe2e88a08c620f032cf1abd1457e81bcb" } +dependencies = [ + { name = "babelfish" }, + { name = "python-dateutil" }, + { name = "rebulk" }, +] + +[[package]] +name = "h11" +version = "0.16.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/01/ee/02a2c011bdab74c6fb3c75474d40b3052059d95df7e73351460c8588d963/h11-0.16.0.tar.gz", hash = "sha256:4e35b956cf45792e4caa5885e69fba00bdbc6ffafbfa020300e549b208ee5ff1", size = 101250, upload-time = "2025-04-24T03:35:25.427Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/04/4b/29cac41a4d98d144bf5f6d33995617b185d14b22401f75ca86f384e87ff1/h11-0.16.0-py3-none-any.whl", hash = "sha256:63cf8bbe7522de3bf65932fda1d9c2772064ffb3dae62d55932da54b31cb6c86", size = 37515, upload-time = "2025-04-24T03:35:24.344Z" }, +] + +[[package]] +name = "httpcore" +version = "1.0.9" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "certifi" }, + { name = "h11" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/06/94/82699a10bca87a5556c9c59b5963f2d039dbd239f25bc2a63907a05a14cb/httpcore-1.0.9.tar.gz", hash = "sha256:6e34463af53fd2ab5d807f399a9b45ea31c3dfa2276f15a2c3f00afff6e176e8", size = 85484, upload-time = "2025-04-24T22:06:22.219Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/7e/f5/f66802a942d491edb555dd61e3a9961140fd64c90bce1eafd741609d334d/httpcore-1.0.9-py3-none-any.whl", hash = "sha256:2d400746a40668fc9dec9810239072b40b4484b640a8c38fd654a024c7a1bf55", size = 78784, upload-time = "2025-04-24T22:06:20.566Z" }, +] + +[[package]] +name = "httptools" +version = "0.6.4" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/a7/9a/ce5e1f7e131522e6d3426e8e7a490b3a01f39a6696602e1c4f33f9e94277/httptools-0.6.4.tar.gz", hash = "sha256:4e93eee4add6493b59a5c514da98c939b244fce4a0d8879cd3f466562f4b7d5c", size = 240639, upload-time = "2024-10-16T19:45:08.902Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/94/a3/9fe9ad23fd35f7de6b91eeb60848986058bd8b5a5c1e256f5860a160cc3e/httptools-0.6.4-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:ade273d7e767d5fae13fa637f4d53b6e961fb7fd93c7797562663f0171c26660", size = 197214, upload-time = "2024-10-16T19:44:38.738Z" }, + { url = "https://files.pythonhosted.org/packages/ea/d9/82d5e68bab783b632023f2fa31db20bebb4e89dfc4d2293945fd68484ee4/httptools-0.6.4-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:856f4bc0478ae143bad54a4242fccb1f3f86a6e1be5548fecfd4102061b3a083", size = 102431, upload-time = "2024-10-16T19:44:39.818Z" }, + { url = "https://files.pythonhosted.org/packages/96/c1/cb499655cbdbfb57b577734fde02f6fa0bbc3fe9fb4d87b742b512908dff/httptools-0.6.4-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:322d20ea9cdd1fa98bd6a74b77e2ec5b818abdc3d36695ab402a0de8ef2865a3", size = 473121, upload-time = "2024-10-16T19:44:41.189Z" }, + { url = "https://files.pythonhosted.org/packages/af/71/ee32fd358f8a3bb199b03261f10921716990808a675d8160b5383487a317/httptools-0.6.4-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4d87b29bd4486c0093fc64dea80231f7c7f7eb4dc70ae394d70a495ab8436071", size = 473805, upload-time = "2024-10-16T19:44:42.384Z" }, + { url = "https://files.pythonhosted.org/packages/8a/0a/0d4df132bfca1507114198b766f1737d57580c9ad1cf93c1ff673e3387be/httptools-0.6.4-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:342dd6946aa6bda4b8f18c734576106b8a31f2fe31492881a9a160ec84ff4bd5", size = 448858, upload-time = "2024-10-16T19:44:43.959Z" }, + { url = "https://files.pythonhosted.org/packages/1e/6a/787004fdef2cabea27bad1073bf6a33f2437b4dbd3b6fb4a9d71172b1c7c/httptools-0.6.4-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:4b36913ba52008249223042dca46e69967985fb4051951f94357ea681e1f5dc0", size = 452042, upload-time = "2024-10-16T19:44:45.071Z" }, + { url = "https://files.pythonhosted.org/packages/4d/dc/7decab5c404d1d2cdc1bb330b1bf70e83d6af0396fd4fc76fc60c0d522bf/httptools-0.6.4-cp313-cp313-win_amd64.whl", hash = "sha256:28908df1b9bb8187393d5b5db91435ccc9c8e891657f9cbb42a2541b44c82fc8", size = 87682, upload-time = "2024-10-16T19:44:46.46Z" }, +] + +[[package]] +name = "httpx" +version = "0.28.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "anyio" }, + { name = "certifi" }, + { name = "httpcore" }, + { name = "idna" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/b1/df/48c586a5fe32a0f01324ee087459e112ebb7224f646c0b5023f5e79e9956/httpx-0.28.1.tar.gz", hash = "sha256:75e98c5f16b0f35b567856f597f06ff2270a374470a5c2392242528e3e3e42fc", size = 141406, upload-time = "2024-12-06T15:37:23.222Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/2a/39/e50c7c3a983047577ee07d2a9e53faf5a69493943ec3f6a384bdc792deb2/httpx-0.28.1-py3-none-any.whl", hash = "sha256:d909fcccc110f8c7faf814ca82a9a4d816bc5a6dbfea25d6591d6985b8ba59ad", size = 73517, upload-time = "2024-12-06T15:37:21.509Z" }, +] + +[[package]] +name = "idna" +version = "3.10" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/f1/70/7703c29685631f5a7590aa73f1f1d3fa9a380e654b86af429e0934a32f7d/idna-3.10.tar.gz", hash = "sha256:12f65c9b470abda6dc35cf8e63cc574b1c52b11df2c86030af0ac09b01b13ea9", size = 190490, upload-time = "2024-09-15T18:07:39.745Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/76/c6/c88e154df9c4e1a2a66ccf0005a88dfb2650c1dffb6f5ce603dfbd452ce3/idna-3.10-py3-none-any.whl", hash = "sha256:946d195a0d259cbba61165e88e65941f16e9b36ea6ddb97f00452bae8b1287d3", size = 70442, upload-time = "2024-09-15T18:07:37.964Z" }, +] + +[[package]] +name = "jinja2" +version = "3.1.6" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "markupsafe" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/df/bf/f7da0350254c0ed7c72f3e33cef02e048281fec7ecec5f032d4aac52226b/jinja2-3.1.6.tar.gz", hash = "sha256:0137fb05990d35f1275a587e9aee6d56da821fc83491a0fb838183be43f66d6d", size = 245115, upload-time = "2025-03-05T20:05:02.478Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/62/a1/3d680cbfd5f4b8f15abc1d571870c5fc3e594bb582bc3b64ea099db13e56/jinja2-3.1.6-py3-none-any.whl", hash = "sha256:85ece4451f492d0c13c5dd7c13a64681a86afae63a5f347908daf103ce6d2f67", size = 134899, upload-time = "2025-03-05T20:05:00.369Z" }, +] + +[[package]] +name = "langcodes" +version = "3.5.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "language-data" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/3a/7a/5a97e327063409a5caa21541e6d08ae4a0f2da328447e9f2c7b39e179226/langcodes-3.5.0.tar.gz", hash = "sha256:1eef8168d07e51e131a2497ffecad4b663f6208e7c3ae3b8dc15c51734a6f801", size = 191030, upload-time = "2024-11-19T10:23:45.546Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/c3/6b/068c2ea7a712bf805c62445bd9e9c06d7340358ef2824150eceac027444b/langcodes-3.5.0-py3-none-any.whl", hash = "sha256:853c69d1a35e0e13da2f427bb68fb2fa4a8f4fb899e0c62ad8df8d073dcfed33", size = 182974, upload-time = "2024-11-19T10:23:42.824Z" }, +] + +[[package]] +name = "language-data" +version = "1.3.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "marisa-trie" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/dd/ce/3f144716a9f2cbf42aa86ebc8b085a184be25c80aa453eea17c294d239c1/language_data-1.3.0.tar.gz", hash = "sha256:7600ef8aa39555145d06c89f0c324bf7dab834ea0b0a439d8243762e3ebad7ec", size = 5129310, upload-time = "2024-11-19T10:21:37.912Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/5d/e9/5a5ffd9b286db82be70d677d0a91e4d58f7912bb8dd026ddeeb4abe70679/language_data-1.3.0-py3-none-any.whl", hash = "sha256:e2ee943551b5ae5f89cd0e801d1fc3835bb0ef5b7e9c3a4e8e17b2b214548fbf", size = 5385760, upload-time = "2024-11-19T10:21:36.005Z" }, +] + +[[package]] +name = "marisa-trie" +version = "1.2.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "setuptools" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/31/15/9d9743897e4450b2de199ee673b50cb018980c4ced477d41cf91304a85e3/marisa_trie-1.2.1.tar.gz", hash = "sha256:3a27c408e2aefc03e0f1d25b2ff2afb85aac3568f6fa2ae2a53b57a2e87ce29d", size = 416124, upload-time = "2024-10-12T11:30:15.989Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/2a/a4/a110cd9952f0e72da7bafea1f0084b18b9e03952110d9083bfda52279f5c/marisa_trie-1.2.1-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:b5ea16e69bfda0ac028c921b58de1a4aaf83d43934892977368579cd3c0a2554", size = 354439, upload-time = "2024-10-12T11:29:16.831Z" }, + { url = "https://files.pythonhosted.org/packages/3c/a5/a6099eb1c3fd8d7e93408c45501e1d08536ac57dfef02ec331f78e1ace18/marisa_trie-1.2.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:9f627f4e41be710b6cb6ed54b0128b229ac9d50e2054d9cde3af0fef277c23cf", size = 188187, upload-time = "2024-10-12T11:29:18.558Z" }, + { url = "https://files.pythonhosted.org/packages/7c/cc/f637127e2beffa920d21f7fc45b4029575bcd1b28a90c0d90cb2b08c2205/marisa_trie-1.2.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:5e649f3dc8ab5476732094f2828cc90cac3be7c79bc0c8318b6fda0c1d248db4", size = 171484, upload-time = "2024-10-12T11:29:19.596Z" }, + { url = "https://files.pythonhosted.org/packages/6d/0f/29f2ad7260b956570f69f25a542efa51ba76eb76ecd53c63ee9d21987c3d/marisa_trie-1.2.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:46e528ee71808c961baf8c3ce1c46a8337ec7a96cc55389d11baafe5b632f8e9", size = 1319770, upload-time = "2024-10-12T11:29:20.661Z" }, + { url = "https://files.pythonhosted.org/packages/f2/12/0b69ed61fba59551a5f3d569af367afae614db7214ce1da12946ba9a433a/marisa_trie-1.2.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:36aa4401a1180615f74d575571a6550081d84fc6461e9aefc0bb7b2427af098e", size = 1356488, upload-time = "2024-10-12T11:29:21.95Z" }, + { url = "https://files.pythonhosted.org/packages/33/23/483b110db7ffe8729d6ebea2bf74258aef51f10fef5775f99e4bac7aef69/marisa_trie-1.2.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ce59bcd2cda9bb52b0e90cc7f36413cd86c3d0ce7224143447424aafb9f4aa48", size = 1302334, upload-time = "2024-10-12T11:29:24.217Z" }, + { url = "https://files.pythonhosted.org/packages/1c/6f/46c2be99ce925985127fdf78900f1673bce8cb72debfebee6dccd11032c6/marisa_trie-1.2.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:f4cd800704a5fc57e53c39c3a6b0c9b1519ebdbcb644ede3ee67a06eb542697d", size = 2202624, upload-time = "2024-10-12T11:29:25.499Z" }, + { url = "https://files.pythonhosted.org/packages/fd/b6/ef642327dbd4ec35be55d5682520b8f70fca98a54024f441ef2732f6b305/marisa_trie-1.2.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:2428b495003c189695fb91ceeb499f9fcced3a2dce853e17fa475519433c67ff", size = 2364206, upload-time = "2024-10-12T11:29:26.771Z" }, + { url = "https://files.pythonhosted.org/packages/69/04/ef8197a79d0ab5043b781cc9b457bd11b81d4204fe78adf7625a67f48c21/marisa_trie-1.2.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:735c363d9aaac82eaf516a28f7c6b95084c2e176d8231c87328dc80e112a9afa", size = 2304801, upload-time = "2024-10-12T11:29:28.095Z" }, + { url = "https://files.pythonhosted.org/packages/03/72/f87564d653daf31d8f33d9bf0121e99ccc21f18f5c485fb404ba06abc10e/marisa_trie-1.2.1-cp313-cp313-win32.whl", hash = "sha256:eba6ca45500ca1a042466a0684aacc9838e7f20fe2605521ee19f2853062798f", size = 128799, upload-time = "2024-10-12T11:29:30.28Z" }, + { url = "https://files.pythonhosted.org/packages/27/40/5f9eb8b73030cc4b0d6817176e66079a62a2ddd9d5530da54f8011473428/marisa_trie-1.2.1-cp313-cp313-win_amd64.whl", hash = "sha256:aa7cd17e1c690ce96c538b2f4aae003d9a498e65067dd433c52dd069009951d4", size = 149035, upload-time = "2024-10-12T11:29:31.332Z" }, +] + +[[package]] +name = "markdown-it-py" +version = "3.0.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "mdurl" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/38/71/3b932df36c1a044d397a1f92d1cf91ee0a503d91e470cbd670aa66b07ed0/markdown-it-py-3.0.0.tar.gz", hash = "sha256:e3f60a94fa066dc52ec76661e37c851cb232d92f9886b15cb560aaada2df8feb", size = 74596, upload-time = "2023-06-03T06:41:14.443Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/42/d7/1ec15b46af6af88f19b8e5ffea08fa375d433c998b8a7639e76935c14f1f/markdown_it_py-3.0.0-py3-none-any.whl", hash = "sha256:355216845c60bd96232cd8d8c40e8f9765cc86f46880e43a8fd22dc1a1a8cab1", size = 87528, upload-time = "2023-06-03T06:41:11.019Z" }, +] + +[[package]] +name = "markupsafe" +version = "3.0.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/b2/97/5d42485e71dfc078108a86d6de8fa46db44a1a9295e89c5d6d4a06e23a62/markupsafe-3.0.2.tar.gz", hash = "sha256:ee55d3edf80167e48ea11a923c7386f4669df67d7994554387f84e7d8b0a2bf0", size = 20537, upload-time = "2024-10-18T15:21:54.129Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/83/0e/67eb10a7ecc77a0c2bbe2b0235765b98d164d81600746914bebada795e97/MarkupSafe-3.0.2-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:ba9527cdd4c926ed0760bc301f6728ef34d841f405abf9d4f959c478421e4efd", size = 14274, upload-time = "2024-10-18T15:21:24.577Z" }, + { url = "https://files.pythonhosted.org/packages/2b/6d/9409f3684d3335375d04e5f05744dfe7e9f120062c9857df4ab490a1031a/MarkupSafe-3.0.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:f8b3d067f2e40fe93e1ccdd6b2e1d16c43140e76f02fb1319a05cf2b79d99430", size = 12352, upload-time = "2024-10-18T15:21:25.382Z" }, + { url = "https://files.pythonhosted.org/packages/d2/f5/6eadfcd3885ea85fe2a7c128315cc1bb7241e1987443d78c8fe712d03091/MarkupSafe-3.0.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:569511d3b58c8791ab4c2e1285575265991e6d8f8700c7be0e88f86cb0672094", size = 24122, upload-time = "2024-10-18T15:21:26.199Z" }, + { url = "https://files.pythonhosted.org/packages/0c/91/96cf928db8236f1bfab6ce15ad070dfdd02ed88261c2afafd4b43575e9e9/MarkupSafe-3.0.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:15ab75ef81add55874e7ab7055e9c397312385bd9ced94920f2802310c930396", size = 23085, upload-time = "2024-10-18T15:21:27.029Z" }, + { url = "https://files.pythonhosted.org/packages/c2/cf/c9d56af24d56ea04daae7ac0940232d31d5a8354f2b457c6d856b2057d69/MarkupSafe-3.0.2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f3818cb119498c0678015754eba762e0d61e5b52d34c8b13d770f0719f7b1d79", size = 22978, upload-time = "2024-10-18T15:21:27.846Z" }, + { url = "https://files.pythonhosted.org/packages/2a/9f/8619835cd6a711d6272d62abb78c033bda638fdc54c4e7f4272cf1c0962b/MarkupSafe-3.0.2-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:cdb82a876c47801bb54a690c5ae105a46b392ac6099881cdfb9f6e95e4014c6a", size = 24208, upload-time = "2024-10-18T15:21:28.744Z" }, + { url = "https://files.pythonhosted.org/packages/f9/bf/176950a1792b2cd2102b8ffeb5133e1ed984547b75db47c25a67d3359f77/MarkupSafe-3.0.2-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:cabc348d87e913db6ab4aa100f01b08f481097838bdddf7c7a84b7575b7309ca", size = 23357, upload-time = "2024-10-18T15:21:29.545Z" }, + { url = "https://files.pythonhosted.org/packages/ce/4f/9a02c1d335caabe5c4efb90e1b6e8ee944aa245c1aaaab8e8a618987d816/MarkupSafe-3.0.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:444dcda765c8a838eaae23112db52f1efaf750daddb2d9ca300bcae1039adc5c", size = 23344, upload-time = "2024-10-18T15:21:30.366Z" }, + { url = "https://files.pythonhosted.org/packages/ee/55/c271b57db36f748f0e04a759ace9f8f759ccf22b4960c270c78a394f58be/MarkupSafe-3.0.2-cp313-cp313-win32.whl", hash = "sha256:bcf3e58998965654fdaff38e58584d8937aa3096ab5354d493c77d1fdd66d7a1", size = 15101, upload-time = "2024-10-18T15:21:31.207Z" }, + { url = "https://files.pythonhosted.org/packages/29/88/07df22d2dd4df40aba9f3e402e6dc1b8ee86297dddbad4872bd5e7b0094f/MarkupSafe-3.0.2-cp313-cp313-win_amd64.whl", hash = "sha256:e6a2a455bd412959b57a172ce6328d2dd1f01cb2135efda2e4576e8a23fa3b0f", size = 15603, upload-time = "2024-10-18T15:21:32.032Z" }, + { url = "https://files.pythonhosted.org/packages/62/6a/8b89d24db2d32d433dffcd6a8779159da109842434f1dd2f6e71f32f738c/MarkupSafe-3.0.2-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:b5a6b3ada725cea8a5e634536b1b01c30bcdcd7f9c6fff4151548d5bf6b3a36c", size = 14510, upload-time = "2024-10-18T15:21:33.625Z" }, + { url = "https://files.pythonhosted.org/packages/7a/06/a10f955f70a2e5a9bf78d11a161029d278eeacbd35ef806c3fd17b13060d/MarkupSafe-3.0.2-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:a904af0a6162c73e3edcb969eeeb53a63ceeb5d8cf642fade7d39e7963a22ddb", size = 12486, upload-time = "2024-10-18T15:21:34.611Z" }, + { url = "https://files.pythonhosted.org/packages/34/cf/65d4a571869a1a9078198ca28f39fba5fbb910f952f9dbc5220afff9f5e6/MarkupSafe-3.0.2-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4aa4e5faecf353ed117801a068ebab7b7e09ffb6e1d5e412dc852e0da018126c", size = 25480, upload-time = "2024-10-18T15:21:35.398Z" }, + { url = "https://files.pythonhosted.org/packages/0c/e3/90e9651924c430b885468b56b3d597cabf6d72be4b24a0acd1fa0e12af67/MarkupSafe-3.0.2-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c0ef13eaeee5b615fb07c9a7dadb38eac06a0608b41570d8ade51c56539e509d", size = 23914, upload-time = "2024-10-18T15:21:36.231Z" }, + { url = "https://files.pythonhosted.org/packages/66/8c/6c7cf61f95d63bb866db39085150df1f2a5bd3335298f14a66b48e92659c/MarkupSafe-3.0.2-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d16a81a06776313e817c951135cf7340a3e91e8c1ff2fac444cfd75fffa04afe", size = 23796, upload-time = "2024-10-18T15:21:37.073Z" }, + { url = "https://files.pythonhosted.org/packages/bb/35/cbe9238ec3f47ac9a7c8b3df7a808e7cb50fe149dc7039f5f454b3fba218/MarkupSafe-3.0.2-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:6381026f158fdb7c72a168278597a5e3a5222e83ea18f543112b2662a9b699c5", size = 25473, upload-time = "2024-10-18T15:21:37.932Z" }, + { url = "https://files.pythonhosted.org/packages/e6/32/7621a4382488aa283cc05e8984a9c219abad3bca087be9ec77e89939ded9/MarkupSafe-3.0.2-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:3d79d162e7be8f996986c064d1c7c817f6df3a77fe3d6859f6f9e7be4b8c213a", size = 24114, upload-time = "2024-10-18T15:21:39.799Z" }, + { url = "https://files.pythonhosted.org/packages/0d/80/0985960e4b89922cb5a0bac0ed39c5b96cbc1a536a99f30e8c220a996ed9/MarkupSafe-3.0.2-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:131a3c7689c85f5ad20f9f6fb1b866f402c445b220c19fe4308c0b147ccd2ad9", size = 24098, upload-time = "2024-10-18T15:21:40.813Z" }, + { url = "https://files.pythonhosted.org/packages/82/78/fedb03c7d5380df2427038ec8d973587e90561b2d90cd472ce9254cf348b/MarkupSafe-3.0.2-cp313-cp313t-win32.whl", hash = "sha256:ba8062ed2cf21c07a9e295d5b8a2a5ce678b913b45fdf68c32d95d6c1291e0b6", size = 15208, upload-time = "2024-10-18T15:21:41.814Z" }, + { url = "https://files.pythonhosted.org/packages/4f/65/6079a46068dfceaeabb5dcad6d674f5f5c61a6fa5673746f42a9f4c233b3/MarkupSafe-3.0.2-cp313-cp313t-win_amd64.whl", hash = "sha256:e444a31f8db13eb18ada366ab3cf45fd4b31e4db1236a4448f68778c1d1a5a2f", size = 15739, upload-time = "2024-10-18T15:21:42.784Z" }, +] + +[[package]] +name = "mdurl" +version = "0.1.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/d6/54/cfe61301667036ec958cb99bd3efefba235e65cdeb9c84d24a8293ba1d90/mdurl-0.1.2.tar.gz", hash = "sha256:bb413d29f5eea38f31dd4754dd7377d4465116fb207585f97bf925588687c1ba", size = 8729, upload-time = "2022-08-14T12:40:10.846Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/b3/38/89ba8ad64ae25be8de66a6d463314cf1eb366222074cfda9ee839c56a4b4/mdurl-0.1.2-py3-none-any.whl", hash = "sha256:84008a41e51615a49fc9966191ff91509e3c40b939176e643fd50a5c2196b8f8", size = 9979, upload-time = "2022-08-14T12:40:09.779Z" }, +] + +[[package]] +name = "multidict" +version = "6.4.3" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/da/2c/e367dfb4c6538614a0c9453e510d75d66099edf1c4e69da1b5ce691a1931/multidict-6.4.3.tar.gz", hash = "sha256:3ada0b058c9f213c5f95ba301f922d402ac234f1111a7d8fd70f1b99f3c281ec", size = 89372, upload-time = "2025-04-10T22:20:17.956Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/6c/4b/86fd786d03915c6f49998cf10cd5fe6b6ac9e9a071cb40885d2e080fb90d/multidict-6.4.3-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:7a76534263d03ae0cfa721fea40fd2b5b9d17a6f85e98025931d41dc49504474", size = 63831, upload-time = "2025-04-10T22:18:48.748Z" }, + { url = "https://files.pythonhosted.org/packages/45/05/9b51fdf7aef2563340a93be0a663acba2c428c4daeaf3960d92d53a4a930/multidict-6.4.3-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:805031c2f599eee62ac579843555ed1ce389ae00c7e9f74c2a1b45e0564a88dd", size = 37888, upload-time = "2025-04-10T22:18:50.021Z" }, + { url = "https://files.pythonhosted.org/packages/0b/43/53fc25394386c911822419b522181227ca450cf57fea76e6188772a1bd91/multidict-6.4.3-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:c56c179839d5dcf51d565132185409d1d5dd8e614ba501eb79023a6cab25576b", size = 36852, upload-time = "2025-04-10T22:18:51.246Z" }, + { url = "https://files.pythonhosted.org/packages/8a/68/7b99c751e822467c94a235b810a2fd4047d4ecb91caef6b5c60116991c4b/multidict-6.4.3-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9c64f4ddb3886dd8ab71b68a7431ad4aa01a8fa5be5b11543b29674f29ca0ba3", size = 223644, upload-time = "2025-04-10T22:18:52.965Z" }, + { url = "https://files.pythonhosted.org/packages/80/1b/d458d791e4dd0f7e92596667784fbf99e5c8ba040affe1ca04f06b93ae92/multidict-6.4.3-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:3002a856367c0b41cad6784f5b8d3ab008eda194ed7864aaa58f65312e2abcac", size = 230446, upload-time = "2025-04-10T22:18:54.509Z" }, + { url = "https://files.pythonhosted.org/packages/e2/46/9793378d988905491a7806d8987862dc5a0bae8a622dd896c4008c7b226b/multidict-6.4.3-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3d75e621e7d887d539d6e1d789f0c64271c250276c333480a9e1de089611f790", size = 231070, upload-time = "2025-04-10T22:18:56.019Z" }, + { url = "https://files.pythonhosted.org/packages/a7/b8/b127d3e1f8dd2a5bf286b47b24567ae6363017292dc6dec44656e6246498/multidict-6.4.3-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:995015cf4a3c0d72cbf453b10a999b92c5629eaf3a0c3e1efb4b5c1f602253bb", size = 229956, upload-time = "2025-04-10T22:18:59.146Z" }, + { url = "https://files.pythonhosted.org/packages/0c/93/f70a4c35b103fcfe1443059a2bb7f66e5c35f2aea7804105ff214f566009/multidict-6.4.3-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a2b0fabae7939d09d7d16a711468c385272fa1b9b7fb0d37e51143585d8e72e0", size = 222599, upload-time = "2025-04-10T22:19:00.657Z" }, + { url = "https://files.pythonhosted.org/packages/63/8c/e28e0eb2fe34921d6aa32bfc4ac75b09570b4d6818cc95d25499fe08dc1d/multidict-6.4.3-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:61ed4d82f8a1e67eb9eb04f8587970d78fe7cddb4e4d6230b77eda23d27938f9", size = 216136, upload-time = "2025-04-10T22:19:02.244Z" }, + { url = "https://files.pythonhosted.org/packages/72/f5/fbc81f866585b05f89f99d108be5d6ad170e3b6c4d0723d1a2f6ba5fa918/multidict-6.4.3-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:062428944a8dc69df9fdc5d5fc6279421e5f9c75a9ee3f586f274ba7b05ab3c8", size = 228139, upload-time = "2025-04-10T22:19:04.151Z" }, + { url = "https://files.pythonhosted.org/packages/bb/ba/7d196bad6b85af2307d81f6979c36ed9665f49626f66d883d6c64d156f78/multidict-6.4.3-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:b90e27b4674e6c405ad6c64e515a505c6d113b832df52fdacb6b1ffd1fa9a1d1", size = 226251, upload-time = "2025-04-10T22:19:06.117Z" }, + { url = "https://files.pythonhosted.org/packages/cc/e2/fae46a370dce79d08b672422a33df721ec8b80105e0ea8d87215ff6b090d/multidict-6.4.3-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:7d50d4abf6729921e9613d98344b74241572b751c6b37feed75fb0c37bd5a817", size = 221868, upload-time = "2025-04-10T22:19:07.981Z" }, + { url = "https://files.pythonhosted.org/packages/26/20/bbc9a3dec19d5492f54a167f08546656e7aef75d181d3d82541463450e88/multidict-6.4.3-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:43fe10524fb0a0514be3954be53258e61d87341008ce4914f8e8b92bee6f875d", size = 233106, upload-time = "2025-04-10T22:19:09.5Z" }, + { url = "https://files.pythonhosted.org/packages/ee/8d/f30ae8f5ff7a2461177f4d8eb0d8f69f27fb6cfe276b54ec4fd5a282d918/multidict-6.4.3-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:236966ca6c472ea4e2d3f02f6673ebfd36ba3f23159c323f5a496869bc8e47c9", size = 230163, upload-time = "2025-04-10T22:19:11Z" }, + { url = "https://files.pythonhosted.org/packages/15/e9/2833f3c218d3c2179f3093f766940ded6b81a49d2e2f9c46ab240d23dfec/multidict-6.4.3-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:422a5ec315018e606473ba1f5431e064cf8b2a7468019233dcf8082fabad64c8", size = 225906, upload-time = "2025-04-10T22:19:12.875Z" }, + { url = "https://files.pythonhosted.org/packages/f1/31/6edab296ac369fd286b845fa5dd4c409e63bc4655ed8c9510fcb477e9ae9/multidict-6.4.3-cp313-cp313-win32.whl", hash = "sha256:f901a5aace8e8c25d78960dcc24c870c8d356660d3b49b93a78bf38eb682aac3", size = 35238, upload-time = "2025-04-10T22:19:14.41Z" }, + { url = "https://files.pythonhosted.org/packages/23/57/2c0167a1bffa30d9a1383c3dab99d8caae985defc8636934b5668830d2ef/multidict-6.4.3-cp313-cp313-win_amd64.whl", hash = "sha256:1c152c49e42277bc9a2f7b78bd5fa10b13e88d1b0328221e7aef89d5c60a99a5", size = 38799, upload-time = "2025-04-10T22:19:15.869Z" }, + { url = "https://files.pythonhosted.org/packages/c9/13/2ead63b9ab0d2b3080819268acb297bd66e238070aa8d42af12b08cbee1c/multidict-6.4.3-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:be8751869e28b9c0d368d94f5afcb4234db66fe8496144547b4b6d6a0645cfc6", size = 68642, upload-time = "2025-04-10T22:19:17.527Z" }, + { url = "https://files.pythonhosted.org/packages/85/45/f1a751e1eede30c23951e2ae274ce8fad738e8a3d5714be73e0a41b27b16/multidict-6.4.3-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:0d4b31f8a68dccbcd2c0ea04f0e014f1defc6b78f0eb8b35f2265e8716a6df0c", size = 40028, upload-time = "2025-04-10T22:19:19.465Z" }, + { url = "https://files.pythonhosted.org/packages/a7/29/fcc53e886a2cc5595cc4560df333cb9630257bda65003a7eb4e4e0d8f9c1/multidict-6.4.3-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:032efeab3049e37eef2ff91271884303becc9e54d740b492a93b7e7266e23756", size = 39424, upload-time = "2025-04-10T22:19:20.762Z" }, + { url = "https://files.pythonhosted.org/packages/f6/f0/056c81119d8b88703971f937b371795cab1407cd3c751482de5bfe1a04a9/multidict-6.4.3-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9e78006af1a7c8a8007e4f56629d7252668344442f66982368ac06522445e375", size = 226178, upload-time = "2025-04-10T22:19:22.17Z" }, + { url = "https://files.pythonhosted.org/packages/a3/79/3b7e5fea0aa80583d3a69c9d98b7913dfd4fbc341fb10bb2fb48d35a9c21/multidict-6.4.3-cp313-cp313t-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:daeac9dd30cda8703c417e4fddccd7c4dc0c73421a0b54a7da2713be125846be", size = 222617, upload-time = "2025-04-10T22:19:23.773Z" }, + { url = "https://files.pythonhosted.org/packages/06/db/3ed012b163e376fc461e1d6a67de69b408339bc31dc83d39ae9ec3bf9578/multidict-6.4.3-cp313-cp313t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1f6f90700881438953eae443a9c6f8a509808bc3b185246992c4233ccee37fea", size = 227919, upload-time = "2025-04-10T22:19:25.35Z" }, + { url = "https://files.pythonhosted.org/packages/b1/db/0433c104bca380989bc04d3b841fc83e95ce0c89f680e9ea4251118b52b6/multidict-6.4.3-cp313-cp313t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f84627997008390dd15762128dcf73c3365f4ec0106739cde6c20a07ed198ec8", size = 226097, upload-time = "2025-04-10T22:19:27.183Z" }, + { url = "https://files.pythonhosted.org/packages/c2/95/910db2618175724dd254b7ae635b6cd8d2947a8b76b0376de7b96d814dab/multidict-6.4.3-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3307b48cd156153b117c0ea54890a3bdbf858a5b296ddd40dc3852e5f16e9b02", size = 220706, upload-time = "2025-04-10T22:19:28.882Z" }, + { url = "https://files.pythonhosted.org/packages/d1/af/aa176c6f5f1d901aac957d5258d5e22897fe13948d1e69063ae3d5d0ca01/multidict-6.4.3-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ead46b0fa1dcf5af503a46e9f1c2e80b5d95c6011526352fa5f42ea201526124", size = 211728, upload-time = "2025-04-10T22:19:30.481Z" }, + { url = "https://files.pythonhosted.org/packages/e7/42/d51cc5fc1527c3717d7f85137d6c79bb7a93cd214c26f1fc57523774dbb5/multidict-6.4.3-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:1748cb2743bedc339d63eb1bca314061568793acd603a6e37b09a326334c9f44", size = 226276, upload-time = "2025-04-10T22:19:32.454Z" }, + { url = "https://files.pythonhosted.org/packages/28/6b/d836dea45e0b8432343ba4acf9a8ecaa245da4c0960fb7ab45088a5e568a/multidict-6.4.3-cp313-cp313t-musllinux_1_2_armv7l.whl", hash = "sha256:acc9fa606f76fc111b4569348cc23a771cb52c61516dcc6bcef46d612edb483b", size = 212069, upload-time = "2025-04-10T22:19:34.17Z" }, + { url = "https://files.pythonhosted.org/packages/55/34/0ee1a7adb3560e18ee9289c6e5f7db54edc312b13e5c8263e88ea373d12c/multidict-6.4.3-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:31469d5832b5885adeb70982e531ce86f8c992334edd2f2254a10fa3182ac504", size = 217858, upload-time = "2025-04-10T22:19:35.879Z" }, + { url = "https://files.pythonhosted.org/packages/04/08/586d652c2f5acefe0cf4e658eedb4d71d4ba6dfd4f189bd81b400fc1bc6b/multidict-6.4.3-cp313-cp313t-musllinux_1_2_ppc64le.whl", hash = "sha256:ba46b51b6e51b4ef7bfb84b82f5db0dc5e300fb222a8a13b8cd4111898a869cf", size = 226988, upload-time = "2025-04-10T22:19:37.434Z" }, + { url = "https://files.pythonhosted.org/packages/82/e3/cc59c7e2bc49d7f906fb4ffb6d9c3a3cf21b9f2dd9c96d05bef89c2b1fd1/multidict-6.4.3-cp313-cp313t-musllinux_1_2_s390x.whl", hash = "sha256:389cfefb599edf3fcfd5f64c0410da686f90f5f5e2c4d84e14f6797a5a337af4", size = 220435, upload-time = "2025-04-10T22:19:39.005Z" }, + { url = "https://files.pythonhosted.org/packages/e0/32/5c3a556118aca9981d883f38c4b1bfae646f3627157f70f4068e5a648955/multidict-6.4.3-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:64bc2bbc5fba7b9db5c2c8d750824f41c6994e3882e6d73c903c2afa78d091e4", size = 221494, upload-time = "2025-04-10T22:19:41.447Z" }, + { url = "https://files.pythonhosted.org/packages/b9/3b/1599631f59024b75c4d6e3069f4502409970a336647502aaf6b62fb7ac98/multidict-6.4.3-cp313-cp313t-win32.whl", hash = "sha256:0ecdc12ea44bab2807d6b4a7e5eef25109ab1c82a8240d86d3c1fc9f3b72efd5", size = 41775, upload-time = "2025-04-10T22:19:43.707Z" }, + { url = "https://files.pythonhosted.org/packages/e8/4e/09301668d675d02ca8e8e1a3e6be046619e30403f5ada2ed5b080ae28d02/multidict-6.4.3-cp313-cp313t-win_amd64.whl", hash = "sha256:7146a8742ea71b5d7d955bffcef58a9e6e04efba704b52a460134fefd10a8208", size = 45946, upload-time = "2025-04-10T22:19:45.071Z" }, + { url = "https://files.pythonhosted.org/packages/96/10/7d526c8974f017f1e7ca584c71ee62a638e9334d8d33f27d7cdfc9ae79e4/multidict-6.4.3-py3-none-any.whl", hash = "sha256:59fe01ee8e2a1e8ceb3f6dbb216b09c8d9f4ef1c22c4fc825d045a147fa2ebc9", size = 10400, upload-time = "2025-04-10T22:20:16.445Z" }, +] + +[[package]] +name = "propcache" +version = "0.3.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/07/c8/fdc6686a986feae3541ea23dcaa661bd93972d3940460646c6bb96e21c40/propcache-0.3.1.tar.gz", hash = "sha256:40d980c33765359098837527e18eddefc9a24cea5b45e078a7f3bb5b032c6ecf", size = 43651, upload-time = "2025-03-26T03:06:12.05Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/58/60/f645cc8b570f99be3cf46714170c2de4b4c9d6b827b912811eff1eb8a412/propcache-0.3.1-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:f1528ec4374617a7a753f90f20e2f551121bb558fcb35926f99e3c42367164b8", size = 77865, upload-time = "2025-03-26T03:04:53.406Z" }, + { url = "https://files.pythonhosted.org/packages/6f/d4/c1adbf3901537582e65cf90fd9c26fde1298fde5a2c593f987112c0d0798/propcache-0.3.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:dc1915ec523b3b494933b5424980831b636fe483d7d543f7afb7b3bf00f0c10f", size = 45452, upload-time = "2025-03-26T03:04:54.624Z" }, + { url = "https://files.pythonhosted.org/packages/d1/b5/fe752b2e63f49f727c6c1c224175d21b7d1727ce1d4873ef1c24c9216830/propcache-0.3.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:a110205022d077da24e60b3df8bcee73971be9575dec5573dd17ae5d81751111", size = 44800, upload-time = "2025-03-26T03:04:55.844Z" }, + { url = "https://files.pythonhosted.org/packages/62/37/fc357e345bc1971e21f76597028b059c3d795c5ca7690d7a8d9a03c9708a/propcache-0.3.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d249609e547c04d190e820d0d4c8ca03ed4582bcf8e4e160a6969ddfb57b62e5", size = 225804, upload-time = "2025-03-26T03:04:57.158Z" }, + { url = "https://files.pythonhosted.org/packages/0d/f1/16e12c33e3dbe7f8b737809bad05719cff1dccb8df4dafbcff5575002c0e/propcache-0.3.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5ced33d827625d0a589e831126ccb4f5c29dfdf6766cac441d23995a65825dcb", size = 230650, upload-time = "2025-03-26T03:04:58.61Z" }, + { url = "https://files.pythonhosted.org/packages/3e/a2/018b9f2ed876bf5091e60153f727e8f9073d97573f790ff7cdf6bc1d1fb8/propcache-0.3.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4114c4ada8f3181af20808bedb250da6bae56660e4b8dfd9cd95d4549c0962f7", size = 234235, upload-time = "2025-03-26T03:05:00.599Z" }, + { url = "https://files.pythonhosted.org/packages/45/5f/3faee66fc930dfb5da509e34c6ac7128870631c0e3582987fad161fcb4b1/propcache-0.3.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:975af16f406ce48f1333ec5e912fe11064605d5c5b3f6746969077cc3adeb120", size = 228249, upload-time = "2025-03-26T03:05:02.11Z" }, + { url = "https://files.pythonhosted.org/packages/62/1e/a0d5ebda5da7ff34d2f5259a3e171a94be83c41eb1e7cd21a2105a84a02e/propcache-0.3.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a34aa3a1abc50740be6ac0ab9d594e274f59960d3ad253cd318af76b996dd654", size = 214964, upload-time = "2025-03-26T03:05:03.599Z" }, + { url = "https://files.pythonhosted.org/packages/db/a0/d72da3f61ceab126e9be1f3bc7844b4e98c6e61c985097474668e7e52152/propcache-0.3.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:9cec3239c85ed15bfaded997773fdad9fb5662b0a7cbc854a43f291eb183179e", size = 222501, upload-time = "2025-03-26T03:05:05.107Z" }, + { url = "https://files.pythonhosted.org/packages/18/6d/a008e07ad7b905011253adbbd97e5b5375c33f0b961355ca0a30377504ac/propcache-0.3.1-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:05543250deac8e61084234d5fc54f8ebd254e8f2b39a16b1dce48904f45b744b", size = 217917, upload-time = "2025-03-26T03:05:06.59Z" }, + { url = "https://files.pythonhosted.org/packages/98/37/02c9343ffe59e590e0e56dc5c97d0da2b8b19fa747ebacf158310f97a79a/propcache-0.3.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:5cb5918253912e088edbf023788de539219718d3b10aef334476b62d2b53de53", size = 217089, upload-time = "2025-03-26T03:05:08.1Z" }, + { url = "https://files.pythonhosted.org/packages/53/1b/d3406629a2c8a5666d4674c50f757a77be119b113eedd47b0375afdf1b42/propcache-0.3.1-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:f3bbecd2f34d0e6d3c543fdb3b15d6b60dd69970c2b4c822379e5ec8f6f621d5", size = 228102, upload-time = "2025-03-26T03:05:09.982Z" }, + { url = "https://files.pythonhosted.org/packages/cd/a7/3664756cf50ce739e5f3abd48febc0be1a713b1f389a502ca819791a6b69/propcache-0.3.1-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:aca63103895c7d960a5b9b044a83f544b233c95e0dcff114389d64d762017af7", size = 230122, upload-time = "2025-03-26T03:05:11.408Z" }, + { url = "https://files.pythonhosted.org/packages/35/36/0bbabaacdcc26dac4f8139625e930f4311864251276033a52fd52ff2a274/propcache-0.3.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:5a0a9898fdb99bf11786265468571e628ba60af80dc3f6eb89a3545540c6b0ef", size = 226818, upload-time = "2025-03-26T03:05:12.909Z" }, + { url = "https://files.pythonhosted.org/packages/cc/27/4e0ef21084b53bd35d4dae1634b6d0bad35e9c58ed4f032511acca9d4d26/propcache-0.3.1-cp313-cp313-win32.whl", hash = "sha256:3a02a28095b5e63128bcae98eb59025924f121f048a62393db682f049bf4ac24", size = 40112, upload-time = "2025-03-26T03:05:14.289Z" }, + { url = "https://files.pythonhosted.org/packages/a6/2c/a54614d61895ba6dd7ac8f107e2b2a0347259ab29cbf2ecc7b94fa38c4dc/propcache-0.3.1-cp313-cp313-win_amd64.whl", hash = "sha256:813fbb8b6aea2fc9659815e585e548fe706d6f663fa73dff59a1677d4595a037", size = 44034, upload-time = "2025-03-26T03:05:15.616Z" }, + { url = "https://files.pythonhosted.org/packages/5a/a8/0a4fd2f664fc6acc66438370905124ce62e84e2e860f2557015ee4a61c7e/propcache-0.3.1-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:a444192f20f5ce8a5e52761a031b90f5ea6288b1eef42ad4c7e64fef33540b8f", size = 82613, upload-time = "2025-03-26T03:05:16.913Z" }, + { url = "https://files.pythonhosted.org/packages/4d/e5/5ef30eb2cd81576256d7b6caaa0ce33cd1d2c2c92c8903cccb1af1a4ff2f/propcache-0.3.1-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:0fbe94666e62ebe36cd652f5fc012abfbc2342de99b523f8267a678e4dfdee3c", size = 47763, upload-time = "2025-03-26T03:05:18.607Z" }, + { url = "https://files.pythonhosted.org/packages/87/9a/87091ceb048efeba4d28e903c0b15bcc84b7c0bf27dc0261e62335d9b7b8/propcache-0.3.1-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:f011f104db880f4e2166bcdcf7f58250f7a465bc6b068dc84c824a3d4a5c94dc", size = 47175, upload-time = "2025-03-26T03:05:19.85Z" }, + { url = "https://files.pythonhosted.org/packages/3e/2f/854e653c96ad1161f96194c6678a41bbb38c7947d17768e8811a77635a08/propcache-0.3.1-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3e584b6d388aeb0001d6d5c2bd86b26304adde6d9bb9bfa9c4889805021b96de", size = 292265, upload-time = "2025-03-26T03:05:21.654Z" }, + { url = "https://files.pythonhosted.org/packages/40/8d/090955e13ed06bc3496ba4a9fb26c62e209ac41973cb0d6222de20c6868f/propcache-0.3.1-cp313-cp313t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8a17583515a04358b034e241f952f1715243482fc2c2945fd99a1b03a0bd77d6", size = 294412, upload-time = "2025-03-26T03:05:23.147Z" }, + { url = "https://files.pythonhosted.org/packages/39/e6/d51601342e53cc7582449e6a3c14a0479fab2f0750c1f4d22302e34219c6/propcache-0.3.1-cp313-cp313t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5aed8d8308215089c0734a2af4f2e95eeb360660184ad3912686c181e500b2e7", size = 294290, upload-time = "2025-03-26T03:05:24.577Z" }, + { url = "https://files.pythonhosted.org/packages/3b/4d/be5f1a90abc1881884aa5878989a1acdafd379a91d9c7e5e12cef37ec0d7/propcache-0.3.1-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6d8e309ff9a0503ef70dc9a0ebd3e69cf7b3894c9ae2ae81fc10943c37762458", size = 282926, upload-time = "2025-03-26T03:05:26.459Z" }, + { url = "https://files.pythonhosted.org/packages/57/2b/8f61b998c7ea93a2b7eca79e53f3e903db1787fca9373af9e2cf8dc22f9d/propcache-0.3.1-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b655032b202028a582d27aeedc2e813299f82cb232f969f87a4fde491a233f11", size = 267808, upload-time = "2025-03-26T03:05:28.188Z" }, + { url = "https://files.pythonhosted.org/packages/11/1c/311326c3dfce59c58a6098388ba984b0e5fb0381ef2279ec458ef99bd547/propcache-0.3.1-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:9f64d91b751df77931336b5ff7bafbe8845c5770b06630e27acd5dbb71e1931c", size = 290916, upload-time = "2025-03-26T03:05:29.757Z" }, + { url = "https://files.pythonhosted.org/packages/4b/74/91939924b0385e54dc48eb2e4edd1e4903ffd053cf1916ebc5347ac227f7/propcache-0.3.1-cp313-cp313t-musllinux_1_2_armv7l.whl", hash = "sha256:19a06db789a4bd896ee91ebc50d059e23b3639c25d58eb35be3ca1cbe967c3bf", size = 262661, upload-time = "2025-03-26T03:05:31.472Z" }, + { url = "https://files.pythonhosted.org/packages/c2/d7/e6079af45136ad325c5337f5dd9ef97ab5dc349e0ff362fe5c5db95e2454/propcache-0.3.1-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:bef100c88d8692864651b5f98e871fb090bd65c8a41a1cb0ff2322db39c96c27", size = 264384, upload-time = "2025-03-26T03:05:32.984Z" }, + { url = "https://files.pythonhosted.org/packages/b7/d5/ba91702207ac61ae6f1c2da81c5d0d6bf6ce89e08a2b4d44e411c0bbe867/propcache-0.3.1-cp313-cp313t-musllinux_1_2_ppc64le.whl", hash = "sha256:87380fb1f3089d2a0b8b00f006ed12bd41bd858fabfa7330c954c70f50ed8757", size = 291420, upload-time = "2025-03-26T03:05:34.496Z" }, + { url = "https://files.pythonhosted.org/packages/58/70/2117780ed7edcd7ba6b8134cb7802aada90b894a9810ec56b7bb6018bee7/propcache-0.3.1-cp313-cp313t-musllinux_1_2_s390x.whl", hash = "sha256:e474fc718e73ba5ec5180358aa07f6aded0ff5f2abe700e3115c37d75c947e18", size = 290880, upload-time = "2025-03-26T03:05:36.256Z" }, + { url = "https://files.pythonhosted.org/packages/4a/1f/ecd9ce27710021ae623631c0146719280a929d895a095f6d85efb6a0be2e/propcache-0.3.1-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:17d1c688a443355234f3c031349da69444be052613483f3e4158eef751abcd8a", size = 287407, upload-time = "2025-03-26T03:05:37.799Z" }, + { url = "https://files.pythonhosted.org/packages/3e/66/2e90547d6b60180fb29e23dc87bd8c116517d4255240ec6d3f7dc23d1926/propcache-0.3.1-cp313-cp313t-win32.whl", hash = "sha256:359e81a949a7619802eb601d66d37072b79b79c2505e6d3fd8b945538411400d", size = 42573, upload-time = "2025-03-26T03:05:39.193Z" }, + { url = "https://files.pythonhosted.org/packages/cb/8f/50ad8599399d1861b4d2b6b45271f0ef6af1b09b0a2386a46dbaf19c9535/propcache-0.3.1-cp313-cp313t-win_amd64.whl", hash = "sha256:e7fb9a84c9abbf2b2683fa3e7b0d7da4d8ecf139a1c635732a8bda29c5214b0e", size = 46757, upload-time = "2025-03-26T03:05:40.811Z" }, + { url = "https://files.pythonhosted.org/packages/b8/d3/c3cb8f1d6ae3b37f83e1de806713a9b3642c5895f0215a62e1a4bd6e5e34/propcache-0.3.1-py3-none-any.whl", hash = "sha256:9a8ecf38de50a7f518c21568c80f985e776397b902f1ce0b01f799aba1608b40", size = 12376, upload-time = "2025-03-26T03:06:10.5Z" }, +] + +[[package]] +name = "pycparser" +version = "2.22" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/1d/b2/31537cf4b1ca988837256c910a668b553fceb8f069bedc4b1c826024b52c/pycparser-2.22.tar.gz", hash = "sha256:491c8be9c040f5390f5bf44a5b07752bd07f56edf992381b05c701439eec10f6", size = 172736, upload-time = "2024-03-30T13:22:22.564Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/13/a3/a812df4e2dd5696d1f351d58b8fe16a405b234ad2886a0dab9183fb78109/pycparser-2.22-py3-none-any.whl", hash = "sha256:c3702b6d3dd8c7abc1afa565d7e63d53a1d0bd86cdc24edd75470f4de499cfcc", size = 117552, upload-time = "2024-03-30T13:22:20.476Z" }, +] + +[[package]] +name = "pydantic" +version = "2.11.4" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "annotated-types" }, + { name = "pydantic-core" }, + { name = "typing-extensions" }, + { name = "typing-inspection" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/77/ab/5250d56ad03884ab5efd07f734203943c8a8ab40d551e208af81d0257bf2/pydantic-2.11.4.tar.gz", hash = "sha256:32738d19d63a226a52eed76645a98ee07c1f410ee41d93b4afbfa85ed8111c2d", size = 786540, upload-time = "2025-04-29T20:38:55.02Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/e7/12/46b65f3534d099349e38ef6ec98b1a5a81f42536d17e0ba382c28c67ba67/pydantic-2.11.4-py3-none-any.whl", hash = "sha256:d9615eaa9ac5a063471da949c8fc16376a84afb5024688b3ff885693506764eb", size = 443900, upload-time = "2025-04-29T20:38:52.724Z" }, +] + +[[package]] +name = "pydantic-core" +version = "2.33.2" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/ad/88/5f2260bdfae97aabf98f1778d43f69574390ad787afb646292a638c923d4/pydantic_core-2.33.2.tar.gz", hash = "sha256:7cb8bc3605c29176e1b105350d2e6474142d7c1bd1d9327c4a9bdb46bf827acc", size = 435195, upload-time = "2025-04-23T18:33:52.104Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/46/8c/99040727b41f56616573a28771b1bfa08a3d3fe74d3d513f01251f79f172/pydantic_core-2.33.2-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:1082dd3e2d7109ad8b7da48e1d4710c8d06c253cbc4a27c1cff4fbcaa97a9e3f", size = 2015688, upload-time = "2025-04-23T18:31:53.175Z" }, + { url = "https://files.pythonhosted.org/packages/3a/cc/5999d1eb705a6cefc31f0b4a90e9f7fc400539b1a1030529700cc1b51838/pydantic_core-2.33.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:f517ca031dfc037a9c07e748cefd8d96235088b83b4f4ba8939105d20fa1dcd6", size = 1844808, upload-time = "2025-04-23T18:31:54.79Z" }, + { url = "https://files.pythonhosted.org/packages/6f/5e/a0a7b8885c98889a18b6e376f344da1ef323d270b44edf8174d6bce4d622/pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0a9f2c9dd19656823cb8250b0724ee9c60a82f3cdf68a080979d13092a3b0fef", size = 1885580, upload-time = "2025-04-23T18:31:57.393Z" }, + { url = "https://files.pythonhosted.org/packages/3b/2a/953581f343c7d11a304581156618c3f592435523dd9d79865903272c256a/pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:2b0a451c263b01acebe51895bfb0e1cc842a5c666efe06cdf13846c7418caa9a", size = 1973859, upload-time = "2025-04-23T18:31:59.065Z" }, + { url = "https://files.pythonhosted.org/packages/e6/55/f1a813904771c03a3f97f676c62cca0c0a4138654107c1b61f19c644868b/pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1ea40a64d23faa25e62a70ad163571c0b342b8bf66d5fa612ac0dec4f069d916", size = 2120810, upload-time = "2025-04-23T18:32:00.78Z" }, + { url = "https://files.pythonhosted.org/packages/aa/c3/053389835a996e18853ba107a63caae0b9deb4a276c6b472931ea9ae6e48/pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0fb2d542b4d66f9470e8065c5469ec676978d625a8b7a363f07d9a501a9cb36a", size = 2676498, upload-time = "2025-04-23T18:32:02.418Z" }, + { url = "https://files.pythonhosted.org/packages/eb/3c/f4abd740877a35abade05e437245b192f9d0ffb48bbbbd708df33d3cda37/pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9fdac5d6ffa1b5a83bca06ffe7583f5576555e6c8b3a91fbd25ea7780f825f7d", size = 2000611, upload-time = "2025-04-23T18:32:04.152Z" }, + { url = "https://files.pythonhosted.org/packages/59/a7/63ef2fed1837d1121a894d0ce88439fe3e3b3e48c7543b2a4479eb99c2bd/pydantic_core-2.33.2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:04a1a413977ab517154eebb2d326da71638271477d6ad87a769102f7c2488c56", size = 2107924, upload-time = "2025-04-23T18:32:06.129Z" }, + { url = "https://files.pythonhosted.org/packages/04/8f/2551964ef045669801675f1cfc3b0d74147f4901c3ffa42be2ddb1f0efc4/pydantic_core-2.33.2-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:c8e7af2f4e0194c22b5b37205bfb293d166a7344a5b0d0eaccebc376546d77d5", size = 2063196, upload-time = "2025-04-23T18:32:08.178Z" }, + { url = "https://files.pythonhosted.org/packages/26/bd/d9602777e77fc6dbb0c7db9ad356e9a985825547dce5ad1d30ee04903918/pydantic_core-2.33.2-cp313-cp313-musllinux_1_1_armv7l.whl", hash = "sha256:5c92edd15cd58b3c2d34873597a1e20f13094f59cf88068adb18947df5455b4e", size = 2236389, upload-time = "2025-04-23T18:32:10.242Z" }, + { url = "https://files.pythonhosted.org/packages/42/db/0e950daa7e2230423ab342ae918a794964b053bec24ba8af013fc7c94846/pydantic_core-2.33.2-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:65132b7b4a1c0beded5e057324b7e16e10910c106d43675d9bd87d4f38dde162", size = 2239223, upload-time = "2025-04-23T18:32:12.382Z" }, + { url = "https://files.pythonhosted.org/packages/58/4d/4f937099c545a8a17eb52cb67fe0447fd9a373b348ccfa9a87f141eeb00f/pydantic_core-2.33.2-cp313-cp313-win32.whl", hash = "sha256:52fb90784e0a242bb96ec53f42196a17278855b0f31ac7c3cc6f5c1ec4811849", size = 1900473, upload-time = "2025-04-23T18:32:14.034Z" }, + { url = "https://files.pythonhosted.org/packages/a0/75/4a0a9bac998d78d889def5e4ef2b065acba8cae8c93696906c3a91f310ca/pydantic_core-2.33.2-cp313-cp313-win_amd64.whl", hash = "sha256:c083a3bdd5a93dfe480f1125926afcdbf2917ae714bdb80b36d34318b2bec5d9", size = 1955269, upload-time = "2025-04-23T18:32:15.783Z" }, + { url = "https://files.pythonhosted.org/packages/f9/86/1beda0576969592f1497b4ce8e7bc8cbdf614c352426271b1b10d5f0aa64/pydantic_core-2.33.2-cp313-cp313-win_arm64.whl", hash = "sha256:e80b087132752f6b3d714f041ccf74403799d3b23a72722ea2e6ba2e892555b9", size = 1893921, upload-time = "2025-04-23T18:32:18.473Z" }, + { url = "https://files.pythonhosted.org/packages/a4/7d/e09391c2eebeab681df2b74bfe6c43422fffede8dc74187b2b0bf6fd7571/pydantic_core-2.33.2-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:61c18fba8e5e9db3ab908620af374db0ac1baa69f0f32df4f61ae23f15e586ac", size = 1806162, upload-time = "2025-04-23T18:32:20.188Z" }, + { url = "https://files.pythonhosted.org/packages/f1/3d/847b6b1fed9f8ed3bb95a9ad04fbd0b212e832d4f0f50ff4d9ee5a9f15cf/pydantic_core-2.33.2-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:95237e53bb015f67b63c91af7518a62a8660376a6a0db19b89acc77a4d6199f5", size = 1981560, upload-time = "2025-04-23T18:32:22.354Z" }, + { url = "https://files.pythonhosted.org/packages/6f/9a/e73262f6c6656262b5fdd723ad90f518f579b7bc8622e43a942eec53c938/pydantic_core-2.33.2-cp313-cp313t-win_amd64.whl", hash = "sha256:c2fc0a768ef76c15ab9238afa6da7f69895bb5d1ee83aeea2e3509af4472d0b9", size = 1935777, upload-time = "2025-04-23T18:32:25.088Z" }, +] + +[[package]] +name = "pygments" +version = "2.19.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/7c/2d/c3338d48ea6cc0feb8446d8e6937e1408088a72a39937982cc6111d17f84/pygments-2.19.1.tar.gz", hash = "sha256:61c16d2a8576dc0649d9f39e089b5f02bcd27fba10d8fb4dcc28173f7a45151f", size = 4968581, upload-time = "2025-01-06T17:26:30.443Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/8a/0b/9fcc47d19c48b59121088dd6da2488a49d5f72dacf8262e2790a1d2c7d15/pygments-2.19.1-py3-none-any.whl", hash = "sha256:9ea1544ad55cecf4b8242fab6dd35a93bbce657034b0611ee383099054ab6d8c", size = 1225293, upload-time = "2025-01-06T17:26:25.553Z" }, +] + +[[package]] +name = "pyjwt" +version = "2.10.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/e7/46/bd74733ff231675599650d3e47f361794b22ef3e3770998dda30d3b63726/pyjwt-2.10.1.tar.gz", hash = "sha256:3cc5772eb20009233caf06e9d8a0577824723b44e6648ee0a2aedb6cf9381953", size = 87785, upload-time = "2024-11-28T03:43:29.933Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/61/ad/689f02752eeec26aed679477e80e632ef1b682313be70793d798c1d5fc8f/PyJWT-2.10.1-py3-none-any.whl", hash = "sha256:dcdd193e30abefd5debf142f9adfcdd2b58004e644f25406ffaebd50bd98dacb", size = 22997, upload-time = "2024-11-28T03:43:27.893Z" }, +] + +[package.optional-dependencies] +crypto = [ + { name = "cryptography" }, +] + +[[package]] +name = "python-dateutil" +version = "2.9.0.post0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "six" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/66/c0/0c8b6ad9f17a802ee498c46e004a0eb49bc148f2fd230864601a86dcf6db/python-dateutil-2.9.0.post0.tar.gz", hash = "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3", size = 342432, upload-time = "2024-03-01T18:36:20.211Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/ec/57/56b9bcc3c9c6a792fcbaf139543cee77261f3651ca9da0c93f5c1221264b/python_dateutil-2.9.0.post0-py2.py3-none-any.whl", hash = "sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427", size = 229892, upload-time = "2024-03-01T18:36:18.57Z" }, +] + +[[package]] +name = "python-dotenv" +version = "1.1.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/88/2c/7bb1416c5620485aa793f2de31d3df393d3686aa8a8506d11e10e13c5baf/python_dotenv-1.1.0.tar.gz", hash = "sha256:41f90bc6f5f177fb41f53e87666db362025010eb28f60a01c9143bfa33a2b2d5", size = 39920, upload-time = "2025-03-25T10:14:56.835Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/1e/18/98a99ad95133c6a6e2005fe89faedf294a748bd5dc803008059409ac9b1e/python_dotenv-1.1.0-py3-none-any.whl", hash = "sha256:d7c01d9e2293916c18baf562d95698754b0dbbb5e74d457c45d4f6561fb9d55d", size = 20256, upload-time = "2025-03-25T10:14:55.034Z" }, +] + +[[package]] +name = "python-multipart" +version = "0.0.20" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/f3/87/f44d7c9f274c7ee665a29b885ec97089ec5dc034c7f3fafa03da9e39a09e/python_multipart-0.0.20.tar.gz", hash = "sha256:8dd0cab45b8e23064ae09147625994d090fa46f5b0d1e13af944c331a7fa9d13", size = 37158, upload-time = "2024-12-16T19:45:46.972Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/45/58/38b5afbc1a800eeea951b9285d3912613f2603bdf897a4ab0f4bd7f405fc/python_multipart-0.0.20-py3-none-any.whl", hash = "sha256:8a62d3a8335e06589fe01f2a3e178cdcc632f3fbe0d492ad9ee0ec35aab1f104", size = 24546, upload-time = "2024-12-16T19:45:44.423Z" }, +] + +[[package]] +name = "python-slugify" +version = "8.0.4" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "text-unidecode" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/87/c7/5e1547c44e31da50a460df93af11a535ace568ef89d7a811069ead340c4a/python-slugify-8.0.4.tar.gz", hash = "sha256:59202371d1d05b54a9e7720c5e038f928f45daaffe41dd10822f3907b937c856", size = 10921, upload-time = "2024-02-08T18:32:45.488Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/a4/62/02da182e544a51a5c3ccf4b03ab79df279f9c60c5e82d5e8bec7ca26ac11/python_slugify-8.0.4-py2.py3-none-any.whl", hash = "sha256:276540b79961052b66b7d116620b36518847f52d5fd9e3a70164fc8c50faa6b8", size = 10051, upload-time = "2024-02-08T18:32:43.911Z" }, +] + +[[package]] +name = "pyyaml" +version = "6.0.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/54/ed/79a089b6be93607fa5cdaedf301d7dfb23af5f25c398d5ead2525b063e17/pyyaml-6.0.2.tar.gz", hash = "sha256:d584d9ec91ad65861cc08d42e834324ef890a082e591037abe114850ff7bbc3e", size = 130631, upload-time = "2024-08-06T20:33:50.674Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/ef/e3/3af305b830494fa85d95f6d95ef7fa73f2ee1cc8ef5b495c7c3269fb835f/PyYAML-6.0.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:efdca5630322a10774e8e98e1af481aad470dd62c3170801852d752aa7a783ba", size = 181309, upload-time = "2024-08-06T20:32:43.4Z" }, + { url = "https://files.pythonhosted.org/packages/45/9f/3b1c20a0b7a3200524eb0076cc027a970d320bd3a6592873c85c92a08731/PyYAML-6.0.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:50187695423ffe49e2deacb8cd10510bc361faac997de9efef88badc3bb9e2d1", size = 171679, upload-time = "2024-08-06T20:32:44.801Z" }, + { url = "https://files.pythonhosted.org/packages/7c/9a/337322f27005c33bcb656c655fa78325b730324c78620e8328ae28b64d0c/PyYAML-6.0.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0ffe8360bab4910ef1b9e87fb812d8bc0a308b0d0eef8c8f44e0254ab3b07133", size = 733428, upload-time = "2024-08-06T20:32:46.432Z" }, + { url = "https://files.pythonhosted.org/packages/a3/69/864fbe19e6c18ea3cc196cbe5d392175b4cf3d5d0ac1403ec3f2d237ebb5/PyYAML-6.0.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:17e311b6c678207928d649faa7cb0d7b4c26a0ba73d41e99c4fff6b6c3276484", size = 763361, upload-time = "2024-08-06T20:32:51.188Z" }, + { url = "https://files.pythonhosted.org/packages/04/24/b7721e4845c2f162d26f50521b825fb061bc0a5afcf9a386840f23ea19fa/PyYAML-6.0.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:70b189594dbe54f75ab3a1acec5f1e3faa7e8cf2f1e08d9b561cb41b845f69d5", size = 759523, upload-time = "2024-08-06T20:32:53.019Z" }, + { url = "https://files.pythonhosted.org/packages/2b/b2/e3234f59ba06559c6ff63c4e10baea10e5e7df868092bf9ab40e5b9c56b6/PyYAML-6.0.2-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:41e4e3953a79407c794916fa277a82531dd93aad34e29c2a514c2c0c5fe971cc", size = 726660, upload-time = "2024-08-06T20:32:54.708Z" }, + { url = "https://files.pythonhosted.org/packages/fe/0f/25911a9f080464c59fab9027482f822b86bf0608957a5fcc6eaac85aa515/PyYAML-6.0.2-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:68ccc6023a3400877818152ad9a1033e3db8625d899c72eacb5a668902e4d652", size = 751597, upload-time = "2024-08-06T20:32:56.985Z" }, + { url = "https://files.pythonhosted.org/packages/14/0d/e2c3b43bbce3cf6bd97c840b46088a3031085179e596d4929729d8d68270/PyYAML-6.0.2-cp313-cp313-win32.whl", hash = "sha256:bc2fa7c6b47d6bc618dd7fb02ef6fdedb1090ec036abab80d4681424b84c1183", size = 140527, upload-time = "2024-08-06T20:33:03.001Z" }, + { url = "https://files.pythonhosted.org/packages/fa/de/02b54f42487e3d3c6efb3f89428677074ca7bf43aae402517bc7cca949f3/PyYAML-6.0.2-cp313-cp313-win_amd64.whl", hash = "sha256:8388ee1976c416731879ac16da0aff3f63b286ffdd57cdeb95f3f2e085687563", size = 156446, upload-time = "2024-08-06T20:33:04.33Z" }, +] + +[[package]] +name = "rebulk" +version = "3.2.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/f2/06/24c69f8d707c9eefc1108a64e079da56b5f351e3f59ed76e8f04b9f3e296/rebulk-3.2.0.tar.gz", hash = "sha256:0d30bf80fca00fa9c697185ac475daac9bde5f646ce3338c9ff5d5dc1ebdfebc", size = 261685, upload-time = "2023-02-18T09:10:14.378Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/84/4d/df073d593f7e7e4a5a7e19148b2e9b4ae63b4ddcbb863f1e7bb2b6f19c62/rebulk-3.2.0-py3-none-any.whl", hash = "sha256:6bc31ae4b37200623c5827d2f539f9ec3e52b50431322dad8154642a39b0a53e", size = 56298, upload-time = "2023-02-18T09:10:12.435Z" }, +] + +[[package]] +name = "rich" +version = "14.0.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "markdown-it-py" }, + { name = "pygments" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/a1/53/830aa4c3066a8ab0ae9a9955976fb770fe9c6102117c8ec4ab3ea62d89e8/rich-14.0.0.tar.gz", hash = "sha256:82f1bc23a6a21ebca4ae0c45af9bdbc492ed20231dcb63f297d6d1021a9d5725", size = 224078, upload-time = "2025-03-30T14:15:14.23Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/0d/9b/63f4c7ebc259242c89b3acafdb37b41d1185c07ff0011164674e9076b491/rich-14.0.0-py3-none-any.whl", hash = "sha256:1c9491e1951aac09caffd42f448ee3d04e58923ffe14993f6e83068dc395d7e0", size = 243229, upload-time = "2025-03-30T14:15:12.283Z" }, +] + +[[package]] +name = "rich-toolkit" +version = "0.14.6" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "click" }, + { name = "rich" }, + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/f6/31/b6d055f291a660a7bcaec4bcc9457b9fef8ecb6293e527b1eef1840aefd4/rich_toolkit-0.14.6.tar.gz", hash = "sha256:9dbd40e83414b84e828bf899115fff8877ce5951b73175f44db142902f07645d", size = 110805, upload-time = "2025-05-12T19:19:15.284Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/2e/3c/7a824c0514e87c61000583ac22c8321da6dc8e58a93d5f56e583482a2ee0/rich_toolkit-0.14.6-py3-none-any.whl", hash = "sha256:764f3a5f9e4b539ce805596863299e8982599514906dc5e3ccc2d390ef74c301", size = 24815, upload-time = "2025-05-12T19:19:13.713Z" }, +] + +[[package]] +name = "scanner" +version = "0.1.0" +source = { virtual = "." } +dependencies = [ + { name = "aiohttp" }, + { name = "asyncpg" }, + { name = "fastapi", extra = ["standard"] }, + { name = "guessit" }, + { name = "langcodes" }, + { name = "pydantic" }, + { name = "pyjwt", extra = ["crypto"] }, + { name = "python-slugify" }, + { name = "watchfiles" }, +] + +[package.metadata] +requires-dist = [ + { name = "aiohttp", specifier = ">=3.11.18" }, + { name = "asyncpg", specifier = ">=0.30.0" }, + { name = "fastapi", extras = ["standard"], specifier = ">=0.115.12" }, + { name = "guessit", git = "https://github.com/zoriya/guessit" }, + { name = "langcodes", specifier = ">=3.5.0" }, + { name = "pydantic", specifier = ">=2.11.4" }, + { name = "pyjwt", extras = ["crypto"], specifier = ">=2.10.1" }, + { name = "python-slugify", specifier = ">=8.0.4" }, + { name = "watchfiles", specifier = ">=1.0.5" }, +] + +[[package]] +name = "setuptools" +version = "80.7.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/9e/8b/dc1773e8e5d07fd27c1632c45c1de856ac3dbf09c0147f782ca6d990cf15/setuptools-80.7.1.tar.gz", hash = "sha256:f6ffc5f0142b1bd8d0ca94ee91b30c0ca862ffd50826da1ea85258a06fd94552", size = 1319188, upload-time = "2025-05-15T02:41:00.955Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/a1/18/0e835c3a557dc5faffc8f91092f62fc337c1dab1066715842e7a4b318ec4/setuptools-80.7.1-py3-none-any.whl", hash = "sha256:ca5cc1069b85dc23070a6628e6bcecb3292acac802399c7f8edc0100619f9009", size = 1200776, upload-time = "2025-05-15T02:40:58.887Z" }, +] + +[[package]] +name = "shellingham" +version = "1.5.4" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/58/15/8b3609fd3830ef7b27b655beb4b4e9c62313a4e8da8c676e142cc210d58e/shellingham-1.5.4.tar.gz", hash = "sha256:8dbca0739d487e5bd35ab3ca4b36e11c4078f3a234bfce294b0a0291363404de", size = 10310, upload-time = "2023-10-24T04:13:40.426Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/e0/f9/0595336914c5619e5f28a1fb793285925a8cd4b432c9da0a987836c7f822/shellingham-1.5.4-py2.py3-none-any.whl", hash = "sha256:7ecfff8f2fd72616f7481040475a65b2bf8af90a56c89140852d1120324e8686", size = 9755, upload-time = "2023-10-24T04:13:38.866Z" }, +] + +[[package]] +name = "six" +version = "1.17.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/94/e7/b2c673351809dca68a0e064b6af791aa332cf192da575fd474ed7d6f16a2/six-1.17.0.tar.gz", hash = "sha256:ff70335d468e7eb6ec65b95b99d3a2836546063f63acc5171de367e834932a81", size = 34031, upload-time = "2024-12-04T17:35:28.174Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/b7/ce/149a00dd41f10bc29e5921b496af8b574d8413afcd5e30dfa0ed46c2cc5e/six-1.17.0-py2.py3-none-any.whl", hash = "sha256:4721f391ed90541fddacab5acf947aa0d3dc7d27b2e1e8eda2be8970586c3274", size = 11050, upload-time = "2024-12-04T17:35:26.475Z" }, +] + +[[package]] +name = "sniffio" +version = "1.3.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/a2/87/a6771e1546d97e7e041b6ae58d80074f81b7d5121207425c964ddf5cfdbd/sniffio-1.3.1.tar.gz", hash = "sha256:f4324edc670a0f49750a81b895f35c3adb843cca46f0530f79fc1babb23789dc", size = 20372, upload-time = "2024-02-25T23:20:04.057Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/e9/44/75a9c9421471a6c4805dbf2356f7c181a29c1879239abab1ea2cc8f38b40/sniffio-1.3.1-py3-none-any.whl", hash = "sha256:2f6da418d1f1e0fddd844478f41680e794e6051915791a034ff65e5f100525a2", size = 10235, upload-time = "2024-02-25T23:20:01.196Z" }, +] + +[[package]] +name = "starlette" +version = "0.46.2" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "anyio" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/ce/20/08dfcd9c983f6a6f4a1000d934b9e6d626cff8d2eeb77a89a68eef20a2b7/starlette-0.46.2.tar.gz", hash = "sha256:7f7361f34eed179294600af672f565727419830b54b7b084efe44bb82d2fccd5", size = 2580846, upload-time = "2025-04-13T13:56:17.942Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/8b/0c/9d30a4ebeb6db2b25a841afbb80f6ef9a854fc3b41be131d249a977b4959/starlette-0.46.2-py3-none-any.whl", hash = "sha256:595633ce89f8ffa71a015caed34a5b2dc1c0cdb3f0f1fbd1e69339cf2abeec35", size = 72037, upload-time = "2025-04-13T13:56:16.21Z" }, +] + +[[package]] +name = "text-unidecode" +version = "1.3" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/ab/e2/e9a00f0ccb71718418230718b3d900e71a5d16e701a3dae079a21e9cd8f8/text-unidecode-1.3.tar.gz", hash = "sha256:bad6603bb14d279193107714b288be206cac565dfa49aa5b105294dd5c4aab93", size = 76885, upload-time = "2019-08-30T21:36:45.405Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/a6/a5/c0b6468d3824fe3fde30dbb5e1f687b291608f9473681bbf7dabbf5a87d7/text_unidecode-1.3-py2.py3-none-any.whl", hash = "sha256:1311f10e8b895935241623731c2ba64f4c455287888b18189350b67134a822e8", size = 78154, upload-time = "2019-08-30T21:37:03.543Z" }, +] + +[[package]] +name = "typer" +version = "0.15.4" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "click" }, + { name = "rich" }, + { name = "shellingham" }, + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/6c/89/c527e6c848739be8ceb5c44eb8208c52ea3515c6cf6406aa61932887bf58/typer-0.15.4.tar.gz", hash = "sha256:89507b104f9b6a0730354f27c39fae5b63ccd0c95b1ce1f1a6ba0cfd329997c3", size = 101559, upload-time = "2025-05-14T16:34:57.704Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/c9/62/d4ba7afe2096d5659ec3db8b15d8665bdcb92a3c6ff0b95e99895b335a9c/typer-0.15.4-py3-none-any.whl", hash = "sha256:eb0651654dcdea706780c466cf06d8f174405a659ffff8f163cfbfee98c0e173", size = 45258, upload-time = "2025-05-14T16:34:55.583Z" }, +] + +[[package]] +name = "typing-extensions" +version = "4.13.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/f6/37/23083fcd6e35492953e8d2aaaa68b860eb422b34627b13f2ce3eb6106061/typing_extensions-4.13.2.tar.gz", hash = "sha256:e6c81219bd689f51865d9e372991c540bda33a0379d5573cddb9a3a23f7caaef", size = 106967, upload-time = "2025-04-10T14:19:05.416Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/8b/54/b1ae86c0973cc6f0210b53d508ca3641fb6d0c56823f288d108bc7ab3cc8/typing_extensions-4.13.2-py3-none-any.whl", hash = "sha256:a439e7c04b49fec3e5d3e2beaa21755cadbbdc391694e28ccdd36ca4a1408f8c", size = 45806, upload-time = "2025-04-10T14:19:03.967Z" }, +] + +[[package]] +name = "typing-inspection" +version = "0.4.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/82/5c/e6082df02e215b846b4b8c0b887a64d7d08ffaba30605502639d44c06b82/typing_inspection-0.4.0.tar.gz", hash = "sha256:9765c87de36671694a67904bf2c96e395be9c6439bb6c87b5142569dcdd65122", size = 76222, upload-time = "2025-02-25T17:27:59.638Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/31/08/aa4fdfb71f7de5176385bd9e90852eaf6b5d622735020ad600f2bab54385/typing_inspection-0.4.0-py3-none-any.whl", hash = "sha256:50e72559fcd2a6367a19f7a7e610e6afcb9fac940c650290eed893d61386832f", size = 14125, upload-time = "2025-02-25T17:27:57.754Z" }, +] + +[[package]] +name = "uvicorn" +version = "0.34.2" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "click" }, + { name = "h11" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/a6/ae/9bbb19b9e1c450cf9ecaef06463e40234d98d95bf572fab11b4f19ae5ded/uvicorn-0.34.2.tar.gz", hash = "sha256:0e929828f6186353a80b58ea719861d2629d766293b6d19baf086ba31d4f3328", size = 76815, upload-time = "2025-04-19T06:02:50.101Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/b1/4b/4cef6ce21a2aaca9d852a6e84ef4f135d99fcd74fa75105e2fc0c8308acd/uvicorn-0.34.2-py3-none-any.whl", hash = "sha256:deb49af569084536d269fe0a6d67e3754f104cf03aba7c11c40f01aadf33c403", size = 62483, upload-time = "2025-04-19T06:02:48.42Z" }, +] + +[package.optional-dependencies] +standard = [ + { name = "colorama", marker = "sys_platform == 'win32'" }, + { name = "httptools" }, + { name = "python-dotenv" }, + { name = "pyyaml" }, + { name = "uvloop", marker = "platform_python_implementation != 'PyPy' and sys_platform != 'cygwin' and sys_platform != 'win32'" }, + { name = "watchfiles" }, + { name = "websockets" }, +] + +[[package]] +name = "uvloop" +version = "0.21.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/af/c0/854216d09d33c543f12a44b393c402e89a920b1a0a7dc634c42de91b9cf6/uvloop-0.21.0.tar.gz", hash = "sha256:3bf12b0fda68447806a7ad847bfa591613177275d35b6724b1ee573faa3704e3", size = 2492741, upload-time = "2024-10-14T23:38:35.489Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/3f/8d/2cbef610ca21539f0f36e2b34da49302029e7c9f09acef0b1c3b5839412b/uvloop-0.21.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:bfd55dfcc2a512316e65f16e503e9e450cab148ef11df4e4e679b5e8253a5281", size = 1468123, upload-time = "2024-10-14T23:38:00.688Z" }, + { url = "https://files.pythonhosted.org/packages/93/0d/b0038d5a469f94ed8f2b2fce2434a18396d8fbfb5da85a0a9781ebbdec14/uvloop-0.21.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:787ae31ad8a2856fc4e7c095341cccc7209bd657d0e71ad0dc2ea83c4a6fa8af", size = 819325, upload-time = "2024-10-14T23:38:02.309Z" }, + { url = "https://files.pythonhosted.org/packages/50/94/0a687f39e78c4c1e02e3272c6b2ccdb4e0085fda3b8352fecd0410ccf915/uvloop-0.21.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5ee4d4ef48036ff6e5cfffb09dd192c7a5027153948d85b8da7ff705065bacc6", size = 4582806, upload-time = "2024-10-14T23:38:04.711Z" }, + { url = "https://files.pythonhosted.org/packages/d2/19/f5b78616566ea68edd42aacaf645adbf71fbd83fc52281fba555dc27e3f1/uvloop-0.21.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f3df876acd7ec037a3d005b3ab85a7e4110422e4d9c1571d4fc89b0fc41b6816", size = 4701068, upload-time = "2024-10-14T23:38:06.385Z" }, + { url = "https://files.pythonhosted.org/packages/47/57/66f061ee118f413cd22a656de622925097170b9380b30091b78ea0c6ea75/uvloop-0.21.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:bd53ecc9a0f3d87ab847503c2e1552b690362e005ab54e8a48ba97da3924c0dc", size = 4454428, upload-time = "2024-10-14T23:38:08.416Z" }, + { url = "https://files.pythonhosted.org/packages/63/9a/0962b05b308494e3202d3f794a6e85abe471fe3cafdbcf95c2e8c713aabd/uvloop-0.21.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:a5c39f217ab3c663dc699c04cbd50c13813e31d917642d459fdcec07555cc553", size = 4660018, upload-time = "2024-10-14T23:38:10.888Z" }, +] + +[[package]] +name = "watchfiles" +version = "1.0.5" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "anyio" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/03/e2/8ed598c42057de7aa5d97c472254af4906ff0a59a66699d426fc9ef795d7/watchfiles-1.0.5.tar.gz", hash = "sha256:b7529b5dcc114679d43827d8c35a07c493ad6f083633d573d81c660abc5979e9", size = 94537, upload-time = "2025-04-08T10:36:26.722Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/c7/62/435766874b704f39b2fecd8395a29042db2b5ec4005bd34523415e9bd2e0/watchfiles-1.0.5-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:0b289572c33a0deae62daa57e44a25b99b783e5f7aed81b314232b3d3c81a11d", size = 401531, upload-time = "2025-04-08T10:35:35.792Z" }, + { url = "https://files.pythonhosted.org/packages/6e/a6/e52a02c05411b9cb02823e6797ef9bbba0bfaf1bb627da1634d44d8af833/watchfiles-1.0.5-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:a056c2f692d65bf1e99c41045e3bdcaea3cb9e6b5a53dcaf60a5f3bd95fc9763", size = 392417, upload-time = "2025-04-08T10:35:37.048Z" }, + { url = "https://files.pythonhosted.org/packages/3f/53/c4af6819770455932144e0109d4854437769672d7ad897e76e8e1673435d/watchfiles-1.0.5-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b9dca99744991fc9850d18015c4f0438865414e50069670f5f7eee08340d8b40", size = 453423, upload-time = "2025-04-08T10:35:38.357Z" }, + { url = "https://files.pythonhosted.org/packages/cb/d1/8e88df58bbbf819b8bc5cfbacd3c79e01b40261cad0fc84d1e1ebd778a07/watchfiles-1.0.5-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:894342d61d355446d02cd3988a7326af344143eb33a2fd5d38482a92072d9563", size = 458185, upload-time = "2025-04-08T10:35:39.708Z" }, + { url = "https://files.pythonhosted.org/packages/ff/70/fffaa11962dd5429e47e478a18736d4e42bec42404f5ee3b92ef1b87ad60/watchfiles-1.0.5-cp313-cp313-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ab44e1580924d1ffd7b3938e02716d5ad190441965138b4aa1d1f31ea0877f04", size = 486696, upload-time = "2025-04-08T10:35:41.469Z" }, + { url = "https://files.pythonhosted.org/packages/39/db/723c0328e8b3692d53eb273797d9a08be6ffb1d16f1c0ba2bdbdc2a3852c/watchfiles-1.0.5-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d6f9367b132078b2ceb8d066ff6c93a970a18c3029cea37bfd7b2d3dd2e5db8f", size = 522327, upload-time = "2025-04-08T10:35:43.289Z" }, + { url = "https://files.pythonhosted.org/packages/cd/05/9fccc43c50c39a76b68343484b9da7b12d42d0859c37c61aec018c967a32/watchfiles-1.0.5-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f2e55a9b162e06e3f862fb61e399fe9f05d908d019d87bf5b496a04ef18a970a", size = 499741, upload-time = "2025-04-08T10:35:44.574Z" }, + { url = "https://files.pythonhosted.org/packages/23/14/499e90c37fa518976782b10a18b18db9f55ea73ca14641615056f8194bb3/watchfiles-1.0.5-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0125f91f70e0732a9f8ee01e49515c35d38ba48db507a50c5bdcad9503af5827", size = 453995, upload-time = "2025-04-08T10:35:46.336Z" }, + { url = "https://files.pythonhosted.org/packages/61/d9/f75d6840059320df5adecd2c687fbc18960a7f97b55c300d20f207d48aef/watchfiles-1.0.5-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:13bb21f8ba3248386337c9fa51c528868e6c34a707f729ab041c846d52a0c69a", size = 629693, upload-time = "2025-04-08T10:35:48.161Z" }, + { url = "https://files.pythonhosted.org/packages/fc/17/180ca383f5061b61406477218c55d66ec118e6c0c51f02d8142895fcf0a9/watchfiles-1.0.5-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:839ebd0df4a18c5b3c1b890145b5a3f5f64063c2a0d02b13c76d78fe5de34936", size = 624677, upload-time = "2025-04-08T10:35:49.65Z" }, + { url = "https://files.pythonhosted.org/packages/bf/15/714d6ef307f803f236d69ee9d421763707899d6298d9f3183e55e366d9af/watchfiles-1.0.5-cp313-cp313-win32.whl", hash = "sha256:4a8ec1e4e16e2d5bafc9ba82f7aaecfeec990ca7cd27e84fb6f191804ed2fcfc", size = 277804, upload-time = "2025-04-08T10:35:51.093Z" }, + { url = "https://files.pythonhosted.org/packages/a8/b4/c57b99518fadf431f3ef47a610839e46e5f8abf9814f969859d1c65c02c7/watchfiles-1.0.5-cp313-cp313-win_amd64.whl", hash = "sha256:f436601594f15bf406518af922a89dcaab416568edb6f65c4e5bbbad1ea45c11", size = 291087, upload-time = "2025-04-08T10:35:52.458Z" }, +] + +[[package]] +name = "websockets" +version = "15.0.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/21/e6/26d09fab466b7ca9c7737474c52be4f76a40301b08362eb2dbc19dcc16c1/websockets-15.0.1.tar.gz", hash = "sha256:82544de02076bafba038ce055ee6412d68da13ab47f0c60cab827346de828dee", size = 177016, upload-time = "2025-03-05T20:03:41.606Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/cb/9f/51f0cf64471a9d2b4d0fc6c534f323b664e7095640c34562f5182e5a7195/websockets-15.0.1-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:ee443ef070bb3b6ed74514f5efaa37a252af57c90eb33b956d35c8e9c10a1931", size = 175440, upload-time = "2025-03-05T20:02:36.695Z" }, + { url = "https://files.pythonhosted.org/packages/8a/05/aa116ec9943c718905997412c5989f7ed671bc0188ee2ba89520e8765d7b/websockets-15.0.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:5a939de6b7b4e18ca683218320fc67ea886038265fd1ed30173f5ce3f8e85675", size = 173098, upload-time = "2025-03-05T20:02:37.985Z" }, + { url = "https://files.pythonhosted.org/packages/ff/0b/33cef55ff24f2d92924923c99926dcce78e7bd922d649467f0eda8368923/websockets-15.0.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:746ee8dba912cd6fc889a8147168991d50ed70447bf18bcda7039f7d2e3d9151", size = 173329, upload-time = "2025-03-05T20:02:39.298Z" }, + { url = "https://files.pythonhosted.org/packages/31/1d/063b25dcc01faa8fada1469bdf769de3768b7044eac9d41f734fd7b6ad6d/websockets-15.0.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:595b6c3969023ecf9041b2936ac3827e4623bfa3ccf007575f04c5a6aa318c22", size = 183111, upload-time = "2025-03-05T20:02:40.595Z" }, + { url = "https://files.pythonhosted.org/packages/93/53/9a87ee494a51bf63e4ec9241c1ccc4f7c2f45fff85d5bde2ff74fcb68b9e/websockets-15.0.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3c714d2fc58b5ca3e285461a4cc0c9a66bd0e24c5da9911e30158286c9b5be7f", size = 182054, upload-time = "2025-03-05T20:02:41.926Z" }, + { url = "https://files.pythonhosted.org/packages/ff/b2/83a6ddf56cdcbad4e3d841fcc55d6ba7d19aeb89c50f24dd7e859ec0805f/websockets-15.0.1-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0f3c1e2ab208db911594ae5b4f79addeb3501604a165019dd221c0bdcabe4db8", size = 182496, upload-time = "2025-03-05T20:02:43.304Z" }, + { url = "https://files.pythonhosted.org/packages/98/41/e7038944ed0abf34c45aa4635ba28136f06052e08fc2168520bb8b25149f/websockets-15.0.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:229cf1d3ca6c1804400b0a9790dc66528e08a6a1feec0d5040e8b9eb14422375", size = 182829, upload-time = "2025-03-05T20:02:48.812Z" }, + { url = "https://files.pythonhosted.org/packages/e0/17/de15b6158680c7623c6ef0db361da965ab25d813ae54fcfeae2e5b9ef910/websockets-15.0.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:756c56e867a90fb00177d530dca4b097dd753cde348448a1012ed6c5131f8b7d", size = 182217, upload-time = "2025-03-05T20:02:50.14Z" }, + { url = "https://files.pythonhosted.org/packages/33/2b/1f168cb6041853eef0362fb9554c3824367c5560cbdaad89ac40f8c2edfc/websockets-15.0.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:558d023b3df0bffe50a04e710bc87742de35060580a293c2a984299ed83bc4e4", size = 182195, upload-time = "2025-03-05T20:02:51.561Z" }, + { url = "https://files.pythonhosted.org/packages/86/eb/20b6cdf273913d0ad05a6a14aed4b9a85591c18a987a3d47f20fa13dcc47/websockets-15.0.1-cp313-cp313-win32.whl", hash = "sha256:ba9e56e8ceeeedb2e080147ba85ffcd5cd0711b89576b83784d8605a7df455fa", size = 176393, upload-time = "2025-03-05T20:02:53.814Z" }, + { url = "https://files.pythonhosted.org/packages/1b/6c/c65773d6cab416a64d191d6ee8a8b1c68a09970ea6909d16965d26bfed1e/websockets-15.0.1-cp313-cp313-win_amd64.whl", hash = "sha256:e09473f095a819042ecb2ab9465aee615bd9c2028e4ef7d933600a8401c79561", size = 176837, upload-time = "2025-03-05T20:02:55.237Z" }, + { url = "https://files.pythonhosted.org/packages/fa/a8/5b41e0da817d64113292ab1f8247140aac61cbf6cfd085d6a0fa77f4984f/websockets-15.0.1-py3-none-any.whl", hash = "sha256:f7a866fbc1e97b5c617ee4116daaa09b722101d4a3c170c787450ba409f9736f", size = 169743, upload-time = "2025-03-05T20:03:39.41Z" }, +] + +[[package]] +name = "yarl" +version = "1.20.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "idna" }, + { name = "multidict" }, + { name = "propcache" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/62/51/c0edba5219027f6eab262e139f73e2417b0f4efffa23bf562f6e18f76ca5/yarl-1.20.0.tar.gz", hash = "sha256:686d51e51ee5dfe62dec86e4866ee0e9ed66df700d55c828a615640adc885307", size = 185258, upload-time = "2025-04-17T00:45:14.661Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/0f/6f/514c9bff2900c22a4f10e06297714dbaf98707143b37ff0bcba65a956221/yarl-1.20.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:2137810a20b933b1b1b7e5cf06a64c3ed3b4747b0e5d79c9447c00db0e2f752f", size = 145030, upload-time = "2025-04-17T00:43:15.083Z" }, + { url = "https://files.pythonhosted.org/packages/4e/9d/f88da3fa319b8c9c813389bfb3463e8d777c62654c7168e580a13fadff05/yarl-1.20.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:447c5eadd750db8389804030d15f43d30435ed47af1313303ed82a62388176d3", size = 96894, upload-time = "2025-04-17T00:43:17.372Z" }, + { url = "https://files.pythonhosted.org/packages/cd/57/92e83538580a6968b2451d6c89c5579938a7309d4785748e8ad42ddafdce/yarl-1.20.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:42fbe577272c203528d402eec8bf4b2d14fd49ecfec92272334270b850e9cd7d", size = 94457, upload-time = "2025-04-17T00:43:19.431Z" }, + { url = "https://files.pythonhosted.org/packages/e9/ee/7ee43bd4cf82dddd5da97fcaddb6fa541ab81f3ed564c42f146c83ae17ce/yarl-1.20.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:18e321617de4ab170226cd15006a565d0fa0d908f11f724a2c9142d6b2812ab0", size = 343070, upload-time = "2025-04-17T00:43:21.426Z" }, + { url = "https://files.pythonhosted.org/packages/4a/12/b5eccd1109e2097bcc494ba7dc5de156e41cf8309fab437ebb7c2b296ce3/yarl-1.20.0-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:4345f58719825bba29895011e8e3b545e6e00257abb984f9f27fe923afca2501", size = 337739, upload-time = "2025-04-17T00:43:23.634Z" }, + { url = "https://files.pythonhosted.org/packages/7d/6b/0eade8e49af9fc2585552f63c76fa59ef469c724cc05b29519b19aa3a6d5/yarl-1.20.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5d9b980d7234614bc4674468ab173ed77d678349c860c3af83b1fffb6a837ddc", size = 351338, upload-time = "2025-04-17T00:43:25.695Z" }, + { url = "https://files.pythonhosted.org/packages/45/cb/aaaa75d30087b5183c7b8a07b4fb16ae0682dd149a1719b3a28f54061754/yarl-1.20.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:af4baa8a445977831cbaa91a9a84cc09debb10bc8391f128da2f7bd070fc351d", size = 353636, upload-time = "2025-04-17T00:43:27.876Z" }, + { url = "https://files.pythonhosted.org/packages/98/9d/d9cb39ec68a91ba6e66fa86d97003f58570327d6713833edf7ad6ce9dde5/yarl-1.20.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:123393db7420e71d6ce40d24885a9e65eb1edefc7a5228db2d62bcab3386a5c0", size = 348061, upload-time = "2025-04-17T00:43:29.788Z" }, + { url = "https://files.pythonhosted.org/packages/72/6b/103940aae893d0cc770b4c36ce80e2ed86fcb863d48ea80a752b8bda9303/yarl-1.20.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ab47acc9332f3de1b39e9b702d9c916af7f02656b2a86a474d9db4e53ef8fd7a", size = 334150, upload-time = "2025-04-17T00:43:31.742Z" }, + { url = "https://files.pythonhosted.org/packages/ef/b2/986bd82aa222c3e6b211a69c9081ba46484cffa9fab2a5235e8d18ca7a27/yarl-1.20.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:4a34c52ed158f89876cba9c600b2c964dfc1ca52ba7b3ab6deb722d1d8be6df2", size = 362207, upload-time = "2025-04-17T00:43:34.099Z" }, + { url = "https://files.pythonhosted.org/packages/14/7c/63f5922437b873795d9422cbe7eb2509d4b540c37ae5548a4bb68fd2c546/yarl-1.20.0-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:04d8cfb12714158abf2618f792c77bc5c3d8c5f37353e79509608be4f18705c9", size = 361277, upload-time = "2025-04-17T00:43:36.202Z" }, + { url = "https://files.pythonhosted.org/packages/81/83/450938cccf732466953406570bdb42c62b5ffb0ac7ac75a1f267773ab5c8/yarl-1.20.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:7dc63ad0d541c38b6ae2255aaa794434293964677d5c1ec5d0116b0e308031f5", size = 364990, upload-time = "2025-04-17T00:43:38.551Z" }, + { url = "https://files.pythonhosted.org/packages/b4/de/af47d3a47e4a833693b9ec8e87debb20f09d9fdc9139b207b09a3e6cbd5a/yarl-1.20.0-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:f9d02b591a64e4e6ca18c5e3d925f11b559c763b950184a64cf47d74d7e41877", size = 374684, upload-time = "2025-04-17T00:43:40.481Z" }, + { url = "https://files.pythonhosted.org/packages/62/0b/078bcc2d539f1faffdc7d32cb29a2d7caa65f1a6f7e40795d8485db21851/yarl-1.20.0-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:95fc9876f917cac7f757df80a5dda9de59d423568460fe75d128c813b9af558e", size = 382599, upload-time = "2025-04-17T00:43:42.463Z" }, + { url = "https://files.pythonhosted.org/packages/74/a9/4fdb1a7899f1fb47fd1371e7ba9e94bff73439ce87099d5dd26d285fffe0/yarl-1.20.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:bb769ae5760cd1c6a712135ee7915f9d43f11d9ef769cb3f75a23e398a92d384", size = 378573, upload-time = "2025-04-17T00:43:44.797Z" }, + { url = "https://files.pythonhosted.org/packages/fd/be/29f5156b7a319e4d2e5b51ce622b4dfb3aa8d8204cd2a8a339340fbfad40/yarl-1.20.0-cp313-cp313-win32.whl", hash = "sha256:70e0c580a0292c7414a1cead1e076c9786f685c1fc4757573d2967689b370e62", size = 86051, upload-time = "2025-04-17T00:43:47.076Z" }, + { url = "https://files.pythonhosted.org/packages/52/56/05fa52c32c301da77ec0b5f63d2d9605946fe29defacb2a7ebd473c23b81/yarl-1.20.0-cp313-cp313-win_amd64.whl", hash = "sha256:4c43030e4b0af775a85be1fa0433119b1565673266a70bf87ef68a9d5ba3174c", size = 92742, upload-time = "2025-04-17T00:43:49.193Z" }, + { url = "https://files.pythonhosted.org/packages/d4/2f/422546794196519152fc2e2f475f0e1d4d094a11995c81a465faf5673ffd/yarl-1.20.0-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:b6c4c3d0d6a0ae9b281e492b1465c72de433b782e6b5001c8e7249e085b69051", size = 163575, upload-time = "2025-04-17T00:43:51.533Z" }, + { url = "https://files.pythonhosted.org/packages/90/fc/67c64ddab6c0b4a169d03c637fb2d2a212b536e1989dec8e7e2c92211b7f/yarl-1.20.0-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:8681700f4e4df891eafa4f69a439a6e7d480d64e52bf460918f58e443bd3da7d", size = 106121, upload-time = "2025-04-17T00:43:53.506Z" }, + { url = "https://files.pythonhosted.org/packages/6d/00/29366b9eba7b6f6baed7d749f12add209b987c4cfbfa418404dbadc0f97c/yarl-1.20.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:84aeb556cb06c00652dbf87c17838eb6d92cfd317799a8092cee0e570ee11229", size = 103815, upload-time = "2025-04-17T00:43:55.41Z" }, + { url = "https://files.pythonhosted.org/packages/28/f4/a2a4c967c8323c03689383dff73396281ced3b35d0ed140580825c826af7/yarl-1.20.0-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f166eafa78810ddb383e930d62e623d288fb04ec566d1b4790099ae0f31485f1", size = 408231, upload-time = "2025-04-17T00:43:57.825Z" }, + { url = "https://files.pythonhosted.org/packages/0f/a1/66f7ffc0915877d726b70cc7a896ac30b6ac5d1d2760613603b022173635/yarl-1.20.0-cp313-cp313t-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:5d3d6d14754aefc7a458261027a562f024d4f6b8a798adb472277f675857b1eb", size = 390221, upload-time = "2025-04-17T00:44:00.526Z" }, + { url = "https://files.pythonhosted.org/packages/41/15/cc248f0504610283271615e85bf38bc014224122498c2016d13a3a1b8426/yarl-1.20.0-cp313-cp313t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2a8f64df8ed5d04c51260dbae3cc82e5649834eebea9eadfd829837b8093eb00", size = 411400, upload-time = "2025-04-17T00:44:02.853Z" }, + { url = "https://files.pythonhosted.org/packages/5c/af/f0823d7e092bfb97d24fce6c7269d67fcd1aefade97d0a8189c4452e4d5e/yarl-1.20.0-cp313-cp313t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4d9949eaf05b4d30e93e4034a7790634bbb41b8be2d07edd26754f2e38e491de", size = 411714, upload-time = "2025-04-17T00:44:04.904Z" }, + { url = "https://files.pythonhosted.org/packages/83/70/be418329eae64b9f1b20ecdaac75d53aef098797d4c2299d82ae6f8e4663/yarl-1.20.0-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9c366b254082d21cc4f08f522ac201d0d83a8b8447ab562732931d31d80eb2a5", size = 404279, upload-time = "2025-04-17T00:44:07.721Z" }, + { url = "https://files.pythonhosted.org/packages/19/f5/52e02f0075f65b4914eb890eea1ba97e6fd91dd821cc33a623aa707b2f67/yarl-1.20.0-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:91bc450c80a2e9685b10e34e41aef3d44ddf99b3a498717938926d05ca493f6a", size = 384044, upload-time = "2025-04-17T00:44:09.708Z" }, + { url = "https://files.pythonhosted.org/packages/6a/36/b0fa25226b03d3f769c68d46170b3e92b00ab3853d73127273ba22474697/yarl-1.20.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:9c2aa4387de4bc3a5fe158080757748d16567119bef215bec643716b4fbf53f9", size = 416236, upload-time = "2025-04-17T00:44:11.734Z" }, + { url = "https://files.pythonhosted.org/packages/cb/3a/54c828dd35f6831dfdd5a79e6c6b4302ae2c5feca24232a83cb75132b205/yarl-1.20.0-cp313-cp313t-musllinux_1_2_armv7l.whl", hash = "sha256:d2cbca6760a541189cf87ee54ff891e1d9ea6406079c66341008f7ef6ab61145", size = 402034, upload-time = "2025-04-17T00:44:13.975Z" }, + { url = "https://files.pythonhosted.org/packages/10/97/c7bf5fba488f7e049f9ad69c1b8fdfe3daa2e8916b3d321aa049e361a55a/yarl-1.20.0-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:798a5074e656f06b9fad1a162be5a32da45237ce19d07884d0b67a0aa9d5fdda", size = 407943, upload-time = "2025-04-17T00:44:16.052Z" }, + { url = "https://files.pythonhosted.org/packages/fd/a4/022d2555c1e8fcff08ad7f0f43e4df3aba34f135bff04dd35d5526ce54ab/yarl-1.20.0-cp313-cp313t-musllinux_1_2_ppc64le.whl", hash = "sha256:f106e75c454288472dbe615accef8248c686958c2e7dd3b8d8ee2669770d020f", size = 423058, upload-time = "2025-04-17T00:44:18.547Z" }, + { url = "https://files.pythonhosted.org/packages/4c/f6/0873a05563e5df29ccf35345a6ae0ac9e66588b41fdb7043a65848f03139/yarl-1.20.0-cp313-cp313t-musllinux_1_2_s390x.whl", hash = "sha256:3b60a86551669c23dc5445010534d2c5d8a4e012163218fc9114e857c0586fdd", size = 423792, upload-time = "2025-04-17T00:44:20.639Z" }, + { url = "https://files.pythonhosted.org/packages/9e/35/43fbbd082708fa42e923f314c24f8277a28483d219e049552e5007a9aaca/yarl-1.20.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:3e429857e341d5e8e15806118e0294f8073ba9c4580637e59ab7b238afca836f", size = 422242, upload-time = "2025-04-17T00:44:22.851Z" }, + { url = "https://files.pythonhosted.org/packages/ed/f7/f0f2500cf0c469beb2050b522c7815c575811627e6d3eb9ec7550ddd0bfe/yarl-1.20.0-cp313-cp313t-win32.whl", hash = "sha256:65a4053580fe88a63e8e4056b427224cd01edfb5f951498bfefca4052f0ce0ac", size = 93816, upload-time = "2025-04-17T00:44:25.491Z" }, + { url = "https://files.pythonhosted.org/packages/3f/93/f73b61353b2a699d489e782c3f5998b59f974ec3156a2050a52dfd7e8946/yarl-1.20.0-cp313-cp313t-win_amd64.whl", hash = "sha256:53b2da3a6ca0a541c1ae799c349788d480e5144cac47dba0266c7cb6c76151fe", size = 101093, upload-time = "2025-04-17T00:44:27.418Z" }, + { url = "https://files.pythonhosted.org/packages/ea/1f/70c57b3d7278e94ed22d85e09685d3f0a38ebdd8c5c73b65ba4c0d0fe002/yarl-1.20.0-py3-none-any.whl", hash = "sha256:5d0fe6af927a47a230f31e6004621fd0959eaa915fc62acfafa67ff7229a3124", size = 46124, upload-time = "2025-04-17T00:45:12.199Z" }, +] diff --git a/shell.nix b/shell.nix index 1e3fb313..632ba261 100644 --- a/shell.nix +++ b/shell.nix @@ -1,51 +1,17 @@ -{pkgs ? import {}}: let - python = pkgs.python312.withPackages (ps: - with ps; [ - guessit - aiohttp - jsons - watchfiles - pika - aio-pika - requests - dataclasses-json - msgspec - langcodes - ]); - dotnet = with pkgs.dotnetCorePackages; - combinePackages [ - sdk_8_0 - aspnetcore_8_0 - ]; -in - pkgs.mkShell { - packages = with pkgs; [ - # nodejs-18_x - nodePackages.yarn - dotnet - csharpier - python - ruff - go - wgo - mediainfo - ffmpeg-full - postgresql_15 - pgformatter - biome - kubernetes-helm - go-migrate - sqlc - go-swag - bun - pkg-config - nodejs - node-gyp - vips - hurl - ]; +{pkgs ? import {}}: +pkgs.mkShell { + name = "kyoo"; + inputsFrom = [ + (import ./api/shell.nix {inherit pkgs;}) + (import ./auth/shell.nix {inherit pkgs;}) + (import ./back/shell.nix {inherit pkgs;}) + (import ./chart/shell.nix {inherit pkgs;}) + (import ./scanner/shell.nix {inherit pkgs;}) + (import ./transcoder/shell.nix {inherit pkgs;}) + ]; - DOTNET_ROOT = "${dotnet}"; - - SHARP_FORCE_GLOBAL_LIBVIPS = 1; - } + # env vars aren't inherited from the `inputsFrom` + SHARP_FORCE_GLOBAL_LIBVIPS = 1; + UV_PYTHON_PREFERENCE = "only-system"; + UV_PYTHON = pkgs.python313; +} diff --git a/transcoder/.dockerignore b/transcoder/.dockerignore index 2f7896d1..26f014e6 100644 --- a/transcoder/.dockerignore +++ b/transcoder/.dockerignore @@ -1 +1,7 @@ -target/ +** +!/go.mod +!/go.sum +!/**.go +!/migrations +# genereated via swag +!/docs diff --git a/transcoder/Dockerfile b/transcoder/Dockerfile index 5368595c..a9c98f0e 100644 --- a/transcoder/Dockerfile +++ b/transcoder/Dockerfile @@ -24,11 +24,13 @@ RUN apt-get update \ ffmpeg libavformat-dev libavutil-dev libswscale-dev \ && apt-get clean autoclean -y \ && apt-get autoremove -y + WORKDIR /app COPY go.mod go.sum ./ RUN go mod download + COPY . . -RUN go build -o ./transcoder +RUN GOOS=linux go build -o ./transcoder # debian is required for nvidia hardware acceleration # we use trixie (debian's testing because ffmpeg on latest is v5 and we need v6) diff --git a/transcoder/Dockerfile.dev b/transcoder/Dockerfile.dev index 773bd23b..b3ccf9b7 100644 --- a/transcoder/Dockerfile.dev +++ b/transcoder/Dockerfile.dev @@ -22,8 +22,6 @@ RUN set -eux; \ ENV SSL_CERT_DIR=/etc/ssl/certs RUN update-ca-certificates -RUN go install github.com/bokwoon95/wgo@latest - # read target arch from buildx or default to amd64 if using legacy builder. ARG TARGETARCH ENV TARGETARCH=${TARGETARCH:-amd64} @@ -51,5 +49,7 @@ ENV NVIDIA_DRIVER_CAPABILITIES="all" COPY go.mod go.sum ./ RUN go mod download +COPY . . + EXPOSE 7666 -CMD ["wgo", "run", "-race", "."] +CMD ["go", "run", "-race", "."] diff --git a/transcoder/shell.nix b/transcoder/shell.nix new file mode 100644 index 00000000..cd5c85e4 --- /dev/null +++ b/transcoder/shell.nix @@ -0,0 +1,14 @@ +{pkgs ? import {}}: +pkgs.mkShell { + packages = with pkgs; [ + go + wgo + go-migrate + # for psql in cli (+ pgformatter for sql files) + postgresql_15 + pgformatter + # to debug video files + mediainfo + ffmpeg-full + ]; +}