Scanner rework for v5 (#923)

This commit is contained in:
Zoe Roux 2025-06-07 19:25:46 +02:00 committed by GitHub
commit b602185237
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
148 changed files with 6390 additions and 3016 deletions

View File

@ -1,6 +1,7 @@
# vi: ft=sh
# shellcheck disable=SC2034
# THIS IS V5 .ENV ; IF YOU ARE ON V4 PLEASE LOOK AT THE .ENV HERE: https://github.com/zoriya/Kyoo/blob/v4.7.1/.env.example
# Useful config options
@ -9,23 +10,8 @@ LIBRARY_ROOT=./video
# You should set this to a path where kyoo can write large amount of data, this is used as a cache by the transcoder.
# It will automatically be cleaned up on kyoo's startup/shutdown/runtime.
CACHE_ROOT=/tmp/kyoo_cache
LIBRARY_LANGUAGES=en
# If this is true, kyoo will prefer to download the media in the original language of the item.
MEDIA_PREFER_ORIGINAL_LANGUAGE=false
# A pattern (regex) to ignore files.
LIBRARY_IGNORE_PATTERN=".*/[dD]ownloads?/.*|.*[Tt][Rr][Aa][Ii][Ll][Ee][Rr].*"
# If this is true, new accounts wont have any permissions before you approve them in your admin dashboard.
REQUIRE_ACCOUNT_VERIFICATION=true
# Specify permissions of guest accounts, default is no permissions.
UNLOGGED_PERMISSIONS=
# but you can allow anyone to use your instance without account by doing:
# UNLOGGED_PERMISSIONS=overall.read,overall.play
# You can specify this to allow guests users to see your collection without behing able to play videos for example:
# UNLOGGED_PERMISSIONS=overall.read
# Specify permissions of new accounts.
DEFAULT_PERMISSIONS=overall.read,overall.play
LIBRARY_IGNORE_PATTERN=".*/[dD]ownloads?/.*"
# Hardware transcoding (equivalent of --profile docker compose option).
COMPOSE_PROFILES=cpu # cpu (no hardware acceleration) or vaapi or qsv or nvidia
@ -34,11 +20,6 @@ COMPOSE_PROFILES=cpu # cpu (no hardware acceleration) or vaapi or qsv or nvidia
GOCODER_PRESET=fast
# The following value should be set to a random sequence of characters.
# You MUST change it when installing kyoo (for security)
# You can input multiple api keys separated by a ,
KYOO_APIKEYS=t7H5!@4iMNsAaSJQ49pat4jprJgTcF656if#J3
# Keep those empty to use kyoo's default api key. You can also specify a custom API key if you want.
# go to https://www.themoviedb.org/settings/api and copy the api key (not the read access token, the api key)
THEMOVIEDB_APIKEY=
@ -77,28 +58,14 @@ OIDC_SERVICE_AUTHMETHOD=ClientSecretBasic
KYOO_URL=
# Database things
POSTGRES_USER=KyooUser
POSTGRES_PASSWORD=KyooPassword
POSTGRES_DB=kyooDB
POSTGRES_SERVER=postgres
POSTGRES_PORT=5432
# Read by the api container to know if it should run meilisearch's migrations/sync
# and download missing images. This is a good idea to only have one instance with this on
# Note: it does not run postgres migrations, use the migration container for that.
RUN_MIGRATIONS=true
MEILI_HOST="http://meilisearch:7700"
MEILI_MASTER_KEY="ghvjkgisbgkbgskegblfqbgjkebbhgwkjfb"
RABBITMQ_HOST=rabbitmq
RABBITMQ_PORT=5672
RABBITMQ_DEFAULT_USER=kyoo
RABBITMQ_DEFAULT_PASS=aohohunuhouhuhhoahothonseuhaoensuthoaentsuhha
PGUSER=kyoo
PGPASSWORD=password
PGDATABASE=kyoo
PGHOST=postgres
PGPORT=5432
# v5 stuff, does absolutely nothing on master (aka: you can delete this)
EXTRA_CLAIMS='{"permissions": ["core.read"], "verified": false}'
FIRST_USER_CLAIMS='{"permissions": ["users.read", "users.write", "apikeys.read", "apikeys.write", "users.delete", "core.read", "core.write"], "verified": true}'
FIRST_USER_CLAIMS='{"permissions": ["users.read", "users.write", "apikeys.read", "apikeys.write", "users.delete", "core.read", "core.write", "scanner.trigger"], "verified": true}'
GUEST_CLAIMS='{"permissions": ["core.read"]}'
PROTECTED_CLAIMS="permissions,verified"

View File

@ -48,8 +48,8 @@ jobs:
working-directory: ./auth
run: |
./keibi > logs &
wget --retry-connrefused --retry-on-http-error=502 http://localhost:4568/health
hurl --error-format long --variable host=http://localhost:4568 tests/*
wget --retry-connrefused --retry-on-http-error=502 http://localhost:4568/auth/health
hurl --error-format long --variable host=http://localhost:4568/auth tests/*
env:
PGHOST: localhost
FIRST_USER_CLAIMS: '{"permissions": ["users.read"]}'

View File

@ -71,3 +71,15 @@ jobs:
- name: Run go fmt
run: if [ "$(gofmt -s -l . | wc -l)" -gt 0 ]; then exit 1; fi
auth:
name: "Lint auth"
runs-on: ubuntu-latest
defaults:
run:
working-directory: ./auth
steps:
- uses: actions/checkout@v4
- name: Run go fmt
run: if [ "$(gofmt -s -l . | wc -l)" -gt 0 ]; then exit 1; fi

View File

@ -3,3 +3,5 @@ function-case=1 #lowercase
keyword-case=1
type-case=1
no-space-function=1
keep-newline=1
nogrouping=1

7
api/.dockerignore Normal file
View File

@ -0,0 +1,7 @@
**
!/package.json
!/bun.lock
!/tsconfig.json
!/patches
!/src
!/drizzle

View File

@ -20,7 +20,7 @@ IMAGES_PATH=./images
# https://www.postgresql.org/docs/current/libpq-envars.html
PGUSER=kyoo
PGPASSWORD=password
PGDATABASE=kyooDB
PGDATABASE=kyoo
PGHOST=postgres
PGPORT=5432
# PGOPTIONS=-c search_path=kyoo,public

2
api/.gitignore vendored
View File

@ -1,3 +1,3 @@
node_modules
/node_modules
**/*.bun
images

View File

@ -5,6 +5,8 @@ COPY package.json bun.lock .
COPY patches patches
RUN bun install --production
COPY . .
EXPOSE 3567
CMD ["bun", "dev"]

View File

@ -8,7 +8,7 @@
"blurhash": "^2.0.5",
"drizzle-kit": "^0.31.0",
"drizzle-orm": "0.43.1",
"elysia": "^1.2.25",
"elysia": "^1.3.0",
"jose": "^6.0.10",
"parjs": "^1.3.9",
"pg": "^8.15.6",
@ -27,9 +27,9 @@
"packages": {
"@drizzle-team/brocli": ["@drizzle-team/brocli@0.10.2", "", {}, "sha512-z33Il7l5dKjUgGULTqBsQBQwckHh5AbIuxhdsIxDDiZAzBOrZO6q9ogcWC65kU382AfynTfgNumVcNIjuIua6w=="],
"@elysiajs/swagger": ["@elysiajs/swagger@github:zoriya/elysia-swagger#ef89c17", { "dependencies": { "@scalar/themes": "^0.9.81", "@scalar/types": "^0.1.3", "openapi-types": "^12.1.3", "pathe": "^1.1.2" }, "peerDependencies": { "elysia": ">= 1.2.0" } }, "zoriya-elysia-swagger-ef89c17"],
"@elysiajs/swagger": ["@elysiajs/swagger@github:zoriya/elysia-swagger#f88fbc7", { "dependencies": { "@scalar/themes": "^0.9.81", "@scalar/types": "^0.1.3", "openapi-types": "^12.1.3", "pathe": "^1.1.2" }, "peerDependencies": { "elysia": ">= 1.3.0" } }, "zoriya-elysia-swagger-f88fbc7"],
"@emnapi/runtime": ["@emnapi/runtime@1.4.1", "", { "dependencies": { "tslib": "^2.4.0" } }, "sha512-LMshMVP0ZhACNjQNYXiU1iZJ6QCcv0lUdPDPugqGvCGXt5xtRVBPdtA0qU12pEXZzpWAhWlZYptfdAFq10DOVQ=="],
"@emnapi/runtime": ["@emnapi/runtime@1.4.3", "", { "dependencies": { "tslib": "^2.4.0" } }, "sha512-pBPWdu6MLKROBX05wSNKcNb++m5Er+KQ9QkB+WVM+pW2Kx9hoSrVTnu3BdkI5eBLZoKu/J6mW/B6i6bJB2ytXQ=="],
"@esbuild-kit/core-utils": ["@esbuild-kit/core-utils@3.3.2", "", { "dependencies": { "esbuild": "~0.18.20", "source-map-support": "^0.5.21" } }, "sha512-sPRAnw9CdSsRmEtnsl2WXWdyquogVpB3yZ3dgwJfe8zrOzTsV7cJvmwrKVa+0ma5BoiGJ+BoqkMvawbayKUsqQ=="],
@ -85,9 +85,9 @@
"@esbuild/win32-x64": ["@esbuild/win32-x64@0.25.3", "", { "os": "win32", "cpu": "x64" }, "sha512-ICgUR+kPimx0vvRzf+N/7L7tVSQeE3BYY+NhHRHXS1kBuPO7z2+7ea2HbhDyZdTephgvNvKrlDDKUexuCVBVvg=="],
"@img/sharp-darwin-arm64": ["@img/sharp-darwin-arm64@0.34.1", "", { "optionalDependencies": { "@img/sharp-libvips-darwin-arm64": "1.1.0" }, "os": "darwin", "cpu": "arm64" }, "sha512-pn44xgBtgpEbZsu+lWf2KNb6OAf70X68k+yk69Ic2Xz11zHR/w24/U49XT7AeRwJ0Px+mhALhU5LPci1Aymk7A=="],
"@img/sharp-darwin-arm64": ["@img/sharp-darwin-arm64@0.34.2", "", { "optionalDependencies": { "@img/sharp-libvips-darwin-arm64": "1.1.0" }, "os": "darwin", "cpu": "arm64" }, "sha512-OfXHZPppddivUJnqyKoi5YVeHRkkNE2zUFT2gbpKxp/JZCFYEYubnMg+gOp6lWfasPrTS+KPosKqdI+ELYVDtg=="],
"@img/sharp-darwin-x64": ["@img/sharp-darwin-x64@0.34.1", "", { "optionalDependencies": { "@img/sharp-libvips-darwin-x64": "1.1.0" }, "os": "darwin", "cpu": "x64" }, "sha512-VfuYgG2r8BpYiOUN+BfYeFo69nP/MIwAtSJ7/Zpxc5QF3KS22z8Pvg3FkrSFJBPNQ7mmcUcYQFBmEQp7eu1F8Q=="],
"@img/sharp-darwin-x64": ["@img/sharp-darwin-x64@0.34.2", "", { "optionalDependencies": { "@img/sharp-libvips-darwin-x64": "1.1.0" }, "os": "darwin", "cpu": "x64" }, "sha512-dYvWqmjU9VxqXmjEtjmvHnGqF8GrVjM2Epj9rJ6BUIXvk8slvNDJbhGFvIoXzkDhrJC2jUxNLz/GUjjvSzfw+g=="],
"@img/sharp-libvips-darwin-arm64": ["@img/sharp-libvips-darwin-arm64@1.1.0", "", { "os": "darwin", "cpu": "arm64" }, "sha512-HZ/JUmPwrJSoM4DIQPv/BfNh9yrOA8tlBbqbLz4JZ5uew2+o22Ik+tHQJcih7QJuSa0zo5coHTfD5J8inqj9DA=="],
@ -107,23 +107,25 @@
"@img/sharp-libvips-linuxmusl-x64": ["@img/sharp-libvips-linuxmusl-x64@1.1.0", "", { "os": "linux", "cpu": "x64" }, "sha512-wK7SBdwrAiycjXdkPnGCPLjYb9lD4l6Ze2gSdAGVZrEL05AOUJESWU2lhlC+Ffn5/G+VKuSm6zzbQSzFX/P65A=="],
"@img/sharp-linux-arm": ["@img/sharp-linux-arm@0.34.1", "", { "optionalDependencies": { "@img/sharp-libvips-linux-arm": "1.1.0" }, "os": "linux", "cpu": "arm" }, "sha512-anKiszvACti2sGy9CirTlNyk7BjjZPiML1jt2ZkTdcvpLU1YH6CXwRAZCA2UmRXnhiIftXQ7+Oh62Ji25W72jA=="],
"@img/sharp-linux-arm": ["@img/sharp-linux-arm@0.34.2", "", { "optionalDependencies": { "@img/sharp-libvips-linux-arm": "1.1.0" }, "os": "linux", "cpu": "arm" }, "sha512-0DZzkvuEOqQUP9mo2kjjKNok5AmnOr1jB2XYjkaoNRwpAYMDzRmAqUIa1nRi58S2WswqSfPOWLNOr0FDT3H5RQ=="],
"@img/sharp-linux-arm64": ["@img/sharp-linux-arm64@0.34.1", "", { "optionalDependencies": { "@img/sharp-libvips-linux-arm64": "1.1.0" }, "os": "linux", "cpu": "arm64" }, "sha512-kX2c+vbvaXC6vly1RDf/IWNXxrlxLNpBVWkdpRq5Ka7OOKj6nr66etKy2IENf6FtOgklkg9ZdGpEu9kwdlcwOQ=="],
"@img/sharp-linux-arm64": ["@img/sharp-linux-arm64@0.34.2", "", { "optionalDependencies": { "@img/sharp-libvips-linux-arm64": "1.1.0" }, "os": "linux", "cpu": "arm64" }, "sha512-D8n8wgWmPDakc83LORcfJepdOSN6MvWNzzz2ux0MnIbOqdieRZwVYY32zxVx+IFUT8er5KPcyU3XXsn+GzG/0Q=="],
"@img/sharp-linux-s390x": ["@img/sharp-linux-s390x@0.34.1", "", { "optionalDependencies": { "@img/sharp-libvips-linux-s390x": "1.1.0" }, "os": "linux", "cpu": "s390x" }, "sha512-7s0KX2tI9mZI2buRipKIw2X1ufdTeaRgwmRabt5bi9chYfhur+/C1OXg3TKg/eag1W+6CCWLVmSauV1owmRPxA=="],
"@img/sharp-linux-s390x": ["@img/sharp-linux-s390x@0.34.2", "", { "optionalDependencies": { "@img/sharp-libvips-linux-s390x": "1.1.0" }, "os": "linux", "cpu": "s390x" }, "sha512-EGZ1xwhBI7dNISwxjChqBGELCWMGDvmxZXKjQRuqMrakhO8QoMgqCrdjnAqJq/CScxfRn+Bb7suXBElKQpPDiw=="],
"@img/sharp-linux-x64": ["@img/sharp-linux-x64@0.34.1", "", { "optionalDependencies": { "@img/sharp-libvips-linux-x64": "1.1.0" }, "os": "linux", "cpu": "x64" }, "sha512-wExv7SH9nmoBW3Wr2gvQopX1k8q2g5V5Iag8Zk6AVENsjwd+3adjwxtp3Dcu2QhOXr8W9NusBU6XcQUohBZ5MA=="],
"@img/sharp-linux-x64": ["@img/sharp-linux-x64@0.34.2", "", { "optionalDependencies": { "@img/sharp-libvips-linux-x64": "1.1.0" }, "os": "linux", "cpu": "x64" }, "sha512-sD7J+h5nFLMMmOXYH4DD9UtSNBD05tWSSdWAcEyzqW8Cn5UxXvsHAxmxSesYUsTOBmUnjtxghKDl15EvfqLFbQ=="],
"@img/sharp-linuxmusl-arm64": ["@img/sharp-linuxmusl-arm64@0.34.1", "", { "optionalDependencies": { "@img/sharp-libvips-linuxmusl-arm64": "1.1.0" }, "os": "linux", "cpu": "arm64" }, "sha512-DfvyxzHxw4WGdPiTF0SOHnm11Xv4aQexvqhRDAoD00MzHekAj9a/jADXeXYCDFH/DzYruwHbXU7uz+H+nWmSOQ=="],
"@img/sharp-linuxmusl-arm64": ["@img/sharp-linuxmusl-arm64@0.34.2", "", { "optionalDependencies": { "@img/sharp-libvips-linuxmusl-arm64": "1.1.0" }, "os": "linux", "cpu": "arm64" }, "sha512-NEE2vQ6wcxYav1/A22OOxoSOGiKnNmDzCYFOZ949xFmrWZOVII1Bp3NqVVpvj+3UeHMFyN5eP/V5hzViQ5CZNA=="],
"@img/sharp-linuxmusl-x64": ["@img/sharp-linuxmusl-x64@0.34.1", "", { "optionalDependencies": { "@img/sharp-libvips-linuxmusl-x64": "1.1.0" }, "os": "linux", "cpu": "x64" }, "sha512-pax/kTR407vNb9qaSIiWVnQplPcGU8LRIJpDT5o8PdAx5aAA7AS3X9PS8Isw1/WfqgQorPotjrZL3Pqh6C5EBg=="],
"@img/sharp-linuxmusl-x64": ["@img/sharp-linuxmusl-x64@0.34.2", "", { "optionalDependencies": { "@img/sharp-libvips-linuxmusl-x64": "1.1.0" }, "os": "linux", "cpu": "x64" }, "sha512-DOYMrDm5E6/8bm/yQLCWyuDJwUnlevR8xtF8bs+gjZ7cyUNYXiSf/E8Kp0Ss5xasIaXSHzb888V1BE4i1hFhAA=="],
"@img/sharp-wasm32": ["@img/sharp-wasm32@0.34.1", "", { "dependencies": { "@emnapi/runtime": "^1.4.0" }, "cpu": "none" }, "sha512-YDybQnYrLQfEpzGOQe7OKcyLUCML4YOXl428gOOzBgN6Gw0rv8dpsJ7PqTHxBnXnwXr8S1mYFSLSa727tpz0xg=="],
"@img/sharp-wasm32": ["@img/sharp-wasm32@0.34.2", "", { "dependencies": { "@emnapi/runtime": "^1.4.3" }, "cpu": "none" }, "sha512-/VI4mdlJ9zkaq53MbIG6rZY+QRN3MLbR6usYlgITEzi4Rpx5S6LFKsycOQjkOGmqTNmkIdLjEvooFKwww6OpdQ=="],
"@img/sharp-win32-ia32": ["@img/sharp-win32-ia32@0.34.1", "", { "os": "win32", "cpu": "ia32" }, "sha512-WKf/NAZITnonBf3U1LfdjoMgNO5JYRSlhovhRhMxXVdvWYveM4kM3L8m35onYIdh75cOMCo1BexgVQcCDzyoWw=="],
"@img/sharp-win32-arm64": ["@img/sharp-win32-arm64@0.34.2", "", { "os": "win32", "cpu": "arm64" }, "sha512-cfP/r9FdS63VA5k0xiqaNaEoGxBg9k7uE+RQGzuK9fHt7jib4zAVVseR9LsE4gJcNWgT6APKMNnCcnyOtmSEUQ=="],
"@img/sharp-win32-x64": ["@img/sharp-win32-x64@0.34.1", "", { "os": "win32", "cpu": "x64" }, "sha512-hw1iIAHpNE8q3uMIRCgGOeDoz9KtFNarFLQclLxr/LK1VBkj8nby18RjFvr6aP7USRYAjTZW6yisnBWMX571Tw=="],
"@img/sharp-win32-ia32": ["@img/sharp-win32-ia32@0.34.2", "", { "os": "win32", "cpu": "ia32" }, "sha512-QLjGGvAbj0X/FXl8n1WbtQ6iVBpWU7JO94u/P2M4a8CFYsvQi4GW2mRy/JqkRx0qpBzaOdKJKw8uc930EX2AHw=="],
"@img/sharp-win32-x64": ["@img/sharp-win32-x64@0.34.2", "", { "os": "win32", "cpu": "x64" }, "sha512-aUdT6zEYtDKCaxkofmmJDJYGCf0+pJg3eU9/oBuqvEeoB9dKI6ZLc/1iLJCTuJQDO4ptntAlkUmHgGjyuobZbw=="],
"@scalar/openapi-types": ["@scalar/openapi-types@0.1.9", "", {}, "sha512-HQQudOSQBU7ewzfnBW9LhDmBE2XOJgSfwrh5PlUB7zJup/kaRkBGNgV2wMjNz9Af/uztiU/xNrO179FysmUT+g=="],
@ -133,9 +135,13 @@
"@sinclair/typebox": ["@sinclair/typebox@0.34.33", "", {}, "sha512-5HAV9exOMcXRUxo+9iYB5n09XxzCXnfy4VTNW4xnDv+FgjzAGY989C28BIdljKqmF+ZltUwujE3aossvcVtq6g=="],
"@tokenizer/inflate": ["@tokenizer/inflate@0.2.7", "", { "dependencies": { "debug": "^4.4.0", "fflate": "^0.8.2", "token-types": "^6.0.0" } }, "sha512-MADQgmZT1eKjp06jpI2yozxaU9uVs4GzzgSL+uEq7bVcJ9V1ZXQkeGNql1fsSI0gMy1vhvNTNbUqrx+pZfJVmg=="],
"@tokenizer/token": ["@tokenizer/token@0.3.0", "", {}, "sha512-OvjF+z51L3ov0OyAU0duzsYuvO01PH7x4t6DJx+guahgTnBHkhJdG7soQeTSFLWN3efnHyibZ4Z8l2EuWwJN3A=="],
"@types/node": ["@types/node@22.13.13", "", { "dependencies": { "undici-types": "~6.20.0" } }, "sha512-ClsL5nMwKaBRwPcCvH8E7+nU4GxHVx1axNvMZTFHMEfNI7oahimt26P5zjVCRrjiIWj6YFXfE1v3dEp94wLcGQ=="],
"@types/pg": ["@types/pg@8.11.14", "", { "dependencies": { "@types/node": "*", "pg-protocol": "*", "pg-types": "^4.0.1" } }, "sha512-qyD11E5R3u0eJmd1lB0WnWKXJGA7s015nyARWljfz5DcX83TKAIlY+QrmvzQTsbIe+hkiFtkyL2gHC6qwF6Fbg=="],
"@types/pg": ["@types/pg@8.15.2", "", { "dependencies": { "@types/node": "*", "pg-protocol": "*", "pg-types": "^4.0.1" } }, "sha512-+BKxo5mM6+/A1soSHBI7ufUglqYXntChLDyTbvcAn1Lawi9J7J9Ok3jt6w7I0+T/UDJ4CyhHk66+GZbwmkYxSg=="],
"@unhead/schema": ["@unhead/schema@1.11.20", "", { "dependencies": { "hookable": "^5.5.3", "zhead": "^2.2.4" } }, "sha512-0zWykKAaJdm+/Y7yi/Yds20PrUK7XabLe9c3IRcjnwYmSWY6z0Cr19VIs3ozCj8P+GhR+/TI2mwtGlueCEYouA=="],
@ -143,7 +149,7 @@
"buffer-from": ["buffer-from@1.1.2", "", {}, "sha512-E+XQCRwSbaaiChtv6k6Dwgc+bx+Bs6vuKJHHl5kox/BaKbhiXzqQOwK4cO22yElGp2OCmjwVhT3HmxgyPGnJfQ=="],
"bun-types": ["bun-types@1.2.11", "", { "dependencies": { "@types/node": "*" } }, "sha512-dbkp5Lo8HDrXkLrONm6bk+yiiYQSntvFUzQp0v3pzTAsXk6FtgVMjdQ+lzFNVAmQFUkPQZ3WMZqH5tTo+Dp/IA=="],
"bun-types": ["bun-types@1.2.14", "", { "dependencies": { "@types/node": "*" } }, "sha512-Kuh4Ub28ucMRWeiUUWMHsT9Wcbr4H3kLIO72RZZElSDxSu7vpetRvxIUDUaW6QtaIeixIpm7OXtNnZPf82EzwA=="],
"char-info": ["char-info@0.3.5", "", { "dependencies": { "node-interval-tree": "^1.3.3" } }, "sha512-gRslEBFEcuLMGLNO1EFIrdN1MMUfO+aqa7y8iWzNyAzB3mYKnTIvP+ioW3jpyeEvqA5WapVLIPINGtFjEIH4cQ=="],
@ -159,27 +165,35 @@
"debug": ["debug@4.4.0", "", { "dependencies": { "ms": "^2.1.3" } }, "sha512-6WTZ/IxCY/T6BALoZHaE4ctp9xm+Z5kY/pzYaCHRFeyVhojxlrm+46y68HA6hr0TcwEssoxNiDEUJQjfPZ/RYA=="],
"detect-libc": ["detect-libc@2.0.3", "", {}, "sha512-bwy0MGW55bG41VqxxypOsdSdGqLwXPI/focwgTYCFMbdUiBAxLg9CFzG08sz2aqzknwiX7Hkl0bQENjg8iLByw=="],
"detect-libc": ["detect-libc@2.0.4", "", {}, "sha512-3UDv+G9CsCKO1WKMGw9fwq/SWJYbI0c5Y7LU1AXYoDdbhE2AHQ6N6Nb34sG8Fj7T5APy8qXDCKuuIHd1BR0tVA=="],
"drizzle-kit": ["drizzle-kit@0.31.0", "", { "dependencies": { "@drizzle-team/brocli": "^0.10.2", "@esbuild-kit/esm-loader": "^2.5.5", "esbuild": "^0.25.2", "esbuild-register": "^3.5.0" }, "bin": { "drizzle-kit": "bin.cjs" } }, "sha512-pcKVT+GbfPA+bUovPIilgVOoq+onNBo/YQBG86sf3/GFHkN6lRJPm1l7dKN0IMAk57RQoIm4GUllRrasLlcaSg=="],
"drizzle-kit": ["drizzle-kit@0.31.1", "", { "dependencies": { "@drizzle-team/brocli": "^0.10.2", "@esbuild-kit/esm-loader": "^2.5.5", "esbuild": "^0.25.2", "esbuild-register": "^3.5.0" }, "bin": { "drizzle-kit": "bin.cjs" } }, "sha512-PUjYKWtzOzPtdtQlTHQG3qfv4Y0XT8+Eas6UbxCmxTj7qgMf+39dDujf1BP1I+qqZtw9uzwTh8jYtkMuCq+B0Q=="],
"drizzle-orm": ["drizzle-orm@0.43.1", "", { "peerDependencies": { "@aws-sdk/client-rds-data": ">=3", "@cloudflare/workers-types": ">=4", "@electric-sql/pglite": ">=0.2.0", "@libsql/client": ">=0.10.0", "@libsql/client-wasm": ">=0.10.0", "@neondatabase/serverless": ">=0.10.0", "@op-engineering/op-sqlite": ">=2", "@opentelemetry/api": "^1.4.1", "@planetscale/database": ">=1.13", "@prisma/client": "*", "@tidbcloud/serverless": "*", "@types/better-sqlite3": "*", "@types/pg": "*", "@types/sql.js": "*", "@vercel/postgres": ">=0.8.0", "@xata.io/client": "*", "better-sqlite3": ">=7", "bun-types": "*", "expo-sqlite": ">=14.0.0", "gel": ">=2", "knex": "*", "kysely": "*", "mysql2": ">=2", "pg": ">=8", "postgres": ">=3", "sql.js": ">=1", "sqlite3": ">=5" }, "optionalPeers": ["@aws-sdk/client-rds-data", "@cloudflare/workers-types", "@electric-sql/pglite", "@libsql/client", "@libsql/client-wasm", "@neondatabase/serverless", "@op-engineering/op-sqlite", "@opentelemetry/api", "@planetscale/database", "@prisma/client", "@tidbcloud/serverless", "@types/better-sqlite3", "@types/pg", "@types/sql.js", "@vercel/postgres", "@xata.io/client", "better-sqlite3", "bun-types", "expo-sqlite", "gel", "knex", "kysely", "mysql2", "pg", "postgres", "sql.js", "sqlite3"] }, "sha512-dUcDaZtE/zN4RV/xqGrVSMpnEczxd5cIaoDeor7Zst9wOe/HzC/7eAaulywWGYXdDEc9oBPMjayVEDg0ziTLJA=="],
"elysia": ["elysia@1.2.25", "", { "dependencies": { "@sinclair/typebox": "^0.34.27", "cookie": "^1.0.2", "memoirist": "^0.3.0", "openapi-types": "^12.1.3" }, "peerDependencies": { "typescript": ">= 5.0.0" }, "optionalPeers": ["typescript"] }, "sha512-WsdQpORJvb4uszzeqYT0lg97knw1iBW1NTzJ1Jm57tiHg+DfAotlWXYbjmvQ039ssV0fYELDHinLLoUazZkEHg=="],
"elysia": ["elysia@1.3.1", "", { "dependencies": { "cookie": "^1.0.2", "exact-mirror": "0.1.2", "fast-decode-uri-component": "^1.0.1" }, "optionalDependencies": { "@sinclair/typebox": "^0.34.33", "openapi-types": "^12.1.3" }, "peerDependencies": { "file-type": ">= 20.0.0", "typescript": ">= 5.0.0" } }, "sha512-En41P6cDHcHtQ0nvfsn9ayB+8ahQJqG1nzvPX8FVZjOriFK/RtZPQBtXMfZDq/AsVIk7JFZGFEtAVEmztNJVhQ=="],
"esbuild": ["esbuild@0.25.3", "", { "optionalDependencies": { "@esbuild/aix-ppc64": "0.25.3", "@esbuild/android-arm": "0.25.3", "@esbuild/android-arm64": "0.25.3", "@esbuild/android-x64": "0.25.3", "@esbuild/darwin-arm64": "0.25.3", "@esbuild/darwin-x64": "0.25.3", "@esbuild/freebsd-arm64": "0.25.3", "@esbuild/freebsd-x64": "0.25.3", "@esbuild/linux-arm": "0.25.3", "@esbuild/linux-arm64": "0.25.3", "@esbuild/linux-ia32": "0.25.3", "@esbuild/linux-loong64": "0.25.3", "@esbuild/linux-mips64el": "0.25.3", "@esbuild/linux-ppc64": "0.25.3", "@esbuild/linux-riscv64": "0.25.3", "@esbuild/linux-s390x": "0.25.3", "@esbuild/linux-x64": "0.25.3", "@esbuild/netbsd-arm64": "0.25.3", "@esbuild/netbsd-x64": "0.25.3", "@esbuild/openbsd-arm64": "0.25.3", "@esbuild/openbsd-x64": "0.25.3", "@esbuild/sunos-x64": "0.25.3", "@esbuild/win32-arm64": "0.25.3", "@esbuild/win32-ia32": "0.25.3", "@esbuild/win32-x64": "0.25.3" }, "bin": { "esbuild": "bin/esbuild" } }, "sha512-qKA6Pvai73+M2FtftpNKRxJ78GIjmFXFxd/1DVBqGo/qNhLSfv+G12n9pNoWdytJC8U00TrViOwpjT0zgqQS8Q=="],
"esbuild-register": ["esbuild-register@3.6.0", "", { "dependencies": { "debug": "^4.3.4" }, "peerDependencies": { "esbuild": ">=0.12 <1" } }, "sha512-H2/S7Pm8a9CL1uhp9OvjwrBh5Pvx0H8qVOxNu8Wed9Y7qv56MPtq+GGM8RJpq6glYJn9Wspr8uw7l55uyinNeg=="],
"exact-mirror": ["exact-mirror@0.1.2", "", { "peerDependencies": { "@sinclair/typebox": "^0.34.15" }, "optionalPeers": ["@sinclair/typebox"] }, "sha512-wFCPCDLmHbKGUb8TOi/IS7jLsgR8WVDGtDK3CzcB4Guf/weq7G+I+DkXiRSZfbemBFOxOINKpraM6ml78vo8Zw=="],
"fast-decode-uri-component": ["fast-decode-uri-component@1.0.1", "", {}, "sha512-WKgKWg5eUxvRZGwW8FvfbaH7AXSh2cL+3j5fMGzUMCxWBJ3dV3a7Wz8y2f/uQ0e3B6WmodD3oS54jTQ9HVTIIg=="],
"fflate": ["fflate@0.8.2", "", {}, "sha512-cPJU47OaAoCbg0pBvzsgpTPhmhqI5eJjh/JIu8tPj5q+T7iLvW/JAYUqmE7KOB4R1ZyEhzBaIQpQpardBF5z8A=="],
"file-type": ["file-type@20.5.0", "", { "dependencies": { "@tokenizer/inflate": "^0.2.6", "strtok3": "^10.2.0", "token-types": "^6.0.0", "uint8array-extras": "^1.4.0" } }, "sha512-BfHZtG/l9iMm4Ecianu7P8HRD2tBHLtjXinm4X62XBOYzi7CYA7jyqfJzOvXHqzVrVPYqBo2/GvbARMaaJkKVg=="],
"get-tsconfig": ["get-tsconfig@4.10.0", "", { "dependencies": { "resolve-pkg-maps": "^1.0.0" } }, "sha512-kGzZ3LWWQcGIAmg6iWvXn0ei6WDtV26wzHRMwDSzmAbcXrTEXxHy6IehI6/4eT6VRKyMP1eF1VqwrVUmE/LR7A=="],
"hookable": ["hookable@5.5.3", "", {}, "sha512-Yc+BQe8SvoXH1643Qez1zqLRmbA5rCL+sSmk6TVos0LWVfNIB7PGncdlId77WzLGSIB5KaWgTaNTs2lNVEI6VQ=="],
"ieee754": ["ieee754@1.2.1", "", {}, "sha512-dcyqhDvX1C46lXZcVqCpK+FtMRQVdIMN6/Df5js2zouUsqG7I6sFxitIC+7KYK29KdXOLHdu9zL4sFnoVQnqaA=="],
"is-arrayish": ["is-arrayish@0.3.2", "", {}, "sha512-eVRqCvVlZbuw3GrM63ovNSNAeA1K16kaR/LRY/92w0zxQ5/1YzwblUX652i4Xs9RwAGjW9d9y6X88t8OaAJfWQ=="],
"jose": ["jose@6.0.10", "", {}, "sha512-skIAxZqcMkOrSwjJvplIPYrlXGpxTPnro2/QWTDCxAdWQrSTV5/KqspMWmi5WAx5+ULswASJiZ0a+1B/Lxt9cw=="],
"memoirist": ["memoirist@0.3.0", "", {}, "sha512-wR+4chMgVPq+T6OOsk40u9Wlpw1Pjx66NMNiYxCQQ4EUJ7jDs3D9kTCeKdBOkvAiqXlHLVJlvYL01PvIJ1MPNg=="],
"jose": ["jose@6.0.11", "", {}, "sha512-QxG7EaliDARm1O1S8BGakqncGT9s25bKL1WSf6/oa17Tkqwi8D2ZNglqCF+DsYF88/rV66Q/Q2mFAy697E1DUg=="],
"ms": ["ms@2.1.3", "", {}, "sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA=="],
@ -195,19 +209,21 @@
"pathe": ["pathe@1.1.2", "", {}, "sha512-whLdWMYL2TwI08hn8/ZqAbrVemu0LNaNNJZX73O6qaIdCTfXutsLhMkjdENX0qhsQ9uIimo4/aQOmXkoon2nDQ=="],
"pg": ["pg@8.15.6", "", { "dependencies": { "pg-connection-string": "^2.8.5", "pg-pool": "^3.9.6", "pg-protocol": "^1.9.5", "pg-types": "^2.1.0", "pgpass": "1.x" }, "optionalDependencies": { "pg-cloudflare": "^1.2.5" }, "peerDependencies": { "pg-native": ">=3.0.1" }, "optionalPeers": ["pg-native"] }, "sha512-yvao7YI3GdmmrslNVsZgx9PfntfWrnXwtR+K/DjI0I/sTKif4Z623um+sjVZ1hk5670B+ODjvHDAckKdjmPTsg=="],
"peek-readable": ["peek-readable@7.0.0", "", {}, "sha512-nri2TO5JE3/mRryik9LlHFT53cgHfRK0Lt0BAZQXku/AW3E6XLt2GaY8siWi7dvW/m1z0ecn+J+bpDa9ZN3IsQ=="],
"pg": ["pg@8.16.0", "", { "dependencies": { "pg-connection-string": "^2.9.0", "pg-pool": "^3.10.0", "pg-protocol": "^1.10.0", "pg-types": "2.2.0", "pgpass": "1.0.5" }, "optionalDependencies": { "pg-cloudflare": "^1.2.5" }, "peerDependencies": { "pg-native": ">=3.0.1" }, "optionalPeers": ["pg-native"] }, "sha512-7SKfdvP8CTNXjMUzfcVTaI+TDzBEeaUnVwiVGZQD1Hh33Kpev7liQba9uLd4CfN8r9mCVsD0JIpq03+Unpz+kg=="],
"pg-cloudflare": ["pg-cloudflare@1.2.5", "", {}, "sha512-OOX22Vt0vOSRrdoUPKJ8Wi2OpE/o/h9T8X1s4qSkCedbNah9ei2W2765be8iMVxQUsvgT7zIAT2eIa9fs5+vtg=="],
"pg-connection-string": ["pg-connection-string@2.8.5", "", {}, "sha512-Ni8FuZ8yAF+sWZzojvtLE2b03cqjO5jNULcHFfM9ZZ0/JXrgom5pBREbtnAw7oxsxJqHw9Nz/XWORUEL3/IFow=="],
"pg-connection-string": ["pg-connection-string@2.9.0", "", {}, "sha512-P2DEBKuvh5RClafLngkAuGe9OUlFV7ebu8w1kmaaOgPcpJd1RIFh7otETfI6hAR8YupOLFTY7nuvvIn7PLciUQ=="],
"pg-int8": ["pg-int8@1.0.1", "", {}, "sha512-WCtabS6t3c8SkpDBUlb1kjOs7l66xsGdKpIPZsg4wR+B3+u9UAum2odSsF9tnvxg80h4ZxLWMy4pRjOsFIqQpw=="],
"pg-numeric": ["pg-numeric@1.0.2", "", {}, "sha512-BM/Thnrw5jm2kKLE5uJkXqqExRUY/toLHda65XgFTBTFYZyopbKjBe29Ii3RbkvlsMoFwD+tHeGaCjjv0gHlyw=="],
"pg-pool": ["pg-pool@3.9.6", "", { "peerDependencies": { "pg": ">=8.0" } }, "sha512-rFen0G7adh1YmgvrmE5IPIqbb+IgEzENUm+tzm6MLLDSlPRoZVhzU1WdML9PV2W5GOdRA9qBKURlbt1OsXOsPw=="],
"pg-pool": ["pg-pool@3.10.0", "", { "peerDependencies": { "pg": ">=8.0" } }, "sha512-DzZ26On4sQ0KmqnO34muPcmKbhrjmyiO4lCCR0VwEd7MjmiKf5NTg/6+apUEu0NF7ESa37CGzFxH513CoUmWnA=="],
"pg-protocol": ["pg-protocol@1.9.5", "", {}, "sha512-DYTWtWpfd5FOro3UnAfwvhD8jh59r2ig8bPtc9H8Ds7MscE/9NYruUQWFAOuraRl29jwcT2kyMFQ3MxeaVjUhg=="],
"pg-protocol": ["pg-protocol@1.10.0", "", {}, "sha512-IpdytjudNuLv8nhlHs/UrVBhU0e78J0oIS/0AVdTbWxSOkFUVdsHC/NrorO6nXsQNDTT1kzDSOMJubBQviX18Q=="],
"pg-types": ["pg-types@4.0.2", "", { "dependencies": { "pg-int8": "1.0.1", "pg-numeric": "1.0.2", "postgres-array": "~3.0.1", "postgres-bytea": "~3.0.0", "postgres-date": "~2.1.0", "postgres-interval": "^3.0.0", "postgres-range": "^1.1.1" } }, "sha512-cRL3JpS3lKMGsKaWndugWQoLOCoP+Cic8oseVcbr0qhPzYD5DWXK+RZ9LY9wxRf7RQia4SCwQlXk0q6FCPrVng=="],
@ -225,11 +241,11 @@
"resolve-pkg-maps": ["resolve-pkg-maps@1.0.0", "", {}, "sha512-seS2Tj26TBVOC2NIc2rOe2y2ZO7efxITtLZcGSOnHHNOQ7CkiUBfw0Iw2ck6xkIhPwLhKNLS8BO+hEpngQlqzw=="],
"semver": ["semver@7.7.1", "", { "bin": { "semver": "bin/semver.js" } }, "sha512-hlq8tAfn0m/61p4BVRcPzIGr6LKiMwo4VM6dGi6pt4qcRkmNzTcWq6eCEjEh+qXjkMDvPlOFFSGwQjoEa6gyMA=="],
"semver": ["semver@7.7.2", "", { "bin": { "semver": "bin/semver.js" } }, "sha512-RF0Fw+rO5AMf9MAyaRXI4AV0Ulj5lMHqVxxdSgiVbixSCXoEmmX/jk0CuJw4+3SqroYO9VoUh+HcuJivvtJemA=="],
"shallowequal": ["shallowequal@1.1.0", "", {}, "sha512-y0m1JoUZSlPAjXVtPPW70aZWfIL/dSP7AFkRnniLCrK/8MDKog3TySTBmckD+RObVxH0v4Tox67+F14PdED2oQ=="],
"sharp": ["sharp@0.34.1", "", { "dependencies": { "color": "^4.2.3", "detect-libc": "^2.0.3", "semver": "^7.7.1" }, "optionalDependencies": { "@img/sharp-darwin-arm64": "0.34.1", "@img/sharp-darwin-x64": "0.34.1", "@img/sharp-libvips-darwin-arm64": "1.1.0", "@img/sharp-libvips-darwin-x64": "1.1.0", "@img/sharp-libvips-linux-arm": "1.1.0", "@img/sharp-libvips-linux-arm64": "1.1.0", "@img/sharp-libvips-linux-ppc64": "1.1.0", "@img/sharp-libvips-linux-s390x": "1.1.0", "@img/sharp-libvips-linux-x64": "1.1.0", "@img/sharp-libvips-linuxmusl-arm64": "1.1.0", "@img/sharp-libvips-linuxmusl-x64": "1.1.0", "@img/sharp-linux-arm": "0.34.1", "@img/sharp-linux-arm64": "0.34.1", "@img/sharp-linux-s390x": "0.34.1", "@img/sharp-linux-x64": "0.34.1", "@img/sharp-linuxmusl-arm64": "0.34.1", "@img/sharp-linuxmusl-x64": "0.34.1", "@img/sharp-wasm32": "0.34.1", "@img/sharp-win32-ia32": "0.34.1", "@img/sharp-win32-x64": "0.34.1" } }, "sha512-1j0w61+eVxu7DawFJtnfYcvSv6qPFvfTaqzTQ2BLknVhHTwGS8sc63ZBF4rzkWMBVKybo4S5OBtDdZahh2A1xg=="],
"sharp": ["sharp@0.34.2", "", { "dependencies": { "color": "^4.2.3", "detect-libc": "^2.0.4", "semver": "^7.7.2" }, "optionalDependencies": { "@img/sharp-darwin-arm64": "0.34.2", "@img/sharp-darwin-x64": "0.34.2", "@img/sharp-libvips-darwin-arm64": "1.1.0", "@img/sharp-libvips-darwin-x64": "1.1.0", "@img/sharp-libvips-linux-arm": "1.1.0", "@img/sharp-libvips-linux-arm64": "1.1.0", "@img/sharp-libvips-linux-ppc64": "1.1.0", "@img/sharp-libvips-linux-s390x": "1.1.0", "@img/sharp-libvips-linux-x64": "1.1.0", "@img/sharp-libvips-linuxmusl-arm64": "1.1.0", "@img/sharp-libvips-linuxmusl-x64": "1.1.0", "@img/sharp-linux-arm": "0.34.2", "@img/sharp-linux-arm64": "0.34.2", "@img/sharp-linux-s390x": "0.34.2", "@img/sharp-linux-x64": "0.34.2", "@img/sharp-linuxmusl-arm64": "0.34.2", "@img/sharp-linuxmusl-x64": "0.34.2", "@img/sharp-wasm32": "0.34.2", "@img/sharp-win32-arm64": "0.34.2", "@img/sharp-win32-ia32": "0.34.2", "@img/sharp-win32-x64": "0.34.2" } }, "sha512-lszvBmB9QURERtyKT2bNmsgxXK0ShJrL/fvqlonCo7e6xBF8nT8xU6pW+PMIbLsz0RxQk3rgH9kd8UmvOzlMJg=="],
"simple-swizzle": ["simple-swizzle@0.2.2", "", { "dependencies": { "is-arrayish": "^0.3.1" } }, "sha512-JA//kQgZtbuY83m+xT+tXJkmJncGMTFT+C+g2h2R9uxkYIrE2yy9sgmcLhCnw57/WSD+Eh3J97FPEDFnbXnDUg=="],
@ -239,8 +255,16 @@
"split2": ["split2@4.2.0", "", {}, "sha512-UcjcJOWknrNkF6PLX83qcHM6KHgVKNkV62Y8a5uYDVv9ydGQVwAHMKqHdJje1VTWpljG0WYpCDhrCdAOYH4TWg=="],
"strtok3": ["strtok3@10.2.2", "", { "dependencies": { "@tokenizer/token": "^0.3.0", "peek-readable": "^7.0.0" } }, "sha512-Xt18+h4s7Z8xyZ0tmBoRmzxcop97R4BAh+dXouUDCYn+Em+1P3qpkUfI5ueWLT8ynC5hZ+q4iPEmGG1urvQGBg=="],
"token-types": ["token-types@6.0.0", "", { "dependencies": { "@tokenizer/token": "^0.3.0", "ieee754": "^1.2.1" } }, "sha512-lbDrTLVsHhOMljPscd0yitpozq7Ga2M5Cvez5AjGg8GASBjtt6iERCAJ93yommPmz62fb45oFIXHEZ3u9bfJEA=="],
"tslib": ["tslib@2.8.1", "", {}, "sha512-oJFu94HQb+KVduSUQL7wnpmqnfmLsOA/nAh6b6EH0wCEoK0/mPeXU6c3wKDV83MkOuHPRHtSXKKU99IBazS/2w=="],
"typescript": ["typescript@5.8.3", "", { "bin": { "tsc": "bin/tsc", "tsserver": "bin/tsserver" } }, "sha512-p1diW6TqL9L07nNxvRMM7hMMw4c5XOo/1ibL4aAIGmSAt9slTE1Xgw5KWuof2uTOvCg9BY7ZRi+GaF+7sfgPeQ=="],
"uint8array-extras": ["uint8array-extras@1.4.0", "", {}, "sha512-ZPtzy0hu4cZjv3z5NW9gfKnNLjoz4y6uv4HlelAjDK7sY/xOkKZv9xK/WQpcsBB3jEybChz9DPC2U/+cusjJVQ=="],
"undici-types": ["undici-types@6.20.0", "", {}, "sha512-Ny6QZ2Nju20vw1SRHe3d9jVu6gJ+4e3+MMpqu7pqE5HT6WsTSlce++GQmK5UXS8mzV8DSYHrQH+Xrf2jVcuKNg=="],
"xtend": ["xtend@4.0.2", "", {}, "sha512-LKYU1iAXJXUgAXn9URjiu+MWhyUXHsvfp7mcuYm9dSUKK0/CjtrUwFAxD82/mCWbtLsGjFIad0wIsod4zrTAEQ=="],

View File

@ -0,0 +1 @@
ALTER TYPE "kyoo"."role_kind" ADD VALUE 'crew' BEFORE 'other';

File diff suppressed because it is too large Load Diff

View File

@ -148,6 +148,13 @@
"when": 1746198322219,
"tag": "0020_video_unique",
"breakpoints": true
},
{
"idx": 21,
"version": "7",
"when": 1747727831649,
"tag": "0021_crew",
"breakpoints": true
}
]
}

View File

@ -11,18 +11,18 @@
"dependencies": {
"@elysiajs/swagger": "zoriya/elysia-swagger#build",
"blurhash": "^2.0.5",
"drizzle-kit": "^0.31.0",
"drizzle-kit": "^0.31.1",
"drizzle-orm": "0.43.1",
"elysia": "^1.2.25",
"jose": "^6.0.10",
"elysia": "^1.3.1",
"jose": "^6.0.11",
"parjs": "^1.3.9",
"pg": "^8.15.6",
"sharp": "^0.34.1"
"pg": "^8.16.0",
"sharp": "^0.34.2"
},
"devDependencies": {
"@types/pg": "^8.11.14",
"@types/pg": "^8.15.2",
"node-addon-api": "^8.3.1",
"bun-types": "^1.2.11"
"bun-types": "^1.2.14"
},
"module": "src/index.js",
"patchedDependencies": {

16
api/shell.nix Normal file
View File

@ -0,0 +1,16 @@
{pkgs ? import <nixpkgs> {}}:
pkgs.mkShell {
packages = with pkgs; [
bun
biome
# for psql to debug from the cli
postgresql_15
# to build libvips (for sharp)
nodejs
node-gyp
pkg-config
vips
];
SHARP_FORCE_GLOBAL_LIBVIPS = 1;
}

View File

@ -37,17 +37,17 @@ export const auth = new Elysia({ name: "auth" })
.guard({
headers: t.Object(
{
authorization: t.TemplateLiteral("Bearer ${string}"),
authorization: t.Optional(t.TemplateLiteral("Bearer ${string}")),
},
{ additionalProperties: true },
),
})
.resolve(async ({ headers: { authorization }, error }) => {
.resolve(async ({ headers: { authorization }, status }) => {
const bearer = authorization?.slice(7);
if (!bearer) {
return error(500, {
status: 500,
message: "No jwt, auth server configuration error.",
return status(403, {
status: 403,
message: "No authorization header was found.",
});
}
@ -63,7 +63,7 @@ export const auth = new Elysia({ name: "auth" })
return { jwt };
} catch (err) {
return error(403, {
return status(403, {
status: 403,
message: "Invalid jwt. Verification vailed",
details: err,
@ -73,10 +73,10 @@ export const auth = new Elysia({ name: "auth" })
.macro({
permissions(perms: string[]) {
return {
beforeHandle: ({ jwt, error }) => {
beforeHandle: ({ jwt, status }) => {
for (const perm of perms) {
if (!jwt!.permissions.includes(perm)) {
return error(403, {
return status(403, {
status: 403,
message: `Missing permission: '${perm}'.`,
details: { current: jwt!.permissions, required: perms },
@ -87,7 +87,7 @@ export const auth = new Elysia({ name: "auth" })
};
},
})
.as("plugin");
.as("scoped");
const User = t.Object({
id: t.String({ format: "uuid" }),

View File

@ -52,10 +52,10 @@ export const base = new Elysia({ name: "base" })
detail: { description: "Check if the api is healthy." },
response: { 200: t.Object({ status: t.Literal("healthy") }) },
})
.as("plugin");
.as("scoped");
export const prefix = process.env.KYOO_PREFIX ?? "";
export const app = new Elysia({ prefix })
export const handlers = new Elysia({ prefix })
.use(base)
.use(auth)
.guard(

View File

@ -72,6 +72,7 @@ export const entryFilters: FilterDef = {
runtime: { column: entries.runtime, type: "float" },
airDate: { column: entries.airDate, type: "date" },
playedDate: { column: entryProgressQ.playedDate, type: "date" },
isAvailable: { column: isNotNull(entries.availableSince), type: "bool" },
};
const extraFilters: FilterDef = {
@ -255,7 +256,7 @@ export const entriesH = new Elysia({ tags: ["series"] })
headers: { "accept-language": languages },
request: { url },
jwt: { sub },
error,
status,
}) => {
const [serie] = await db
.select({ pk: shows.pk })
@ -269,7 +270,7 @@ export const entriesH = new Elysia({ tags: ["series"] })
.limit(1);
if (!serie) {
return error(404, {
return status(404, {
status: 404,
message: `No serie with the id or slug: '${id}'.`,
});
@ -335,7 +336,7 @@ export const entriesH = new Elysia({ tags: ["series"] })
query: { limit, after, query, sort, filter },
request: { url },
jwt: { sub },
error,
status,
}) => {
const [serie] = await db
.select({ pk: shows.pk })
@ -349,7 +350,7 @@ export const entriesH = new Elysia({ tags: ["series"] })
.limit(1);
if (!serie) {
return error(404, {
return status(404, {
status: 404,
message: `No serie with the id or slug: '${id}'.`,
});

View File

@ -28,14 +28,14 @@ function getRedirectToImageHandler({
headers: { "accept-language": languages },
query: { quality },
set,
error,
status,
redirect,
}: {
params: { id: string; image: "poster" | "thumbnail" | "banner" | "logo" };
headers: { "accept-language": string };
query: { quality: "high" | "medium" | "low" };
set: Context["set"];
error: Context["error"];
status: Context["status"];
redirect: Context["redirect"];
}) {
id ??= "random";
@ -76,13 +76,13 @@ function getRedirectToImageHandler({
.limit(1);
if (!ret) {
return error(404, {
return status(404, {
status: 404,
message: `No item found with id or slug: '${id}'.`,
});
}
if (!ret.language) {
return error(422, {
return status(422, {
status: 422,
message: "Accept-Language header could not be satisfied.",
});
@ -162,7 +162,7 @@ export const imagesH = new Elysia({ tags: ["images"] })
})
.get(
"/staff/:id/image",
async ({ params: { id }, query: { quality }, error, redirect }) => {
async ({ params: { id }, query: { quality }, status, redirect }) => {
const [ret] = await db
.select({ image: staff.image })
.from(staff)
@ -177,7 +177,7 @@ export const imagesH = new Elysia({ tags: ["images"] })
.limit(1);
if (!ret) {
return error(404, {
return status(404, {
status: 404,
message: `No staff member found with id or slug: '${id}'.`,
});
@ -211,7 +211,7 @@ export const imagesH = new Elysia({ tags: ["images"] })
headers: { "accept-language": languages },
query: { quality },
set,
error,
status,
redirect,
}) => {
const lang = processLanguages(languages);
@ -248,13 +248,13 @@ export const imagesH = new Elysia({ tags: ["images"] })
.limit(1);
if (!ret) {
return error(404, {
return status(404, {
status: 404,
message: `No studio found with id or slug: '${id}'.`,
});
}
if (!ret.language) {
return error(422, {
return status(422, {
status: 422,
message: "Accept-Language header could not be satisfied.",
});

View File

@ -111,10 +111,10 @@ export const historyH = new Elysia({ tags: ["profiles"] })
query: { sort, filter, query, limit, after },
headers: { "accept-language": languages, authorization },
request: { url },
error,
status,
}) => {
const uInfo = await getUserInfo(id, { authorization });
if ("status" in uInfo) return error(uInfo.status as 404, uInfo);
if ("status" in uInfo) return status(uInfo.status as 404, uInfo);
const langs = processLanguages(languages);
const items = (await getEntries({
@ -163,7 +163,7 @@ export const historyH = new Elysia({ tags: ["profiles"] })
)
.post(
"/profiles/me/history",
async ({ body, jwt: { sub }, error }) => {
async ({ body, jwt: { sub }, status }) => {
const profilePk = await getOrCreateProfile(sub);
const hist = values(
@ -321,7 +321,7 @@ export const historyH = new Elysia({ tags: ["profiles"] })
},
});
return error(201, { status: 201, inserted: rows.length });
return status(201, { status: 201, inserted: rows.length });
},
{
detail: { description: "Bulk add entries/movies to your watch history." },

View File

@ -197,10 +197,10 @@ export const watchlistH = new Elysia({ tags: ["profiles"] })
jwt: { settings },
headers: { "accept-language": languages, authorization },
request: { url },
error,
status,
}) => {
const uInfo = await getUserInfo(id, { authorization });
if ("status" in uInfo) return error(uInfo.status as 404, uInfo);
if ("status" in uInfo) return status(uInfo.status as 404, uInfo);
const langs = processLanguages(languages);
const items = await getShows({
@ -261,7 +261,7 @@ export const watchlistH = new Elysia({ tags: ["profiles"] })
)
.post(
"/series/:id/watchstatus",
async ({ params: { id }, body, jwt: { sub }, error }) => {
async ({ params: { id }, body, jwt: { sub }, status }) => {
const [show] = await db
.select({ pk: shows.pk, entriesCount: shows.entriesCount })
.from(shows)
@ -273,7 +273,7 @@ export const watchlistH = new Elysia({ tags: ["profiles"] })
);
if (!show) {
return error(404, {
return status(404, {
status: 404,
message: `No serie found for the id/slug: '${id}'.`,
});
@ -302,7 +302,7 @@ export const watchlistH = new Elysia({ tags: ["profiles"] })
)
.post(
"/movies/:id/watchstatus",
async ({ params: { id }, body, jwt: { sub }, error }) => {
async ({ params: { id }, body, jwt: { sub }, status }) => {
const [show] = await db
.select({ pk: shows.pk })
.from(shows)
@ -314,7 +314,7 @@ export const watchlistH = new Elysia({ tags: ["profiles"] })
);
if (!show) {
return error(404, {
return status(404, {
status: 404,
message: `No movie found for the id/slug: '${id}'.`,
});

View File

@ -51,7 +51,7 @@ export const seasonsH = new Elysia({ tags: ["series"] })
query: { limit, after, query, sort, filter },
headers: { "accept-language": languages },
request: { url },
error,
status,
}) => {
const langs = processLanguages(languages);
@ -67,7 +67,7 @@ export const seasonsH = new Elysia({ tags: ["series"] })
.limit(1);
if (!serie) {
return error(404, {
return status(404, {
status: 404,
message: `No serie with the id or slug: '${id}'.`,
});

View File

@ -21,7 +21,7 @@ type ImageTask = {
};
// this will only push a task to the image downloader service and not download it instantly.
// this is both done to prevent to many requests to be sent at once and to make sure POST
// this is both done to prevent too many requests to be sent at once and to make sure POST
// requests are not blocked by image downloading or blurhash calculation
export const enqueueOptImage = async (
tx: Transaction,
@ -68,7 +68,7 @@ export const enqueueOptImage = async (
kind: "image",
message,
});
await tx.execute(sql`notify image`);
await tx.execute(sql`notify kyoo_image`);
return {
id,
@ -103,8 +103,8 @@ export const processImages = async () => {
`);
await tx.delete(mqueue).where(eq(mqueue.id, item.id));
} catch (err) {
console.error("Failed to download image", img.url, err);
} catch (err: any) {
console.error("Failed to download image", img.url, err.message);
await tx
.update(mqueue)
.set({ attempt: sql`${mqueue.attempt}+1` })
@ -128,10 +128,10 @@ export const processImages = async () => {
const client = (await db.$client.connect()) as PoolClient;
client.on("notification", (evt) => {
if (evt.channel !== "image") return;
if (evt.channel !== "kyoo_image") return;
processAll();
});
await client.query("listen image");
await client.query("listen kyoo_image");
// start processing old tasks
await processAll();
@ -139,7 +139,13 @@ export const processImages = async () => {
};
async function downloadImage(id: string, url: string): Promise<string> {
// TODO: check if file exists before downloading
const low = await getFile(path.join(imageDir, `${id}.low.jpg`))
.arrayBuffer()
.catch(() => false as const);
if (low) {
return await getBlurhash(sharp(low));
}
const resp = await fetch(url, {
headers: { "User-Agent": `Kyoo v${version}` },
});
@ -167,20 +173,15 @@ async function downloadImage(id: string, url: string): Promise<string> {
await Bun.write(file, buffer, { mode: 0o660 });
}),
);
return await getBlurhash(image);
}
async function getBlurhash(image: sharp.Sharp): Promise<string> {
const { data, info } = await image
.resize(32, 32, { fit: "inside" })
.ensureAlpha()
.raw()
.toBuffer({ resolveWithObject: true });
const blurHash = encode(
new Uint8ClampedArray(data),
info.width,
info.height,
4,
3,
);
return blurHash;
return encode(new Uint8ClampedArray(data), info.width, info.height, 4, 3);
}

View File

@ -16,10 +16,10 @@ export const seed = new Elysia()
})
.post(
"/movies",
async ({ body, error }) => {
async ({ body, status }) => {
const ret = await seedMovie(body);
if ("status" in ret) return error(ret.status, ret as any);
return error(ret.updated ? 200 : 201, ret);
if ("status" in ret) return status(ret.status, ret as any);
return status(ret.updated ? 200 : 201, ret);
},
{
detail: {
@ -47,10 +47,10 @@ export const seed = new Elysia()
)
.post(
"/series",
async ({ body, error }) => {
async ({ body, status }) => {
const ret = await seedSerie(body);
if ("status" in ret) return error(ret.status, ret as any);
return error(ret.updated ? 200 : 201, ret);
if ("status" in ret) return status(ret.status, ret as any);
return status(ret.updated ? 200 : 201, ret);
},
{
detail: {

View File

@ -47,7 +47,7 @@ export const insertEntries = async (
items: (SeedEntry | SeedExtra)[],
onlyExtras = false,
) => {
if (!items) return [];
if (!items.length) return [];
const retEntries = await db.transaction(async (tx) => {
const vals: EntryI[] = await Promise.all(

View File

@ -12,13 +12,18 @@ export const insertSeasons = async (
show: { pk: number; slug: string },
items: SeedSeason[],
) => {
if (!items.length) return [];
return db.transaction(async (tx) => {
const vals: SeasonI[] = items.map((x) => {
const { translations, ...season } = x;
return {
...season,
showPk: show.pk,
slug: `${show.slug}-s${season.seasonNumber}`,
slug:
season.seasonNumber === 0
? `${show.slug}-specials`
: `${show.slug}-s${season.seasonNumber}`,
nextRefresh: guessNextRefresh(season.startAir ?? new Date()),
};
});

View File

@ -41,7 +41,7 @@ export const collections = new Elysia({
headers: { "accept-language": languages },
query: { preferOriginal, with: relations },
jwt: { sub, settings },
error,
status,
set,
}) => {
const langs = processLanguages(languages);
@ -58,13 +58,13 @@ export const collections = new Elysia({
userId: sub,
});
if (!ret) {
return error(404, {
return status(404, {
status: 404,
message: "Collection not found",
});
}
if (!ret.language) {
return error(422, {
return status(422, {
status: 422,
message: "Accept-Language header could not be satisfied.",
});
@ -109,7 +109,7 @@ export const collections = new Elysia({
)
.get(
"random",
async ({ error, redirect }) => {
async ({ status, redirect }) => {
const [serie] = await db
.select({ slug: shows.slug })
.from(shows)
@ -117,7 +117,7 @@ export const collections = new Elysia({
.orderBy(sql`random()`)
.limit(1);
if (!serie)
return error(404, {
return status(404, {
status: 404,
message: "No collection in the database.",
});
@ -230,7 +230,7 @@ export const collections = new Elysia({
headers: { "accept-language": languages },
jwt: { sub, settings },
request: { url },
error,
status,
}) => {
const [collection] = await db
.select({ pk: shows.pk })
@ -244,7 +244,7 @@ export const collections = new Elysia({
.limit(1);
if (!collection) {
return error(404, {
return status(404, {
status: 404,
message: `No collection with the id or slug: '${id}'.`,
});
@ -287,7 +287,7 @@ export const collections = new Elysia({
headers: { "accept-language": languages },
jwt: { sub, settings },
request: { url },
error,
status,
}) => {
const [collection] = await db
.select({ pk: shows.pk })
@ -301,7 +301,7 @@ export const collections = new Elysia({
.limit(1);
if (!collection) {
return error(404, {
return status(404, {
status: 404,
message: `No collection with the id or slug: '${id}'.`,
});
@ -344,7 +344,7 @@ export const collections = new Elysia({
headers: { "accept-language": languages },
jwt: { sub, settings },
request: { url },
error,
status,
}) => {
const [collection] = await db
.select({ pk: shows.pk })
@ -358,7 +358,7 @@ export const collections = new Elysia({
.limit(1);
if (!collection) {
return error(404, {
return status(404, {
status: 404,
message: `No collection with the id or slug: '${id}'.`,
});

View File

@ -1,4 +1,4 @@
import { type SQL, and, eq, exists, ne, sql } from "drizzle-orm";
import { type SQL, and, eq, exists, gt, ne, sql } from "drizzle-orm";
import { db } from "~/db";
import {
entries,
@ -60,7 +60,7 @@ export const showFilters: FilterDef = {
runtime: { column: shows.runtime, type: "float" },
airDate: { column: shows.startAir, type: "date" },
startAir: { column: shows.startAir, type: "date" },
endAir: { column: shows.startAir, type: "date" },
endAir: { column: shows.endAir, type: "date" },
originalLanguage: {
column: sql`${shows.original}->'language'`,
type: "string",
@ -76,6 +76,7 @@ export const showFilters: FilterDef = {
values: WatchlistStatus.enum,
},
score: { column: watchStatusQ.score, type: "int" },
isAvailable: { column: sql`(${shows.availableCount} > 0)`, type: "bool" },
};
export const showSort = Sort(
{

View File

@ -31,7 +31,7 @@ export const movies = new Elysia({ prefix: "/movies", tags: ["movies"] })
headers: { "accept-language": languages },
query: { preferOriginal, with: relations },
jwt: { sub, settings },
error,
status,
set,
}) => {
const langs = processLanguages(languages);
@ -48,13 +48,13 @@ export const movies = new Elysia({ prefix: "/movies", tags: ["movies"] })
userId: sub,
});
if (!ret) {
return error(404, {
return status(404, {
status: 404,
message: `No movie found with id or slug: '${id}'.`,
});
}
if (!ret.language) {
return error(422, {
return status(422, {
status: 422,
message: "Accept-Language header could not be satisfied.",
});
@ -99,7 +99,7 @@ export const movies = new Elysia({ prefix: "/movies", tags: ["movies"] })
)
.get(
"random",
async ({ error, redirect }) => {
async ({ status, redirect }) => {
const [movie] = await db
.select({ slug: shows.slug })
.from(shows)
@ -107,7 +107,7 @@ export const movies = new Elysia({ prefix: "/movies", tags: ["movies"] })
.orderBy(sql`random()`)
.limit(1);
if (!movie)
return error(404, {
return status(404, {
status: 404,
message: "No movies in the database.",
});

View File

@ -31,7 +31,7 @@ export const series = new Elysia({ prefix: "/series", tags: ["series"] })
headers: { "accept-language": languages },
query: { preferOriginal, with: relations },
jwt: { sub, settings },
error,
status,
set,
}) => {
const langs = processLanguages(languages);
@ -48,13 +48,13 @@ export const series = new Elysia({ prefix: "/series", tags: ["series"] })
userId: sub,
});
if (!ret) {
return error(404, {
return status(404, {
status: 404,
message: `No serie found with the id or slug: '${id}'.`,
});
}
if (!ret.language) {
return error(422, {
return status(422, {
status: 422,
message: "Accept-Language header could not be satisfied.",
});
@ -102,7 +102,7 @@ export const series = new Elysia({ prefix: "/series", tags: ["series"] })
)
.get(
"random",
async ({ error, redirect }) => {
async ({ status, redirect }) => {
const [serie] = await db
.select({ slug: shows.slug })
.from(shows)
@ -110,7 +110,7 @@ export const series = new Elysia({ prefix: "/series", tags: ["series"] })
.orderBy(sql`random()`)
.limit(1);
if (!serie)
return error(404, {
return status(404, {
status: 404,
message: "No series in the database.",
});

View File

@ -23,14 +23,14 @@ export const showsH = new Elysia({ prefix: "/shows", tags: ["shows"] })
.use(auth)
.get(
"random",
async ({ error, redirect }) => {
async ({ status, redirect }) => {
const [show] = await db
.select({ kind: shows.kind, slug: shows.slug })
.from(shows)
.orderBy(sql`random()`)
.limit(1);
if (!show)
return error(404, {
return status(404, {
status: 404,
message: "No shows in the database.",
});

View File

@ -120,14 +120,14 @@ export const staffH = new Elysia({ tags: ["staff"] })
.use(auth)
.get(
"/staff/:id",
async ({ params: { id }, error }) => {
async ({ params: { id }, status }) => {
const [ret] = await db
.select()
.from(staff)
.where(isUuid(id) ? eq(staff.id, id) : eq(staff.slug, id))
.limit(1);
if (!ret) {
return error(404, {
return status(404, {
status: 404,
message: `No staff found with the id or slug: '${id}'`,
});
@ -155,14 +155,14 @@ export const staffH = new Elysia({ tags: ["staff"] })
)
.get(
"/staff/random",
async ({ error, redirect }) => {
async ({ status, redirect }) => {
const [member] = await db
.select({ slug: staff.slug })
.from(staff)
.orderBy(sql`random()`)
.limit(1);
if (!member)
return error(404, {
return status(404, {
status: 404,
message: "No staff in the database.",
});
@ -192,7 +192,7 @@ export const staffH = new Elysia({ tags: ["staff"] })
headers: { "accept-language": languages },
request: { url },
jwt: { sub, settings },
error,
status,
}) => {
const [member] = await db
.select({ pk: staff.pk })
@ -201,7 +201,7 @@ export const staffH = new Elysia({ tags: ["staff"] })
.limit(1);
if (!member) {
return error(404, {
return status(404, {
status: 404,
message: `No staff member with the id or slug: '${id}'.`,
});
@ -363,7 +363,7 @@ export const staffH = new Elysia({ tags: ["staff"] })
params: { id },
query: { limit, after, query, sort, filter },
request: { url },
error,
status,
}) => {
const [movie] = await db
.select({ pk: shows.pk })
@ -377,7 +377,7 @@ export const staffH = new Elysia({ tags: ["staff"] })
.limit(1);
if (!movie) {
return error(404, {
return status(404, {
status: 404,
message: `No movie with the id or slug: '${id}'.`,
});
@ -430,7 +430,7 @@ export const staffH = new Elysia({ tags: ["staff"] })
params: { id },
query: { limit, after, query, sort, filter },
request: { url },
error,
status,
}) => {
const [serie] = await db
.select({ pk: shows.pk })
@ -444,7 +444,7 @@ export const staffH = new Elysia({ tags: ["staff"] })
.limit(1);
if (!serie) {
return error(404, {
return status(404, {
status: 404,
message: `No serie with the id or slug: '${id}'.`,
});

View File

@ -135,7 +135,7 @@ export const studiosH = new Elysia({ prefix: "/studios", tags: ["studios"] })
params: { id },
headers: { "accept-language": languages },
query: { with: relations },
error,
status,
set,
}) => {
const langs = processLanguages(languages);
@ -147,13 +147,13 @@ export const studiosH = new Elysia({ prefix: "/studios", tags: ["studios"] })
relations,
});
if (!ret) {
return error(404, {
return status(404, {
status: 404,
message: `No studio found with the id or slug: '${id}'`,
});
}
if (!ret.language) {
return error(422, {
return status(422, {
status: 422,
message: "Accept-Language header could not be satisfied.",
});
@ -195,14 +195,14 @@ export const studiosH = new Elysia({ prefix: "/studios", tags: ["studios"] })
)
.get(
"random",
async ({ error, redirect }) => {
async ({ status, redirect }) => {
const [studio] = await db
.select({ slug: studios.slug })
.from(studios)
.orderBy(sql`random()`)
.limit(1);
if (!studio)
return error(404, {
return status(404, {
status: 404,
message: "No studios in the database.",
});
@ -305,7 +305,7 @@ export const studiosH = new Elysia({ prefix: "/studios", tags: ["studios"] })
headers: { "accept-language": languages },
jwt: { sub, settings },
request: { url },
error,
status,
}) => {
const [studio] = await db
.select({ pk: studios.pk })
@ -314,7 +314,7 @@ export const studiosH = new Elysia({ prefix: "/studios", tags: ["studios"] })
.limit(1);
if (!studio) {
return error(404, {
return status(404, {
status: 404,
message: `No studios with the id or slug: '${id}'.`,
});
@ -366,7 +366,7 @@ export const studiosH = new Elysia({ prefix: "/studios", tags: ["studios"] })
headers: { "accept-language": languages },
jwt: { sub, settings },
request: { url },
error,
status,
}) => {
const [studio] = await db
.select({ pk: studios.pk })
@ -375,7 +375,7 @@ export const studiosH = new Elysia({ prefix: "/studios", tags: ["studios"] })
.limit(1);
if (!studio) {
return error(404, {
return status(404, {
status: 404,
message: `No studios with the id or slug: '${id}'.`,
});
@ -428,7 +428,7 @@ export const studiosH = new Elysia({ prefix: "/studios", tags: ["studios"] })
headers: { "accept-language": languages },
jwt: { sub, settings },
request: { url },
error,
status,
}) => {
const [studio] = await db
.select({ pk: studios.pk })
@ -437,7 +437,7 @@ export const studiosH = new Elysia({ prefix: "/studios", tags: ["studios"] })
.limit(1);
if (!studio) {
return error(404, {
return status(404, {
status: 404,
message: `No studios with the id or slug: '${id}'.`,
});

View File

@ -1,7 +1,6 @@
import { and, eq, exists, inArray, not, notExists, or, sql } from "drizzle-orm";
import { alias } from "drizzle-orm/pg-core";
import { and, eq, notExists, or, sql } from "drizzle-orm";
import { Elysia, t } from "elysia";
import { db } from "~/db";
import { type Transaction, db } from "~/db";
import { entries, entryVideoJoin, shows, videos } from "~/db/schema";
import {
conflictUpdateAllExcept,
@ -23,7 +22,7 @@ import {
sortToSql,
} from "~/models/utils";
import { desc as description } from "~/models/utils/descriptions";
import { Guesses, SeedVideo, Video } from "~/models/video";
import { Guess, Guesses, SeedVideo, Video } from "~/models/video";
import { comment } from "~/utils";
import { computeVideoSlug } from "./seed/insert/entries";
import {
@ -31,9 +30,144 @@ import {
updateAvailableSince,
} from "./seed/insert/shows";
async function linkVideos(
tx: Transaction,
links: {
video: number;
entry: Omit<SeedVideo["for"], "movie" | "serie"> & {
movie?: { id?: string; slug?: string };
serie?: { id?: string; slug?: string };
};
}[],
) {
if (!links.length) return {};
const entriesQ = tx
.select({
pk: entries.pk,
id: entries.id,
slug: entries.slug,
kind: entries.kind,
seasonNumber: entries.seasonNumber,
episodeNumber: entries.episodeNumber,
order: entries.order,
showId: sql`${shows.id}`.as("showId"),
showSlug: sql`${shows.slug}`.as("showSlug"),
externalId: entries.externalId,
})
.from(entries)
.innerJoin(shows, eq(entries.showPk, shows.pk))
.as("entriesQ");
const hasRenderingQ = tx
.select()
.from(entryVideoJoin)
.where(eq(entryVideoJoin.entryPk, entriesQ.pk));
const ret = await tx
.insert(entryVideoJoin)
.select(
tx
.selectDistinctOn([entriesQ.pk, videos.pk], {
entryPk: entriesQ.pk,
videoPk: videos.pk,
slug: computeVideoSlug(entriesQ.slug, sql`exists(${hasRenderingQ})`),
})
.from(
values(links, {
video: "integer",
entry: "jsonb",
}).as("j"),
)
.innerJoin(videos, eq(videos.pk, sql`j.video`))
.innerJoin(
entriesQ,
or(
and(
sql`j.entry ? 'slug'`,
eq(entriesQ.slug, sql`j.entry->>'slug'`),
),
and(
sql`j.entry ? 'movie'`,
or(
eq(entriesQ.showId, sql`(j.entry #>> '{movie, id}')::uuid`),
eq(entriesQ.showSlug, sql`j.entry #>> '{movie, slug}'`),
),
eq(entriesQ.kind, "movie"),
),
and(
sql`j.entry ? 'serie'`,
or(
eq(entriesQ.showId, sql`(j.entry #>> '{serie, id}')::uuid`),
eq(entriesQ.showSlug, sql`j.entry #>> '{serie, slug}'`),
),
or(
and(
sql`j.entry ?& array['season', 'episode']`,
eq(entriesQ.seasonNumber, sql`(j.entry->>'season')::integer`),
eq(
entriesQ.episodeNumber,
sql`(j.entry->>'episode')::integer`,
),
),
and(
sql`j.entry ? 'order'`,
eq(entriesQ.order, sql`(j.entry->>'order')::float`),
),
and(
sql`j.entry ? 'special'`,
eq(
entriesQ.episodeNumber,
sql`(j.entry->>'special')::integer`,
),
eq(entriesQ.kind, "special"),
),
),
),
and(
sql`j.entry ? 'externalId'`,
sql`j.entry->'externalId' <@ ${entriesQ.externalId}`,
),
),
),
)
.onConflictDoUpdate({
target: [entryVideoJoin.entryPk, entryVideoJoin.videoPk],
// this is basically a `.onConflictDoNothing()` but we want `returning` to give us the existing data
set: { entryPk: sql`excluded.entry_pk` },
})
.returning({
slug: entryVideoJoin.slug,
entryPk: entryVideoJoin.entryPk,
videoPk: entryVideoJoin.videoPk,
});
const entr = ret.reduce(
(acc, x) => {
acc[x.videoPk] ??= [];
acc[x.videoPk].push({ slug: x.slug });
return acc;
},
{} as Record<number, { slug: string }[]>,
);
const entriesPk = [...new Set(ret.map((x) => x.entryPk))];
await updateAvailableCount(
tx,
tx
.selectDistinct({ pk: entries.showPk })
.from(entries)
.where(eq(entries.pk, sql`any(${sqlarr(entriesPk)})`)),
);
await updateAvailableSince(tx, entriesPk);
return entr;
}
const CreatedVideo = t.Object({
id: t.String({ format: "uuid" }),
path: t.String({ examples: [bubbleVideo.path] }),
guess: t.Omit(Guess, ["history"]),
entries: t.Array(
t.Object({
slug: t.String({ format: "slug", examples: ["bubble-v2"] }),
@ -60,7 +194,7 @@ export const videosH = new Elysia({ prefix: "/videos", tags: ["videos"] })
})
.from(videos)
.leftJoin(
sql`jsonb_array_elements_text(${videos.guess}->'year') as year`,
sql`jsonb_array_elements_text(${videos.guess}->'years') as year`,
sql`true`,
)
.innerJoin(entryVideoJoin, eq(entryVideoJoin.videoPk, videos.pk))
@ -119,7 +253,7 @@ export const videosH = new Elysia({ prefix: "/videos", tags: ["videos"] })
},
)
.get(
"unknowns",
"unmatched",
async ({ query: { sort, query, limit, after }, request: { url } }) => {
const ret = await db
.select()
@ -146,7 +280,7 @@ export const videosH = new Elysia({ prefix: "/videos", tags: ["videos"] })
return createPage(ret, { url, sort, limit });
},
{
detail: { description: "Get unknown/unmatch videos." },
detail: { description: "Get unknown/unmatched videos." },
query: t.Object({
sort: Sort(
{ createdAt: videos.createdAt, path: videos.path },
@ -169,9 +303,9 @@ export const videosH = new Elysia({ prefix: "/videos", tags: ["videos"] })
)
.post(
"",
async ({ body, error }) => {
async ({ body, status }) => {
return await db.transaction(async (tx) => {
let vids: { pk: number; id: string; path: string }[] = [];
let vids: { pk: number; id: string; path: string; guess: Guess }[] = [];
try {
vids = await tx
.insert(videos)
@ -184,10 +318,11 @@ export const videosH = new Elysia({ prefix: "/videos", tags: ["videos"] })
pk: videos.pk,
id: videos.id,
path: videos.path,
guess: videos.guess,
});
} catch (e) {
if (!isUniqueConstraint(e)) throw e;
return error(409, {
return status(409, {
status: 409,
message: comment`
Invalid rendering. A video with the same (rendering, part, version) combo
@ -202,7 +337,6 @@ export const videosH = new Elysia({ prefix: "/videos", tags: ["videos"] })
if (!x.for) return [];
return x.for.map((e) => ({
video: vids.find((v) => v.path === x.path)!.pk,
path: x.path,
entry: {
...e,
movie:
@ -222,148 +356,26 @@ export const videosH = new Elysia({ prefix: "/videos", tags: ["videos"] })
});
if (!vidEntries.length) {
return error(
return status(
201,
vids.map((x) => ({ id: x.id, path: x.path, entries: [] })),
vids.map((x) => ({
id: x.id,
path: x.path,
guess: x.guess,
entries: [],
})),
);
}
const entriesQ = tx
.select({
pk: entries.pk,
id: entries.id,
slug: entries.slug,
kind: entries.kind,
seasonNumber: entries.seasonNumber,
episodeNumber: entries.episodeNumber,
order: entries.order,
showId: sql`${shows.id}`.as("showId"),
showSlug: sql`${shows.slug}`.as("showSlug"),
externalId: entries.externalId,
})
.from(entries)
.innerJoin(shows, eq(entries.showPk, shows.pk))
.as("entriesQ");
const links = await linkVideos(tx, vidEntries);
const hasRenderingQ = tx
.select()
.from(entryVideoJoin)
.where(eq(entryVideoJoin.entryPk, entriesQ.pk));
const ret = await tx
.insert(entryVideoJoin)
.select(
tx
.selectDistinctOn([entriesQ.pk, videos.pk], {
entryPk: entriesQ.pk,
videoPk: videos.pk,
slug: computeVideoSlug(
entriesQ.slug,
sql`exists(${hasRenderingQ})`,
),
})
.from(
values(vidEntries, {
video: "integer",
entry: "jsonb",
}).as("j"),
)
.innerJoin(videos, eq(videos.pk, sql`j.video`))
.innerJoin(
entriesQ,
or(
and(
sql`j.entry ? 'slug'`,
eq(entriesQ.slug, sql`j.entry->>'slug'`),
),
and(
sql`j.entry ? 'movie'`,
or(
eq(
entriesQ.showId,
sql`(j.entry #>> '{movie, id}')::uuid`,
),
eq(entriesQ.showSlug, sql`j.entry #>> '{movie, slug}'`),
),
eq(entriesQ.kind, "movie"),
),
and(
sql`j.entry ? 'serie'`,
or(
eq(
entriesQ.showId,
sql`(j.entry #>> '{serie, id}')::uuid`,
),
eq(entriesQ.showSlug, sql`j.entry #>> '{serie, slug}'`),
),
or(
and(
sql`j.entry ?& array['season', 'episode']`,
eq(
entriesQ.seasonNumber,
sql`(j.entry->>'season')::integer`,
),
eq(
entriesQ.episodeNumber,
sql`(j.entry->>'episode')::integer`,
),
),
and(
sql`j.entry ? 'order'`,
eq(entriesQ.order, sql`(j.entry->>'order')::float`),
),
and(
sql`j.entry ? 'special'`,
eq(
entriesQ.episodeNumber,
sql`(j.entry->>'special')::integer`,
),
eq(entriesQ.kind, "special"),
),
),
),
and(
sql`j.entry ? 'externalId'`,
sql`j.entry->'externalId' <@ ${entriesQ.externalId}`,
),
),
),
)
.onConflictDoUpdate({
target: [entryVideoJoin.entryPk, entryVideoJoin.videoPk],
// this is basically a `.onConflictDoNothing()` but we want `returning` to give us the existing data
set: { entryPk: sql`excluded.entry_pk` },
})
.returning({
slug: entryVideoJoin.slug,
entryPk: entryVideoJoin.entryPk,
videoPk: entryVideoJoin.videoPk,
});
const entr = ret.reduce(
(acc, x) => {
acc[x.videoPk] ??= [];
acc[x.videoPk].push({ slug: x.slug });
return acc;
},
{} as Record<number, { slug: string }[]>,
);
const entriesPk = [...new Set(ret.map((x) => x.entryPk))];
await updateAvailableCount(
tx,
tx
.selectDistinct({ pk: entries.showPk })
.from(entries)
.where(eq(entries.pk, sql`any(${sqlarr(entriesPk)})`)),
);
await updateAvailableSince(tx, entriesPk);
return error(
return status(
201,
vids.map((x) => ({
id: x.id,
path: x.path,
entries: entr[x.pk] ?? [],
guess: x.guess,
entries: links[x.pk] ?? [],
})),
);
});
@ -446,4 +458,75 @@ export const videosH = new Elysia({ prefix: "/videos", tags: ["videos"] })
),
response: { 200: t.Array(t.String()) },
},
)
.post(
"/link",
async ({ body, status }) => {
return await db.transaction(async (tx) => {
const vids = await tx
.select({ pk: videos.pk, id: videos.id, path: videos.path })
.from(videos)
.where(eq(videos.id, sql`any(${sqlarr(body.map((x) => x.id))})`));
const lVids = body.flatMap((x) => {
return x.for.map((e) => ({
video: vids.find((v) => v.id === x.id)!.pk,
entry: {
...e,
movie:
"movie" in e
? isUuid(e.movie)
? { id: e.movie }
: { slug: e.movie }
: undefined,
serie:
"serie" in e
? isUuid(e.serie)
? { id: e.serie }
: { slug: e.serie }
: undefined,
},
}));
});
const links = await linkVideos(tx, lVids);
return status(
201,
vids.map((x) => ({
id: x.id,
path: x.path,
entries: links[x.pk] ?? [],
})),
);
});
},
{
detail: {
description: "Link existing videos to existing entries",
},
body: t.Array(
t.Object({
id: t.String({
description: "Id of the video",
format: "uuid",
}),
for: t.Array(SeedVideo.properties.for.items),
}),
),
response: {
201: t.Array(
t.Object({
id: t.String({ format: "uuid" }),
path: t.String({ examples: ["/video/made in abyss s1e13.mkv"] }),
entries: t.Array(
t.Object({
slug: t.String({
format: "slug",
examples: ["made-in-abyss-s1e13"],
}),
}),
),
}),
),
422: KError,
},
},
);

View File

@ -19,6 +19,7 @@ export const roleKind = schema.enum("role_kind", [
"writter",
"producer",
"music",
"crew",
"other",
]);

View File

@ -1,6 +1,6 @@
import { swagger } from "@elysiajs/swagger";
import Elysia from "elysia";
import { app } from "./base";
import { handlers } from "./base";
import { processImages } from "./controllers/seed/images";
import { migrate } from "./db";
import { comment } from "./utils";
@ -10,13 +10,14 @@ await migrate();
// run image processor task in background
processImages();
new Elysia()
const app = new Elysia()
.use(
swagger({
scalarConfig: {
sources: [
{ slug: "kyoo", url: "/swagger/json" },
{ slug: "keibi", url: "http://localhost:4568/auth/swagger/doc.json" },
{ slug: "keibi", url: "/auth/swagger/doc.json" },
{ slug: "scanner", url: "/scanner/openapi.json" },
],
},
documentation: {
@ -83,7 +84,7 @@ new Elysia()
},
}),
)
.use(app)
.use(handlers)
.listen(3567);
console.log(`Api running at ${app.server?.hostname}:${app.server?.port}`);

View File

@ -65,7 +65,7 @@ export const FullCollection = t.Intersect([
export type FullCollection = Prettify<typeof FullCollection.static>;
export const SeedCollection = t.Composite([
t.Omit(BaseCollection, ["kind", "startAir", "endAir", "nextRefresh"]),
t.Omit(BaseCollection, ["startAir", "endAir", "nextRefresh"]),
t.Object({
slug: t.String({ format: "slug" }),
originalLanguage: Language({

View File

@ -56,7 +56,7 @@ export type MovieEntry = Prettify<typeof MovieEntry.static>;
export const SeedMovieEntry = t.Composite([
t.Omit(BaseMovieEntry, ["thumbnail", "nextRefresh"]),
t.Object({
slug: t.Optional(t.String({ format: "slug" })),
slug: t.Optional(t.Nullable(t.String({ format: "slug" }))),
thumbnail: t.Nullable(SeedImage),
translations: TranslationRecord(
t.Intersect([

View File

@ -10,8 +10,9 @@ export const bubbleVideo: Video = {
guess: {
kind: "movie",
title: "bubble",
year: [2022],
years: [2022],
from: "guessit",
history: [],
},
createdAt: "2024-11-23T15:01:24.968Z",
updatedAt: "2024-11-23T15:01:24.968Z",
@ -33,7 +34,7 @@ export const bubble: SeedMovie = {
"https://image.tmdb.org/t/p/original/a8Q2g0g7XzAF6gcB8qgn37ccb9Y.jpg",
banner: null,
logo: "https://image.tmdb.org/t/p/original/ihIs7fayAmZieMlMQbs6TWM77uf.png",
trailerUrl: "https://www.youtube.com/watch?v=vs7zsyIZkMM",
trailer: "https://www.youtube.com/watch?v=vs7zsyIZkMM",
},
ja: {
name: "バブル2022",
@ -47,7 +48,7 @@ export const bubble: SeedMovie = {
thumbnail: "https://image.tmdb.org/t/p/original/jp.jpg",
banner: null,
logo: null,
trailerUrl: "https://www.youtube.com/watch?v=vs7zsyIZkMM",
trailer: "https://www.youtube.com/watch?v=vs7zsyIZkMM",
},
},
genres: ["animation", "adventure", "science-fiction", "fantasy"],

View File

@ -10,8 +10,9 @@ export const dune1984Video: Video = {
guess: {
kind: "movie",
title: "dune",
year: [1984],
years: [1984],
from: "guessit",
history: [],
},
createdAt: "2024-12-02T11:45:12.968Z",
updatedAt: "2024-12-02T11:45:12.968Z",
@ -33,7 +34,7 @@ export const dune1984: SeedMovie = {
"https://image.tmdb.org/t/p/original/pCHV6BntWLO2H6wQOj4LwzAWqpa.jpg",
banner: null,
logo: "https://image.tmdb.org/t/p/original/olbKnk2VvFcM2STl0dJAf6kfydo.png",
trailerUrl: "https://www.youtube.com/watch?v=vczYTLQ6oiE",
trailer: "https://www.youtube.com/watch?v=vczYTLQ6oiE",
},
},
genres: ["adventure", "drama", "science-fiction"],

View File

@ -10,8 +10,9 @@ export const duneVideo: Video = {
guess: {
kind: "movie",
title: "dune",
year: [2021],
years: [2021],
from: "guessit",
history: [],
},
createdAt: "2024-12-02T10:10:24.968Z",
updatedAt: "2024-12-02T10:10:24.968Z",
@ -33,7 +34,7 @@ export const dune: SeedMovie = {
"https://image.tmdb.org/t/p/original/k2ocXnNkmvE6rJomRkExIStFq3v.jpg",
banner: null,
logo: "https://image.tmdb.org/t/p/original/5nDsd3u1c6kDphbtIqkHseLg7HL.png",
trailerUrl: "https://www.youtube.com/watch?v=n9xhJrPXop4",
trailer: "https://www.youtube.com/watch?v=n9xhJrPXop4",
},
},
genres: ["adventure", "drama", "science-fiction", "action"],

View File

@ -9,10 +9,10 @@ export const madeInAbyssVideo: Video = {
version: 1,
guess: {
title: "Made in abyss",
season: [1],
episode: [13],
episodes: [{ season: 1, episode: 13 }],
kind: "episode",
from: "guessit",
history: [],
},
createdAt: "2024-11-23T15:01:24.968Z",
updatedAt: "2024-11-23T15:01:24.968Z",
@ -57,7 +57,7 @@ export const madeInAbyss = {
"https://image.tmdb.org/t/p/original/Df9XrvZFIeQfLKfu8evRmzvRsd.jpg",
logo: "https://image.tmdb.org/t/p/original/7hY3Q4GhkiYPBfn4UoVg0AO4Zgk.png",
banner: null,
trailerUrl: "https://www.youtube.com/watch?v=ePOyy6Wlk4s",
trailer: "https://www.youtube.com/watch?v=ePOyy6Wlk4s",
},
ja: {
name: "メイドインアビス",
@ -91,7 +91,7 @@ export const madeInAbyss = {
"https://image.tmdb.org/t/p/original/Df9XrvZFIeQfLKfu8evRmzvRsd.jpg",
logo: "https://image.tmdb.org/t/p/original/7hY3Q4GhkiYPBfn4UoVg0AO4Zgk.png",
banner: null,
trailerUrl: "https://www.youtube.com/watch?v=ePOyy6Wlk4s",
trailer: "https://www.youtube.com/watch?v=ePOyy6Wlk4s",
},
},
genres: [

View File

@ -72,7 +72,7 @@ export const FullMovie = t.Intersect([
export type FullMovie = Prettify<typeof FullMovie.static>;
export const SeedMovie = t.Composite([
t.Omit(BaseMovie, ["kind", "nextRefresh"]),
t.Omit(BaseMovie, ["nextRefresh"]),
t.Object({
slug: t.String({ format: "slug", examples: ["bubble"] }),
originalLanguage: Language({
@ -80,12 +80,19 @@ export const SeedMovie = t.Composite([
}),
translations: TranslationRecord(
t.Composite([
t.Omit(MovieTranslation, ["poster", "thumbnail", "banner", "logo"]),
t.Omit(MovieTranslation, [
"poster",
"thumbnail",
"banner",
"logo",
"trailerUrl",
]),
t.Object({
poster: t.Nullable(SeedImage),
thumbnail: t.Nullable(SeedImage),
banner: t.Nullable(SeedImage),
logo: t.Nullable(SeedImage),
trailer: t.Nullable(SeedImage),
latinName: t.Optional(Original.properties.latinName),
}),
]),

View File

@ -8,7 +8,7 @@ import { TranslationRecord } from "./utils/language";
import { Resource } from "./utils/resource";
export const BaseSeason = t.Object({
seasonNumber: t.Integer({ minimum: 1 }),
seasonNumber: t.Integer({ minimum: 0 }),
startAir: t.Nullable(t.String({ format: "date" })),
endAir: t.Nullable(t.String({ format: "date" })),

View File

@ -88,7 +88,7 @@ export const FullSerie = t.Intersect([
export type FullSerie = Prettify<typeof FullSerie.static>;
export const SeedSerie = t.Composite([
t.Omit(BaseSerie, ["kind", "nextRefresh"]),
t.Omit(BaseSerie, ["nextRefresh"]),
t.Object({
slug: t.String({ format: "slug" }),
originalLanguage: Language({
@ -96,12 +96,19 @@ export const SeedSerie = t.Composite([
}),
translations: TranslationRecord(
t.Composite([
t.Omit(SerieTranslation, ["poster", "thumbnail", "banner", "logo"]),
t.Omit(SerieTranslation, [
"poster",
"thumbnail",
"banner",
"logo",
"trailerUrl",
]),
t.Object({
poster: t.Nullable(SeedImage),
thumbnail: t.Nullable(SeedImage),
banner: t.Nullable(SeedImage),
logo: t.Nullable(SeedImage),
trailer: t.Nullable(SeedImage),
latinName: t.Optional(Original.properties.latinName),
}),
]),

View File

@ -16,6 +16,7 @@ export const Role = t.Object({
"writter",
"producer",
"music",
"crew",
"other",
]),
character: t.Nullable(Character),

View File

@ -9,7 +9,7 @@ export type FilterDef = {
[key: string]:
| {
column: Column | SQLWrapper;
type: "int" | "float" | "date" | "string";
type: "int" | "float" | "date" | "string" | "bool";
isArray?: boolean;
}
| {

View File

@ -29,7 +29,8 @@ export type Value =
| { type: "float"; value: number }
| { type: "date"; value: string }
| { type: "string"; value: string }
| { type: "enum"; value: string };
| { type: "enum"; value: string }
| { type: "bool"; value: boolean };
const operators = ["eq", "ne", "gt", "ge", "lt", "le", "has"] as const;
export type Operator = (typeof operators)[number];
export type Expression =

View File

@ -48,6 +48,18 @@ export const toDrizzle = (expr: Expression, config: FilterDef): SQL => {
// but parser doesn't know if an enum should be a string
expr.value = { type: "string", value: expr.value.value };
}
if (prop.type === "bool" && expr.value.type === "enum") {
if (expr.value.value !== "false" && expr.value.value !== "true") {
throw new KErrorT(
comment`
Invalid value for property ${expr.property}.
Get ${expr.value.value} but expected true or false.
`,
{ in: where },
);
}
expr.value = { type: "bool", value: expr.value.value === "true" };
}
if (prop.type !== expr.value.type) {
throw new KErrorT(
comment`

View File

@ -1,45 +1,37 @@
import { PatternStringExact } from "@sinclair/typebox";
import { PatternStringExact, type TSchema } from "@sinclair/typebox";
import { t } from "elysia";
import { type Prettify, comment } from "~/utils";
import { ExtraType } from "./entry/extra";
import { bubble, bubbleVideo, registerExamples } from "./examples";
import { DbMetadata, EpisodeId, ExternalId, Resource } from "./utils";
const ExternalIds = t.Record(
t.String(),
t.Omit(
t.Union([
EpisodeId.patternProperties[PatternStringExact],
ExternalId().patternProperties[PatternStringExact],
]),
["link"],
),
);
type ExternalIds = typeof ExternalIds.static;
const Opt = (schema: TSchema) => t.Optional(t.Nullable(schema));
export const Guess = t.Recursive((Self) =>
t.Object(
{
title: t.String(),
year: t.Optional(t.Array(t.Integer(), { default: [] })),
season: t.Optional(t.Array(t.Integer(), { default: [] })),
episode: t.Optional(t.Array(t.Integer(), { default: [] })),
kind: t.Optional(t.UnionEnum(["episode", "movie", "extra"])),
extraKind: t.Optional(ExtraType),
externalId: t.Optional(ExternalIds),
kind: Opt(t.UnionEnum(["episode", "movie", "extra"])),
extraKind: Opt(ExtraType),
years: Opt(t.Array(t.Integer(), { default: [] })),
episodes: Opt(
t.Array(
t.Object({ season: t.Nullable(t.Integer()), episode: t.Integer() }),
{ default: [] },
),
),
externalId: Opt(t.Record(t.String(), t.String())),
from: t.String({
description: "Name of the tool that made the guess",
}),
history: t.Optional(
t.Array(t.Omit(Self, ["history"]), {
default: [],
description: comment`
When another tool refines the guess or a user manually edit it, the history of the guesses
are kept in this \`history\` value.
`,
}),
),
history: t.Array(t.Omit(Self, ["history"]), {
default: [],
description: comment`
When another tool refines the guess or a user manually edit it, the history of the guesses
are kept in this \`history\` value.
`,
}),
},
{
additionalProperties: true,
@ -92,7 +84,16 @@ export const SeedVideo = t.Object({
}),
}),
t.Object({
externalId: ExternalIds,
externalId: t.Record(
t.String(),
t.Omit(
t.Union([
EpisodeId.patternProperties[PatternStringExact],
ExternalId().patternProperties[PatternStringExact],
]),
["link"],
),
),
}),
t.Object({
movie: t.Union([

View File

@ -1,5 +1,5 @@
import { buildUrl } from "tests/utils";
import { app } from "~/base";
import { handlers } from "~/base";
import type { SeedMovie } from "~/models/movie";
import type { MovieWatchStatus } from "~/models/watchlist";
import { getJwtHeaders } from "./jwt";
@ -11,7 +11,7 @@ export const getMovie = async (
...query
}: { langs?: string; preferOriginal?: boolean; with?: string[] },
) => {
const resp = await app.handle(
const resp = await handlers.handle(
new Request(buildUrl(`movies/${id}`, query), {
method: "GET",
headers: langs
@ -38,7 +38,7 @@ export const getMovies = async ({
langs?: string;
preferOriginal?: boolean;
}) => {
const resp = await app.handle(
const resp = await handlers.handle(
new Request(buildUrl("movies", query), {
method: "GET",
headers: langs
@ -54,7 +54,7 @@ export const getMovies = async ({
};
export const createMovie = async (movie: SeedMovie) => {
const resp = await app.handle(
const resp = await handlers.handle(
new Request(buildUrl("movies"), {
method: "POST",
body: JSON.stringify(movie),
@ -72,7 +72,7 @@ export const setMovieStatus = async (
id: string,
status: Omit<MovieWatchStatus, "percent">,
) => {
const resp = await app.handle(
const resp = await handlers.handle(
new Request(buildUrl(`movies/${id}/watchstatus`), {
method: "POST",
body: JSON.stringify(status),

View File

@ -1,12 +1,12 @@
import { buildUrl } from "tests/utils";
import { app } from "~/base";
import { handlers } from "~/base";
import type { SeedHistory } from "~/models/history";
import type { SeedSerie } from "~/models/serie";
import type { SerieWatchStatus } from "~/models/watchlist";
import { getJwtHeaders } from "./jwt";
export const createSerie = async (serie: SeedSerie) => {
const resp = await app.handle(
const resp = await handlers.handle(
new Request(buildUrl("series"), {
method: "POST",
body: JSON.stringify(serie),
@ -27,7 +27,7 @@ export const getSerie = async (
...query
}: { langs?: string; preferOriginal?: boolean; with?: string[] },
) => {
const resp = await app.handle(
const resp = await handlers.handle(
new Request(buildUrl(`series/${id}`, query), {
method: "GET",
headers: langs
@ -46,7 +46,7 @@ export const getSeries = async ({
langs,
...query
}: { langs?: string; preferOriginal?: boolean; with?: string[] }) => {
const resp = await app.handle(
const resp = await handlers.handle(
new Request(buildUrl("series", query), {
method: "GET",
headers: langs
@ -76,7 +76,7 @@ export const getSeasons = async (
preferOriginal?: boolean;
},
) => {
const resp = await app.handle(
const resp = await handlers.handle(
new Request(buildUrl(`series/${serie}/seasons`, opts), {
method: "GET",
headers: langs
@ -106,7 +106,7 @@ export const getEntries = async (
preferOriginal?: boolean;
},
) => {
const resp = await app.handle(
const resp = await handlers.handle(
new Request(buildUrl(`series/${serie}/entries`, opts), {
method: "GET",
headers: langs
@ -131,7 +131,7 @@ export const getExtras = async (
query?: string;
},
) => {
const resp = await app.handle(
const resp = await handlers.handle(
new Request(buildUrl(`series/${serie}/extras`, opts), {
method: "GET",
headers: await getJwtHeaders(),
@ -148,7 +148,7 @@ export const getUnknowns = async (opts: {
sort?: string | string[];
query?: string;
}) => {
const resp = await app.handle(
const resp = await handlers.handle(
new Request(buildUrl("unknowns", opts), {
method: "GET",
headers: await getJwtHeaders(),
@ -169,7 +169,7 @@ export const getNews = async ({
langs?: string;
preferOriginal?: boolean;
}) => {
const resp = await app.handle(
const resp = await handlers.handle(
new Request(buildUrl("news", opts), {
method: "GET",
headers: langs
@ -188,7 +188,7 @@ export const setSerieStatus = async (
id: string,
status: Omit<SerieWatchStatus, "seenCount">,
) => {
const resp = await app.handle(
const resp = await handlers.handle(
new Request(buildUrl(`series/${id}/watchstatus`), {
method: "POST",
body: JSON.stringify(status),
@ -216,7 +216,7 @@ export const getHistory = async (
preferOriginal?: boolean;
},
) => {
const resp = await app.handle(
const resp = await handlers.handle(
new Request(buildUrl(`profiles/${profile}/history`, opts), {
method: "GET",
headers: langs
@ -232,7 +232,7 @@ export const getHistory = async (
};
export const addToHistory = async (profile: string, seed: SeedHistory[]) => {
const resp = await app.handle(
const resp = await handlers.handle(
new Request(buildUrl(`profiles/${profile}/history`), {
method: "POST",
body: JSON.stringify(seed),

View File

@ -1,5 +1,5 @@
import { buildUrl } from "tests/utils";
import { app } from "~/base";
import { handlers } from "~/base";
import { getJwtHeaders } from "./jwt";
export const getShows = async ({
@ -14,7 +14,7 @@ export const getShows = async ({
langs?: string;
preferOriginal?: boolean;
}) => {
const resp = await app.handle(
const resp = await handlers.handle(
new Request(buildUrl("shows", query), {
method: "GET",
headers: langs
@ -44,7 +44,7 @@ export const getWatchlist = async (
preferOriginal?: boolean;
},
) => {
const resp = await app.handle(
const resp = await handlers.handle(
new Request(buildUrl(`profiles/${id}/watchlist`, query), {
method: "GET",
headers: langs
@ -74,7 +74,7 @@ export const getNextup = async (
preferOriginal?: boolean;
},
) => {
const resp = await app.handle(
const resp = await handlers.handle(
new Request(buildUrl(`profiles/${id}/nextup`, query), {
method: "GET",
headers: langs

View File

@ -1,9 +1,9 @@
import { buildUrl } from "tests/utils";
import { app } from "~/base";
import { handlers } from "~/base";
import { getJwtHeaders } from "./jwt";
export const getStaff = async (id: string, query: {}) => {
const resp = await app.handle(
const resp = await handlers.handle(
new Request(buildUrl(`staff/${id}`, query), {
method: "GET",
headers: await getJwtHeaders(),
@ -28,7 +28,7 @@ export const getStaffRoles = async (
preferOriginal?: boolean;
},
) => {
const resp = await app.handle(
const resp = await handlers.handle(
new Request(buildUrl(`staff/${staff}/roles`, opts), {
method: "GET",
headers: langs
@ -52,7 +52,7 @@ export const getSerieStaff = async (
sort?: string | string[];
},
) => {
const resp = await app.handle(
const resp = await handlers.handle(
new Request(buildUrl(`series/${serie}/staff`, opts), {
method: "GET",
headers: await getJwtHeaders(),
@ -71,7 +71,7 @@ export const getMovieStaff = async (
sort?: string | string[];
},
) => {
const resp = await app.handle(
const resp = await handlers.handle(
new Request(buildUrl(`movies/${movie}/staff`, opts), {
method: "GET",
headers: await getJwtHeaders(),

View File

@ -1,12 +1,12 @@
import { buildUrl } from "tests/utils";
import { app } from "~/base";
import { handlers } from "~/base";
import { getJwtHeaders } from "./jwt";
export const getStudio = async (
id: string,
{ langs, ...query }: { langs?: string; preferOriginal?: boolean },
) => {
const resp = await app.handle(
const resp = await handlers.handle(
new Request(buildUrl(`studios/${id}`, query), {
method: "GET",
headers: langs
@ -36,7 +36,7 @@ export const getShowsByStudio = async (
preferOriginal?: boolean;
},
) => {
const resp = await app.handle(
const resp = await handlers.handle(
new Request(buildUrl(`studios/${studio}/shows`, opts), {
method: "GET",
headers: langs

View File

@ -1,10 +1,10 @@
import { buildUrl } from "tests/utils";
import { app } from "~/base";
import { handlers } from "~/base";
import type { SeedVideo } from "~/models/video";
import { getJwtHeaders } from "./jwt";
export const createVideo = async (video: SeedVideo | SeedVideo[]) => {
const resp = await app.handle(
const resp = await handlers.handle(
new Request(buildUrl("videos"), {
method: "POST",
body: JSON.stringify(Array.isArray(video) ? video : [video]),
@ -19,7 +19,7 @@ export const createVideo = async (video: SeedVideo | SeedVideo[]) => {
};
export const getVideos = async () => {
const resp = await app.handle(
const resp = await handlers.handle(
new Request(buildUrl("videos"), {
method: "GET",
headers: await getJwtHeaders(),
@ -30,7 +30,7 @@ export const getVideos = async () => {
};
export const deleteVideo = async (paths: string[]) => {
const resp = await app.handle(
const resp = await handlers.handle(
new Request(buildUrl("videos"), {
method: "DELETE",
body: JSON.stringify(paths),
@ -43,3 +43,20 @@ export const deleteVideo = async (paths: string[]) => {
const body = await resp.json();
return [resp, body] as const;
};
export const linkVideos = async (
links: { id: string; for: SeedVideo["for"] }[],
) => {
const resp = await handlers.handle(
new Request(buildUrl("videos/link"), {
method: "POST",
body: JSON.stringify(links),
headers: {
"Content-Type": "application/json",
...(await getJwtHeaders()),
},
}),
);
const body = await resp.json();
return [resp, body] as const;
};

View File

@ -16,7 +16,11 @@ const [_, ser] = await createSerie(madeInAbyss);
const [__, mov] = await createMovie(bubble);
const [resp, body] = await createVideo([
{
guess: { title: "mia", season: [1], episode: [13], from: "test" },
guess: {
title: "mia",
episodes: [{ season: 1, episode: 13 }],
from: "test",
},
part: null,
path: "/video/mia s1e13.mkv",
rendering: "sha2",
@ -26,9 +30,8 @@ const [resp, body] = await createVideo([
{
guess: {
title: "mia",
season: [2],
episode: [1],
year: [2017],
episodes: [{ season: 2, episode: 1 }],
years: [2017],
from: "test",
},
part: null,

View File

@ -6,7 +6,7 @@ import { shows } from "~/db/schema";
import { bubble } from "~/models/examples";
import { dune1984 } from "~/models/examples/dune-1984";
import { dune } from "~/models/examples/dune-2021";
import { app, createMovie, getMovies } from "../helpers";
import { createMovie, getMovies, handlers } from "../helpers";
beforeAll(async () => {
await db.delete(shows);
@ -21,7 +21,7 @@ describe("with a null value", () => {
// instead we just make a new file for those /shrug
// see: https://github.com/oven-sh/bun/issues/5738
beforeAll(async () => {
await createMovie({
const [ret, body] = await createMovie({
slug: "no-air-date",
translations: {
en: {
@ -34,7 +34,7 @@ describe("with a null value", () => {
tagline: null,
tags: [],
thumbnail: null,
trailerUrl: null,
trailer: null,
},
},
genres: [],
@ -46,6 +46,7 @@ describe("with a null value", () => {
externalId: {},
studios: [],
});
expectStatus(ret, body).toBe(201);
});
it("sort by dates desc with a null value", async () => {
@ -74,7 +75,7 @@ describe("with a null value", () => {
),
});
resp = await app.handle(
resp = await handlers.handle(
new Request(next, { headers: await getJwtHeaders() }),
);
body = await resp.json();
@ -123,7 +124,7 @@ describe("with a null value", () => {
),
});
resp = await app.handle(
resp = await handlers.handle(
new Request(next, { headers: await getJwtHeaders() }),
);
body = await resp.json();

View File

@ -7,7 +7,7 @@ import { bubble } from "~/models/examples";
import { dune1984 } from "~/models/examples/dune-1984";
import { dune } from "~/models/examples/dune-2021";
import type { Movie } from "~/models/movie";
import { app, createMovie, getMovies } from "../helpers";
import { createMovie, getMovies, handlers } from "../helpers";
beforeAll(async () => {
await db.delete(shows);
@ -72,7 +72,7 @@ describe("Get all movies", () => {
});
expectStatus(resp, body).toBe(200);
resp = await app.handle(
resp = await handlers.handle(
new Request(body.next, { headers: await getJwtHeaders() }),
);
body = await resp.json();
@ -107,7 +107,7 @@ describe("Get all movies", () => {
),
});
resp = await app.handle(
resp = await handlers.handle(
new Request(next, { headers: await getJwtHeaders() }),
);
body = await resp.json();
@ -165,7 +165,7 @@ describe("Get all movies", () => {
expect(items.length).toBe(1);
expect(items[0].id).toBe(expectedIds[0]);
// Get Second Page
resp = await app.handle(
resp = await handlers.handle(
new Request(body.next, { headers: await getJwtHeaders() }),
);
body = await resp.json();
@ -182,7 +182,7 @@ describe("Get all movies", () => {
});
expectStatus(resp, body).toBe(200);
const resp2 = await app.handle(
const resp2 = await handlers.handle(
new Request(body.next, { headers: await getJwtHeaders() }),
);
const body2 = await resp2.json();
@ -195,7 +195,7 @@ describe("Get all movies", () => {
});
it("Get /random", async () => {
const resp = await app.handle(
const resp = await handlers.handle(
new Request("http://localhost/movies/random", {
headers: await getJwtHeaders(),
}),

View File

@ -9,6 +9,7 @@ let bubbleId = "";
beforeAll(async () => {
await db.delete(shows);
await db.delete(videos);
await db.insert(videos).values(bubbleVideo);
const [ret, body] = await createMovie(bubble);
expect(ret.status).toBe(201);
@ -66,21 +67,29 @@ describe("Get movie", () => {
const [resp, body] = await getMovie(bubble.slug, { langs: "fr,pr,*" });
expectStatus(resp, body).toBe(200);
expect(body).toMatchObject({
slug: bubble.slug,
name: bubble.translations.en.name,
});
expect(resp.headers.get("Content-Language")).toBe("en");
expect(body.slug).toBe(bubble.slug);
const lang = resp.headers.get("Content-Language");
if (lang === "en") {
expect(body.name).toBe(bubble.translations.en.name);
} else if (lang === "ja") {
expect(body.name).toBe(bubble.translations.ja.name);
} else {
expect(lang).toBe("en");
}
});
it("Works without accept-language header", async () => {
const [resp, body] = await getMovie(bubble.slug, { langs: undefined });
expectStatus(resp, body).toBe(200);
expect(body).toMatchObject({
slug: bubble.slug,
name: bubble.translations.en.name,
});
expect(resp.headers.get("Content-Language")).toBe("en");
expect(body.slug).toBe(bubble.slug);
const lang = resp.headers.get("Content-Language");
if (lang === "en") {
expect(body.name).toBe(bubble.translations.en.name);
} else if (lang === "ja") {
expect(body.name).toBe(bubble.translations.ja.name);
} else {
expect(lang).toBe("en");
}
});
it("Fallback if translations does not exist", async () => {
const [resp, body] = await getMovie(bubble.slug, { langs: "en-au" });

View File

@ -49,7 +49,7 @@ describe("Movie seeding", () => {
thumbnail: null,
banner: null,
logo: null,
trailerUrl: null,
trailer: null,
},
},
});
@ -154,7 +154,7 @@ describe("Movie seeding", () => {
poster: null,
thumbnail: null,
logo: null,
trailerUrl: null,
trailer: null,
},
},
});
@ -180,7 +180,7 @@ describe("Movie seeding", () => {
poster: null,
thumbnail: null,
logo: null,
trailerUrl: null,
trailer: null,
},
},
});
@ -308,7 +308,7 @@ describe("Movie seeding", () => {
part: null,
version: 1,
rendering: "oeunhtoeuth",
guess: { title: "bubble", from: "test" },
guess: { title: "bubble", from: "test", history: [] },
});
expectStatus(vresp, video).toBe(201);
@ -334,7 +334,7 @@ describe("Movie seeding", () => {
part: null,
version: 2,
rendering: "oeunhtoeuth",
guess: { title: "bubble", from: "test" },
guess: { title: "bubble", from: "test", history: [] },
});
expectStatus(vresp, video).toBe(201);
@ -359,7 +359,7 @@ describe("Movie seeding", () => {
part: 1,
version: 2,
rendering: "oaoeueunhtoeuth",
guess: { title: "bubble", from: "test" },
guess: { title: "bubble", from: "test", history: [] },
});
expectStatus(vresp, video).toBe(201);
@ -385,14 +385,14 @@ describe("Movie seeding", () => {
part: null,
version: 1,
rendering: "oeunhtoeuthoeu",
guess: { title: "bubble", from: "test" },
guess: { title: "bubble", from: "test", history: [] },
},
{
path: "/video/bubble4.mkv",
part: null,
version: 1,
rendering: "aoeuaoeu",
guess: { title: "bubble", from: "test" },
guess: { title: "bubble", from: "test", history: [] },
},
]);
expectStatus(vresp, video).toBe(201);

View File

@ -6,10 +6,11 @@ import {
createVideo,
deleteVideo,
getVideos,
linkVideos,
} from "tests/helpers";
import { expectStatus } from "tests/utils";
import { db } from "~/db";
import { entries, shows, videos } from "~/db/schema";
import { entries, entryVideoJoin, shows, videos } from "~/db/schema";
import { bubble, madeInAbyss } from "~/models/examples";
beforeAll(async () => {
@ -23,7 +24,12 @@ beforeAll(async () => {
[ret, body] = await createVideo([
{
guess: { title: "mia", season: [1], episode: [13], from: "test" },
guess: {
title: "mia",
episodes: [{ season: 1, episode: 13 }],
from: "test",
history: [],
},
part: null,
path: "/video/mia s1e13.mkv",
rendering: "sha2",
@ -33,10 +39,10 @@ beforeAll(async () => {
{
guess: {
title: "mia",
season: [2],
episode: [1],
year: [2017],
episodes: [{ season: 2, episode: 1 }],
years: [2017],
from: "test",
history: [],
},
part: null,
path: "/video/mia 2017 s2e1.mkv",
@ -45,19 +51,42 @@ beforeAll(async () => {
for: [{ slug: `${madeInAbyss.slug}-s2e1` }],
},
{
guess: { title: "bubble", from: "test" },
guess: { title: "bubble", from: "test", history: [] },
part: null,
path: "/video/bubble.mkv",
rendering: "sha5",
version: 1,
for: [{ movie: bubble.slug }],
},
{
guess: {
title: "mia",
episodes: [{ season: 1, episode: 1 }], // Different episode for unlinked
from: "test",
history: [],
},
part: null,
path: "/video/mia-unlinked.mkv",
rendering: "sha-unlinked-1",
version: 1,
// No 'for' initially
},
{
guess: { title: "bubble", from: "test", history: [] },
part: null,
path: "/video/bubble-unlinked.mkv",
rendering: "sha-unlinked-2",
version: 1,
// No 'for' initially
},
]);
expectStatus(ret, body).toBe(201);
expect(body).toBeArrayOfSize(3);
expect(body).toBeArrayOfSize(5);
expect(body[0].entries).toBeArrayOfSize(1);
expect(body[1].entries).toBeArrayOfSize(1);
expect(body[2].entries).toBeArrayOfSize(1);
expect(body[3].entries).toBeArrayOfSize(0); // Unlinked
expect(body[4].entries).toBeArrayOfSize(0); // Unlinked
const items = await db.query.shows.findMany();
expect(items.find((x) => x.slug === "bubble")!.availableCount).toBe(1);
@ -103,7 +132,12 @@ describe("Video get/deletion", () => {
it("With unknown", async () => {
let [resp, body] = await createVideo({
guess: { title: "mia", season: [1], episode: [13], from: "test" },
guess: {
title: "mia",
episodes: [{ season: 1, episode: 13 }],
from: "test",
history: [],
},
part: null,
path: "/video/mia s1e13 unknown test.mkv",
rendering: "shanthnth",
@ -131,13 +165,20 @@ describe("Video get/deletion", () => {
},
},
});
expect(body.unmatched).toBeArrayOfSize(1);
expect(body.unmatched[0]).toBe("/video/mia s1e13 unknown test.mkv");
expect(body.unmatched).toBeArrayOfSize(3);
expect(body.unmatched).toContain("/video/mia s1e13 unknown test.mkv");
expect(body.unmatched).toContain("/video/mia-unlinked.mkv");
expect(body.unmatched).toContain("/video/bubble-unlinked.mkv");
});
it("Mismatch title guess", async () => {
let [resp, body] = await createVideo({
guess: { title: "mia", season: [1], episode: [13], from: "test" },
guess: {
title: "mia",
episodes: [{ season: 1, episode: 13 }],
from: "test",
history: [],
},
part: null,
path: "/video/mia s1e13 mismatch.mkv",
rendering: "mismatch",
@ -234,3 +275,69 @@ describe("Video get/deletion", () => {
expect(body[0]).toBe("/video/mia s1e13 unknown test.mkv");
});
});
describe("Video linking", () => {
it("Should link videos to entries", async () => {
const allVideos = await db
.select({
id: videos.id,
path: videos.path,
rendering: videos.rendering,
})
.from(videos);
const miaUnlinkedVideo = allVideos.find(
(v) => v.rendering === "sha-unlinked-1",
);
const bubbleUnlinkedVideo = allVideos.find(
(v) => v.rendering === "sha-unlinked-2",
);
expect(miaUnlinkedVideo).toBeDefined();
expect(bubbleUnlinkedVideo).toBeDefined();
const [resp, body] = await linkVideos([
{
id: miaUnlinkedVideo!.id,
for: [{ slug: `${madeInAbyss.slug}-s1e13` }],
},
{
id: bubbleUnlinkedVideo!.id,
for: [{ movie: bubble.slug }],
},
]);
expectStatus(resp, body).toBe(201);
expect(body).toBeArrayOfSize(2);
expect(body[0]).toMatchObject({
id: miaUnlinkedVideo!.id,
path: "/video/mia-unlinked.mkv",
entries: [
{
slug: expect.stringContaining(`${madeInAbyss.slug}-s1e13`),
},
],
});
expect(body[1]).toMatchObject({
id: bubbleUnlinkedVideo!.id,
path: "/video/bubble-unlinked.mkv",
entries: [
{
slug: expect.stringContaining(bubble.slug),
},
],
});
const miaShow = await db.query.shows.findFirst({
where: eq(shows.slug, madeInAbyss.slug),
});
expect(miaShow!.availableCount).toBe(1);
const bubbleShow = await db.query.shows.findFirst({
where: eq(shows.slug, bubble.slug),
});
expect(bubbleShow!.availableCount).toBe(1);
});
});

View File

@ -18,7 +18,7 @@ beforeAll(async () => {
describe("Video seeding", () => {
it("Can create a video without entry", async () => {
const [resp, body] = await createVideo({
guess: { title: "unknown", from: "test" },
guess: { title: "unknown", from: "test", history: [] },
part: null,
path: "/video/unknown s1e13.mkv",
rendering: "sha",
@ -46,7 +46,12 @@ describe("Video seeding", () => {
it("With slug", async () => {
const [resp, body] = await createVideo({
guess: { title: "mia", season: [1], episode: [13], from: "test" },
guess: {
title: "mia",
episodes: [{ season: 1, episode: 13 }],
from: "test",
history: [],
},
part: null,
path: "/video/mia s1e13.mkv",
rendering: "sha2",
@ -78,7 +83,7 @@ describe("Video seeding", () => {
it("With movie", async () => {
const [resp, body] = await createVideo({
guess: { title: "bubble", from: "test" },
guess: { title: "bubble", from: "test", history: [] },
part: null,
path: "/video/bubble.mkv",
rendering: "sha3",
@ -110,7 +115,7 @@ describe("Video seeding", () => {
it("Conflicting path", async () => {
const [resp, body] = await createVideo({
guess: { title: "bubble", from: "test" },
guess: { title: "bubble", from: "test", history: [] },
part: null,
path: "/video/bubble.mkv",
rendering: "sha4",
@ -142,7 +147,12 @@ describe("Video seeding", () => {
it("With season/episode", async () => {
const [resp, body] = await createVideo({
guess: { title: "mia", season: [2], episode: [1], from: "test" },
guess: {
title: "mia",
episodes: [{ season: 2, episode: 1 }],
from: "test",
history: [],
},
part: null,
path: "/video/mia s2e1.mkv",
rendering: "renderingsha",
@ -180,7 +190,12 @@ describe("Video seeding", () => {
it("With special", async () => {
const [resp, body] = await createVideo({
guess: { title: "mia", season: [0], episode: [3], from: "test" },
guess: {
title: "mia",
episodes: [{ season: 0, episode: 3 }],
from: "test",
history: [],
},
part: null,
path: "/video/mia sp3.mkv",
rendering: "notehu",
@ -217,7 +232,12 @@ describe("Video seeding", () => {
it("With order", async () => {
const [resp, body] = await createVideo({
guess: { title: "mia", season: [0], episode: [3], from: "test" },
guess: {
title: "mia",
episodes: [{ season: 0, episode: 3 }],
from: "test",
history: [],
},
part: null,
path: "/video/mia 13.5.mkv",
rendering: "notehu2",
@ -256,11 +276,11 @@ describe("Video seeding", () => {
const [resp, body] = await createVideo({
guess: {
title: "mia",
season: [0],
episode: [3],
episodes: [{ season: 1, episode: 13 }],
from: "test",
history: [],
externalId: {
themoviedatabase: { serieId: "72636", season: 1, episode: 13 },
themoviedatabase: "72636",
},
},
part: null,
@ -303,8 +323,9 @@ describe("Video seeding", () => {
guess: {
title: "bubble",
from: "test",
history: [],
externalId: {
themoviedatabase: { dataId: "912598" },
themoviedatabase: "912598",
},
},
part: null,
@ -344,7 +365,7 @@ describe("Video seeding", () => {
it("Different path, same sha", async () => {
const [resp, body] = await createVideo({
guess: { title: "bubble", from: "test" },
guess: { title: "bubble", from: "test", history: [] },
part: null,
path: "/video/bubble invalid-sha.mkv",
rendering: "sha",
@ -362,8 +383,9 @@ describe("Video seeding", () => {
guess: {
title: "bubble",
from: "test",
history: [],
externalId: {
themoviedatabase: { dataId: "912598" },
themoviedatabase: "912598",
},
},
part: null,
@ -408,8 +430,9 @@ describe("Video seeding", () => {
guess: {
title: "bubble",
from: "test",
history: [],
externalId: {
themoviedatabase: { dataId: "912598" },
themoviedatabase: "912598",
},
},
part: null,
@ -455,8 +478,9 @@ describe("Video seeding", () => {
guess: {
title: "bubble",
from: "test",
history: [],
externalId: {
themoviedatabase: { dataId: "912598" },
themoviedatabase: "912598",
},
},
part: 1,
@ -476,8 +500,9 @@ describe("Video seeding", () => {
guess: {
title: "bubble",
from: "test",
history: [],
externalId: {
themoviedatabase: { dataId: "912598" },
themoviedatabase: "912598",
},
},
part: 2,
@ -521,9 +546,12 @@ describe("Video seeding", () => {
const [resp, body] = await createVideo({
guess: {
title: "mia",
season: [1, 2],
episode: [13, 1],
episodes: [
{ season: 1, episode: 13 },
{ season: 2, episode: 1 },
],
from: "test",
history: [],
},
part: null,
path: "/video/mia s1e13 & s2e1 [tmdb=72636].mkv",

View File

@ -1,12 +1,9 @@
Dockerfile*
*.md
.dockerignore
.gitignore
.env*
**
!/go.mod
!/go.sum
!/**.go
# generated via sqlc
dbc/
!/sql
!/dbc
# genereated via swag
docs/
# vim: ft=gitignore
!/docs

View File

@ -1,9 +1,6 @@
# vi: ft=sh
# shellcheck disable=SC2034
# http route prefix (will listen to $KEIBI_PREFIX/users for example)
KEIBI_PREFIX=""
# path of the private key used to sign jwts. If this is empty, a new one will be generated on startup
RSA_PRIVATE_KEY_PATH=""

View File

@ -1,18 +1,10 @@
FROM golang:1.24 AS build
WORKDIR /app
RUN go install github.com/sqlc-dev/sqlc/cmd/sqlc@latest
RUN go install github.com/swaggo/swag/cmd/swag@latest
COPY go.mod go.sum ./
RUN go mod download
COPY sqlc.yaml ./
COPY sql ./sql
RUN sqlc generate
COPY . .
RUN swag init --parseDependency --outputTypes json,go
RUN CGO_ENABLED=0 GOOS=linux go build -o /keibi
FROM gcr.io/distroless/base-debian11
@ -23,5 +15,4 @@ USER nonroot:nonroot
COPY --from=build /keibi /app/keibi
COPY sql ./sql
HEALTHCHECK --interval=30s --retries=15 CMD curl --fail http://localhost:4568$KEIBI_PREFIX/health || exit
CMD ["/app/keibi"]

View File

@ -1,11 +1,10 @@
FROM golang:1.24 AS build
WORKDIR /app
RUN go install github.com/bokwoon95/wgo@latest
COPY go.mod go.sum ./
RUN go mod download
COPY . .
EXPOSE 4568
HEALTHCHECK --interval=30s --retries=15 CMD curl --fail http://localhost:4568$KEIBI_PREFIX/health || exit
CMD ["wgo", "run", "-race", "."]
CMD ["go", "run", "-race", "."]

View File

@ -19,11 +19,11 @@ import (
)
type ApiKey struct {
Id uuid.UUID `json:"id" example:"e05089d6-9179-4b5b-a63e-94dd5fc2a397"`
Name string `json:"name" example:"myapp"`
CreatedAt time.Time `json:"createAt" example:"2025-03-29T18:20:05.267Z"`
LastUsed time.Time `json:"lastUsed" example:"2025-03-29T18:20:05.267Z"`
Claims jwt.MapClaims `json:"claims" example:"isAdmin: true"`
Id uuid.UUID `json:"id" example:"e05089d6-9179-4b5b-a63e-94dd5fc2a397"`
Name string `json:"name" example:"myapp"`
CreatedAt time.Time `json:"createAt" example:"2025-03-29T18:20:05.267Z"`
LastUsed time.Time `json:"lastUsed" example:"2025-03-29T18:20:05.267Z"`
Claims jwt.MapClaims `json:"claims" example:"isAdmin: true"`
}
type ApiKeyWToken struct {
@ -32,18 +32,18 @@ type ApiKeyWToken struct {
}
type ApiKeyDto struct {
Name string `json:"name" example:"myapp" validate:"alpha"`
Name string `json:"name" example:"myapp" validate:"alpha"`
Claims jwt.MapClaims `json:"claims" example:"isAdmin: true"`
}
func MapDbKey(key *dbc.Apikey) ApiKeyWToken {
return ApiKeyWToken{
ApiKey: ApiKey{
Id: key.Id,
Name: key.Name,
Claims: key.Claims,
Id: key.Id,
Name: key.Name,
Claims: key.Claims,
CreatedAt: key.CreatedAt,
LastUsed: key.LastUsed,
LastUsed: key.LastUsed,
},
Token: fmt.Sprintf("%s-%s", key.Name, key.Token),
}
@ -91,15 +91,15 @@ func (h *Handler) CreateApiKey(c echo.Context) error {
if err != nil {
u, _ := h.db.GetUser(context.Background(), dbc.GetUserParams{
UseId: true,
Id: uid,
Id: uid,
})
user = &u[0].User.Pk
}
dbkey, err := h.db.CreateApiKey(context.Background(), dbc.CreateApiKeyParams{
Name: req.Name,
Token: base64.RawURLEncoding.EncodeToString(id),
Claims: req.Claims,
Name: req.Name,
Token: base64.RawURLEncoding.EncodeToString(id),
Claims: req.Claims,
CreatedBy: user,
})
if ErrIs(err, pgerrcode.UniqueViolation) {
@ -169,7 +169,7 @@ func (h *Handler) ListApiKey(c echo.Context) error {
return c.JSON(200, Page[ApiKey]{
Items: ret,
This: c.Request().URL.String(),
This: c.Request().URL.String(),
})
}
@ -182,7 +182,7 @@ func (h *Handler) createApiJwt(apikey string) (string, error) {
key, fromEnv := h.config.EnvApiKeys[info[0]]
if !fromEnv {
dbKey, err := h.db.GetApiKey(context.Background(), dbc.GetApiKeyParams{
Name: info[0],
Name: info[0],
Token: info[1],
})
if err == pgx.ErrNoRows {

View File

@ -2,9 +2,11 @@ package main
import (
"context"
"crypto"
"crypto/rand"
"crypto/rsa"
"crypto/x509"
"encoding/base64"
"encoding/json"
"encoding/pem"
"fmt"
@ -15,20 +17,21 @@ import (
"github.com/golang-jwt/jwt/v5"
"github.com/google/uuid"
"github.com/lestrrat-go/jwx/v3/jwk"
"github.com/zoriya/kyoo/keibi/dbc"
)
type Configuration struct {
Prefix string
JwtPrivateKey *rsa.PrivateKey
JwtPublicKey *rsa.PublicKey
JwtKid string
PublicUrl string
DefaultClaims jwt.MapClaims
FirstUserClaims jwt.MapClaims
GuestClaims jwt.MapClaims
ProtectedClaims []string
ExpirationDelay time.Duration
EnvApiKeys map[string]ApiKeyWToken
EnvApiKeys map[string]ApiKeyWToken
}
var DefaultConfig = Configuration{
@ -36,14 +39,13 @@ var DefaultConfig = Configuration{
FirstUserClaims: make(jwt.MapClaims),
ProtectedClaims: []string{"permissions"},
ExpirationDelay: 30 * 24 * time.Hour,
EnvApiKeys: make(map[string]ApiKeyWToken),
EnvApiKeys: make(map[string]ApiKeyWToken),
}
func LoadConfiguration(db *dbc.Queries) (*Configuration, error) {
ret := DefaultConfig
ret.PublicUrl = os.Getenv("PUBLIC_URL")
ret.Prefix = os.Getenv("KEIBI_PREFIX")
claims := os.Getenv("EXTRA_CLAIMS")
if claims != "" {
@ -100,11 +102,20 @@ func LoadConfiguration(db *dbc.Queries) (*Configuration, error) {
if err != nil {
return nil, err
}
ret.JwtPublicKey = &ret.JwtPrivateKey.PublicKey
}
ret.JwtPublicKey = &ret.JwtPrivateKey.PublicKey
key, err := jwk.Import(ret.JwtPublicKey)
if err != nil {
return nil, err
}
thumbprint, err := key.Thumbprint(crypto.SHA256)
if err != nil {
return nil, err
}
ret.JwtKid = base64.RawStdEncoding.EncodeToString(thumbprint)
for _, env := range os.Environ() {
if !strings.HasPrefix(env, "KEIBI_APIKEY_"){
if !strings.HasPrefix(env, "KEIBI_APIKEY_") {
continue
}
v := strings.Split(env, "=")
@ -128,8 +139,8 @@ func LoadConfiguration(db *dbc.Queries) (*Configuration, error) {
name = strings.ToLower(name)
ret.EnvApiKeys[name] = ApiKeyWToken{
ApiKey: ApiKey{
Id: uuid.New(),
Name: name,
Id: uuid.New(),
Name: name,
Claims: claims,
},
Token: v[1],

View File

@ -79,6 +79,7 @@ func (h *Handler) createGuestJwt() *string {
Time: time.Now().UTC().Add(time.Hour),
}
jwt := jwt.NewWithClaims(jwt.SigningMethodRS256, claims)
jwt.Header["kid"] = h.config.JwtKid
t, err := jwt.SignedString(h.config.JwtPrivateKey)
if err != nil {
return nil
@ -112,6 +113,7 @@ func (h *Handler) createJwt(token string) (string, error) {
Time: time.Now().UTC().Add(time.Hour),
}
jwt := jwt.NewWithClaims(jwt.SigningMethodRS256, claims)
jwt.Header["kid"] = h.config.JwtKid
t, err := jwt.SignedString(h.config.JwtPrivateKey)
if err != nil {
return "", err
@ -144,6 +146,7 @@ func (h *Handler) GetJwks(c echo.Context) error {
key.Set("use", "sig")
key.Set("key_ops", "verify")
key.Set("kid", h.config.JwtKid)
set := jwk.NewSet()
set.AddKey(key)
return c.JSON(200, set)

View File

@ -228,8 +228,8 @@ func main() {
}
h.config = conf
g := e.Group(conf.Prefix)
r := e.Group(conf.Prefix)
g := e.Group("/auth")
r := e.Group("/auth")
r.Use(h.TokenToJwt)
r.Use(echojwt.WithConfig(echojwt.Config{
SigningMethod: "RS256",

15
auth/shell.nix Normal file
View File

@ -0,0 +1,15 @@
{pkgs ? import <nixpkgs> {}}:
pkgs.mkShell {
packages = with pkgs; [
go
wgo
go-migrate
sqlc
go-swag
# for psql in cli (+ pgformatter for sql files)
postgresql_15
pgformatter
# to run tests
hurl
];
}

View File

@ -56,7 +56,7 @@ func GetCurrentSessionId(c echo.Context) (uuid.UUID, error) {
func CheckPermissions(c echo.Context, perms []string) error {
token, ok := c.Get("user").(*jwt.Token)
if !ok{
if !ok {
return echo.NewHTTPError(401, "Not logged in")
}
sub, err := token.Claims.GetSubject()

15
back/shell.nix Normal file
View File

@ -0,0 +1,15 @@
{pkgs ? import <nixpkgs> {}}: let
dotnet = with pkgs.dotnetCorePackages;
combinePackages [
sdk_8_0
aspnetcore_8_0
];
in
pkgs.mkShell {
packages = with pkgs; [
dotnet
csharpier
];
DOTNET_ROOT = "${dotnet}";
}

6
chart/shell.nix Normal file
View File

@ -0,0 +1,6 @@
{pkgs ? import <nixpkgs> {}}:
pkgs.mkShell {
packages = with pkgs; [
kubernetes-helm
];
}

View File

@ -66,8 +66,6 @@ services:
condition: service_healthy
env_file:
- ./.env
environment:
- KEIBI_PREFIX=/auth
labels:
- "traefik.enable=true"
- "traefik.http.routers.auth.rule=PathPrefix(`/auth/`)"

View File

@ -1,188 +0,0 @@
x-transcoder: &transcoder-base
build:
context: ./transcoder
dockerfile: Dockerfile.dev
networks:
default:
aliases:
- transcoder
ports:
- "7666:7666"
restart: on-failure
cpus: 1
env_file:
- ./.env
environment:
- GOCODER_PREFIX=/video
volumes:
- ./transcoder:/app
- ${LIBRARY_ROOT}:/video:ro
- ${CACHE_ROOT}:/cache
- transcoder_metadata:/metadata
services:
front:
build:
context: ./front
dockerfile: Dockerfile.dev
volumes:
- ./front:/app
- /app/.yarn
- /app/node_modules
- /app/apps/mobile/node_modules
- /app/apps/web/.next/
- /app/apps/mobile/.expo/
ports:
- "3000:3000"
- "8081:8081"
restart: on-failure
environment:
- KYOO_URL=${KYOO_URL:-http://api:5000/api}
labels:
- "traefik.enable=true"
- "traefik.http.routers.front.rule=PathPrefix(`/`)"
auth:
build:
context: ./auth
dockerfile: Dockerfile.dev
restart: on-failure
depends_on:
postgres:
condition: service_healthy
ports:
- "4568:4568"
env_file:
- ./.env
environment:
- KEIBI_PREFIX=/auth
volumes:
- ./auth:/app
labels:
- "traefik.enable=true"
- "traefik.http.routers.auth.rule=PathPrefix(`/auth/`) || PathPrefix(`/.well-known/`)"
api:
build:
context: ./api
dockerfile: Dockerfile.dev
restart: on-failure
depends_on:
postgres:
condition: service_healthy
volumes:
- ./api:/app
- /app/node_modules
ports:
- "3567:3567"
environment:
- KYOO_PREFIX=/api
- JWT_ISSUER=${PUBLIC_URL}
env_file:
- ./.env
labels:
- "traefik.enable=true"
- "traefik.http.routers.api.rule=PathPrefix(`/api/`) || PathPrefix(`/swagger`)"
- "traefik.http.routers.api.middlewares=phantom-token"
- "traefik.http.middlewares.phantom-token.forwardauth.address=http://auth:4568/auth/jwt"
- "traefik.http.middlewares.phantom-token.forwardauth.authRequestHeaders=Authorization,X-Api-Key"
- "traefik.http.middlewares.phantom-token.forwardauth.authResponseHeaders=Authorization"
# scanner:
# build: ./scanner
# restart: on-failure
# depends_on:
# back:
# condition: service_healthy
# env_file:
# - ./.env
# environment:
# - KYOO_URL=${KYOO_URL:-http://back:5000/api}
# volumes:
# - ${LIBRARY_ROOT}:/video:ro
#
# matcher:
# build: ./scanner
# command: matcher
# restart: on-failure
# depends_on:
# back:
# condition: service_healthy
# env_file:
# - ./.env
# environment:
# - KYOO_URL=${KYOO_URL:-http://back:5000/api}
transcoder:
<<: *transcoder-base
profiles: ['', 'cpu']
transcoder-nvidia:
<<: *transcoder-base
deploy:
resources:
reservations:
devices:
- capabilities: [gpu]
driver: cdi
device_ids:
- nvidia.com/gpu=all
environment:
- GOCODER_PREFIX=/video
- GOCODER_HWACCEL=nvidia
profiles: ['nvidia']
transcoder-vaapi:
<<: *transcoder-base
devices:
- /dev/dri:/dev/dri
environment:
- GOCODER_PREFIX=/video
- GOCODER_HWACCEL=vaapi
- GOCODER_VAAPI_RENDERER=${GOCODER_VAAPI_RENDERER:-/dev/dri/renderD128}
profiles: ['vaapi']
# qsv is the same setup as vaapi but with the hwaccel env var different
transcoder-qsv:
<<: *transcoder-base
devices:
- /dev/dri:/dev/dri
environment:
- GOCODER_PREFIX=/video
- GOCODER_HWACCEL=qsv
- GOCODER_VAAPI_RENDERER=${GOCODER_VAAPI_RENDERER:-/dev/dri/renderD128}
profiles: ['qsv']
traefik:
image: traefik:v3.4
restart: on-failure
command:
- "--providers.docker=true"
- "--providers.docker.exposedbydefault=false"
- "--entryPoints.web.address=:8901"
- "--accesslog=true"
ports:
- "8901:8901"
volumes:
- "/var/run/docker.sock:/var/run/docker.sock:ro"
postgres:
image: postgres:15
restart: on-failure
env_file:
- ./.env
volumes:
- db:/var/lib/postgresql/data
ports:
- "5432:5432"
environment:
- POSTGRES_HOST_AUTH_METHOD=trust
command: ["postgres", "-c", "log_statement=all"]
healthcheck:
test: ["CMD-SHELL", "pg_isready -U ${POSTGRES_USER} -d ${POSTGRES_DB}"]
interval: 5s
timeout: 5s
retries: 5
volumes:
db:
transcoder_metadata:

View File

@ -8,85 +8,49 @@ x-transcoder: &transcoder-base
- transcoder
ports:
- "7666:7666"
restart: on-failure
restart: unless-stopped
cpus: 1
env_file:
- ./.env
environment:
- GOCODER_PREFIX=/video
volumes:
- ./transcoder:/app
- ${LIBRARY_ROOT}:/video:ro
- ${CACHE_ROOT}:/cache
- metadata:/metadata
- transcoder_metadata:/metadata
develop:
watch:
- action: sync+restart
path: ./transcoder
target: /app
services:
back:
build:
context: ./back
dockerfile: Dockerfile.dev
ports:
- "5000:5000"
restart: on-failure
environment:
- TRANSCODER_URL=${TRANSCODER_URL:-http://transcoder:7666/video}
- KYOO_PREFIX=/api
env_file:
- ./.env
depends_on:
postgres:
condition: service_healthy
meilisearch:
condition: service_healthy
rabbitmq:
condition: service_healthy
migrations:
condition: service_completed_successfully
volumes:
- ./back:/app
- /app/out/
- kyoo:/metadata
labels:
- "traefik.enable=true"
- "traefik.http.routers.api.rule=PathPrefix(`/api/`)"
migrations:
build:
context: ./back
dockerfile: Dockerfile.migrations
restart: "no"
depends_on:
postgres:
condition: service_healthy
env_file:
- ./.env
front:
build:
context: ./front
dockerfile: Dockerfile.dev
volumes:
- ./front:/app
- /app/.yarn
- /app/node_modules
- /app/apps/mobile/node_modules
- /app/apps/web/.next/
- /app/apps/mobile/.expo/
ports:
- "3000:3000"
- "8081:8081"
restart: on-failure
environment:
- KYOO_URL=${KYOO_URL:-http://back:5000/api}
labels:
- "traefik.enable=true"
- "traefik.http.routers.front.rule=PathPrefix(`/`)"
# front:
# build:
# context: ./front
# dockerfile: Dockerfile.dev
# volumes:
# - ./front:/app
# - /app/.yarn
# - /app/node_modules
# - /app/apps/mobile/node_modules
# - /app/apps/web/.next/
# - /app/apps/mobile/.expo/
# ports:
# - "3000:3000"
# - "8081:8081"
# restart: unless-stopped
# environment:
# - KYOO_URL=${KYOO_URL:-http://api:5000/api}
# labels:
# - "traefik.enable=true"
# - "traefik.http.routers.front.rule=PathPrefix(`/`)"
auth:
build:
context: ./auth
dockerfile: Dockerfile.dev
restart: on-failure
restart: unless-stopped
depends_on:
postgres:
condition: service_healthy
@ -94,52 +58,84 @@ services:
- "4568:4568"
env_file:
- ./.env
environment:
- KEIBI_PREFIX=/auth
volumes:
- ./auth:/app
labels:
- "traefik.enable=true"
- "traefik.http.routers.auth.rule=PathPrefix(`/auth/`)"
- "traefik.http.routers.auth.rule=PathPrefix(`/auth/`) || PathPrefix(`/.well-known/`)"
develop:
watch:
- action: sync+restart
path: ./auth
target: /app
api:
build:
context: ./api
dockerfile: Dockerfile.dev
restart: unless-stopped
depends_on:
postgres:
condition: service_healthy
ports:
- "3567:3567"
environment:
- KYOO_PREFIX=/api
- JWT_ISSUER=${PUBLIC_URL}
env_file:
- ./.env
labels:
- "traefik.enable=true"
- "traefik.http.routers.api.rule=PathPrefix(`/api/`) || PathPrefix(`/swagger`)"
- "traefik.http.routers.api.middlewares=phantom-token"
- "traefik.http.middlewares.phantom-token.forwardauth.address=http://auth:4568/auth/jwt"
- "traefik.http.middlewares.phantom-token.forwardauth.authRequestHeaders=Authorization,X-Api-Key"
- "traefik.http.middlewares.phantom-token.forwardauth.authResponseHeaders=Authorization"
develop:
watch:
- action: sync
path: ./api
target: /app
- action: rebuild
path: ./api/packages.json
scanner:
build: ./scanner
restart: on-failure
restart: unless-stopped
depends_on:
back:
api:
condition: service_started
postgres:
condition: service_healthy
env_file:
- ./.env
ports:
- "4389:4389"
environment:
- KYOO_URL=${KYOO_URL:-http://back:5000/api}
# Use this env var once we use mTLS for auth
# - KYOO_URL=${KYOO_URL:-http://api:3567/api}
- KYOO_URL=${KYOO_URL:-http://traefik:8901/api}
- JWKS_URL=http://auth:4568/.well-known/jwks.json
- JWT_ISSUER=${PUBLIC_URL}
volumes:
- ${LIBRARY_ROOT}:/video:ro
matcher:
build: ./scanner
command: matcher
restart: on-failure
depends_on:
back:
condition: service_healthy
env_file:
- ./.env
environment:
- KYOO_URL=${KYOO_URL:-http://back:5000/api}
autosync:
build: ./autosync
restart: on-failure
depends_on:
rabbitmq:
condition: service_healthy
env_file:
- ./.env
labels:
- "traefik.enable=true"
- "traefik.http.routers.scanner.rule=PathPrefix(`/scanner/`)"
- "traefik.http.routers.scanner.middlewares=phantom-token"
- "traefik.http.middlewares.phantom-token.forwardauth.address=http://auth:4568/auth/jwt"
- "traefik.http.middlewares.phantom-token.forwardauth.authRequestHeaders=Authorization,X-Api-Key"
- "traefik.http.middlewares.phantom-token.forwardauth.authResponseHeaders=Authorization"
command: fastapi dev scanner --host 0.0.0.0 --port 4389
develop:
watch:
- action: sync
path: ./scanner
target: /app
- action: rebuild
path: ./scanner/pyproject.toml
transcoder:
<<: *transcoder-base
profiles: ['', 'cpu']
transcoder-nvidia:
<<: *transcoder-base
deploy:
@ -154,7 +150,6 @@ services:
- GOCODER_PREFIX=/video
- GOCODER_HWACCEL=nvidia
profiles: ['nvidia']
transcoder-vaapi:
<<: *transcoder-base
devices:
@ -177,7 +172,7 @@ services:
traefik:
image: traefik:v3.4
restart: on-failure
restart: unless-stopped
command:
- "--providers.docker=true"
- "--providers.docker.exposedbydefault=false"
@ -190,7 +185,7 @@ services:
postgres:
image: postgres:15
restart: on-failure
restart: unless-stopped
env_file:
- ./.env
volumes:
@ -198,49 +193,17 @@ services:
ports:
- "5432:5432"
environment:
- POSTGRES_USER=$PGUSER
- POSTGRES_PASSWORD=$PGPASSWORD
- POSTGRES_DB=$PGDATABASE
- POSTGRES_HOST_AUTH_METHOD=trust
command: ["postgres", "-c", "log_statement=all"]
healthcheck:
test: ["CMD-SHELL", "pg_isready -U ${POSTGRES_USER} -d ${POSTGRES_DB}"]
test: ["CMD-SHELL", "pg_isready -U ${PGUSER} -d ${PGDATABASE}"]
interval: 5s
timeout: 5s
retries: 5
meilisearch:
image: getmeili/meilisearch:v1.4
restart: on-failure
ports:
- "7700:7700"
volumes:
- search:/meili_data
environment:
- MEILI_ENV=development
env_file:
- .env
healthcheck:
test: ["CMD", "wget", "--no-verbose", "--spider", "http://meilisearch:7700/health"]
interval: 30s
timeout: 5s
retries: 5
rabbitmq:
image: rabbitmq:4-management-alpine
restart: on-failure
environment:
- RABBITMQ_DEFAULT_USER=${RABBITMQ_DEFAULT_USER}
- RABBITMQ_DEFAULT_PASS=${RABBITMQ_DEFAULT_PASS}
ports:
- 5672:5672
- 15672:15672
healthcheck:
test: rabbitmq-diagnostics -q ping
interval: 30s
timeout: 10s
retries: 5
start_period: 10s
volumes:
kyoo:
db:
metadata:
search:
transcoder_metadata:

View File

@ -1,2 +1,5 @@
Dockerfile*
**
!/pyproject.toml
!/uv.lock
!/scanner
!/migrations

View File

@ -1,12 +1,25 @@
# vi: ft=sh
# shellcheck disable=SC2034
# RabbitMQ settings
# URL examples: https://docs.aio-pika.com/#url-examples
# This uses AIORMQ (https://github.com/mosquito/aiormq/) under the hood, and supports whatever the library supports.
# RABBITMQ_URL=ampqs://user:password@rabbitmq-server:1234/vhost?capath=/path/to/cacert.pem&certfile=/path/to/cert.pem&keyfile=/path/to/key.pem
# These values are ignored when the RABBITMQ_URL is set
RABBITMQ_HOST=rabbitmq
RABBITMQ_PORT=5672
RABBITMQ_USER=guest
RABBITMQ_PASSWORD=guest
# Root directory that will be traversed to find video files (inside the container)
SCANNER_LIBRARY_ROOT="/video"
# A pattern (regex) to ignore video files.
LIBRARY_IGNORE_PATTERN=".*/[dD]ownloads?/.*"
# Keep those empty to use kyoo's default api key. You can also specify a custom API key if you want.
# go to https://www.themoviedb.org/settings/api and copy the read access token (not the api key)
THEMOVIEDB_API_ACCESS_TOKEN=""
KYOO_URL="http://api:3567/api"
KYOO_APIKEY=""
JWKS_URL="http://auth:4568/.well-known/jwks.json"
JWT_ISSUER=$PUBLIC_URL
# The behavior of the below variables match what is documented here:
# https://www.postgresql.org/docs/current/libpq-envars.html
PGUSER=kyoo
PGPASSWORD=password
PGDATABASE=kyoo
PGHOST=postgres
PGPORT=5432

1
scanner/.gitignore vendored
View File

@ -1 +1,2 @@
__pycache__
/.venv

View File

@ -1,9 +1,14 @@
FROM python:3.13
ENV UV_COMPILE_BYTECODE=1 UV_LINK_MODE=copy UV_PYTHON_DOWNLOADS=0
WORKDIR /app
COPY ./requirements.txt .
RUN pip3 install -r ./requirements.txt
RUN --mount=from=ghcr.io/astral-sh/uv,source=/uv,target=/bin/uv \
--mount=type=cache,target=/root/.cache/uv \
--mount=type=bind,source=uv.lock,target=uv.lock \
--mount=type=bind,source=pyproject.toml,target=pyproject.toml \
uv sync --locked
ENV PATH="/app/.venv/bin:$PATH"
COPY . .
ENTRYPOINT ["python3", "-m"]
CMD ["scanner"]
CMD ["fastapi", "run", "scanner", "--port", "4389"]

View File

@ -1,6 +1,6 @@
# Scanner
## Workflow (for v5, not current)
## Workflow
In order of action:
@ -17,9 +17,8 @@ In order of action:
from: "guessit"
kind: movie | episode | extra
title: string,
year?: number[],
season?: number[],
episode?: number[],
years?: number[],
episodes?: {season?: number, episode: number}[],
...
},
}
@ -36,41 +35,42 @@ In order of action:
from: "anilist",
kind: movie | episode | extra
name: string,
year: number | null,
season?: number[],
episode?: number[],
absolute?: number[],
years: number[],
episodes?: {season?: number, episode: number}[],
externalId: Record<string, {showId, season, number}[]>,
history: {
from: "guessit"
kind: movie | episode | extra
title: string,
year?: number,
season?: number[],
episode?: number[],
...
years?: number[],
episodes?: {season?: number, episode: number}[],
},
...
},
}
```
- If kind is episode, try to find the serie's id on kyoo (using the previously fetched data from `/videos`):
- Try to find the series id on kyoo (using the previously fetched data from `/videos`):
- if another video in the list of already registered videos has the same `kind`, `name` & `year`, assume it's the same
- if a match is found, add to the video's json:
```json5
{
entries: (uuid | slug | {
show: uuid | slug,
season: number,
episode: number,
externalId?: Record<string, {showId, season, number}> // takes priority over season/episode for matching if we have one
entries: (
| { slug: string }
| { movie: uuid | string }
| { serie: uuid | slug, season: number, episode: number }
| { serie: uuid | slug, order: number }
| { serie: uuid | slug, special: number }
| { externalId?: Record<string, {serieId, season, number}> }
| { externalId?: Record<string, {dataId}> }
})[],
}
```
- Scanner pushes everything to the api in a single post `/videos` call
- Api registers every video in the database
- For each video without an associated entry, the guess data + the video's id is sent to the Matcher via a queue.
- Matcher retrieves metadata from the movie/serie + ALL episodes/seasons (from an external provider)
- Matcher pushes every metadata to the api (if there are 1000 episodes but only 1 video, still push the 1000 episodes)
- Api registers every video in the database & return the list of videos not matched to an existing serie/movie.
- Scanner adds every non-matched video to a queue
<!-- vim: set noexpandtab : -->
For each item in the queue, the scanner will:
- retrieves metadata from the movie/serie + ALL episodes/seasons (from an external provider)
- pushes every metadata to the api (if there are 1000 episodes but only 1 video, still push the 1000 episodes)
<!-- vim: set expandtab : -->

View File

@ -1,18 +0,0 @@
async def main():
import logging
import sys
from providers.provider import Provider
from providers.kyoo_client import KyooClient
from .matcher import Matcher
from .subscriber import Subscriber
logging.basicConfig(level=logging.INFO)
if len(sys.argv) > 1 and sys.argv[1] == "-v":
logging.basicConfig(level=logging.DEBUG)
logging.getLogger("watchfiles").setLevel(logging.WARNING)
logging.getLogger("rebulk").setLevel(logging.WARNING)
async with KyooClient() as kyoo, Subscriber() as sub:
provider = Provider.get_default(kyoo.client)
matcher = Matcher(kyoo, provider)
await sub.listen(matcher)

View File

@ -1,6 +0,0 @@
#!/usr/bin/env python
import asyncio
import matcher
asyncio.run(matcher.main())

View File

@ -1,242 +0,0 @@
from datetime import timedelta
from typing import Literal, Optional
import asyncio
from logging import getLogger
from providers.provider import Provider, ProviderError
from providers.types.collection import Collection
from providers.types.show import Show
from providers.types.episode import Episode, PartialShow
from providers.types.season import Season
from providers.kyoo_client import KyooClient
from .parser.guess import guessit
from .cache import cache, exec_as_cache, make_key
logger = getLogger(__name__)
class Matcher:
def __init__(self, client: KyooClient, provider: Provider) -> None:
self._client = client
self._provider = provider
self._collection_cache = {}
self._show_cache = {}
self._season_cache = {}
async def delete(self, path: str):
try:
await self._client.delete(path)
return True
except Exception as e:
logger.exception("Unhandled error", exc_info=e)
return False
async def identify(self, path: str):
try:
await self._identify(path)
await self._client.delete_issue(path)
except ProviderError as e:
logger.error(e)
await self._client.create_issue(path, str(e))
except Exception as e:
logger.exception("Unhandled error", exc_info=e)
await self._client.create_issue(
path, "Unknown error", {"type": type(e).__name__, "message": str(e)}
)
return False
return True
async def _identify(self, path: str):
raw = guessit(path, xem_titles=await self._provider.get_expected_titles())
if "mimetype" not in raw or not raw["mimetype"].startswith("video"):
return
logger.info("Identified %s: %s", path, raw)
title = raw.get("title")
if not isinstance(title, str):
raise ProviderError(f"Could not guess title, found: {title}")
year = raw.get("year")
if year is not None and not isinstance(year, int):
year = None
logger.warn(f"Invalid year value. Found {year}. Ignoring")
if raw["type"] == "movie":
await self.search_movie(title, year, path)
elif raw["type"] == "episode":
season = raw.get("season")
if isinstance(season, list):
raise ProviderError(
f"An episode can't have multiple seasons (found {raw.get('season')} for {path})"
)
if season is not None and not isinstance(season, int):
raise ProviderError(f"Could not guess season, found: {season}")
episode = raw.get("episode")
if isinstance(episode, list):
raise ProviderError(
f"Multi-episodes files are not yet supported (for {path})"
)
if not isinstance(episode, int):
raise ProviderError(f"Could not guess episode, found: {episode}")
await self.search_episode(title, year, season, episode, path)
else:
logger.warn("Unknown video file type: %s", raw["type"])
async def search_movie(self, title: str, year: Optional[int], path: str):
movie = await self._provider.search_movie(title, year)
movie.file_title = title
movie.path = path
logger.debug("Got movie: %s", movie)
movie_id = await self._client.post("movies", data=movie.to_kyoo())
if any(movie.collections):
ids = await asyncio.gather(
*(self.create_or_get_collection(x) for x in movie.collections)
)
await asyncio.gather(
*(self._client.link_collection(x, "movie", movie_id) for x in ids)
)
async def search_episode(
self,
title: str,
year: Optional[int],
season: Optional[int],
episode_nbr: int,
path: str,
):
episode = await self._provider.search_episode(
title,
season=season,
episode_nbr=episode_nbr if season is not None else None,
absolute=episode_nbr if season is None else None,
year=year,
)
episode.path = path
logger.debug("Got episode: %s", episode)
episode.show_id = await self.create_or_get_show(episode, title)
if episode.season_number is not None:
episode.season_id = await self.register_seasons(
episode.show, episode.show_id, episode.season_number
)
await self._client.post("episodes", data=episode.to_kyoo())
async def create_or_get_collection(self, collection: Collection) -> str:
@cache(ttl=timedelta(days=1), cache=self._collection_cache)
async def create_collection(provider_id: str):
# TODO: Check if a collection with the same metadata id exists already on kyoo.
new_collection = (
await self._provider.identify_collection(provider_id)
if not any(collection.translations.keys())
else collection
)
logger.debug("Got collection: %s", new_collection)
return await self._client.post("collection", data=new_collection.to_kyoo())
# The parameter is only used as a key for the cache.
provider_id = collection.external_id[self._provider.name].data_id
return await create_collection(provider_id)
async def create_or_get_show(self, episode: Episode, fallback_name: str) -> str:
@cache(ttl=timedelta(days=1), cache=self._show_cache)
async def create_show(_: str):
# TODO: Check if a show with the same metadata id exists already on kyoo.
show = (
await self._provider.identify_show(
episode.show.external_id[self._provider.name].data_id,
)
if isinstance(episode.show, PartialShow)
else episode.show
)
show.file_title = fallback_name
# TODO: collections
logger.debug("Got show: %s", episode)
ret = await self._client.post("show", data=show.to_kyoo())
async def create_season(season: Season, id: str):
try:
season.show_id = id
return await self._client.post("seasons", data=season.to_kyoo())
except Exception as e:
logger.exception("Unhandled error create a season", exc_info=e)
season_tasks = map(
lambda s: exec_as_cache(
self._season_cache,
make_key((ret, s.season_number)),
lambda: create_season(s, ret),
),
show.seasons,
)
await asyncio.gather(*season_tasks)
return ret
# The parameter is only used as a key for the cache.
provider_id = episode.show.external_id[self._provider.name].data_id
return await create_show(provider_id)
async def register_seasons(
self, show: Show | PartialShow, show_id: str, season_number: int
) -> str:
# We use an external season cache because we want to edit this cache programatically
@cache(ttl=timedelta(days=1), cache=self._season_cache)
async def create_season(_: str, __: int):
season = await self._provider.identify_season(
show.external_id[self._provider.name].data_id, season_number
)
season.show_id = show_id
return await self._client.post("seasons", data=season.to_kyoo())
return await create_season(show_id, season_number)
async def refresh(
self,
kind: Literal["collection", "movie", "episode", "show", "season"],
kyoo_id: str,
):
async def id_movie(movie: dict, id: dict):
ret = await self._provider.identify_movie(id["dataId"])
ret.path = movie["path"]
return ret
async def id_season(season: dict, id: dict):
ret = await self._provider.identify_season(
id["dataId"], season["seasonNumber"]
)
ret.show_id = season["showId"]
return ret
async def id_episode(episode: dict, id: dict):
ret = await self._provider.identify_episode(
id["showId"], id["season"], id["episode"], episode["absoluteNumber"]
)
ret.show_id = episode["showId"]
ret.season_id = episode["seasonId"]
ret.path = episode["path"]
return ret
identify_table = {
"collection": lambda _, id: self._provider.identify_collection(
id["dataId"]
),
"movie": id_movie,
"show": lambda _, id: self._provider.identify_show(id["dataId"]),
"season": id_season,
"episode": id_episode,
}
current = await self._client.get(f"{kind}/{kyoo_id}")
if self._provider.name not in current["externalId"]:
logger.error(
f"Could not refresh metadata of {kind}/{kyoo_id}. Missing provider id."
)
return False
provider_id = current["externalId"][self._provider.name]
new_value = await identify_table[kind](current, provider_id)
await self._client.put(f"{kind}/{kyoo_id}", data=new_value.to_kyoo())
return True

View File

@ -1,56 +0,0 @@
#!/usr/bin/env python3
if __name__ == "__main__":
import sys
from pathlib import Path
sys.path.append(str(Path(f"{__file__}/../../..").resolve()))
from guessit.api import default_api
from typing import cast, List, Any
from rebulk import Rebulk
try:
from . import rules
except:
import rules
default_api.configure({})
rblk = cast(Rebulk, default_api.rebulk)
rblk.rules(rules)
def guessit(name: str, *, xem_titles: List[str] = [], extra_flags: dict[str, Any] = {}):
return default_api.guessit(
name,
{
"episode_prefer_number": True,
"excludes": "language",
"expected_title": xem_titles,
}
| extra_flags,
)
# Only used to test localy
if __name__ == "__main__":
import sys
import json
from providers.implementations.thexem import TheXemClient
from guessit.jsonutils import GuessitEncoder
from aiohttp import ClientSession
import asyncio
async def main():
async with ClientSession() as client:
xem = TheXemClient(client)
advanced = any(x == "-a" for x in sys.argv)
ret = guessit(
sys.argv[1],
xem_titles=await xem.get_expected_titles(),
extra_flags={"advanced": advanced},
)
print(json.dumps(ret, cls=GuessitEncoder, indent=4))
asyncio.run(main())

View File

@ -1,62 +0,0 @@
import asyncio
from typing import Union, Literal
from msgspec import Struct, json
from logging import getLogger
from aio_pika.abc import AbstractIncomingMessage
from providers.rabbit_base import RabbitBase
from matcher.matcher import Matcher
logger = getLogger(__name__)
class Message(Struct, tag_field="action", tag=str.lower):
pass
class Scan(Message):
path: str
class Delete(Message):
path: str
class Refresh(Message):
kind: Literal["collection", "show", "movie", "season", "episode"]
id: str
decoder = json.Decoder(Union[Scan, Delete, Refresh])
class Subscriber(RabbitBase):
async def listen(self, matcher: Matcher):
async def on_message(message: AbstractIncomingMessage):
try:
msg = decoder.decode(message.body)
ack = False
match msg:
case Scan(path):
ack = await matcher.identify(path)
case Delete(path):
ack = await matcher.delete(path)
case Refresh(kind, id):
ack = await matcher.refresh(kind, id)
case _:
logger.error(f"Invalid action: {msg.action}")
if ack:
logger.info("finished processing %s", msg)
await message.ack()
else:
logger.warn("failed to process %s", msg)
await message.reject()
except Exception as e:
logger.exception("Unhandled error", exc_info=e)
await message.reject()
# Allow up to 20 scan requests to run in parallel on the same listener.
# Since most work is calling API not doing that is a waste.
await self._channel.set_qos(prefetch_count=20)
await self._queue.consume(on_message)
await asyncio.Future()

View File

@ -0,0 +1,5 @@
drop table scanner.requests;
drop type scanner.request_kind;
drop type scanner.request_status;

View File

@ -0,0 +1,24 @@
create type scanner.request_kind as enum(
'episode',
'movie'
);
create type scanner.request_status as enum(
'pending',
'running',
'failed'
);
create table scanner.requests(
pk serial primary key,
id uuid not null default gen_random_uuid() unique,
kind scanner.request_kind not null,
title text not null,
year integer,
external_id jsonb not null default '{}'::jsonb,
videos jsonb not null default '[]'::jsonb,
status scanner.request_status not null default 'pending',
started_at timestamptz,
created_at timestamptz not null default now()::timestamptz,
constraint unique_kty unique(kind, title, year)
);

30
scanner/old/__init__.py Normal file
View File

@ -0,0 +1,30 @@
async def main():
import asyncio
import os
import logging
from .monitor import monitor
from .scanner import scan
from .refresher import refresh
from .publisher import Publisher
from .subscriber import Subscriber
from old.kyoo_client import KyooClient
logging.basicConfig(level=logging.INFO)
logging.getLogger("watchfiles").setLevel(logging.WARNING)
async with (
Publisher() as publisher,
Subscriber() as subscriber,
KyooClient() as client,
):
path = os.environ.get("SCANNER_LIBRARY_ROOT", "/video")
async def scan_all():
await scan(path, publisher, client, remove_deleted=True)
await asyncio.gather(
monitor(path, publisher, client),
scan_all(),
refresh(publisher, client),
subscriber.listen(scan_all),
)

View File

@ -7,6 +7,7 @@ from typing import Optional, Any, Callable, OrderedDict
from langcodes import Language
from matcher.cache import cache
from scanner.models.staff import Role
from ..provider import Provider, ProviderError
from ..utils import normalize_lang
@ -75,6 +76,19 @@ class TVDB(Provider):
"martial-arts": None,
"awards-show": None,
}
self._roles_map = {
"Actor": Role.ACTOR,
"Creator": Role.OTHER,
"Crew": Role.CREW,
"Director": Role.DIRECTOR,
"Executive Producer": Role.OTHER,
"Guest Star": Role.OTHER,
"Host": Role.OTHER,
"Musical Guest": Role.MUSIC,
"Producer": Role.PRODUCER,
"Showrunner": Role.OTHER,
"Writer": Role.WRITTER,
}
@cache(ttl=timedelta(days=30))
async def login(self) -> str:

67
scanner/old/matcher.py Normal file
View File

@ -0,0 +1,67 @@
from datetime import timedelta
from typing import Literal, Optional
import asyncio
from logging import getLogger
from old.provider import Provider, ProviderError
from old.types.collection import Collection
from old.types.show import Show
from old.types.episode import Episode, PartialShow
from old.types.season import Season
from old.kyoo_client import KyooClient
from .parser.guess import guessit
from .cache import cache, exec_as_cache, make_key
logger = getLogger(__name__)
class Matcher:
def __init__(self, client: KyooClient, provider: Provider) -> None:
self._client = client
self._provider = provider
async def refresh(
self,
kind: Literal["collection", "movie", "episode", "show", "season"],
kyoo_id: str,
):
async def id_movie(movie: dict, id: dict):
ret = await self._provider.identify_movie(id["dataId"])
ret.path = movie["path"]
return ret
async def id_season(season: dict, id: dict):
ret = await self._provider.identify_season(
id["dataId"], season["seasonNumber"]
)
ret.show_id = season["showId"]
return ret
async def id_episode(episode: dict, id: dict):
ret = await self._provider.identify_episode(
id["showId"], id["season"], id["episode"], episode["absoluteNumber"]
)
ret.show_id = episode["showId"]
ret.season_id = episode["seasonId"]
ret.path = episode["path"]
return ret
identify_table = {
"collection": lambda _, id: self._provider.identify_collection(
id["dataId"]
),
"movie": id_movie,
"show": lambda _, id: self._provider.identify_show(id["dataId"]),
"season": id_season,
"episode": id_episode,
}
current = await self._client.get(f"{kind}/{kyoo_id}")
if self._provider.name not in current["externalId"]:
logger.error(
f"Could not refresh metadata of {kind}/{kyoo_id}. Missing provider id."
)
return False
provider_id = current["externalId"][self._provider.name]
new_value = await identify_table[kind](current, provider_id)
await self._client.put(f"{kind}/{kyoo_id}", data=new_value.to_kyoo())
return True

View File

@ -1,7 +1,7 @@
import asyncio
from logging import getLogger
from providers.kyoo_client import KyooClient
from old.kyoo_client import KyooClient
from scanner.publisher import Publisher

View File

@ -1,769 +0,0 @@
import asyncio
from aiohttp import ClientSession
from datetime import datetime, timedelta
from logging import getLogger
from typing import Awaitable, Callable, Dict, List, Optional, Any, TypeVar
from itertools import accumulate, zip_longest
from langcodes import Language
from providers.utils import ProviderError
from matcher.cache import cache
from ..provider import Provider
from ..types.movie import Movie, MovieTranslation, Status as MovieStatus
from ..types.season import Season, SeasonTranslation
from ..types.episode import Episode, EpisodeTranslation, PartialShow, EpisodeID
from ..types.studio import Studio
from ..types.genre import Genre
from ..types.metadataid import MetadataID
from ..types.show import Show, ShowTranslation, Status as ShowStatus
from ..types.collection import Collection, CollectionTranslation
logger = getLogger(__name__)
class TheMovieDatabase(Provider):
DEFAULT_API_KEY = "c9f328a01011b28f22483717395fc3fa"
def __init__(
self,
languages: list[str],
client: ClientSession,
api_key: str,
) -> None:
super().__init__()
self._languages = [Language.get(l) for l in languages]
self._client = client
self.base = "https://api.themoviedb.org/3"
self.api_key = api_key
self.genre_map = {
28: Genre.ACTION,
12: Genre.ADVENTURE,
16: Genre.ANIMATION,
35: Genre.COMEDY,
80: Genre.CRIME,
99: Genre.DOCUMENTARY,
18: Genre.DRAMA,
10751: Genre.FAMILY,
14: Genre.FANTASY,
36: Genre.HISTORY,
27: Genre.HORROR,
10402: Genre.MUSIC,
9648: Genre.MYSTERY,
10749: Genre.ROMANCE,
878: Genre.SCIENCE_FICTION,
53: Genre.THRILLER,
10752: Genre.WAR,
37: Genre.WESTERN,
10759: [Genre.ACTION, Genre.ADVENTURE],
10762: Genre.KIDS,
10763: Genre.NEWS,
10764: Genre.REALITY,
10765: [Genre.SCIENCE_FICTION, Genre.FANTASY],
10766: Genre.SOAP,
10767: Genre.TALK,
10768: [Genre.WAR, Genre.POLITICS],
}
@property
def name(self) -> str:
return "themoviedatabase"
def process_genres(self, genres) -> list[Genre]:
def flatten(x: Genre | list[Genre]) -> list[Genre]:
if isinstance(x, list):
return [j for i in x for j in flatten(i)]
return [x]
return flatten(
[self.genre_map[x["id"]] for x in genres if x["id"] in self.genre_map]
)
def get_languages(self, *args) -> list[Language]:
return self._languages + list(args)
async def get(
self,
path: str,
*,
params: dict[str, Any] = {},
not_found_fail: Optional[str] = None,
):
params = {k: v for k, v in params.items() if v is not None}
async with self._client.get(
f"{self.base}/{path}", params={"api_key": self.api_key, **params}
) as r:
if not_found_fail and r.status == 404:
raise ProviderError(not_found_fail)
r.raise_for_status()
return await r.json()
T = TypeVar("T")
def merge_translations(self, host, translations, *, languages: list[Language]):
host.translations = {
k.to_tag(): v.translations[k.to_tag()]
for k, v in zip(languages, translations)
}
return host
async def process_translations(
self,
for_language: Callable[[str], Awaitable[T]],
languages: list[Language],
post_merge: Callable[[T, list[T]], T] | None = None,
) -> T:
tasks = map(lambda lng: for_language(lng), languages)
items: list[Any] = await asyncio.gather(*tasks)
item = self.merge_translations(items[0], items, languages=languages)
if post_merge:
item = post_merge(item, items)
return item
def get_image(self, images: list[Dict[str, Any]]) -> list[str]:
return [
f"https://image.tmdb.org/t/p/original{x['file_path']}"
for x in images
if x["file_path"]
]
def to_studio(self, company: dict[str, Any]) -> Studio:
return Studio(
name=company["name"],
logos=[f"https://image.tmdb.org/t/p/original{company['logo_path']}"]
if "logo_path" in company
else [],
external_id={
self.name: MetadataID(
company["id"], f"https://www.themoviedb.org/company/{company['id']}"
)
},
)
def get_best_image(
self, item: dict[str, Any], lng: Language, key: str
) -> list[dict]:
"""
Retrieves the best available images for a item based on localization.
Args:
item (dict): A dictionary containing item information, including images and language details.
lng (Language): The preferred language for the images.
key (str): The key to access the images in the item dictionary. (e.g. "posters", "backdrops", "logos")
Returns:
list: A list of images, prioritized by localization, original language, and any available image.
"""
# Order images by size and vote average
item["images"][key] = sorted(
item["images"][key],
key=lambda x: (x.get("vote_average", 0), x.get("width", 0)),
reverse=True,
)
# Step 1: Try to get localized images
localized_images = [
image
for image in item["images"][key]
if image.get("iso_639_1") == lng.language
]
# Step 2: If no localized images, try images in the original language
if not localized_images:
localized_images = [
image
for image in item["images"][key]
if image.get("iso_639_1") == item.get("original_language")
]
# Step 3: If still no images, use any available images
if not localized_images:
localized_images = item["images"][key]
# Step 4: If there are no images at all, fallback to _path attribute.
if not localized_images:
localized_images = self._get_image_fallback(item, key)
return self.get_image(localized_images)
def _get_image_fallback(self, item: dict[str, Any], key: str) -> list[dict]:
"""
Fallback to _path attribute if there are no images available in the images list.
"""
if key == "posters":
return [{"file_path": item.get("poster_path")}]
elif key == "backdrops":
return [{"file_path": item.get("backdrop_path")}]
return []
async def search_movie(self, name: str, year: Optional[int]) -> Movie:
search_results = (
await self.get("search/movie", params={"query": name, "year": year})
)["results"]
if len(search_results) == 0:
raise ProviderError(f"No result for a movie named: {name}")
search = self.get_best_result(search_results, name, year)
original_language = Language.get(search["original_language"])
return await self.identify_movie(
search["id"], original_language=original_language
)
async def identify_movie(
self, movie_id: str, original_language: Optional[Language] = None
) -> Movie:
languages = self.get_languages()
async def for_language(lng: Language) -> Movie:
movie = await self.get(
f"movie/{movie_id}",
params={
"language": lng.to_tag(),
"append_to_response": "alternative_titles,videos,credits,keywords,images",
"include_image_language": f"{lng.language},null,{original_language.language if original_language else ''}",
},
)
logger.debug("TMDb responded: %s", movie)
ret = Movie(
original_language=movie["original_language"],
aliases=[x["title"] for x in movie["alternative_titles"]["titles"]],
air_date=datetime.strptime(movie["release_date"], "%Y-%m-%d").date()
if movie["release_date"]
else None,
status=MovieStatus.FINISHED
if movie["status"] == "Released"
else MovieStatus.PLANNED,
rating=round(float(movie["vote_average"]) * 10),
runtime=int(movie["runtime"]) if movie["runtime"] is not None else None,
studios=[self.to_studio(x) for x in movie["production_companies"]],
genres=self.process_genres(movie["genres"]),
external_id=(
{
self.name: MetadataID(
movie["id"],
f"https://www.themoviedb.org/movie/{movie['id']}",
)
}
| (
{
"imdb": MetadataID(
movie["imdb_id"],
f"https://www.imdb.com/title/{movie['imdb_id']}",
)
}
if movie["imdb_id"]
else {}
)
),
collections=[
Collection(
external_id={
self.name: MetadataID(
movie["belongs_to_collection"]["id"],
f"https://www.themoviedb.org/collection/{movie['belongs_to_collection']['id']}",
)
},
)
]
if movie["belongs_to_collection"] is not None
else [],
# TODO: Add cast information
)
translation = MovieTranslation(
name=movie["title"],
tagline=movie["tagline"] if movie["tagline"] else None,
tags=list(map(lambda x: x["name"], movie["keywords"]["keywords"])),
overview=movie["overview"],
posters=self.get_best_image(movie, lng, "posters"),
logos=self.get_best_image(movie, lng, "logos"),
thumbnails=self.get_best_image(movie, lng, "backdrops"),
trailers=[
f"https://www.youtube.com/watch?v={x['key']}"
for x in movie["videos"]["results"]
if x["type"] == "Trailer" and x["site"] == "YouTube"
],
)
ret.translations = {lng.to_tag(): translation}
return ret
ret = await self.process_translations(for_language, languages)
if (
ret.original_language is not None
and ret.original_language not in ret.translations
):
orig_language = Language.get(ret.original_language)
ret.translations[orig_language.to_tag()] = (
await for_language(orig_language)
).translations[orig_language.to_tag()]
return ret
@cache(ttl=timedelta(days=1))
async def identify_show(
self,
show_id: str,
) -> Show:
languages = self.get_languages()
async def for_language(lng: Language) -> Show:
show = await self.get(
f"tv/{show_id}",
params={
"language": lng.to_tag(),
"append_to_response": "alternative_titles,videos,credits,keywords,images,external_ids",
"include_image_language": f"{lng.language},null,en",
},
)
logger.debug("TMDb responded: %s", show)
ret = Show(
original_language=show["original_language"],
aliases=[x["title"] for x in show["alternative_titles"]["results"]],
start_air=datetime.strptime(show["first_air_date"], "%Y-%m-%d").date()
if show["first_air_date"]
else None,
end_air=datetime.strptime(show["last_air_date"], "%Y-%m-%d").date()
if show["last_air_date"]
else None,
status=ShowStatus.FINISHED
if show["status"] == "Released"
else ShowStatus.AIRING
if show["in_production"]
else ShowStatus.FINISHED,
rating=round(float(show["vote_average"]) * 10),
studios=[self.to_studio(x) for x in show["production_companies"]],
genres=self.process_genres(show["genres"]),
external_id={
self.name: MetadataID(
show["id"], f"https://www.themoviedb.org/tv/{show['id']}"
),
}
| (
{
"imdb": MetadataID(
show["external_ids"]["imdb_id"],
f"https://www.imdb.com/title/{show['external_ids']['imdb_id']}",
)
}
if show["external_ids"]["imdb_id"]
else {}
)
| (
{"tvdb": MetadataID(show["external_ids"]["tvdb_id"], link=None)}
if show["external_ids"]["tvdb_id"]
else {}
),
seasons=[
self.to_season(x, language=lng, show_id=show["id"])
for x in show["seasons"]
],
# TODO: Add cast information
)
translation = ShowTranslation(
name=show["name"],
tagline=show["tagline"] if show["tagline"] else None,
tags=list(map(lambda x: x["name"], show["keywords"]["results"])),
overview=show["overview"],
posters=self.get_best_image(show, lng, "posters"),
logos=self.get_best_image(show, lng, "logos"),
thumbnails=self.get_best_image(show, lng, "backdrops"),
trailers=[
f"https://www.youtube.com/watch?v={x['key']}"
for x in show["videos"]["results"]
if x["type"] == "Trailer" and x["site"] == "YouTube"
],
)
ret.translations = {lng.to_tag(): translation}
return ret
def merge_seasons_translations(item: Show, items: list[Show]) -> Show:
item.seasons = [
self.merge_translations(
season,
[
next(
y
for y in x.seasons
if y.season_number == season.season_number
)
for x in items
],
languages=languages,
)
for season in item.seasons
]
return item
ret = await self.process_translations(
for_language, languages, merge_seasons_translations
)
if (
ret.original_language is not None
and ret.original_language not in ret.translations
):
orig_language = Language.get(ret.original_language)
ret.translations[orig_language.to_tag()] = (
await for_language(orig_language)
).translations[orig_language.to_tag()]
return ret
def to_season(
self, season: dict[str, Any], *, language: Language, show_id: str
) -> Season:
return Season(
season_number=season["season_number"],
episodes_count=season["episode_count"],
start_air=datetime.strptime(season["air_date"], "%Y-%m-%d").date()
if season["air_date"]
else None,
end_air=None,
external_id={
self.name: MetadataID(
show_id,
f"https://www.themoviedb.org/tv/{show_id}/season/{season['season_number']}",
)
},
translations={
language.to_tag(): SeasonTranslation(
name=season["name"],
overview=season["overview"],
posters=[
f"https://image.tmdb.org/t/p/original{season['poster_path']}"
]
if season["poster_path"] is not None
else [],
thumbnails=[],
)
},
)
async def identify_season(self, show_id: str, season: int) -> Season:
# We already get seasons info in the identify_show and chances are this gets cached already
show = await self.identify_show(show_id)
ret = next((x for x in show.seasons if x.season_number == season), None)
if ret is None:
raise ProviderError(
f"Could not find season {season} for show {show.to_kyoo()['name']}"
)
return ret
@cache(ttl=timedelta(days=1))
async def search_show(self, name: str, year: Optional[int]) -> PartialShow:
search_results = (
await self.get("search/tv", params={"query": name, "year": year})
)["results"]
if len(search_results) == 0:
raise ProviderError(f"No result for a tv show named: {name}")
search = self.get_best_result(search_results, name, year)
show_id = search["id"]
return PartialShow(
name=search["name"],
original_language=search["original_language"],
external_id={
self.name: MetadataID(
show_id, f"https://www.themoviedb.org/tv/{show_id}"
)
},
)
async def search_episode(
self,
name: str,
season: Optional[int],
episode_nbr: Optional[int],
absolute: Optional[int],
year: Optional[int],
) -> Episode:
show = await self.search_show(name, year)
show_id = show.external_id[self.name].data_id
if absolute is not None and (season is None or episode_nbr is None):
(season, episode_nbr) = await self.get_episode_from_absolute(
show_id, absolute
)
if season is None or episode_nbr is None:
raise ProviderError(
f"Could not guess season or episode number of the episode {show.name} {season}-{episode_nbr} ({absolute})",
)
if absolute is None:
absolute = await self.get_absolute_number(show_id, season, episode_nbr)
return await self.identify_episode(show_id, season, episode_nbr, absolute)
async def identify_episode(
self, show_id: str, season: Optional[int], episode_nbr: int, absolute: int
) -> Episode:
async def for_language(lng: Language) -> Episode:
try:
episode = await self.get(
f"tv/{show_id}/season/{season}/episode/{episode_nbr}",
params={
"language": lng.to_tag(),
},
)
except:
episode = await self.get(
f"tv/{show_id}/season/{season}/episode/{absolute}",
params={
"language": lng.to_tag(),
},
not_found_fail=f"Could not find episode {episode_nbr} of season {season} of serie {show_id} (absolute: {absolute})",
)
logger.debug("TMDb responded: %s", episode)
ret = Episode(
show=PartialShow(
name=show_id,
original_language=None,
external_id={
self.name: MetadataID(
show_id, f"https://www.themoviedb.org/tv/{show_id}"
)
},
),
season_number=episode["season_number"],
episode_number=episode["episode_number"],
absolute_number=absolute,
runtime=int(episode["runtime"])
if episode["runtime"] is not None
else None,
release_date=datetime.strptime(episode["air_date"], "%Y-%m-%d").date()
if episode["air_date"]
else None,
thumbnail=f"https://image.tmdb.org/t/p/original{episode['still_path']}"
if "still_path" in episode and episode["still_path"] is not None
else None,
external_id={
self.name: EpisodeID(
show_id,
episode["season_number"],
episode["episode_number"],
f"https://www.themoviedb.org/tv/{show_id}/season/{episode['season_number']}/episode/{episode['episode_number']}",
),
},
)
translation = EpisodeTranslation(
name=episode["name"],
overview=episode["overview"],
)
ret.translations = {lng.to_tag(): translation}
return ret
return await self.process_translations(for_language, self.get_languages())
def get_best_result(
self, search_results: List[Any], name: str, year: Optional[int]
) -> Any:
results = search_results
# Find perfect match by year since sometime tmdb decides to discard the year parameter.
if year:
results = list(
x
for x in search_results
if ("first_air_date" in x and x["first_air_date"].startswith(str(year)))
or ("release_date" in x and x["release_date"].startswith(str(year)))
)
if not results:
results = search_results
# If there is a perfect match use it (and if there are multiple, use the most popular one)
res = sorted(
(
x
for x in results
if ("name" in x and x["name"].casefold() == name.casefold())
or ("title" in x and x["title"].casefold() == name.casefold())
),
key=lambda x: (x["vote_count"], x["popularity"]),
reverse=True,
)
if res:
results = res
else:
# Ignore totally unpopular shows or unknown ones.
# sorted is stable and False<True so doing this puts baddly rated items at the end of the list.
results = sorted(
results, key=lambda x: x["vote_count"] < 5 or x["popularity"] < 5
)
return results[0]
@cache(ttl=timedelta(days=1))
async def get_absolute_order(self, show_id: str):
"""
TheMovieDb does not allow to fetch an episode by an absolute number but it
support groups where you can list episodes. One type is the absolute group
where everything should be on one season, this method tries to find a complete
absolute-ordered group and return it
"""
show = await self.identify_show(show_id)
try:
groups = await self.get(f"tv/{show_id}/episode_groups")
ep_count = max((x["episode_count"] for x in groups["results"]), default=0)
if ep_count == 0:
return None
# Filter only absolute groups that contains at least 75% of all episodes (to skip non maintained absolute ordering)
group_id = next(
(
x["id"]
for x in groups["results"]
if x["type"] == 2 and x["episode_count"] >= ep_count // 1.5
),
None,
)
if group_id is None:
return None
group = await self.get(f"tv/episode_group/{group_id}")
absgrp = [
ep
for grp in sorted(group["groups"], key=lambda x: x["order"])
# Some shows include specials as the first absolute group (like TenSura)
if grp["name"] != "Specials"
for ep in sorted(grp["episodes"], key=lambda x: x["order"])
]
season_starts = [
next(
(
x["episode_number"]
for x in absgrp
if x["season_number"] == s.season_number
),
1,
)
for s in show.seasons
]
complete_abs = absgrp + [
{"season_number": s.season_number, "episode_number": e}
for s in show.seasons
# ignore specials not specified in the absgrp
if s.season_number > 0
for e in range(1, s.episodes_count + 1)
if not any(
x["season_number"] == s.season_number
and (
x["episode_number"] == e
# take into account weird absolute (for example one piece, episodes are not reset to 1 when the season starts)
or x["episode_number"] == season_starts[s.season_number - 1] + e
)
for x in absgrp
)
]
if len(complete_abs) != len(absgrp):
logger.warn(
f"Incomplete absolute group for show {show_id}. Filling missing values by assuming season/episode order is ascending"
)
return complete_abs
except Exception as e:
logger.exception(
"Could not retrieve absolute ordering information", exc_info=e
)
return None
async def get_episode_from_absolute(self, show_id: str, absolute: int):
absgrp = await self.get_absolute_order(show_id)
if absgrp is not None and len(absgrp) >= absolute:
# Using absolute - 1 since the array is 0based (absolute episode 1 is at index 0)
season = absgrp[absolute - 1]["season_number"]
episode_nbr = absgrp[absolute - 1]["episode_number"]
return (season, episode_nbr)
# We assume that each season should be played in order with no special episodes.
show = await self.identify_show(show_id)
# Dont forget to ingore the special season (season_number 0)
seasons_nbrs = [x.season_number for x in show.seasons if x.season_number != 0]
seasons_eps = [x.episodes_count for x in show.seasons if x.season_number != 0]
if not any(seasons_nbrs):
return (None, None)
# zip_longest(seasons_nbrs[1:], accumulate(seasons_eps)) return [(2, 12), (None, 24)] if the show has two seasons with 12 eps
# we take the last group that has less total episodes than the absolute number.
return next(
(
(snbr, absolute - ep_cnt)
for snbr, ep_cnt in reversed(
list(zip_longest(seasons_nbrs[1:], accumulate(seasons_eps)))
)
if ep_cnt < absolute
),
# If the absolute episode number is lower than the 1st season number of episode, it is part of it.
(seasons_nbrs[0], absolute),
)
async def get_absolute_number(
self, show_id: str, season: int, episode_nbr: int
) -> int:
absgrp = await self.get_absolute_order(show_id)
if absgrp is None:
# We assume that each season should be played in order with no special episodes.
show = await self.identify_show(show_id)
return (
sum(
x.episodes_count
for x in show.seasons
if 0 < x.season_number < season
)
+ episode_nbr
)
absolute = next(
(
# The + 1 is to go from 0based index to 1based absolute number
i + 1
for i, x in enumerate(absgrp)
if x["episode_number"] == episode_nbr and x["season_number"] == season
),
None,
)
if absolute is not None:
return absolute
# assume we use tmdb weird absolute by default (for example, One Piece S21E800, the first
# episode of S21 is not reset to 0 but keep increasing so it can be 800
start = next(
(x["episode_number"] for x in absgrp if x["season_number"] == season), None
)
if start is None or start <= episode_nbr:
raise ProviderError(
f"Could not guess absolute number of episode {show_id} s{season} e{episode_nbr}"
)
# add back the continuous number (imagine the user has one piece S21e31
# but tmdb registered it as S21E831 since S21's first ep is 800
return await self.get_absolute_number(show_id, season, episode_nbr + start)
async def identify_collection(self, provider_id: str) -> Collection:
languages = self.get_languages()
async def for_language(lng: Language) -> Collection:
collection = await self.get(
f"collection/{provider_id}",
params={
"language": lng.to_tag(),
"append_to_response": "images",
"include_image_language": f"{lng.language},null,en",
},
)
logger.debug("TMDb responded: %s", collection)
ret = Collection(
external_id={
self.name: MetadataID(
collection["id"],
f"https://www.themoviedb.org/collection/{collection['id']}",
)
},
)
translation = CollectionTranslation(
name=collection["name"],
overview=collection["overview"],
posters=self.get_best_image(collection, lng, "posters"),
logos=[],
thumbnails=self.get_best_image(collection, lng, "backdrops"),
)
ret.translations = {lng.to_tag(): translation}
return ret
return await self.process_translations(for_language, languages)

View File

@ -1,159 +0,0 @@
import os
import jsons
from aiohttp import ClientSession
from datetime import date
from logging import getLogger
from typing import List, Literal, Any, Optional
from urllib.parse import quote
from .utils import format_date
logger = getLogger(__name__)
class KyooClient:
def __init__(self) -> None:
self._api_key = os.environ.get("KYOO_APIKEY")
if not self._api_key:
self._api_key = os.environ.get("KYOO_APIKEYS")
if not self._api_key:
print("Missing environment variable 'KYOO_APIKEY'.")
exit(2)
self._api_key = self._api_key.split(",")[0]
self._url = os.environ.get("KYOO_URL", "http://back:5000")
async def __aenter__(self):
jsons.set_serializer(lambda x, **_: format_date(x), type[Optional[date | int]])
self.client = ClientSession(
headers={
"User-Agent": "kyoo",
},
json_serialize=lambda *args, **kwargs: jsons.dumps(
*args, key_transformer=jsons.KEY_TRANSFORMER_CAMELCASE, **kwargs
),
)
return self
async def __aexit__(self, exc_type, exc_value, exc_tb):
await self.client.close()
async def get_registered_paths(self) -> List[str]:
async with self.client.get(
f"{self._url}/paths",
headers={"X-API-Key": self._api_key},
) as r:
r.raise_for_status()
return await r.json()
async def create_issue(self, path: str, issue: str, extra: dict | None = None):
async with self.client.post(
f"{self._url}/issues",
json={
"domain": "scanner",
"cause": path,
"reason": issue,
"extra": extra if extra is not None else {},
},
headers={"X-API-Key": self._api_key},
) as r:
if not r.ok:
logger.error(f"Request error: {await r.text()}")
r.raise_for_status()
async def get_issues(self) -> List[str]:
async with self.client.get(
f"{self._url}/issues",
params={"limit": 0},
headers={"X-API-Key": self._api_key},
) as r:
r.raise_for_status()
ret = await r.json()
return [x["cause"] for x in ret if x["domain"] == "scanner"]
async def delete_issue(self, path: str):
async with self.client.delete(
f'{self._url}/issues?filter=domain eq scanner and cause eq "{quote(path)}"',
headers={"X-API-Key": self._api_key},
) as r:
if not r.ok:
logger.error(f"Request error: {await r.text()}")
r.raise_for_status()
async def link_collection(
self, collection: str, type: Literal["movie"] | Literal["show"], id: str
):
async with self.client.put(
f"{self._url}/collections/{collection}/{type}/{id}",
headers={"X-API-Key": self._api_key},
) as r:
# Allow 409 and continue as if it worked.
if not r.ok and r.status != 409:
logger.error(f"Request error: {await r.text()}")
r.raise_for_status()
async def post(self, path: str, *, data: dict[str, Any]) -> str:
logger.debug(
"Sending %s: %s",
path,
jsons.dumps(
data,
key_transformer=jsons.KEY_TRANSFORMER_CAMELCASE,
jdkwargs={"indent": 4},
),
)
async with self.client.post(
f"{self._url}/{path}",
json=data,
headers={"X-API-Key": self._api_key},
) as r:
# Allow 409 and continue as if it worked.
if not r.ok and r.status != 409:
logger.error(f"Request error: {await r.text()}")
r.raise_for_status()
ret = await r.json()
return ret["id"]
async def delete(
self,
path: str,
):
logger.info("Deleting %s", path)
async with self.client.delete(
f"{self._url}/paths?recursive=true&path={quote(path)}",
headers={"X-API-Key": self._api_key},
) as r:
if not r.ok:
logger.error(f"Request error: {await r.text()}")
r.raise_for_status()
async def get(self, path: str):
async with self.client.get(
f"{self._url}/{path}",
headers={"X-API-Key": self._api_key},
) as r:
if not r.ok:
logger.error(f"Request error: {await r.text()}")
r.raise_for_status()
return await r.json()
async def put(self, path: str, *, data: dict[str, Any]):
logger.debug(
"Sending %s: %s",
path,
jsons.dumps(
data,
key_transformer=jsons.KEY_TRANSFORMER_CAMELCASE,
jdkwargs={"indent": 4},
),
)
async with self.client.put(
f"{self._url}/{path}",
json=data,
headers={"X-API-Key": self._api_key},
) as r:
# Allow 409 and continue as if it worked.
if not r.ok and r.status != 409:
logger.error(f"Request error: {await r.text()}")
r.raise_for_status()

View File

@ -1,97 +0,0 @@
from logging import getLogger
import os
from aiohttp import ClientSession
from abc import abstractmethod, abstractproperty
from typing import Optional
from providers.utils import ProviderError
from .types.show import Show
from .types.season import Season
from .types.episode import Episode
from .types.movie import Movie
from .types.collection import Collection
logger = getLogger(__name__)
class Provider:
@classmethod
def get_default(cls, client: ClientSession):
languages = os.environ.get("LIBRARY_LANGUAGES")
if not languages:
print("Missing environment variable 'LIBRARY_LANGUAGES'.")
exit(2)
languages = languages.split(",")
providers = []
from providers.implementations.themoviedatabase import TheMovieDatabase
tmdb = os.environ.get("THEMOVIEDB_APIKEY") or TheMovieDatabase.DEFAULT_API_KEY
if tmdb != "disabled":
tmdb = TheMovieDatabase(languages, client, tmdb)
providers.append(tmdb)
from providers.implementations.thetvdb import TVDB
tvdb = os.environ.get("TVDB_APIKEY") or TVDB.DEFAULT_API_KEY
if tvdb != "disabled":
pin = os.environ.get("TVDB_PIN") or None
tvdb = TVDB(client, tvdb, pin, languages)
providers.append(tvdb)
if not any(providers):
raise ProviderError(
"No provider configured. You probably forgot to specify an API Key"
)
from providers.implementations.thexem import TheXem
provider = next(iter(providers))
logger.info(f"Starting with provider: {provider.name}")
return TheXem(client, provider)
@abstractproperty
def name(self) -> str:
raise NotImplementedError
@abstractmethod
async def search_movie(self, name: str, year: Optional[int]) -> Movie:
raise NotImplementedError
@abstractmethod
async def search_episode(
self,
name: str,
season: Optional[int],
episode_nbr: Optional[int],
absolute: Optional[int],
year: Optional[int],
) -> Episode:
raise NotImplementedError
@abstractmethod
async def identify_movie(self, movie_id: str) -> Movie:
raise NotImplementedError
@abstractmethod
async def identify_show(self, show_id: str) -> Show:
raise NotImplementedError
@abstractmethod
async def identify_season(self, show_id: str, season: int) -> Season:
raise NotImplementedError
@abstractmethod
async def identify_episode(
self, show_id: str, season: Optional[int], episode_nbr: int, absolute: int
) -> Episode:
raise NotImplementedError
@abstractmethod
async def identify_collection(self, provider_id: str) -> Collection:
raise NotImplementedError
@abstractmethod
async def get_expected_titles(self) -> list[str]:
return []

View File

@ -1,34 +0,0 @@
import os
from aio_pika import connect_robust
class RabbitBase:
QUEUE = "scanner"
async def __aenter__(self):
self._con = await connect_robust(
os.environ.get("RABBITMQ_URL"),
host=os.environ.get("RABBITMQ_HOST", "rabbitmq"),
port=int(os.environ.get("RABBITMQ_PORT", "5672")),
login=os.environ.get("RABBITMQ_DEFAULT_USER", "guest"),
password=os.environ.get("RABBITMQ_DEFAULT_PASS", "guest"),
)
# Attempt to declare the queue passively in case it already exists.
try:
self._channel = await self._con.channel()
self._queue = await self._channel.declare_queue(self.QUEUE, passive=True)
return self
except Exception:
# The server will close the channel on error.
# Cleanup the reference to it.
await self._channel.close()
# The queue does not exist, so actively declare it.
self._channel = await self._con.channel()
self._queue = await self._channel.declare_queue(self.QUEUE)
return self
async def __aexit__(self, exc_type, exc_value, exc_tb):
await self._channel.close()
await self._con.close()

View File

@ -1,35 +0,0 @@
from dataclasses import asdict, dataclass, field
from typing import Optional
from providers.utils import ProviderError, select_translation, select_image
from .metadataid import MetadataID
@dataclass
class CollectionTranslation:
name: str
overview: Optional[str] = None
posters: list[str] = field(default_factory=list)
logos: list[str] = field(default_factory=list)
thumbnails: list[str] = field(default_factory=list)
@dataclass
class Collection:
external_id: dict[str, MetadataID]
translations: dict[str, CollectionTranslation] = field(default_factory=dict)
def to_kyoo(self):
trans = select_translation(self)
if trans is None:
raise ProviderError(
"Could not find translations for the collection. Aborting"
)
return {
**asdict(self),
**asdict(trans),
"poster": select_image(self, "posters"),
"thumbnail": select_image(self, "thumbnails"),
"logo": select_image(self, "logos"),
}

Some files were not shown because too many files have changed in this diff Show More