Compare commits
4 Commits
main
...
feat/prism
Author | SHA1 | Date | |
---|---|---|---|
|
388e132fca | ||
|
43e0a59f93 | ||
|
da34bd714e | ||
|
77da10e3ab |
@ -6,14 +6,6 @@ body:
|
||||
attributes:
|
||||
value: |
|
||||
Please use this form to request new feature for Immich
|
||||
|
||||
- type: checkboxes
|
||||
attributes:
|
||||
label: I have searched the existing feature requests to make sure this is not a duplicate request.
|
||||
options:
|
||||
- label: "Yes"
|
||||
required: true
|
||||
|
||||
- type: textarea
|
||||
id: feature
|
||||
attributes:
|
||||
|
2
.github/workflows/cli.yml
vendored
@ -58,7 +58,7 @@ jobs:
|
||||
uses: docker/setup-qemu-action@v3.0.0
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v3.3.0
|
||||
uses: docker/setup-buildx-action@v3.2.0
|
||||
|
||||
- name: Login to GitHub Container Registry
|
||||
uses: docker/login-action@v3
|
||||
|
2
.github/workflows/docker.yml
vendored
@ -66,7 +66,7 @@ jobs:
|
||||
uses: docker/setup-qemu-action@v3.0.0
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v3.3.0
|
||||
uses: docker/setup-buildx-action@v3.2.0
|
||||
|
||||
- name: Login to Docker Hub
|
||||
# Only push to Docker Hub when making a release
|
||||
|
@ -1,4 +0,0 @@
|
||||
/.github/ @bo0tzz
|
||||
/docker/ @bo0tzz
|
||||
/server/ @danieldietzler
|
||||
/e2e/ @danieldietzler
|
@ -131,10 +131,6 @@ If you feel like this is the right cause and the app is something you are seeing
|
||||
|
||||
## Star History
|
||||
|
||||
<a href="https://star-history.com/#immich-app/immich&Date">
|
||||
<picture>
|
||||
<source media="(prefers-color-scheme: dark)" srcset="https://api.star-history.com/svg?repos=immich-app/immich&type=Date&theme=dark" />
|
||||
<source media="(prefers-color-scheme: light)" srcset="https://api.star-history.com/svg?repos=immich-app/immich&type=Date" />
|
||||
<img alt="Star History Chart" src="https://api.star-history.com/svg?repos=immich-app/immich&type=Date" width="100%" />
|
||||
</picture>
|
||||
<a href="https://star-history.com/#immich-app/immich">
|
||||
<img src="https://api.star-history.com/svg?repos=immich-app/immich&type=Date" alt="Star History Chart" width="100%" />
|
||||
</a>
|
||||
|
@ -21,7 +21,6 @@ module.exports = {
|
||||
'unicorn/prefer-module': 'off',
|
||||
'unicorn/prevent-abbreviations': 'off',
|
||||
'unicorn/no-process-exit': 'off',
|
||||
'unicorn/import-style': 'off',
|
||||
curly: 2,
|
||||
'prettier/prettier': 0,
|
||||
},
|
||||
|
@ -1,4 +1,4 @@
|
||||
FROM node:20-alpine3.19@sha256:7e227295e96f5b00aa79555ae166f50610940d888fc2e321cf36304cbd17d7d6 as core
|
||||
FROM node:20-alpine3.19@sha256:bf77dc26e48ea95fca9d1aceb5acfa69d2e546b765ec2abfb502975f1a2d4def as core
|
||||
|
||||
WORKDIR /usr/src/open-api/typescript-sdk
|
||||
COPY open-api/typescript-sdk/package*.json open-api/typescript-sdk/tsconfig*.json ./
|
||||
|
456
cli/package-lock.json
generated
@ -1,12 +1,12 @@
|
||||
{
|
||||
"name": "@immich/cli",
|
||||
"version": "2.2.0",
|
||||
"version": "2.1.0",
|
||||
"lockfileVersion": 3,
|
||||
"requires": true,
|
||||
"packages": {
|
||||
"": {
|
||||
"name": "@immich/cli",
|
||||
"version": "2.2.0",
|
||||
"version": "2.1.0",
|
||||
"license": "GNU Affero General Public License version 3",
|
||||
"dependencies": {
|
||||
"lodash-es": "^4.17.21"
|
||||
@ -30,7 +30,7 @@
|
||||
"eslint": "^8.56.0",
|
||||
"eslint-config-prettier": "^9.1.0",
|
||||
"eslint-plugin-prettier": "^5.1.3",
|
||||
"eslint-plugin-unicorn": "^52.0.0",
|
||||
"eslint-plugin-unicorn": "^51.0.0",
|
||||
"glob": "^10.3.1",
|
||||
"mock-fs": "^5.2.0",
|
||||
"prettier": "^3.2.5",
|
||||
@ -47,7 +47,7 @@
|
||||
},
|
||||
"../open-api/typescript-sdk": {
|
||||
"name": "@immich/sdk",
|
||||
"version": "1.101.0",
|
||||
"version": "1.99.0",
|
||||
"dev": true,
|
||||
"license": "GNU Affero General Public License version 3",
|
||||
"dependencies": {
|
||||
@ -300,9 +300,9 @@
|
||||
"dev": true
|
||||
},
|
||||
"node_modules/@esbuild/aix-ppc64": {
|
||||
"version": "0.20.2",
|
||||
"resolved": "https://registry.npmjs.org/@esbuild/aix-ppc64/-/aix-ppc64-0.20.2.tgz",
|
||||
"integrity": "sha512-D+EBOJHXdNZcLJRBkhENNG8Wji2kgc9AZ9KiPr1JuZjsNtyHzrsfLRrY0tk2H2aoFu6RANO1y1iPPUCDYWkb5g==",
|
||||
"version": "0.19.12",
|
||||
"resolved": "https://registry.npmjs.org/@esbuild/aix-ppc64/-/aix-ppc64-0.19.12.tgz",
|
||||
"integrity": "sha512-bmoCYyWdEL3wDQIVbcyzRyeKLgk2WtWLTWz1ZIAZF/EGbNOwSA6ew3PftJ1PqMiOOGu0OyFMzG53L0zqIpPeNA==",
|
||||
"cpu": [
|
||||
"ppc64"
|
||||
],
|
||||
@ -316,9 +316,9 @@
|
||||
}
|
||||
},
|
||||
"node_modules/@esbuild/android-arm": {
|
||||
"version": "0.20.2",
|
||||
"resolved": "https://registry.npmjs.org/@esbuild/android-arm/-/android-arm-0.20.2.tgz",
|
||||
"integrity": "sha512-t98Ra6pw2VaDhqNWO2Oph2LXbz/EJcnLmKLGBJwEwXX/JAN83Fym1rU8l0JUWK6HkIbWONCSSatf4sf2NBRx/w==",
|
||||
"version": "0.19.12",
|
||||
"resolved": "https://registry.npmjs.org/@esbuild/android-arm/-/android-arm-0.19.12.tgz",
|
||||
"integrity": "sha512-qg/Lj1mu3CdQlDEEiWrlC4eaPZ1KztwGJ9B6J+/6G+/4ewxJg7gqj8eVYWvao1bXrqGiW2rsBZFSX3q2lcW05w==",
|
||||
"cpu": [
|
||||
"arm"
|
||||
],
|
||||
@ -332,9 +332,9 @@
|
||||
}
|
||||
},
|
||||
"node_modules/@esbuild/android-arm64": {
|
||||
"version": "0.20.2",
|
||||
"resolved": "https://registry.npmjs.org/@esbuild/android-arm64/-/android-arm64-0.20.2.tgz",
|
||||
"integrity": "sha512-mRzjLacRtl/tWU0SvD8lUEwb61yP9cqQo6noDZP/O8VkwafSYwZ4yWy24kan8jE/IMERpYncRt2dw438LP3Xmg==",
|
||||
"version": "0.19.12",
|
||||
"resolved": "https://registry.npmjs.org/@esbuild/android-arm64/-/android-arm64-0.19.12.tgz",
|
||||
"integrity": "sha512-P0UVNGIienjZv3f5zq0DP3Nt2IE/3plFzuaS96vihvD0Hd6H/q4WXUGpCxD/E8YrSXfNyRPbpTq+T8ZQioSuPA==",
|
||||
"cpu": [
|
||||
"arm64"
|
||||
],
|
||||
@ -348,9 +348,9 @@
|
||||
}
|
||||
},
|
||||
"node_modules/@esbuild/android-x64": {
|
||||
"version": "0.20.2",
|
||||
"resolved": "https://registry.npmjs.org/@esbuild/android-x64/-/android-x64-0.20.2.tgz",
|
||||
"integrity": "sha512-btzExgV+/lMGDDa194CcUQm53ncxzeBrWJcncOBxuC6ndBkKxnHdFJn86mCIgTELsooUmwUm9FkhSp5HYu00Rg==",
|
||||
"version": "0.19.12",
|
||||
"resolved": "https://registry.npmjs.org/@esbuild/android-x64/-/android-x64-0.19.12.tgz",
|
||||
"integrity": "sha512-3k7ZoUW6Q6YqhdhIaq/WZ7HwBpnFBlW905Fa4s4qWJyiNOgT1dOqDiVAQFwBH7gBRZr17gLrlFCRzF6jFh7Kew==",
|
||||
"cpu": [
|
||||
"x64"
|
||||
],
|
||||
@ -364,9 +364,9 @@
|
||||
}
|
||||
},
|
||||
"node_modules/@esbuild/darwin-arm64": {
|
||||
"version": "0.20.2",
|
||||
"resolved": "https://registry.npmjs.org/@esbuild/darwin-arm64/-/darwin-arm64-0.20.2.tgz",
|
||||
"integrity": "sha512-4J6IRT+10J3aJH3l1yzEg9y3wkTDgDk7TSDFX+wKFiWjqWp/iCfLIYzGyasx9l0SAFPT1HwSCR+0w/h1ES/MjA==",
|
||||
"version": "0.19.12",
|
||||
"resolved": "https://registry.npmjs.org/@esbuild/darwin-arm64/-/darwin-arm64-0.19.12.tgz",
|
||||
"integrity": "sha512-B6IeSgZgtEzGC42jsI+YYu9Z3HKRxp8ZT3cqhvliEHovq8HSX2YX8lNocDn79gCKJXOSaEot9MVYky7AKjCs8g==",
|
||||
"cpu": [
|
||||
"arm64"
|
||||
],
|
||||
@ -380,9 +380,9 @@
|
||||
}
|
||||
},
|
||||
"node_modules/@esbuild/darwin-x64": {
|
||||
"version": "0.20.2",
|
||||
"resolved": "https://registry.npmjs.org/@esbuild/darwin-x64/-/darwin-x64-0.20.2.tgz",
|
||||
"integrity": "sha512-tBcXp9KNphnNH0dfhv8KYkZhjc+H3XBkF5DKtswJblV7KlT9EI2+jeA8DgBjp908WEuYll6pF+UStUCfEpdysA==",
|
||||
"version": "0.19.12",
|
||||
"resolved": "https://registry.npmjs.org/@esbuild/darwin-x64/-/darwin-x64-0.19.12.tgz",
|
||||
"integrity": "sha512-hKoVkKzFiToTgn+41qGhsUJXFlIjxI/jSYeZf3ugemDYZldIXIxhvwN6erJGlX4t5h417iFuheZ7l+YVn05N3A==",
|
||||
"cpu": [
|
||||
"x64"
|
||||
],
|
||||
@ -396,9 +396,9 @@
|
||||
}
|
||||
},
|
||||
"node_modules/@esbuild/freebsd-arm64": {
|
||||
"version": "0.20.2",
|
||||
"resolved": "https://registry.npmjs.org/@esbuild/freebsd-arm64/-/freebsd-arm64-0.20.2.tgz",
|
||||
"integrity": "sha512-d3qI41G4SuLiCGCFGUrKsSeTXyWG6yem1KcGZVS+3FYlYhtNoNgYrWcvkOoaqMhwXSMrZRl69ArHsGJ9mYdbbw==",
|
||||
"version": "0.19.12",
|
||||
"resolved": "https://registry.npmjs.org/@esbuild/freebsd-arm64/-/freebsd-arm64-0.19.12.tgz",
|
||||
"integrity": "sha512-4aRvFIXmwAcDBw9AueDQ2YnGmz5L6obe5kmPT8Vd+/+x/JMVKCgdcRwH6APrbpNXsPz+K653Qg8HB/oXvXVukA==",
|
||||
"cpu": [
|
||||
"arm64"
|
||||
],
|
||||
@ -412,9 +412,9 @@
|
||||
}
|
||||
},
|
||||
"node_modules/@esbuild/freebsd-x64": {
|
||||
"version": "0.20.2",
|
||||
"resolved": "https://registry.npmjs.org/@esbuild/freebsd-x64/-/freebsd-x64-0.20.2.tgz",
|
||||
"integrity": "sha512-d+DipyvHRuqEeM5zDivKV1KuXn9WeRX6vqSqIDgwIfPQtwMP4jaDsQsDncjTDDsExT4lR/91OLjRo8bmC1e+Cw==",
|
||||
"version": "0.19.12",
|
||||
"resolved": "https://registry.npmjs.org/@esbuild/freebsd-x64/-/freebsd-x64-0.19.12.tgz",
|
||||
"integrity": "sha512-EYoXZ4d8xtBoVN7CEwWY2IN4ho76xjYXqSXMNccFSx2lgqOG/1TBPW0yPx1bJZk94qu3tX0fycJeeQsKovA8gg==",
|
||||
"cpu": [
|
||||
"x64"
|
||||
],
|
||||
@ -428,9 +428,9 @@
|
||||
}
|
||||
},
|
||||
"node_modules/@esbuild/linux-arm": {
|
||||
"version": "0.20.2",
|
||||
"resolved": "https://registry.npmjs.org/@esbuild/linux-arm/-/linux-arm-0.20.2.tgz",
|
||||
"integrity": "sha512-VhLPeR8HTMPccbuWWcEUD1Az68TqaTYyj6nfE4QByZIQEQVWBB8vup8PpR7y1QHL3CpcF6xd5WVBU/+SBEvGTg==",
|
||||
"version": "0.19.12",
|
||||
"resolved": "https://registry.npmjs.org/@esbuild/linux-arm/-/linux-arm-0.19.12.tgz",
|
||||
"integrity": "sha512-J5jPms//KhSNv+LO1S1TX1UWp1ucM6N6XuL6ITdKWElCu8wXP72l9MM0zDTzzeikVyqFE6U8YAV9/tFyj0ti+w==",
|
||||
"cpu": [
|
||||
"arm"
|
||||
],
|
||||
@ -444,9 +444,9 @@
|
||||
}
|
||||
},
|
||||
"node_modules/@esbuild/linux-arm64": {
|
||||
"version": "0.20.2",
|
||||
"resolved": "https://registry.npmjs.org/@esbuild/linux-arm64/-/linux-arm64-0.20.2.tgz",
|
||||
"integrity": "sha512-9pb6rBjGvTFNira2FLIWqDk/uaf42sSyLE8j1rnUpuzsODBq7FvpwHYZxQ/It/8b+QOS1RYfqgGFNLRI+qlq2A==",
|
||||
"version": "0.19.12",
|
||||
"resolved": "https://registry.npmjs.org/@esbuild/linux-arm64/-/linux-arm64-0.19.12.tgz",
|
||||
"integrity": "sha512-EoTjyYyLuVPfdPLsGVVVC8a0p1BFFvtpQDB/YLEhaXyf/5bczaGeN15QkR+O4S5LeJ92Tqotve7i1jn35qwvdA==",
|
||||
"cpu": [
|
||||
"arm64"
|
||||
],
|
||||
@ -460,9 +460,9 @@
|
||||
}
|
||||
},
|
||||
"node_modules/@esbuild/linux-ia32": {
|
||||
"version": "0.20.2",
|
||||
"resolved": "https://registry.npmjs.org/@esbuild/linux-ia32/-/linux-ia32-0.20.2.tgz",
|
||||
"integrity": "sha512-o10utieEkNPFDZFQm9CoP7Tvb33UutoJqg3qKf1PWVeeJhJw0Q347PxMvBgVVFgouYLGIhFYG0UGdBumROyiig==",
|
||||
"version": "0.19.12",
|
||||
"resolved": "https://registry.npmjs.org/@esbuild/linux-ia32/-/linux-ia32-0.19.12.tgz",
|
||||
"integrity": "sha512-Thsa42rrP1+UIGaWz47uydHSBOgTUnwBwNq59khgIwktK6x60Hivfbux9iNR0eHCHzOLjLMLfUMLCypBkZXMHA==",
|
||||
"cpu": [
|
||||
"ia32"
|
||||
],
|
||||
@ -476,9 +476,9 @@
|
||||
}
|
||||
},
|
||||
"node_modules/@esbuild/linux-loong64": {
|
||||
"version": "0.20.2",
|
||||
"resolved": "https://registry.npmjs.org/@esbuild/linux-loong64/-/linux-loong64-0.20.2.tgz",
|
||||
"integrity": "sha512-PR7sp6R/UC4CFVomVINKJ80pMFlfDfMQMYynX7t1tNTeivQ6XdX5r2XovMmha/VjR1YN/HgHWsVcTRIMkymrgQ==",
|
||||
"version": "0.19.12",
|
||||
"resolved": "https://registry.npmjs.org/@esbuild/linux-loong64/-/linux-loong64-0.19.12.tgz",
|
||||
"integrity": "sha512-LiXdXA0s3IqRRjm6rV6XaWATScKAXjI4R4LoDlvO7+yQqFdlr1Bax62sRwkVvRIrwXxvtYEHHI4dm50jAXkuAA==",
|
||||
"cpu": [
|
||||
"loong64"
|
||||
],
|
||||
@ -492,9 +492,9 @@
|
||||
}
|
||||
},
|
||||
"node_modules/@esbuild/linux-mips64el": {
|
||||
"version": "0.20.2",
|
||||
"resolved": "https://registry.npmjs.org/@esbuild/linux-mips64el/-/linux-mips64el-0.20.2.tgz",
|
||||
"integrity": "sha512-4BlTqeutE/KnOiTG5Y6Sb/Hw6hsBOZapOVF6njAESHInhlQAghVVZL1ZpIctBOoTFbQyGW+LsVYZ8lSSB3wkjA==",
|
||||
"version": "0.19.12",
|
||||
"resolved": "https://registry.npmjs.org/@esbuild/linux-mips64el/-/linux-mips64el-0.19.12.tgz",
|
||||
"integrity": "sha512-fEnAuj5VGTanfJ07ff0gOA6IPsvrVHLVb6Lyd1g2/ed67oU1eFzL0r9WL7ZzscD+/N6i3dWumGE1Un4f7Amf+w==",
|
||||
"cpu": [
|
||||
"mips64el"
|
||||
],
|
||||
@ -508,9 +508,9 @@
|
||||
}
|
||||
},
|
||||
"node_modules/@esbuild/linux-ppc64": {
|
||||
"version": "0.20.2",
|
||||
"resolved": "https://registry.npmjs.org/@esbuild/linux-ppc64/-/linux-ppc64-0.20.2.tgz",
|
||||
"integrity": "sha512-rD3KsaDprDcfajSKdn25ooz5J5/fWBylaaXkuotBDGnMnDP1Uv5DLAN/45qfnf3JDYyJv/ytGHQaziHUdyzaAg==",
|
||||
"version": "0.19.12",
|
||||
"resolved": "https://registry.npmjs.org/@esbuild/linux-ppc64/-/linux-ppc64-0.19.12.tgz",
|
||||
"integrity": "sha512-nYJA2/QPimDQOh1rKWedNOe3Gfc8PabU7HT3iXWtNUbRzXS9+vgB0Fjaqr//XNbd82mCxHzik2qotuI89cfixg==",
|
||||
"cpu": [
|
||||
"ppc64"
|
||||
],
|
||||
@ -524,9 +524,9 @@
|
||||
}
|
||||
},
|
||||
"node_modules/@esbuild/linux-riscv64": {
|
||||
"version": "0.20.2",
|
||||
"resolved": "https://registry.npmjs.org/@esbuild/linux-riscv64/-/linux-riscv64-0.20.2.tgz",
|
||||
"integrity": "sha512-snwmBKacKmwTMmhLlz/3aH1Q9T8v45bKYGE3j26TsaOVtjIag4wLfWSiZykXzXuE1kbCE+zJRmwp+ZbIHinnVg==",
|
||||
"version": "0.19.12",
|
||||
"resolved": "https://registry.npmjs.org/@esbuild/linux-riscv64/-/linux-riscv64-0.19.12.tgz",
|
||||
"integrity": "sha512-2MueBrlPQCw5dVJJpQdUYgeqIzDQgw3QtiAHUC4RBz9FXPrskyyU3VI1hw7C0BSKB9OduwSJ79FTCqtGMWqJHg==",
|
||||
"cpu": [
|
||||
"riscv64"
|
||||
],
|
||||
@ -540,9 +540,9 @@
|
||||
}
|
||||
},
|
||||
"node_modules/@esbuild/linux-s390x": {
|
||||
"version": "0.20.2",
|
||||
"resolved": "https://registry.npmjs.org/@esbuild/linux-s390x/-/linux-s390x-0.20.2.tgz",
|
||||
"integrity": "sha512-wcWISOobRWNm3cezm5HOZcYz1sKoHLd8VL1dl309DiixxVFoFe/o8HnwuIwn6sXre88Nwj+VwZUvJf4AFxkyrQ==",
|
||||
"version": "0.19.12",
|
||||
"resolved": "https://registry.npmjs.org/@esbuild/linux-s390x/-/linux-s390x-0.19.12.tgz",
|
||||
"integrity": "sha512-+Pil1Nv3Umes4m3AZKqA2anfhJiVmNCYkPchwFJNEJN5QxmTs1uzyy4TvmDrCRNT2ApwSari7ZIgrPeUx4UZDg==",
|
||||
"cpu": [
|
||||
"s390x"
|
||||
],
|
||||
@ -556,9 +556,9 @@
|
||||
}
|
||||
},
|
||||
"node_modules/@esbuild/linux-x64": {
|
||||
"version": "0.20.2",
|
||||
"resolved": "https://registry.npmjs.org/@esbuild/linux-x64/-/linux-x64-0.20.2.tgz",
|
||||
"integrity": "sha512-1MdwI6OOTsfQfek8sLwgyjOXAu+wKhLEoaOLTjbijk6E2WONYpH9ZU2mNtR+lZ2B4uwr+usqGuVfFT9tMtGvGw==",
|
||||
"version": "0.19.12",
|
||||
"resolved": "https://registry.npmjs.org/@esbuild/linux-x64/-/linux-x64-0.19.12.tgz",
|
||||
"integrity": "sha512-B71g1QpxfwBvNrfyJdVDexenDIt1CiDN1TIXLbhOw0KhJzE78KIFGX6OJ9MrtC0oOqMWf+0xop4qEU8JrJTwCg==",
|
||||
"cpu": [
|
||||
"x64"
|
||||
],
|
||||
@ -572,9 +572,9 @@
|
||||
}
|
||||
},
|
||||
"node_modules/@esbuild/netbsd-x64": {
|
||||
"version": "0.20.2",
|
||||
"resolved": "https://registry.npmjs.org/@esbuild/netbsd-x64/-/netbsd-x64-0.20.2.tgz",
|
||||
"integrity": "sha512-K8/DhBxcVQkzYc43yJXDSyjlFeHQJBiowJ0uVL6Tor3jGQfSGHNNJcWxNbOI8v5k82prYqzPuwkzHt3J1T1iZQ==",
|
||||
"version": "0.19.12",
|
||||
"resolved": "https://registry.npmjs.org/@esbuild/netbsd-x64/-/netbsd-x64-0.19.12.tgz",
|
||||
"integrity": "sha512-3ltjQ7n1owJgFbuC61Oj++XhtzmymoCihNFgT84UAmJnxJfm4sYCiSLTXZtE00VWYpPMYc+ZQmB6xbSdVh0JWA==",
|
||||
"cpu": [
|
||||
"x64"
|
||||
],
|
||||
@ -588,9 +588,9 @@
|
||||
}
|
||||
},
|
||||
"node_modules/@esbuild/openbsd-x64": {
|
||||
"version": "0.20.2",
|
||||
"resolved": "https://registry.npmjs.org/@esbuild/openbsd-x64/-/openbsd-x64-0.20.2.tgz",
|
||||
"integrity": "sha512-eMpKlV0SThJmmJgiVyN9jTPJ2VBPquf6Kt/nAoo6DgHAoN57K15ZghiHaMvqjCye/uU4X5u3YSMgVBI1h3vKrQ==",
|
||||
"version": "0.19.12",
|
||||
"resolved": "https://registry.npmjs.org/@esbuild/openbsd-x64/-/openbsd-x64-0.19.12.tgz",
|
||||
"integrity": "sha512-RbrfTB9SWsr0kWmb9srfF+L933uMDdu9BIzdA7os2t0TXhCRjrQyCeOt6wVxr79CKD4c+p+YhCj31HBkYcXebw==",
|
||||
"cpu": [
|
||||
"x64"
|
||||
],
|
||||
@ -604,9 +604,9 @@
|
||||
}
|
||||
},
|
||||
"node_modules/@esbuild/sunos-x64": {
|
||||
"version": "0.20.2",
|
||||
"resolved": "https://registry.npmjs.org/@esbuild/sunos-x64/-/sunos-x64-0.20.2.tgz",
|
||||
"integrity": "sha512-2UyFtRC6cXLyejf/YEld4Hajo7UHILetzE1vsRcGL3earZEW77JxrFjH4Ez2qaTiEfMgAXxfAZCm1fvM/G/o8w==",
|
||||
"version": "0.19.12",
|
||||
"resolved": "https://registry.npmjs.org/@esbuild/sunos-x64/-/sunos-x64-0.19.12.tgz",
|
||||
"integrity": "sha512-HKjJwRrW8uWtCQnQOz9qcU3mUZhTUQvi56Q8DPTLLB+DawoiQdjsYq+j+D3s9I8VFtDr+F9CjgXKKC4ss89IeA==",
|
||||
"cpu": [
|
||||
"x64"
|
||||
],
|
||||
@ -620,9 +620,9 @@
|
||||
}
|
||||
},
|
||||
"node_modules/@esbuild/win32-arm64": {
|
||||
"version": "0.20.2",
|
||||
"resolved": "https://registry.npmjs.org/@esbuild/win32-arm64/-/win32-arm64-0.20.2.tgz",
|
||||
"integrity": "sha512-GRibxoawM9ZCnDxnP3usoUDO9vUkpAxIIZ6GQI+IlVmr5kP3zUq+l17xELTHMWTWzjxa2guPNyrpq1GWmPvcGQ==",
|
||||
"version": "0.19.12",
|
||||
"resolved": "https://registry.npmjs.org/@esbuild/win32-arm64/-/win32-arm64-0.19.12.tgz",
|
||||
"integrity": "sha512-URgtR1dJnmGvX864pn1B2YUYNzjmXkuJOIqG2HdU62MVS4EHpU2946OZoTMnRUHklGtJdJZ33QfzdjGACXhn1A==",
|
||||
"cpu": [
|
||||
"arm64"
|
||||
],
|
||||
@ -636,9 +636,9 @@
|
||||
}
|
||||
},
|
||||
"node_modules/@esbuild/win32-ia32": {
|
||||
"version": "0.20.2",
|
||||
"resolved": "https://registry.npmjs.org/@esbuild/win32-ia32/-/win32-ia32-0.20.2.tgz",
|
||||
"integrity": "sha512-HfLOfn9YWmkSKRQqovpnITazdtquEW8/SoHW7pWpuEeguaZI4QnCRW6b+oZTztdBnZOS2hqJ6im/D5cPzBTTlQ==",
|
||||
"version": "0.19.12",
|
||||
"resolved": "https://registry.npmjs.org/@esbuild/win32-ia32/-/win32-ia32-0.19.12.tgz",
|
||||
"integrity": "sha512-+ZOE6pUkMOJfmxmBZElNOx72NKpIa/HFOMGzu8fqzQJ5kgf6aTGrcJaFsNiVMH4JKpMipyK+7k0n2UXN7a8YKQ==",
|
||||
"cpu": [
|
||||
"ia32"
|
||||
],
|
||||
@ -652,9 +652,9 @@
|
||||
}
|
||||
},
|
||||
"node_modules/@esbuild/win32-x64": {
|
||||
"version": "0.20.2",
|
||||
"resolved": "https://registry.npmjs.org/@esbuild/win32-x64/-/win32-x64-0.20.2.tgz",
|
||||
"integrity": "sha512-N49X4lJX27+l9jbLKSqZ6bKNjzQvHaT8IIFUy+YIqmXQdjYCToGWwOItDrfby14c78aDd5NHQl29xingXfCdLQ==",
|
||||
"version": "0.19.12",
|
||||
"resolved": "https://registry.npmjs.org/@esbuild/win32-x64/-/win32-x64-0.19.12.tgz",
|
||||
"integrity": "sha512-T1QyPSDCyMXaO3pzBkF96E8xMkiRYbUEZADd29SyPGabqxMViNoii+NcK7eWJAEoU6RZyEm5lVSIjTmcdoB9HA==",
|
||||
"cpu": [
|
||||
"x64"
|
||||
],
|
||||
@ -1230,9 +1230,9 @@
|
||||
}
|
||||
},
|
||||
"node_modules/@types/node": {
|
||||
"version": "20.11.30",
|
||||
"resolved": "https://registry.npmjs.org/@types/node/-/node-20.11.30.tgz",
|
||||
"integrity": "sha512-dHM6ZxwlmuZaRmUPfv1p+KrdD1Dci04FbdEm/9wEMouFqxYoFl5aMkt0VMAUtYRQDyYvD41WJLukhq/ha3YuTw==",
|
||||
"version": "20.11.27",
|
||||
"resolved": "https://registry.npmjs.org/@types/node/-/node-20.11.27.tgz",
|
||||
"integrity": "sha512-qyUZfMnCg1KEz57r7pzFtSGt49f6RPkPBis3Vo4PbS7roQEDn22hiHzl/Lo1q4i4hDEgBJmBF/NTNg2XR0HbFg==",
|
||||
"dev": true,
|
||||
"dependencies": {
|
||||
"undici-types": "~5.26.4"
|
||||
@ -1251,16 +1251,16 @@
|
||||
"dev": true
|
||||
},
|
||||
"node_modules/@typescript-eslint/eslint-plugin": {
|
||||
"version": "7.4.0",
|
||||
"resolved": "https://registry.npmjs.org/@typescript-eslint/eslint-plugin/-/eslint-plugin-7.4.0.tgz",
|
||||
"integrity": "sha512-yHMQ/oFaM7HZdVrVm/M2WHaNPgyuJH4WelkSVEWSSsir34kxW2kDJCxlXRhhGWEsMN0WAW/vLpKfKVcm8k+MPw==",
|
||||
"version": "7.2.0",
|
||||
"resolved": "https://registry.npmjs.org/@typescript-eslint/eslint-plugin/-/eslint-plugin-7.2.0.tgz",
|
||||
"integrity": "sha512-mdekAHOqS9UjlmyF/LSs6AIEvfceV749GFxoBAjwAv0nkevfKHWQFDMcBZWUiIC5ft6ePWivXoS36aKQ0Cy3sw==",
|
||||
"dev": true,
|
||||
"dependencies": {
|
||||
"@eslint-community/regexpp": "^4.5.1",
|
||||
"@typescript-eslint/scope-manager": "7.4.0",
|
||||
"@typescript-eslint/type-utils": "7.4.0",
|
||||
"@typescript-eslint/utils": "7.4.0",
|
||||
"@typescript-eslint/visitor-keys": "7.4.0",
|
||||
"@typescript-eslint/scope-manager": "7.2.0",
|
||||
"@typescript-eslint/type-utils": "7.2.0",
|
||||
"@typescript-eslint/utils": "7.2.0",
|
||||
"@typescript-eslint/visitor-keys": "7.2.0",
|
||||
"debug": "^4.3.4",
|
||||
"graphemer": "^1.4.0",
|
||||
"ignore": "^5.2.4",
|
||||
@ -1269,7 +1269,7 @@
|
||||
"ts-api-utils": "^1.0.1"
|
||||
},
|
||||
"engines": {
|
||||
"node": "^18.18.0 || >=20.0.0"
|
||||
"node": "^16.0.0 || >=18.0.0"
|
||||
},
|
||||
"funding": {
|
||||
"type": "opencollective",
|
||||
@ -1286,19 +1286,19 @@
|
||||
}
|
||||
},
|
||||
"node_modules/@typescript-eslint/parser": {
|
||||
"version": "7.4.0",
|
||||
"resolved": "https://registry.npmjs.org/@typescript-eslint/parser/-/parser-7.4.0.tgz",
|
||||
"integrity": "sha512-ZvKHxHLusweEUVwrGRXXUVzFgnWhigo4JurEj0dGF1tbcGh6buL+ejDdjxOQxv6ytcY1uhun1p2sm8iWStlgLQ==",
|
||||
"version": "7.2.0",
|
||||
"resolved": "https://registry.npmjs.org/@typescript-eslint/parser/-/parser-7.2.0.tgz",
|
||||
"integrity": "sha512-5FKsVcHTk6TafQKQbuIVkXq58Fnbkd2wDL4LB7AURN7RUOu1utVP+G8+6u3ZhEroW3DF6hyo3ZEXxgKgp4KeCg==",
|
||||
"dev": true,
|
||||
"dependencies": {
|
||||
"@typescript-eslint/scope-manager": "7.4.0",
|
||||
"@typescript-eslint/types": "7.4.0",
|
||||
"@typescript-eslint/typescript-estree": "7.4.0",
|
||||
"@typescript-eslint/visitor-keys": "7.4.0",
|
||||
"@typescript-eslint/scope-manager": "7.2.0",
|
||||
"@typescript-eslint/types": "7.2.0",
|
||||
"@typescript-eslint/typescript-estree": "7.2.0",
|
||||
"@typescript-eslint/visitor-keys": "7.2.0",
|
||||
"debug": "^4.3.4"
|
||||
},
|
||||
"engines": {
|
||||
"node": "^18.18.0 || >=20.0.0"
|
||||
"node": "^16.0.0 || >=18.0.0"
|
||||
},
|
||||
"funding": {
|
||||
"type": "opencollective",
|
||||
@ -1314,16 +1314,16 @@
|
||||
}
|
||||
},
|
||||
"node_modules/@typescript-eslint/scope-manager": {
|
||||
"version": "7.4.0",
|
||||
"resolved": "https://registry.npmjs.org/@typescript-eslint/scope-manager/-/scope-manager-7.4.0.tgz",
|
||||
"integrity": "sha512-68VqENG5HK27ypafqLVs8qO+RkNc7TezCduYrx8YJpXq2QGZ30vmNZGJJJC48+MVn4G2dCV8m5ZTVnzRexTVtw==",
|
||||
"version": "7.2.0",
|
||||
"resolved": "https://registry.npmjs.org/@typescript-eslint/scope-manager/-/scope-manager-7.2.0.tgz",
|
||||
"integrity": "sha512-Qh976RbQM/fYtjx9hs4XkayYujB/aPwglw2choHmf3zBjB4qOywWSdt9+KLRdHubGcoSwBnXUH2sR3hkyaERRg==",
|
||||
"dev": true,
|
||||
"dependencies": {
|
||||
"@typescript-eslint/types": "7.4.0",
|
||||
"@typescript-eslint/visitor-keys": "7.4.0"
|
||||
"@typescript-eslint/types": "7.2.0",
|
||||
"@typescript-eslint/visitor-keys": "7.2.0"
|
||||
},
|
||||
"engines": {
|
||||
"node": "^18.18.0 || >=20.0.0"
|
||||
"node": "^16.0.0 || >=18.0.0"
|
||||
},
|
||||
"funding": {
|
||||
"type": "opencollective",
|
||||
@ -1331,18 +1331,18 @@
|
||||
}
|
||||
},
|
||||
"node_modules/@typescript-eslint/type-utils": {
|
||||
"version": "7.4.0",
|
||||
"resolved": "https://registry.npmjs.org/@typescript-eslint/type-utils/-/type-utils-7.4.0.tgz",
|
||||
"integrity": "sha512-247ETeHgr9WTRMqHbbQdzwzhuyaJ8dPTuyuUEMANqzMRB1rj/9qFIuIXK7l0FX9i9FXbHeBQl/4uz6mYuCE7Aw==",
|
||||
"version": "7.2.0",
|
||||
"resolved": "https://registry.npmjs.org/@typescript-eslint/type-utils/-/type-utils-7.2.0.tgz",
|
||||
"integrity": "sha512-xHi51adBHo9O9330J8GQYQwrKBqbIPJGZZVQTHHmy200hvkLZFWJIFtAG/7IYTWUyun6DE6w5InDReePJYJlJA==",
|
||||
"dev": true,
|
||||
"dependencies": {
|
||||
"@typescript-eslint/typescript-estree": "7.4.0",
|
||||
"@typescript-eslint/utils": "7.4.0",
|
||||
"@typescript-eslint/typescript-estree": "7.2.0",
|
||||
"@typescript-eslint/utils": "7.2.0",
|
||||
"debug": "^4.3.4",
|
||||
"ts-api-utils": "^1.0.1"
|
||||
},
|
||||
"engines": {
|
||||
"node": "^18.18.0 || >=20.0.0"
|
||||
"node": "^16.0.0 || >=18.0.0"
|
||||
},
|
||||
"funding": {
|
||||
"type": "opencollective",
|
||||
@ -1358,12 +1358,12 @@
|
||||
}
|
||||
},
|
||||
"node_modules/@typescript-eslint/types": {
|
||||
"version": "7.4.0",
|
||||
"resolved": "https://registry.npmjs.org/@typescript-eslint/types/-/types-7.4.0.tgz",
|
||||
"integrity": "sha512-mjQopsbffzJskos5B4HmbsadSJQWaRK0UxqQ7GuNA9Ga4bEKeiO6b2DnB6cM6bpc8lemaPseh0H9B/wyg+J7rw==",
|
||||
"version": "7.2.0",
|
||||
"resolved": "https://registry.npmjs.org/@typescript-eslint/types/-/types-7.2.0.tgz",
|
||||
"integrity": "sha512-XFtUHPI/abFhm4cbCDc5Ykc8npOKBSJePY3a3s+lwumt7XWJuzP5cZcfZ610MIPHjQjNsOLlYK8ASPaNG8UiyA==",
|
||||
"dev": true,
|
||||
"engines": {
|
||||
"node": "^18.18.0 || >=20.0.0"
|
||||
"node": "^16.0.0 || >=18.0.0"
|
||||
},
|
||||
"funding": {
|
||||
"type": "opencollective",
|
||||
@ -1371,13 +1371,13 @@
|
||||
}
|
||||
},
|
||||
"node_modules/@typescript-eslint/typescript-estree": {
|
||||
"version": "7.4.0",
|
||||
"resolved": "https://registry.npmjs.org/@typescript-eslint/typescript-estree/-/typescript-estree-7.4.0.tgz",
|
||||
"integrity": "sha512-A99j5AYoME/UBQ1ucEbbMEmGkN7SE0BvZFreSnTd1luq7yulcHdyGamZKizU7canpGDWGJ+Q6ZA9SyQobipePg==",
|
||||
"version": "7.2.0",
|
||||
"resolved": "https://registry.npmjs.org/@typescript-eslint/typescript-estree/-/typescript-estree-7.2.0.tgz",
|
||||
"integrity": "sha512-cyxS5WQQCoBwSakpMrvMXuMDEbhOo9bNHHrNcEWis6XHx6KF518tkF1wBvKIn/tpq5ZpUYK7Bdklu8qY0MsFIA==",
|
||||
"dev": true,
|
||||
"dependencies": {
|
||||
"@typescript-eslint/types": "7.4.0",
|
||||
"@typescript-eslint/visitor-keys": "7.4.0",
|
||||
"@typescript-eslint/types": "7.2.0",
|
||||
"@typescript-eslint/visitor-keys": "7.2.0",
|
||||
"debug": "^4.3.4",
|
||||
"globby": "^11.1.0",
|
||||
"is-glob": "^4.0.3",
|
||||
@ -1386,7 +1386,7 @@
|
||||
"ts-api-utils": "^1.0.1"
|
||||
},
|
||||
"engines": {
|
||||
"node": "^18.18.0 || >=20.0.0"
|
||||
"node": "^16.0.0 || >=18.0.0"
|
||||
},
|
||||
"funding": {
|
||||
"type": "opencollective",
|
||||
@ -1399,21 +1399,21 @@
|
||||
}
|
||||
},
|
||||
"node_modules/@typescript-eslint/utils": {
|
||||
"version": "7.4.0",
|
||||
"resolved": "https://registry.npmjs.org/@typescript-eslint/utils/-/utils-7.4.0.tgz",
|
||||
"integrity": "sha512-NQt9QLM4Tt8qrlBVY9lkMYzfYtNz8/6qwZg8pI3cMGlPnj6mOpRxxAm7BMJN9K0AiY+1BwJ5lVC650YJqYOuNg==",
|
||||
"version": "7.2.0",
|
||||
"resolved": "https://registry.npmjs.org/@typescript-eslint/utils/-/utils-7.2.0.tgz",
|
||||
"integrity": "sha512-YfHpnMAGb1Eekpm3XRK8hcMwGLGsnT6L+7b2XyRv6ouDuJU1tZir1GS2i0+VXRatMwSI1/UfcyPe53ADkU+IuA==",
|
||||
"dev": true,
|
||||
"dependencies": {
|
||||
"@eslint-community/eslint-utils": "^4.4.0",
|
||||
"@types/json-schema": "^7.0.12",
|
||||
"@types/semver": "^7.5.0",
|
||||
"@typescript-eslint/scope-manager": "7.4.0",
|
||||
"@typescript-eslint/types": "7.4.0",
|
||||
"@typescript-eslint/typescript-estree": "7.4.0",
|
||||
"@typescript-eslint/scope-manager": "7.2.0",
|
||||
"@typescript-eslint/types": "7.2.0",
|
||||
"@typescript-eslint/typescript-estree": "7.2.0",
|
||||
"semver": "^7.5.4"
|
||||
},
|
||||
"engines": {
|
||||
"node": "^18.18.0 || >=20.0.0"
|
||||
"node": "^16.0.0 || >=18.0.0"
|
||||
},
|
||||
"funding": {
|
||||
"type": "opencollective",
|
||||
@ -1424,16 +1424,16 @@
|
||||
}
|
||||
},
|
||||
"node_modules/@typescript-eslint/visitor-keys": {
|
||||
"version": "7.4.0",
|
||||
"resolved": "https://registry.npmjs.org/@typescript-eslint/visitor-keys/-/visitor-keys-7.4.0.tgz",
|
||||
"integrity": "sha512-0zkC7YM0iX5Y41homUUeW1CHtZR01K3ybjM1l6QczoMuay0XKtrb93kv95AxUGwdjGr64nNqnOCwmEl616N8CA==",
|
||||
"version": "7.2.0",
|
||||
"resolved": "https://registry.npmjs.org/@typescript-eslint/visitor-keys/-/visitor-keys-7.2.0.tgz",
|
||||
"integrity": "sha512-c6EIQRHhcpl6+tO8EMR+kjkkV+ugUNXOmeASA1rlzkd8EPIriavpWoiEz1HR/VLhbVIdhqnV6E7JZm00cBDx2A==",
|
||||
"dev": true,
|
||||
"dependencies": {
|
||||
"@typescript-eslint/types": "7.4.0",
|
||||
"@typescript-eslint/types": "7.2.0",
|
||||
"eslint-visitor-keys": "^3.4.1"
|
||||
},
|
||||
"engines": {
|
||||
"node": "^18.18.0 || >=20.0.0"
|
||||
"node": "^16.0.0 || >=18.0.0"
|
||||
},
|
||||
"funding": {
|
||||
"type": "opencollective",
|
||||
@ -1447,9 +1447,9 @@
|
||||
"dev": true
|
||||
},
|
||||
"node_modules/@vitest/coverage-v8": {
|
||||
"version": "1.4.0",
|
||||
"resolved": "https://registry.npmjs.org/@vitest/coverage-v8/-/coverage-v8-1.4.0.tgz",
|
||||
"integrity": "sha512-4hDGyH1SvKpgZnIByr9LhGgCEuF9DKM34IBLCC/fVfy24Z3+PZ+Ii9hsVBsHvY1umM1aGPEjceRkzxCfcQ10wg==",
|
||||
"version": "1.3.1",
|
||||
"resolved": "https://registry.npmjs.org/@vitest/coverage-v8/-/coverage-v8-1.3.1.tgz",
|
||||
"integrity": "sha512-UuBnkSJUNE9rdHjDCPyJ4fYuMkoMtnghes1XohYa4At0MS3OQSAo97FrbwSLRshYsXThMZy1+ybD/byK5llyIg==",
|
||||
"dev": true,
|
||||
"dependencies": {
|
||||
"@ampproject/remapping": "^2.2.1",
|
||||
@ -1457,13 +1457,12 @@
|
||||
"debug": "^4.3.4",
|
||||
"istanbul-lib-coverage": "^3.2.2",
|
||||
"istanbul-lib-report": "^3.0.1",
|
||||
"istanbul-lib-source-maps": "^5.0.4",
|
||||
"istanbul-lib-source-maps": "^4.0.1",
|
||||
"istanbul-reports": "^3.1.6",
|
||||
"magic-string": "^0.30.5",
|
||||
"magicast": "^0.3.3",
|
||||
"picocolors": "^1.0.0",
|
||||
"std-env": "^3.5.0",
|
||||
"strip-literal": "^2.0.0",
|
||||
"test-exclude": "^6.0.0",
|
||||
"v8-to-istanbul": "^9.2.0"
|
||||
},
|
||||
@ -1471,17 +1470,17 @@
|
||||
"url": "https://opencollective.com/vitest"
|
||||
},
|
||||
"peerDependencies": {
|
||||
"vitest": "1.4.0"
|
||||
"vitest": "1.3.1"
|
||||
}
|
||||
},
|
||||
"node_modules/@vitest/expect": {
|
||||
"version": "1.4.0",
|
||||
"resolved": "https://registry.npmjs.org/@vitest/expect/-/expect-1.4.0.tgz",
|
||||
"integrity": "sha512-Jths0sWCJZ8BxjKe+p+eKsoqev1/T8lYcrjavEaz8auEJ4jAVY0GwW3JKmdVU4mmNPLPHixh4GNXP7GFtAiDHA==",
|
||||
"version": "1.3.1",
|
||||
"resolved": "https://registry.npmjs.org/@vitest/expect/-/expect-1.3.1.tgz",
|
||||
"integrity": "sha512-xofQFwIzfdmLLlHa6ag0dPV8YsnKOCP1KdAeVVh34vSjN2dcUiXYCD9htu/9eM7t8Xln4v03U9HLxLpPlsXdZw==",
|
||||
"dev": true,
|
||||
"dependencies": {
|
||||
"@vitest/spy": "1.4.0",
|
||||
"@vitest/utils": "1.4.0",
|
||||
"@vitest/spy": "1.3.1",
|
||||
"@vitest/utils": "1.3.1",
|
||||
"chai": "^4.3.10"
|
||||
},
|
||||
"funding": {
|
||||
@ -1489,12 +1488,12 @@
|
||||
}
|
||||
},
|
||||
"node_modules/@vitest/runner": {
|
||||
"version": "1.4.0",
|
||||
"resolved": "https://registry.npmjs.org/@vitest/runner/-/runner-1.4.0.tgz",
|
||||
"integrity": "sha512-EDYVSmesqlQ4RD2VvWo3hQgTJ7ZrFQ2VSJdfiJiArkCerDAGeyF1i6dHkmySqk573jLp6d/cfqCN+7wUB5tLgg==",
|
||||
"version": "1.3.1",
|
||||
"resolved": "https://registry.npmjs.org/@vitest/runner/-/runner-1.3.1.tgz",
|
||||
"integrity": "sha512-5FzF9c3jG/z5bgCnjr8j9LNq/9OxV2uEBAITOXfoe3rdZJTdO7jzThth7FXv/6b+kdY65tpRQB7WaKhNZwX+Kg==",
|
||||
"dev": true,
|
||||
"dependencies": {
|
||||
"@vitest/utils": "1.4.0",
|
||||
"@vitest/utils": "1.3.1",
|
||||
"p-limit": "^5.0.0",
|
||||
"pathe": "^1.1.1"
|
||||
},
|
||||
@ -1530,9 +1529,9 @@
|
||||
}
|
||||
},
|
||||
"node_modules/@vitest/snapshot": {
|
||||
"version": "1.4.0",
|
||||
"resolved": "https://registry.npmjs.org/@vitest/snapshot/-/snapshot-1.4.0.tgz",
|
||||
"integrity": "sha512-saAFnt5pPIA5qDGxOHxJ/XxhMFKkUSBJmVt5VgDsAqPTX6JP326r5C/c9UuCMPoXNzuudTPsYDZCoJ5ilpqG2A==",
|
||||
"version": "1.3.1",
|
||||
"resolved": "https://registry.npmjs.org/@vitest/snapshot/-/snapshot-1.3.1.tgz",
|
||||
"integrity": "sha512-EF++BZbt6RZmOlE3SuTPu/NfwBF6q4ABS37HHXzs2LUVPBLx2QoY/K0fKpRChSo8eLiuxcbCVfqKgx/dplCDuQ==",
|
||||
"dev": true,
|
||||
"dependencies": {
|
||||
"magic-string": "^0.30.5",
|
||||
@ -1544,9 +1543,9 @@
|
||||
}
|
||||
},
|
||||
"node_modules/@vitest/spy": {
|
||||
"version": "1.4.0",
|
||||
"resolved": "https://registry.npmjs.org/@vitest/spy/-/spy-1.4.0.tgz",
|
||||
"integrity": "sha512-Ywau/Qs1DzM/8Uc+yA77CwSegizMlcgTJuYGAi0jujOteJOUf1ujunHThYo243KG9nAyWT3L9ifPYZ5+As/+6Q==",
|
||||
"version": "1.3.1",
|
||||
"resolved": "https://registry.npmjs.org/@vitest/spy/-/spy-1.3.1.tgz",
|
||||
"integrity": "sha512-xAcW+S099ylC9VLU7eZfdT9myV67Nor9w9zhf0mGCYJSO+zM2839tOeROTdikOi/8Qeusffvxb/MyBSOja1Uig==",
|
||||
"dev": true,
|
||||
"dependencies": {
|
||||
"tinyspy": "^2.2.0"
|
||||
@ -1556,9 +1555,9 @@
|
||||
}
|
||||
},
|
||||
"node_modules/@vitest/utils": {
|
||||
"version": "1.4.0",
|
||||
"resolved": "https://registry.npmjs.org/@vitest/utils/-/utils-1.4.0.tgz",
|
||||
"integrity": "sha512-mx3Yd1/6e2Vt/PUC98DcqTirtfxUyAZ32uK82r8rZzbtBeBo+nqgnjx/LvqQdWsrvNtm14VmurNgcf4nqY5gJg==",
|
||||
"version": "1.3.1",
|
||||
"resolved": "https://registry.npmjs.org/@vitest/utils/-/utils-1.3.1.tgz",
|
||||
"integrity": "sha512-d3Waie/299qqRyHTm2DjADeTaNdNSVsnwHPWrs20JMpjh6eiVq7ggggweO8rc4arhf6rRkWuHKwvxGvejUXZZQ==",
|
||||
"dev": true,
|
||||
"dependencies": {
|
||||
"diff-sequences": "^29.6.3",
|
||||
@ -2038,9 +2037,9 @@
|
||||
}
|
||||
},
|
||||
"node_modules/esbuild": {
|
||||
"version": "0.20.2",
|
||||
"resolved": "https://registry.npmjs.org/esbuild/-/esbuild-0.20.2.tgz",
|
||||
"integrity": "sha512-WdOOppmUNU+IbZ0PaDiTst80zjnrOkyJNHoKupIcVyU8Lvla3Ugx94VzkQ32Ijqd7UhHJy75gNWDMUekcrSJ6g==",
|
||||
"version": "0.19.12",
|
||||
"resolved": "https://registry.npmjs.org/esbuild/-/esbuild-0.19.12.tgz",
|
||||
"integrity": "sha512-aARqgq8roFBj054KvQr5f1sFu0D65G+miZRCuJyJ0G13Zwx7vRar5Zhn2tkQNzIXcBrNVsv/8stehpj+GAjgbg==",
|
||||
"dev": true,
|
||||
"hasInstallScript": true,
|
||||
"bin": {
|
||||
@ -2050,29 +2049,29 @@
|
||||
"node": ">=12"
|
||||
},
|
||||
"optionalDependencies": {
|
||||
"@esbuild/aix-ppc64": "0.20.2",
|
||||
"@esbuild/android-arm": "0.20.2",
|
||||
"@esbuild/android-arm64": "0.20.2",
|
||||
"@esbuild/android-x64": "0.20.2",
|
||||
"@esbuild/darwin-arm64": "0.20.2",
|
||||
"@esbuild/darwin-x64": "0.20.2",
|
||||
"@esbuild/freebsd-arm64": "0.20.2",
|
||||
"@esbuild/freebsd-x64": "0.20.2",
|
||||
"@esbuild/linux-arm": "0.20.2",
|
||||
"@esbuild/linux-arm64": "0.20.2",
|
||||
"@esbuild/linux-ia32": "0.20.2",
|
||||
"@esbuild/linux-loong64": "0.20.2",
|
||||
"@esbuild/linux-mips64el": "0.20.2",
|
||||
"@esbuild/linux-ppc64": "0.20.2",
|
||||
"@esbuild/linux-riscv64": "0.20.2",
|
||||
"@esbuild/linux-s390x": "0.20.2",
|
||||
"@esbuild/linux-x64": "0.20.2",
|
||||
"@esbuild/netbsd-x64": "0.20.2",
|
||||
"@esbuild/openbsd-x64": "0.20.2",
|
||||
"@esbuild/sunos-x64": "0.20.2",
|
||||
"@esbuild/win32-arm64": "0.20.2",
|
||||
"@esbuild/win32-ia32": "0.20.2",
|
||||
"@esbuild/win32-x64": "0.20.2"
|
||||
"@esbuild/aix-ppc64": "0.19.12",
|
||||
"@esbuild/android-arm": "0.19.12",
|
||||
"@esbuild/android-arm64": "0.19.12",
|
||||
"@esbuild/android-x64": "0.19.12",
|
||||
"@esbuild/darwin-arm64": "0.19.12",
|
||||
"@esbuild/darwin-x64": "0.19.12",
|
||||
"@esbuild/freebsd-arm64": "0.19.12",
|
||||
"@esbuild/freebsd-x64": "0.19.12",
|
||||
"@esbuild/linux-arm": "0.19.12",
|
||||
"@esbuild/linux-arm64": "0.19.12",
|
||||
"@esbuild/linux-ia32": "0.19.12",
|
||||
"@esbuild/linux-loong64": "0.19.12",
|
||||
"@esbuild/linux-mips64el": "0.19.12",
|
||||
"@esbuild/linux-ppc64": "0.19.12",
|
||||
"@esbuild/linux-riscv64": "0.19.12",
|
||||
"@esbuild/linux-s390x": "0.19.12",
|
||||
"@esbuild/linux-x64": "0.19.12",
|
||||
"@esbuild/netbsd-x64": "0.19.12",
|
||||
"@esbuild/openbsd-x64": "0.19.12",
|
||||
"@esbuild/sunos-x64": "0.19.12",
|
||||
"@esbuild/win32-arm64": "0.19.12",
|
||||
"@esbuild/win32-ia32": "0.19.12",
|
||||
"@esbuild/win32-x64": "0.19.12"
|
||||
}
|
||||
},
|
||||
"node_modules/escalade": {
|
||||
@ -2194,9 +2193,9 @@
|
||||
}
|
||||
},
|
||||
"node_modules/eslint-plugin-unicorn": {
|
||||
"version": "52.0.0",
|
||||
"resolved": "https://registry.npmjs.org/eslint-plugin-unicorn/-/eslint-plugin-unicorn-52.0.0.tgz",
|
||||
"integrity": "sha512-1Yzm7/m+0R4djH0tjDjfVei/ju2w3AzUGjG6q8JnuNIL5xIwsflyCooW5sfBvQp2pMYQFSWWCFONsjCax1EHng==",
|
||||
"version": "51.0.1",
|
||||
"resolved": "https://registry.npmjs.org/eslint-plugin-unicorn/-/eslint-plugin-unicorn-51.0.1.tgz",
|
||||
"integrity": "sha512-MuR/+9VuB0fydoI0nIn2RDA5WISRn4AsJyNSaNKLVwie9/ONvQhxOBbkfSICBPnzKrB77Fh6CZZXjgTt/4Latw==",
|
||||
"dev": true,
|
||||
"dependencies": {
|
||||
"@babel/helper-validator-identifier": "^7.22.20",
|
||||
@ -2555,16 +2554,16 @@
|
||||
}
|
||||
},
|
||||
"node_modules/glob": {
|
||||
"version": "10.3.12",
|
||||
"resolved": "https://registry.npmjs.org/glob/-/glob-10.3.12.tgz",
|
||||
"integrity": "sha512-TCNv8vJ+xz4QiqTpfOJA7HvYv+tNIRHKfUWw/q+v2jdgN4ebz+KY9tGx5J4rHP0o84mNP+ApH66HRX8us3Khqg==",
|
||||
"version": "10.3.10",
|
||||
"resolved": "https://registry.npmjs.org/glob/-/glob-10.3.10.tgz",
|
||||
"integrity": "sha512-fa46+tv1Ak0UPK1TOy/pZrIybNNt4HCv7SDzwyfiOZkvZLEbjsZkJBPtDHVshZjbecAoAGSC20MjLDG/qr679g==",
|
||||
"dev": true,
|
||||
"dependencies": {
|
||||
"foreground-child": "^3.1.0",
|
||||
"jackspeak": "^2.3.6",
|
||||
"jackspeak": "^2.3.5",
|
||||
"minimatch": "^9.0.1",
|
||||
"minipass": "^7.0.4",
|
||||
"path-scurry": "^1.10.2"
|
||||
"minipass": "^5.0.0 || ^6.0.2 || ^7.0.0",
|
||||
"path-scurry": "^1.10.1"
|
||||
},
|
||||
"bin": {
|
||||
"glob": "dist/esm/bin.mjs"
|
||||
@ -2859,14 +2858,14 @@
|
||||
}
|
||||
},
|
||||
"node_modules/istanbul-lib-source-maps": {
|
||||
"version": "5.0.4",
|
||||
"resolved": "https://registry.npmjs.org/istanbul-lib-source-maps/-/istanbul-lib-source-maps-5.0.4.tgz",
|
||||
"integrity": "sha512-wHOoEsNJTVltaJp8eVkm8w+GVkVNHT2YDYo53YdzQEL2gWm1hBX5cGFR9hQJtuGLebidVX7et3+dmDZrmclduw==",
|
||||
"version": "4.0.1",
|
||||
"resolved": "https://registry.npmjs.org/istanbul-lib-source-maps/-/istanbul-lib-source-maps-4.0.1.tgz",
|
||||
"integrity": "sha512-n3s8EwkdFIJCG3BPKBYvskgXGoy88ARzvegkitk60NxRdwltLOTaH7CUiMRXvwYorl0Q712iEjcWB+fK/MrWVw==",
|
||||
"dev": true,
|
||||
"dependencies": {
|
||||
"@jridgewell/trace-mapping": "^0.3.23",
|
||||
"debug": "^4.1.1",
|
||||
"istanbul-lib-coverage": "^3.0.0"
|
||||
"istanbul-lib-coverage": "^3.0.0",
|
||||
"source-map": "^0.6.1"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=10"
|
||||
@ -3411,12 +3410,12 @@
|
||||
"dev": true
|
||||
},
|
||||
"node_modules/path-scurry": {
|
||||
"version": "1.10.2",
|
||||
"resolved": "https://registry.npmjs.org/path-scurry/-/path-scurry-1.10.2.tgz",
|
||||
"integrity": "sha512-7xTavNy5RQXnsjANvVvMkEjvloOinkAjv/Z6Ildz9v2RinZ4SBKTWFOVRbaF8p0vpHnyjV/UwNDdKuUv6M5qcA==",
|
||||
"version": "1.10.1",
|
||||
"resolved": "https://registry.npmjs.org/path-scurry/-/path-scurry-1.10.1.tgz",
|
||||
"integrity": "sha512-MkhCqzzBEpPvxxQ71Md0b1Kk51W01lrYvlMzSUaIzNsODdd7mqhiimSZlr+VegAz5Z6Vzt9Xg2ttE//XBhH3EQ==",
|
||||
"dev": true,
|
||||
"dependencies": {
|
||||
"lru-cache": "^10.2.0",
|
||||
"lru-cache": "^9.1.1 || ^10.0.0",
|
||||
"minipass": "^5.0.0 || ^6.0.2 || ^7.0.0"
|
||||
},
|
||||
"engines": {
|
||||
@ -3489,9 +3488,9 @@
|
||||
}
|
||||
},
|
||||
"node_modules/postcss": {
|
||||
"version": "8.4.38",
|
||||
"resolved": "https://registry.npmjs.org/postcss/-/postcss-8.4.38.tgz",
|
||||
"integrity": "sha512-Wglpdk03BSfXkHoQa3b/oulrotAkwrlLDRSOb9D0bN86FdRyE9lppSp33aHNPgBa0JKCoB+drFLZkQoRRYae5A==",
|
||||
"version": "8.4.35",
|
||||
"resolved": "https://registry.npmjs.org/postcss/-/postcss-8.4.35.tgz",
|
||||
"integrity": "sha512-u5U8qYpBCpN13BsiEB0CbR1Hhh4Gc0zLFuedrHJKMctHCHAGrMdG0PRM/KErzAL3CU6/eckEtmHNB3x6e3c0vA==",
|
||||
"dev": true,
|
||||
"funding": [
|
||||
{
|
||||
@ -3510,7 +3509,7 @@
|
||||
"dependencies": {
|
||||
"nanoid": "^3.3.7",
|
||||
"picocolors": "^1.0.0",
|
||||
"source-map-js": "^1.2.0"
|
||||
"source-map-js": "^1.0.2"
|
||||
},
|
||||
"engines": {
|
||||
"node": "^10 || ^12 || >=14"
|
||||
@ -3988,10 +3987,19 @@
|
||||
"node": ">=8"
|
||||
}
|
||||
},
|
||||
"node_modules/source-map": {
|
||||
"version": "0.6.1",
|
||||
"resolved": "https://registry.npmjs.org/source-map/-/source-map-0.6.1.tgz",
|
||||
"integrity": "sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g==",
|
||||
"dev": true,
|
||||
"engines": {
|
||||
"node": ">=0.10.0"
|
||||
}
|
||||
},
|
||||
"node_modules/source-map-js": {
|
||||
"version": "1.2.0",
|
||||
"resolved": "https://registry.npmjs.org/source-map-js/-/source-map-js-1.2.0.tgz",
|
||||
"integrity": "sha512-itJW8lvSA0TXEphiRoawsCksnlf8SyvmFzIhltqAHluXd88pkCd+cXJVHTDwdCr0IzwptSm035IHQktUu1QUMg==",
|
||||
"version": "1.0.2",
|
||||
"resolved": "https://registry.npmjs.org/source-map-js/-/source-map-js-1.0.2.tgz",
|
||||
"integrity": "sha512-R0XvVJ9WusLiqTCEiGCmICCMplcCkIwwR11mOSD9CR5u+IXYdiseeEuXCVAjS54zqwkLcPNnmU4OeJ6tUrWhDw==",
|
||||
"dev": true,
|
||||
"engines": {
|
||||
"node": ">=0.10.0"
|
||||
@ -4368,9 +4376,9 @@
|
||||
}
|
||||
},
|
||||
"node_modules/typescript": {
|
||||
"version": "5.4.3",
|
||||
"resolved": "https://registry.npmjs.org/typescript/-/typescript-5.4.3.tgz",
|
||||
"integrity": "sha512-KrPd3PKaCLr78MalgiwJnA25Nm8HAmdwN3mYUYZgG/wizIo9EainNVQI9/yDavtVFRN2h3k8uf3GLHuhDMgEHg==",
|
||||
"version": "5.4.2",
|
||||
"resolved": "https://registry.npmjs.org/typescript/-/typescript-5.4.2.tgz",
|
||||
"integrity": "sha512-+2/g0Fds1ERlP6JsakQQDXjZdZMM+rqpamFZJEKh4kwTIn3iDkgKtby0CeNd5ATNZ4Ry1ax15TMx0W2V+miizQ==",
|
||||
"dev": true,
|
||||
"bin": {
|
||||
"tsc": "bin/tsc",
|
||||
@ -4456,14 +4464,14 @@
|
||||
}
|
||||
},
|
||||
"node_modules/vite": {
|
||||
"version": "5.2.7",
|
||||
"resolved": "https://registry.npmjs.org/vite/-/vite-5.2.7.tgz",
|
||||
"integrity": "sha512-k14PWOKLI6pMaSzAuGtT+Cf0YmIx12z9YGon39onaJNy8DLBfBJrzg9FQEmkAM5lpHBZs9wksWAsyF/HkpEwJA==",
|
||||
"version": "5.1.6",
|
||||
"resolved": "https://registry.npmjs.org/vite/-/vite-5.1.6.tgz",
|
||||
"integrity": "sha512-yYIAZs9nVfRJ/AiOLCA91zzhjsHUgMjB+EigzFb6W2XTLO8JixBCKCjvhKZaye+NKYHCrkv3Oh50dH9EdLU2RA==",
|
||||
"dev": true,
|
||||
"dependencies": {
|
||||
"esbuild": "^0.20.1",
|
||||
"postcss": "^8.4.38",
|
||||
"rollup": "^4.13.0"
|
||||
"esbuild": "^0.19.3",
|
||||
"postcss": "^8.4.35",
|
||||
"rollup": "^4.2.0"
|
||||
},
|
||||
"bin": {
|
||||
"vite": "bin/vite.js"
|
||||
@ -4511,9 +4519,9 @@
|
||||
}
|
||||
},
|
||||
"node_modules/vite-node": {
|
||||
"version": "1.4.0",
|
||||
"resolved": "https://registry.npmjs.org/vite-node/-/vite-node-1.4.0.tgz",
|
||||
"integrity": "sha512-VZDAseqjrHgNd4Kh8icYHWzTKSCZMhia7GyHfhtzLW33fZlG9SwsB6CEhgyVOWkJfJ2pFLrp/Gj1FSfAiqH9Lw==",
|
||||
"version": "1.3.1",
|
||||
"resolved": "https://registry.npmjs.org/vite-node/-/vite-node-1.3.1.tgz",
|
||||
"integrity": "sha512-azbRrqRxlWTJEVbzInZCTchx0X69M/XPTCz4H+TLvlTcR/xH/3hkRqhOakT41fMJCMzXTu4UvegkZiEoJAWvng==",
|
||||
"dev": true,
|
||||
"dependencies": {
|
||||
"cac": "^6.7.14",
|
||||
@ -4552,16 +4560,16 @@
|
||||
}
|
||||
},
|
||||
"node_modules/vitest": {
|
||||
"version": "1.4.0",
|
||||
"resolved": "https://registry.npmjs.org/vitest/-/vitest-1.4.0.tgz",
|
||||
"integrity": "sha512-gujzn0g7fmwf83/WzrDTnncZt2UiXP41mHuFYFrdwaLRVQ6JYQEiME2IfEjU3vcFL3VKa75XhI3lFgn+hfVsQw==",
|
||||
"version": "1.3.1",
|
||||
"resolved": "https://registry.npmjs.org/vitest/-/vitest-1.3.1.tgz",
|
||||
"integrity": "sha512-/1QJqXs8YbCrfv/GPQ05wAZf2eakUPLPa18vkJAKE7RXOKfVHqMZZ1WlTjiwl6Gcn65M5vpNUB6EFLnEdRdEXQ==",
|
||||
"dev": true,
|
||||
"dependencies": {
|
||||
"@vitest/expect": "1.4.0",
|
||||
"@vitest/runner": "1.4.0",
|
||||
"@vitest/snapshot": "1.4.0",
|
||||
"@vitest/spy": "1.4.0",
|
||||
"@vitest/utils": "1.4.0",
|
||||
"@vitest/expect": "1.3.1",
|
||||
"@vitest/runner": "1.3.1",
|
||||
"@vitest/snapshot": "1.3.1",
|
||||
"@vitest/spy": "1.3.1",
|
||||
"@vitest/utils": "1.3.1",
|
||||
"acorn-walk": "^8.3.2",
|
||||
"chai": "^4.3.10",
|
||||
"debug": "^4.3.4",
|
||||
@ -4575,7 +4583,7 @@
|
||||
"tinybench": "^2.5.1",
|
||||
"tinypool": "^0.8.2",
|
||||
"vite": "^5.0.0",
|
||||
"vite-node": "1.4.0",
|
||||
"vite-node": "1.3.1",
|
||||
"why-is-node-running": "^2.2.2"
|
||||
},
|
||||
"bin": {
|
||||
@ -4590,8 +4598,8 @@
|
||||
"peerDependencies": {
|
||||
"@edge-runtime/vm": "*",
|
||||
"@types/node": "^18.0.0 || >=20.0.0",
|
||||
"@vitest/browser": "1.4.0",
|
||||
"@vitest/ui": "1.4.0",
|
||||
"@vitest/browser": "1.3.1",
|
||||
"@vitest/ui": "1.3.1",
|
||||
"happy-dom": "*",
|
||||
"jsdom": "*"
|
||||
},
|
||||
|
@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@immich/cli",
|
||||
"version": "2.2.0",
|
||||
"version": "2.1.0",
|
||||
"description": "Command Line Interface (CLI) for Immich",
|
||||
"type": "module",
|
||||
"exports": "./dist/index.js",
|
||||
@ -28,7 +28,7 @@
|
||||
"eslint": "^8.56.0",
|
||||
"eslint-config-prettier": "^9.1.0",
|
||||
"eslint-plugin-prettier": "^5.1.3",
|
||||
"eslint-plugin-unicorn": "^52.0.0",
|
||||
"eslint-plugin-unicorn": "^51.0.0",
|
||||
"glob": "^10.3.1",
|
||||
"mock-fs": "^5.2.0",
|
||||
"prettier": "^3.2.5",
|
||||
|
@ -1,7 +1,5 @@
|
||||
import {
|
||||
Action,
|
||||
AssetBulkUploadCheckResult,
|
||||
AssetFileUploadResponseDto,
|
||||
addAssetsToAlbum,
|
||||
checkBulkUpload,
|
||||
createAlbum,
|
||||
@ -10,235 +8,162 @@ import {
|
||||
getSupportedMediaTypes,
|
||||
} from '@immich/sdk';
|
||||
import byteSize from 'byte-size';
|
||||
import { Presets, SingleBar } from 'cli-progress';
|
||||
import { chunk } from 'lodash-es';
|
||||
import { Stats, createReadStream } from 'node:fs';
|
||||
import { stat, unlink } from 'node:fs/promises';
|
||||
import cliProgress from 'cli-progress';
|
||||
import { chunk, zip } from 'lodash-es';
|
||||
import { createHash } from 'node:crypto';
|
||||
import fs, { createReadStream } from 'node:fs';
|
||||
import { access, constants, stat, unlink } from 'node:fs/promises';
|
||||
import os from 'node:os';
|
||||
import path, { basename } from 'node:path';
|
||||
import { BaseOptions, authenticate, crawl, sha1 } from 'src/utils';
|
||||
import { basename } from 'node:path';
|
||||
import { CrawlService } from 'src/services/crawl.service';
|
||||
import { BaseOptions, authenticate } from 'src/utils';
|
||||
|
||||
const s = (count: number) => (count === 1 ? '' : 's');
|
||||
const zipDefined = zip as <T, U>(a: T[], b: U[]) => [T, U][];
|
||||
|
||||
// TODO figure out why `id` is missing
|
||||
type AssetBulkUploadCheckResults = Array<AssetBulkUploadCheckResult & { id: string }>;
|
||||
type Asset = { id: string; filepath: string };
|
||||
enum CheckResponseStatus {
|
||||
ACCEPT = 'accept',
|
||||
REJECT = 'reject',
|
||||
DUPLICATE = 'duplicate',
|
||||
}
|
||||
|
||||
interface UploadOptionsDto {
|
||||
recursive?: boolean;
|
||||
exclusionPatterns?: string[];
|
||||
dryRun?: boolean;
|
||||
skipHash?: boolean;
|
||||
delete?: boolean;
|
||||
album?: boolean;
|
||||
class Asset {
|
||||
readonly path: string;
|
||||
|
||||
id?: string;
|
||||
deviceAssetId?: string;
|
||||
fileCreatedAt?: Date;
|
||||
fileModifiedAt?: Date;
|
||||
sidecarPath?: string;
|
||||
fileSize?: number;
|
||||
albumName?: string;
|
||||
includeHidden?: boolean;
|
||||
concurrency: number;
|
||||
}
|
||||
|
||||
class UploadFile extends File {
|
||||
constructor(
|
||||
private filepath: string,
|
||||
private _size: number,
|
||||
) {
|
||||
super([], basename(filepath));
|
||||
constructor(path: string) {
|
||||
this.path = path;
|
||||
}
|
||||
|
||||
get size() {
|
||||
return this._size;
|
||||
async prepare() {
|
||||
const stats = await stat(this.path);
|
||||
this.deviceAssetId = `${basename(this.path)}-${stats.size}`.replaceAll(/\s+/g, '');
|
||||
this.fileCreatedAt = stats.mtime;
|
||||
this.fileModifiedAt = stats.mtime;
|
||||
this.fileSize = stats.size;
|
||||
this.albumName = this.extractAlbumName();
|
||||
}
|
||||
|
||||
stream() {
|
||||
return createReadStream(this.filepath) as any;
|
||||
async getUploadFormData(): Promise<FormData> {
|
||||
if (!this.deviceAssetId) {
|
||||
throw new Error('Device asset id not set');
|
||||
}
|
||||
}
|
||||
|
||||
export const upload = async (paths: string[], baseOptions: BaseOptions, options: UploadOptionsDto) => {
|
||||
await authenticate(baseOptions);
|
||||
|
||||
const scanFiles = await scan(paths, options);
|
||||
if (scanFiles.length === 0) {
|
||||
console.log('No files found, exiting');
|
||||
return;
|
||||
if (!this.fileCreatedAt) {
|
||||
throw new Error('File created at not set');
|
||||
}
|
||||
if (!this.fileModifiedAt) {
|
||||
throw new Error('File modified at not set');
|
||||
}
|
||||
|
||||
const { newFiles, duplicates } = await checkForDuplicates(scanFiles, options);
|
||||
const newAssets = await uploadFiles(newFiles, options);
|
||||
await updateAlbums([...newAssets, ...duplicates], options);
|
||||
await deleteFiles(newFiles, options);
|
||||
};
|
||||
|
||||
const scan = async (pathsToCrawl: string[], options: UploadOptionsDto) => {
|
||||
const { image, video } = await getSupportedMediaTypes();
|
||||
|
||||
console.log('Crawling for assets...');
|
||||
const files = await crawl({
|
||||
pathsToCrawl,
|
||||
recursive: options.recursive,
|
||||
exclusionPatterns: options.exclusionPatterns,
|
||||
includeHidden: options.includeHidden,
|
||||
extensions: [...image, ...video],
|
||||
});
|
||||
|
||||
return files;
|
||||
};
|
||||
|
||||
const checkForDuplicates = async (files: string[], { concurrency, skipHash }: UploadOptionsDto) => {
|
||||
if (skipHash) {
|
||||
console.log('Skipping hash check, assuming all files are new');
|
||||
return { newFiles: files, duplicates: [] };
|
||||
}
|
||||
|
||||
const progressBar = new SingleBar(
|
||||
{ format: 'Checking files | {bar} | {percentage}% | ETA: {eta}s | {value}/{total} assets' },
|
||||
Presets.shades_classic,
|
||||
);
|
||||
|
||||
progressBar.start(files.length, 0);
|
||||
|
||||
const newFiles: string[] = [];
|
||||
const duplicates: Asset[] = [];
|
||||
|
||||
// TODO: doesn't xmp replace the file extension? Will need investigation
|
||||
const sideCarPath = `${this.path}.xmp`;
|
||||
let sidecarData: Blob | undefined = undefined;
|
||||
try {
|
||||
// TODO refactor into a queue
|
||||
for (const items of chunk(files, concurrency)) {
|
||||
const dto = await Promise.all(items.map(async (filepath) => ({ id: filepath, checksum: await sha1(filepath) })));
|
||||
const { results } = await checkBulkUpload({ assetBulkUploadCheckDto: { assets: dto } });
|
||||
|
||||
for (const { id: filepath, assetId, action } of results as AssetBulkUploadCheckResults) {
|
||||
if (action === Action.Accept) {
|
||||
newFiles.push(filepath);
|
||||
} else {
|
||||
// rejects are always duplicates
|
||||
duplicates.push({ id: assetId as string, filepath });
|
||||
}
|
||||
progressBar.increment();
|
||||
}
|
||||
}
|
||||
} finally {
|
||||
progressBar.stop();
|
||||
}
|
||||
|
||||
console.log(`Found ${newFiles.length} new files and ${duplicates.length} duplicate${s(duplicates.length)}`);
|
||||
|
||||
return { newFiles, duplicates };
|
||||
};
|
||||
|
||||
const uploadFiles = async (files: string[], { dryRun, concurrency }: UploadOptionsDto): Promise<Asset[]> => {
|
||||
if (files.length === 0) {
|
||||
console.log('All assets were already uploaded, nothing to do.');
|
||||
return [];
|
||||
}
|
||||
|
||||
// Compute total size first
|
||||
let totalSize = 0;
|
||||
const statsMap = new Map<string, Stats>();
|
||||
for (const filepath of files) {
|
||||
const stats = await stat(filepath);
|
||||
statsMap.set(filepath, stats);
|
||||
totalSize += stats.size;
|
||||
}
|
||||
|
||||
if (dryRun) {
|
||||
console.log(`Would have uploaded ${files.length} asset${s(files.length)} (${byteSize(totalSize)})`);
|
||||
return [];
|
||||
}
|
||||
|
||||
const uploadProgress = new SingleBar(
|
||||
{ format: 'Uploading assets | {bar} | {percentage}% | ETA: {eta_formatted} | {value_formatted}/{total_formatted}' },
|
||||
Presets.shades_classic,
|
||||
);
|
||||
uploadProgress.start(totalSize, 0);
|
||||
uploadProgress.update({ value_formatted: 0, total_formatted: byteSize(totalSize) });
|
||||
|
||||
let duplicateCount = 0;
|
||||
let duplicateSize = 0;
|
||||
let successCount = 0;
|
||||
let successSize = 0;
|
||||
|
||||
const newAssets: Asset[] = [];
|
||||
|
||||
try {
|
||||
for (const items of chunk(files, concurrency)) {
|
||||
await Promise.all(
|
||||
items.map(async (filepath) => {
|
||||
const stats = statsMap.get(filepath) as Stats;
|
||||
const response = await uploadFile(filepath, stats);
|
||||
|
||||
newAssets.push({ id: response.id, filepath });
|
||||
|
||||
if (response.duplicate) {
|
||||
duplicateCount++;
|
||||
duplicateSize += stats.size ?? 0;
|
||||
} else {
|
||||
successCount++;
|
||||
successSize += stats.size ?? 0;
|
||||
}
|
||||
|
||||
uploadProgress.update(successSize, { value_formatted: byteSize(successSize + duplicateSize) });
|
||||
|
||||
return response;
|
||||
}),
|
||||
);
|
||||
}
|
||||
} finally {
|
||||
uploadProgress.stop();
|
||||
}
|
||||
|
||||
console.log(`Successfully uploaded ${successCount} new asset${s(successCount)} (${byteSize(successSize)})`);
|
||||
if (duplicateCount > 0) {
|
||||
console.log(`Skipped ${duplicateCount} duplicate asset${s(duplicateCount)} (${byteSize(duplicateSize)})`);
|
||||
}
|
||||
return newAssets;
|
||||
};
|
||||
|
||||
const uploadFile = async (input: string, stats: Stats): Promise<AssetFileUploadResponseDto> => {
|
||||
const { baseUrl, headers } = defaults;
|
||||
|
||||
const assetPath = path.parse(input);
|
||||
const noExtension = path.join(assetPath.dir, assetPath.name);
|
||||
|
||||
const sidecarsFiles = await Promise.all(
|
||||
// XMP sidecars can come in two filename formats. For a photo named photo.ext, the filenames are photo.ext.xmp and photo.xmp
|
||||
[`${noExtension}.xmp`, `${input}.xmp`].map(async (sidecarPath) => {
|
||||
try {
|
||||
const stats = await stat(sidecarPath);
|
||||
return new UploadFile(sidecarPath, stats.size);
|
||||
} catch {
|
||||
return false;
|
||||
}
|
||||
}),
|
||||
);
|
||||
|
||||
const sidecarData = sidecarsFiles.find((file): file is UploadFile => file !== false);
|
||||
await access(sideCarPath, constants.R_OK);
|
||||
sidecarData = new File([await fs.openAsBlob(sideCarPath)], basename(sideCarPath));
|
||||
} catch {}
|
||||
|
||||
const data: any = {
|
||||
assetData: new File([await fs.openAsBlob(this.path)], basename(this.path)),
|
||||
deviceAssetId: this.deviceAssetId,
|
||||
deviceId: 'CLI',
|
||||
fileCreatedAt: this.fileCreatedAt.toISOString(),
|
||||
fileModifiedAt: this.fileModifiedAt.toISOString(),
|
||||
isFavorite: String(false),
|
||||
};
|
||||
const formData = new FormData();
|
||||
formData.append('deviceAssetId', `${basename(input)}-${stats.size}`.replaceAll(/\s+/g, ''));
|
||||
formData.append('deviceId', 'CLI');
|
||||
formData.append('fileCreatedAt', stats.mtime.toISOString());
|
||||
formData.append('fileModifiedAt', stats.mtime.toISOString());
|
||||
formData.append('fileSize', String(stats.size));
|
||||
formData.append('isFavorite', 'false');
|
||||
formData.append('assetData', new UploadFile(input, stats.size));
|
||||
|
||||
for (const property in data) {
|
||||
formData.append(property, data[property]);
|
||||
}
|
||||
|
||||
if (sidecarData) {
|
||||
formData.append('sidecarData', sidecarData);
|
||||
}
|
||||
|
||||
const response = await fetch(`${baseUrl}/asset/upload`, {
|
||||
method: 'post',
|
||||
redirect: 'error',
|
||||
headers: headers as Record<string, string>,
|
||||
body: formData,
|
||||
});
|
||||
if (response.status !== 200 && response.status !== 201) {
|
||||
throw new Error(await response.text());
|
||||
return formData;
|
||||
}
|
||||
|
||||
return response.json();
|
||||
};
|
||||
async delete(): Promise<void> {
|
||||
return unlink(this.path);
|
||||
}
|
||||
|
||||
public async hash(): Promise<string> {
|
||||
const sha1 = (filePath: string) => {
|
||||
const hash = createHash('sha1');
|
||||
return new Promise<string>((resolve, reject) => {
|
||||
const rs = createReadStream(filePath);
|
||||
rs.on('error', reject);
|
||||
rs.on('data', (chunk) => hash.update(chunk));
|
||||
rs.on('end', () => resolve(hash.digest('hex')));
|
||||
});
|
||||
};
|
||||
|
||||
return await sha1(this.path);
|
||||
}
|
||||
|
||||
private extractAlbumName(): string | undefined {
|
||||
return os.platform() === 'win32' ? this.path.split('\\').at(-2) : this.path.split('/').at(-2);
|
||||
}
|
||||
}
|
||||
|
||||
class UploadOptionsDto {
|
||||
recursive? = false;
|
||||
exclusionPatterns?: string[] = [];
|
||||
dryRun? = false;
|
||||
skipHash? = false;
|
||||
delete? = false;
|
||||
album? = false;
|
||||
albumName? = '';
|
||||
includeHidden? = false;
|
||||
concurrency? = 4;
|
||||
}
|
||||
|
||||
export const upload = (paths: string[], baseOptions: BaseOptions, uploadOptions: UploadOptionsDto) =>
|
||||
new UploadCommand().run(paths, baseOptions, uploadOptions);
|
||||
|
||||
// TODO refactor this
|
||||
class UploadCommand {
|
||||
public async run(paths: string[], baseOptions: BaseOptions, options: UploadOptionsDto): Promise<void> {
|
||||
await authenticate(baseOptions);
|
||||
|
||||
console.log('Crawling for assets...');
|
||||
const files = await this.getFiles(paths, options);
|
||||
|
||||
if (files.length === 0) {
|
||||
console.log('No assets found, exiting');
|
||||
return;
|
||||
}
|
||||
|
||||
const assetsToCheck = files.map((path) => new Asset(path));
|
||||
|
||||
const { newAssets, duplicateAssets } = await this.checkAssets(assetsToCheck, options.concurrency ?? 4);
|
||||
|
||||
const totalSizeUploaded = await this.upload(newAssets, options);
|
||||
const messageStart = options.dryRun ? 'Would have' : 'Successfully';
|
||||
if (newAssets.length === 0) {
|
||||
console.log('All assets were already uploaded, nothing to do.');
|
||||
} else {
|
||||
console.log(
|
||||
`${messageStart} uploaded ${newAssets.length} asset${newAssets.length === 1 ? '' : 's'} (${byteSize(totalSizeUploaded)})`,
|
||||
);
|
||||
}
|
||||
|
||||
if (options.album || options.albumName) {
|
||||
const { createdAlbumCount, updatedAssetCount } = await this.updateAlbums(
|
||||
[...newAssets, ...duplicateAssets],
|
||||
options,
|
||||
);
|
||||
console.log(`${messageStart} created ${createdAlbumCount} new album${createdAlbumCount === 1 ? '' : 's'}`);
|
||||
console.log(`${messageStart} updated ${updatedAssetCount} asset${updatedAssetCount === 1 ? '' : 's'}`);
|
||||
}
|
||||
|
||||
const deleteFiles = async (files: string[], options: UploadOptionsDto): Promise<void> => {
|
||||
if (!options.delete) {
|
||||
return;
|
||||
}
|
||||
@ -250,75 +175,169 @@ const deleteFiles = async (files: string[], options: UploadOptionsDto): Promise<
|
||||
|
||||
console.log('Deleting assets that have been uploaded...');
|
||||
|
||||
const deletionProgress = new SingleBar(
|
||||
{ format: 'Deleting local assets | {bar} | {percentage}% | ETA: {eta}s | {value}/{total} assets' },
|
||||
Presets.shades_classic,
|
||||
);
|
||||
deletionProgress.start(files.length, 0);
|
||||
await this.deleteAssets(newAssets, options);
|
||||
}
|
||||
|
||||
public async checkAssets(
|
||||
assetsToCheck: Asset[],
|
||||
concurrency: number,
|
||||
): Promise<{ newAssets: Asset[]; duplicateAssets: Asset[]; rejectedAssets: Asset[] }> {
|
||||
for (const assets of chunk(assetsToCheck, concurrency)) {
|
||||
await Promise.all(assets.map((asset: Asset) => asset.prepare()));
|
||||
}
|
||||
|
||||
const checkProgress = new cliProgress.SingleBar(
|
||||
{ format: 'Checking assets | {bar} | {percentage}% | ETA: {eta}s | {value}/{total} assets' },
|
||||
cliProgress.Presets.shades_classic,
|
||||
);
|
||||
checkProgress.start(assetsToCheck.length, 0);
|
||||
|
||||
const newAssets = [];
|
||||
const duplicateAssets = [];
|
||||
const rejectedAssets = [];
|
||||
try {
|
||||
for (const assetBatch of chunk(files, options.concurrency)) {
|
||||
await Promise.all(assetBatch.map((input: string) => unlink(input)));
|
||||
deletionProgress.update(assetBatch.length);
|
||||
for (const assets of chunk(assetsToCheck, concurrency)) {
|
||||
const checkedAssets = await this.getStatus(assets);
|
||||
for (const checked of checkedAssets) {
|
||||
if (checked.status === CheckResponseStatus.ACCEPT) {
|
||||
newAssets.push(checked.asset);
|
||||
} else if (checked.status === CheckResponseStatus.DUPLICATE) {
|
||||
duplicateAssets.push(checked.asset);
|
||||
} else {
|
||||
rejectedAssets.push(checked.asset);
|
||||
}
|
||||
checkProgress.increment();
|
||||
}
|
||||
}
|
||||
} finally {
|
||||
deletionProgress.stop();
|
||||
}
|
||||
};
|
||||
|
||||
const updateAlbums = async (assets: Asset[], options: UploadOptionsDto) => {
|
||||
if (!options.album && !options.albumName) {
|
||||
return;
|
||||
}
|
||||
const { dryRun, concurrency } = options;
|
||||
|
||||
const albums = await getAllAlbums({});
|
||||
const existingAlbums = new Map(albums.map((album) => [album.albumName, album.id]));
|
||||
const newAlbums: Set<string> = new Set();
|
||||
for (const { filepath } of assets) {
|
||||
const albumName = getAlbumName(filepath, options);
|
||||
if (albumName && !existingAlbums.has(albumName)) {
|
||||
newAlbums.add(albumName);
|
||||
}
|
||||
checkProgress.stop();
|
||||
}
|
||||
|
||||
if (dryRun) {
|
||||
// TODO print asset counts for new albums
|
||||
console.log(`Would have created ${newAlbums.size} new album${s(newAlbums.size)}`);
|
||||
console.log(`Would have updated ${assets.length} asset${s(assets.length)}`);
|
||||
return;
|
||||
return { newAssets, duplicateAssets, rejectedAssets };
|
||||
}
|
||||
|
||||
const progressBar = new SingleBar(
|
||||
{ format: 'Creating albums | {bar} | {percentage}% | ETA: {eta}s | {value}/{total} albums' },
|
||||
Presets.shades_classic,
|
||||
public async upload(assetsToUpload: Asset[], options: UploadOptionsDto): Promise<number> {
|
||||
let totalSize = 0;
|
||||
|
||||
// Compute total size first
|
||||
for (const asset of assetsToUpload) {
|
||||
totalSize += asset.fileSize ?? 0;
|
||||
}
|
||||
|
||||
if (options.dryRun) {
|
||||
return totalSize;
|
||||
}
|
||||
|
||||
const uploadProgress = new cliProgress.SingleBar(
|
||||
{
|
||||
format: 'Uploading assets | {bar} | {percentage}% | ETA: {eta_formatted} | {value_formatted}/{total_formatted}',
|
||||
},
|
||||
cliProgress.Presets.shades_classic,
|
||||
);
|
||||
progressBar.start(newAlbums.size, 0);
|
||||
uploadProgress.start(totalSize, 0);
|
||||
uploadProgress.update({ value_formatted: 0, total_formatted: byteSize(totalSize) });
|
||||
|
||||
let totalSizeUploaded = 0;
|
||||
try {
|
||||
for (const albumNames of chunk([...newAlbums], concurrency)) {
|
||||
const items = await Promise.all(
|
||||
albumNames.map((albumName: string) => createAlbum({ createAlbumDto: { albumName } })),
|
||||
);
|
||||
for (const { id, albumName } of items) {
|
||||
existingAlbums.set(albumName, id);
|
||||
for (const assets of chunk(assetsToUpload, options.concurrency)) {
|
||||
const ids = await this.uploadAssets(assets);
|
||||
for (const [asset, id] of zipDefined(assets, ids)) {
|
||||
asset.id = id;
|
||||
if (asset.fileSize) {
|
||||
totalSizeUploaded += asset.fileSize ?? 0;
|
||||
} else {
|
||||
console.log(`Could not determine file size for ${asset.path}`);
|
||||
}
|
||||
progressBar.increment(albumNames.length);
|
||||
}
|
||||
uploadProgress.update(totalSizeUploaded, { value_formatted: byteSize(totalSizeUploaded) });
|
||||
}
|
||||
} finally {
|
||||
progressBar.stop();
|
||||
uploadProgress.stop();
|
||||
}
|
||||
|
||||
console.log(`Successfully created ${newAlbums.size} new album${s(newAlbums.size)}`);
|
||||
console.log(`Successfully updated ${assets.length} asset${s(assets.length)}`);
|
||||
return totalSizeUploaded;
|
||||
}
|
||||
|
||||
public async getFiles(paths: string[], options: UploadOptionsDto): Promise<string[]> {
|
||||
const inputFiles: string[] = [];
|
||||
for (const pathArgument of paths) {
|
||||
const fileStat = await fs.promises.lstat(pathArgument);
|
||||
if (fileStat.isFile()) {
|
||||
inputFiles.push(pathArgument);
|
||||
}
|
||||
}
|
||||
|
||||
const files: string[] = await this.crawl(paths, options);
|
||||
files.push(...inputFiles);
|
||||
return files;
|
||||
}
|
||||
|
||||
public async getAlbums(): Promise<Map<string, string>> {
|
||||
const existingAlbums = await getAllAlbums({});
|
||||
|
||||
const albumMapping = new Map<string, string>();
|
||||
for (const album of existingAlbums) {
|
||||
albumMapping.set(album.albumName, album.id);
|
||||
}
|
||||
|
||||
return albumMapping;
|
||||
}
|
||||
|
||||
public async updateAlbums(
|
||||
assets: Asset[],
|
||||
options: UploadOptionsDto,
|
||||
): Promise<{ createdAlbumCount: number; updatedAssetCount: number }> {
|
||||
if (options.albumName) {
|
||||
for (const asset of assets) {
|
||||
asset.albumName = options.albumName;
|
||||
}
|
||||
}
|
||||
|
||||
const existingAlbums = await this.getAlbums();
|
||||
const assetsToUpdate = assets.filter(
|
||||
(asset): asset is Asset & { albumName: string; id: string } => !!(asset.albumName && asset.id),
|
||||
);
|
||||
|
||||
const newAlbumsSet: Set<string> = new Set();
|
||||
for (const asset of assetsToUpdate) {
|
||||
if (!existingAlbums.has(asset.albumName)) {
|
||||
newAlbumsSet.add(asset.albumName);
|
||||
}
|
||||
}
|
||||
|
||||
const newAlbums = [...newAlbumsSet];
|
||||
|
||||
if (options.dryRun) {
|
||||
return { createdAlbumCount: newAlbums.length, updatedAssetCount: assetsToUpdate.length };
|
||||
}
|
||||
|
||||
const albumCreationProgress = new cliProgress.SingleBar(
|
||||
{
|
||||
format: 'Creating albums | {bar} | {percentage}% | ETA: {eta}s | {value}/{total} albums',
|
||||
},
|
||||
cliProgress.Presets.shades_classic,
|
||||
);
|
||||
albumCreationProgress.start(newAlbums.length, 0);
|
||||
|
||||
try {
|
||||
for (const albumNames of chunk(newAlbums, options.concurrency)) {
|
||||
const newAlbumIds = await Promise.all(
|
||||
albumNames.map((albumName: string) => createAlbum({ createAlbumDto: { albumName } }).then((r) => r.id)),
|
||||
);
|
||||
|
||||
for (const [albumName, albumId] of zipDefined(albumNames, newAlbumIds)) {
|
||||
existingAlbums.set(albumName, albumId);
|
||||
}
|
||||
|
||||
albumCreationProgress.increment(albumNames.length);
|
||||
}
|
||||
} finally {
|
||||
albumCreationProgress.stop();
|
||||
}
|
||||
|
||||
const albumToAssets = new Map<string, string[]>();
|
||||
for (const asset of assets) {
|
||||
const albumName = getAlbumName(asset.filepath, options);
|
||||
if (!albumName) {
|
||||
continue;
|
||||
}
|
||||
const albumId = existingAlbums.get(albumName);
|
||||
for (const asset of assetsToUpdate) {
|
||||
const albumId = existingAlbums.get(asset.albumName);
|
||||
if (albumId) {
|
||||
if (!albumToAssets.has(albumId)) {
|
||||
albumToAssets.set(albumId, []);
|
||||
@ -327,15 +346,17 @@ const updateAlbums = async (assets: Asset[], options: UploadOptionsDto) => {
|
||||
}
|
||||
}
|
||||
|
||||
const albumUpdateProgress = new SingleBar(
|
||||
{ format: 'Adding assets to albums | {bar} | {percentage}% | ETA: {eta}s | {value}/{total} assets' },
|
||||
Presets.shades_classic,
|
||||
const albumUpdateProgress = new cliProgress.SingleBar(
|
||||
{
|
||||
format: 'Adding assets to albums | {bar} | {percentage}% | ETA: {eta}s | {value}/{total} assets',
|
||||
},
|
||||
cliProgress.Presets.shades_classic,
|
||||
);
|
||||
albumUpdateProgress.start(assets.length, 0);
|
||||
albumUpdateProgress.start(assetsToUpdate.length, 0);
|
||||
|
||||
try {
|
||||
for (const [albumId, assets] of albumToAssets.entries()) {
|
||||
for (const assetBatch of chunk(assets, Math.min(1000 * concurrency, 65_000))) {
|
||||
for (const assetBatch of chunk(assets, Math.min(1000 * (options.concurrency ?? 4), 65_000))) {
|
||||
await addAssetsToAlbum({ id: albumId, bulkIdsDto: { ids: assetBatch } });
|
||||
albumUpdateProgress.increment(assetBatch.length);
|
||||
}
|
||||
@ -343,9 +364,89 @@ const updateAlbums = async (assets: Asset[], options: UploadOptionsDto) => {
|
||||
} finally {
|
||||
albumUpdateProgress.stop();
|
||||
}
|
||||
};
|
||||
|
||||
const getAlbumName = (filepath: string, options: UploadOptionsDto) => {
|
||||
const folderName = os.platform() === 'win32' ? filepath.split('\\').at(-2) : filepath.split('/').at(-2);
|
||||
return options.albumName ?? folderName;
|
||||
};
|
||||
return { createdAlbumCount: newAlbums.length, updatedAssetCount: assetsToUpdate.length };
|
||||
}
|
||||
|
||||
public async deleteAssets(assets: Asset[], options: UploadOptionsDto): Promise<void> {
|
||||
const deletionProgress = new cliProgress.SingleBar(
|
||||
{
|
||||
format: 'Deleting local assets | {bar} | {percentage}% | ETA: {eta}s | {value}/{total} assets',
|
||||
},
|
||||
cliProgress.Presets.shades_classic,
|
||||
);
|
||||
deletionProgress.start(assets.length, 0);
|
||||
|
||||
try {
|
||||
for (const assetBatch of chunk(assets, options.concurrency)) {
|
||||
await Promise.all(assetBatch.map((asset: Asset) => asset.delete()));
|
||||
deletionProgress.update(assetBatch.length);
|
||||
}
|
||||
} finally {
|
||||
deletionProgress.stop();
|
||||
}
|
||||
}
|
||||
|
||||
private async getStatus(assets: Asset[]): Promise<{ asset: Asset; status: CheckResponseStatus }[]> {
|
||||
const checkResponse = await this.checkHashes(assets);
|
||||
|
||||
const responses = [];
|
||||
for (const [check, asset] of zipDefined(checkResponse, assets)) {
|
||||
if (check.assetId) {
|
||||
asset.id = check.assetId;
|
||||
}
|
||||
|
||||
if (check.action === 'accept') {
|
||||
responses.push({ asset, status: CheckResponseStatus.ACCEPT });
|
||||
} else if (check.reason === 'duplicate') {
|
||||
responses.push({ asset, status: CheckResponseStatus.DUPLICATE });
|
||||
} else {
|
||||
responses.push({ asset, status: CheckResponseStatus.REJECT });
|
||||
}
|
||||
}
|
||||
|
||||
return responses;
|
||||
}
|
||||
|
||||
private async checkHashes(assetsToCheck: Asset[]): Promise<AssetBulkUploadCheckResult[]> {
|
||||
const checksums = await Promise.all(assetsToCheck.map((asset) => asset.hash()));
|
||||
const assetBulkUploadCheckDto = {
|
||||
assets: zipDefined(assetsToCheck, checksums).map(([asset, checksum]) => ({ id: asset.path, checksum })),
|
||||
};
|
||||
const checkResponse = await checkBulkUpload({ assetBulkUploadCheckDto });
|
||||
return checkResponse.results;
|
||||
}
|
||||
|
||||
private async uploadAssets(assets: Asset[]): Promise<string[]> {
|
||||
const fileRequests = await Promise.all(assets.map((asset) => asset.getUploadFormData()));
|
||||
const results = await Promise.all(fileRequests.map((request) => this.uploadAsset(request)));
|
||||
return results.map((response) => response.id);
|
||||
}
|
||||
|
||||
private async crawl(paths: string[], options: UploadOptionsDto): Promise<string[]> {
|
||||
const formatResponse = await getSupportedMediaTypes();
|
||||
const crawlService = new CrawlService(formatResponse.image, formatResponse.video);
|
||||
|
||||
return crawlService.crawl({
|
||||
pathsToCrawl: paths,
|
||||
recursive: options.recursive,
|
||||
exclusionPatterns: options.exclusionPatterns,
|
||||
includeHidden: options.includeHidden,
|
||||
});
|
||||
}
|
||||
|
||||
private async uploadAsset(data: FormData): Promise<{ id: string }> {
|
||||
const { baseUrl, headers } = defaults;
|
||||
|
||||
const response = await fetch(`${baseUrl}/asset/upload`, {
|
||||
method: 'post',
|
||||
redirect: 'error',
|
||||
headers: headers as Record<string, string>,
|
||||
body: data,
|
||||
});
|
||||
if (response.status !== 200 && response.status !== 201) {
|
||||
throw new Error(await response.text());
|
||||
}
|
||||
return response.json();
|
||||
}
|
||||
}
|
||||
|
@ -3,12 +3,12 @@ import { existsSync } from 'node:fs';
|
||||
import { mkdir, unlink } from 'node:fs/promises';
|
||||
import { BaseOptions, connect, getAuthFilePath, logError, withError, writeAuthFile } from 'src/utils';
|
||||
|
||||
export const login = async (url: string, key: string, options: BaseOptions) => {
|
||||
console.log(`Logging in to ${url}`);
|
||||
export const login = async (instanceUrl: string, apiKey: string, options: BaseOptions) => {
|
||||
console.log(`Logging in to ${instanceUrl}`);
|
||||
|
||||
const { configDirectory: configDir } = options;
|
||||
|
||||
await connect(url, key);
|
||||
await connect(instanceUrl, apiKey);
|
||||
|
||||
const [error, userInfo] = await withError(getMyUserInfo());
|
||||
if (error) {
|
||||
@ -27,7 +27,7 @@ export const login = async (url: string, key: string, options: BaseOptions) => {
|
||||
}
|
||||
}
|
||||
|
||||
await writeAuthFile(configDir, { url, key });
|
||||
await writeAuthFile(configDir, { instanceUrl, apiKey });
|
||||
|
||||
console.log(`Wrote auth info to ${getAuthFilePath(configDir)}`);
|
||||
};
|
||||
|
@ -1,24 +1,15 @@
|
||||
import { getAssetStatistics, getMyUserInfo, getServerVersion, getSupportedMediaTypes } from '@immich/sdk';
|
||||
import { getAssetStatistics, getServerVersion, getSupportedMediaTypes } from '@immich/sdk';
|
||||
import { BaseOptions, authenticate } from 'src/utils';
|
||||
|
||||
export const serverInfo = async (options: BaseOptions) => {
|
||||
const { url } = await authenticate(options);
|
||||
await authenticate(options);
|
||||
|
||||
const [versionInfo, mediaTypes, stats, userInfo] = await Promise.all([
|
||||
getServerVersion(),
|
||||
getSupportedMediaTypes(),
|
||||
getAssetStatistics({}),
|
||||
getMyUserInfo(),
|
||||
]);
|
||||
const versionInfo = await getServerVersion();
|
||||
const mediaTypes = await getSupportedMediaTypes();
|
||||
const stats = await getAssetStatistics({});
|
||||
|
||||
console.log(`Server Info (via ${userInfo.email})`);
|
||||
console.log(` Url: ${url}`);
|
||||
console.log(` Version: ${versionInfo.major}.${versionInfo.minor}.${versionInfo.patch}`);
|
||||
console.log(` Formats:`);
|
||||
console.log(` Images: ${mediaTypes.image.map((extension) => extension.replace('.', ''))}`);
|
||||
console.log(` Videos: ${mediaTypes.video.map((extension) => extension.replace('.', ''))}`);
|
||||
console.log(` Statistics:`);
|
||||
console.log(` Images: ${stats.images}`);
|
||||
console.log(` Videos: ${stats.videos}`);
|
||||
console.log(` Total: ${stats.total}`);
|
||||
console.log(`Server Version: ${versionInfo.major}.${versionInfo.minor}.${versionInfo.patch}`);
|
||||
console.log(`Image Types: ${mediaTypes.image.map((extension) => extension.replace('.', ''))}`);
|
||||
console.log(`Video Types: ${mediaTypes.video.map((extension) => extension.replace('.', ''))}`);
|
||||
console.log(`Statistics:\n Images: ${stats.images}\n Videos: ${stats.videos}\n Total: ${stats.total}`);
|
||||
};
|
||||
|
@ -19,7 +19,7 @@ const program = new Command()
|
||||
.default(defaultConfigDirectory),
|
||||
)
|
||||
.addOption(new Option('-u, --url [url]', 'Immich server URL').env('IMMICH_INSTANCE_URL'))
|
||||
.addOption(new Option('-k, --key [key]', 'Immich API key').env('IMMICH_API_KEY'));
|
||||
.addOption(new Option('-k, --key [apiKey]', 'Immich API key').env('IMMICH_API_KEY'));
|
||||
|
||||
program
|
||||
.command('login')
|
||||
|
@ -1,31 +1,14 @@
|
||||
import mockfs from 'mock-fs';
|
||||
import { CrawlOptions, crawl } from 'src/utils';
|
||||
import { CrawlOptions, CrawlService } from './crawl.service';
|
||||
|
||||
interface Test {
|
||||
test: string;
|
||||
options: Omit<CrawlOptions, 'extensions'>;
|
||||
options: CrawlOptions;
|
||||
files: Record<string, boolean>;
|
||||
}
|
||||
|
||||
const cwd = process.cwd();
|
||||
|
||||
const extensions = [
|
||||
'.jpg',
|
||||
'.jpeg',
|
||||
'.png',
|
||||
'.heif',
|
||||
'.heic',
|
||||
'.tif',
|
||||
'.nef',
|
||||
'.webp',
|
||||
'.tiff',
|
||||
'.dng',
|
||||
'.gif',
|
||||
'.mov',
|
||||
'.mp4',
|
||||
'.webm',
|
||||
];
|
||||
|
||||
const tests: Test[] = [
|
||||
{
|
||||
test: 'should return empty when crawling an empty path list',
|
||||
@ -268,7 +251,12 @@ const tests: Test[] = [
|
||||
},
|
||||
];
|
||||
|
||||
describe('crawl', () => {
|
||||
describe(CrawlService.name, () => {
|
||||
const sut = new CrawlService(
|
||||
['.jpg', '.jpeg', '.png', '.heif', '.heic', '.tif', '.nef', '.webp', '.tiff', '.dng', '.gif'],
|
||||
['.mov', '.mp4', '.webm'],
|
||||
);
|
||||
|
||||
afterEach(() => {
|
||||
mockfs.restore();
|
||||
});
|
||||
@ -278,7 +266,7 @@ describe('crawl', () => {
|
||||
it(test, async () => {
|
||||
mockfs(Object.fromEntries(Object.keys(files).map((file) => [file, ''])));
|
||||
|
||||
const actual = await crawl({ ...options, extensions });
|
||||
const actual = await sut.crawl(options);
|
||||
const expected = Object.entries(files)
|
||||
.filter((entry) => entry[1])
|
||||
.map(([file]) => file);
|
70
cli/src/services/crawl.service.ts
Normal file
@ -0,0 +1,70 @@
|
||||
import { glob } from 'glob';
|
||||
import * as fs from 'node:fs';
|
||||
|
||||
export class CrawlOptions {
|
||||
pathsToCrawl!: string[];
|
||||
recursive? = false;
|
||||
includeHidden? = false;
|
||||
exclusionPatterns?: string[];
|
||||
}
|
||||
|
||||
export class CrawlService {
|
||||
private readonly extensions!: string[];
|
||||
|
||||
constructor(image: string[], video: string[]) {
|
||||
this.extensions = [...image, ...video].map((extension) => extension.replace('.', ''));
|
||||
}
|
||||
|
||||
async crawl(options: CrawlOptions): Promise<string[]> {
|
||||
const { recursive, pathsToCrawl, exclusionPatterns, includeHidden } = options;
|
||||
|
||||
if (!pathsToCrawl) {
|
||||
return [];
|
||||
}
|
||||
|
||||
const patterns: string[] = [];
|
||||
const crawledFiles: string[] = [];
|
||||
|
||||
for await (const currentPath of pathsToCrawl) {
|
||||
try {
|
||||
const stats = await fs.promises.stat(currentPath);
|
||||
if (stats.isFile() || stats.isSymbolicLink()) {
|
||||
crawledFiles.push(currentPath);
|
||||
} else {
|
||||
patterns.push(currentPath);
|
||||
}
|
||||
} catch (error: any) {
|
||||
if (error.code === 'ENOENT') {
|
||||
patterns.push(currentPath);
|
||||
} else {
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
let searchPattern: string;
|
||||
if (patterns.length === 1) {
|
||||
searchPattern = patterns[0];
|
||||
} else if (patterns.length === 0) {
|
||||
return crawledFiles;
|
||||
} else {
|
||||
searchPattern = '{' + patterns.join(',') + '}';
|
||||
}
|
||||
|
||||
if (recursive) {
|
||||
searchPattern = searchPattern + '/**/';
|
||||
}
|
||||
|
||||
searchPattern = `${searchPattern}/*.{${this.extensions.join(',')}}`;
|
||||
|
||||
const globbedFiles = await glob(searchPattern, {
|
||||
absolute: true,
|
||||
nocase: true,
|
||||
nodir: true,
|
||||
dot: includeHidden,
|
||||
ignore: exclusionPatterns,
|
||||
});
|
||||
|
||||
return [...crawledFiles, ...globbedFiles].sort();
|
||||
}
|
||||
}
|
127
cli/src/utils.ts
@ -1,61 +1,54 @@
|
||||
import { defaults, getMyUserInfo, isHttpError } from '@immich/sdk';
|
||||
import { glob } from 'glob';
|
||||
import { createHash } from 'node:crypto';
|
||||
import { createReadStream } from 'node:fs';
|
||||
import { readFile, stat, writeFile } from 'node:fs/promises';
|
||||
import { readFile, writeFile } from 'node:fs/promises';
|
||||
import { join } from 'node:path';
|
||||
import yaml from 'yaml';
|
||||
|
||||
export interface BaseOptions {
|
||||
configDirectory: string;
|
||||
key?: string;
|
||||
url?: string;
|
||||
apiKey?: string;
|
||||
instanceUrl?: string;
|
||||
}
|
||||
|
||||
export type AuthDto = { url: string; key: string };
|
||||
type OldAuthDto = { instanceUrl: string; apiKey: string };
|
||||
export interface AuthDto {
|
||||
instanceUrl: string;
|
||||
apiKey: string;
|
||||
}
|
||||
|
||||
export const authenticate = async (options: BaseOptions): Promise<AuthDto> => {
|
||||
const { configDirectory: configDir, url, key } = options;
|
||||
export const authenticate = async (options: BaseOptions): Promise<void> => {
|
||||
const { configDirectory: configDir, instanceUrl, apiKey } = options;
|
||||
|
||||
// provided in command
|
||||
if (url && key) {
|
||||
return connect(url, key);
|
||||
if (instanceUrl && apiKey) {
|
||||
await connect(instanceUrl, apiKey);
|
||||
return;
|
||||
}
|
||||
|
||||
// fallback to auth file
|
||||
// fallback to file
|
||||
const config = await readAuthFile(configDir);
|
||||
const auth = await connect(config.url, config.key);
|
||||
if (auth.url !== config.url) {
|
||||
await writeAuthFile(configDir, auth);
|
||||
}
|
||||
|
||||
return auth;
|
||||
await connect(config.instanceUrl, config.apiKey);
|
||||
};
|
||||
|
||||
export const connect = async (url: string, key: string) => {
|
||||
const wellKnownUrl = new URL('.well-known/immich', url);
|
||||
export const connect = async (instanceUrl: string, apiKey: string): Promise<void> => {
|
||||
const wellKnownUrl = new URL('.well-known/immich', instanceUrl);
|
||||
try {
|
||||
const wellKnown = await fetch(wellKnownUrl).then((response) => response.json());
|
||||
const endpoint = new URL(wellKnown.api.endpoint, url).toString();
|
||||
if (endpoint !== url) {
|
||||
const endpoint = new URL(wellKnown.api.endpoint, instanceUrl).toString();
|
||||
if (endpoint !== instanceUrl) {
|
||||
console.debug(`Discovered API at ${endpoint}`);
|
||||
}
|
||||
url = endpoint;
|
||||
instanceUrl = endpoint;
|
||||
} catch {
|
||||
// noop
|
||||
}
|
||||
|
||||
defaults.baseUrl = url;
|
||||
defaults.headers = { 'x-api-key': key };
|
||||
defaults.baseUrl = instanceUrl;
|
||||
defaults.headers = { 'x-api-key': apiKey };
|
||||
|
||||
const [error] = await withError(getMyUserInfo());
|
||||
if (isHttpError(error)) {
|
||||
logError(error, 'Failed to connect to server');
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
return { url, key };
|
||||
};
|
||||
|
||||
export const logError = (error: unknown, message: string) => {
|
||||
@ -73,12 +66,7 @@ export const readAuthFile = async (dir: string) => {
|
||||
try {
|
||||
const data = await readFile(getAuthFilePath(dir));
|
||||
// TODO add class-transform/validation
|
||||
const auth = yaml.parse(data.toString()) as AuthDto | OldAuthDto;
|
||||
const { instanceUrl, apiKey } = auth as OldAuthDto;
|
||||
if (instanceUrl && apiKey) {
|
||||
return { url: instanceUrl, key: apiKey };
|
||||
}
|
||||
return auth as AuthDto;
|
||||
return yaml.parse(data.toString()) as AuthDto;
|
||||
} catch (error: Error | any) {
|
||||
if (error.code === 'ENOENT' || error.code === 'ENOTDIR') {
|
||||
console.log('No auth file exists. Please login first.');
|
||||
@ -99,74 +87,3 @@ export const withError = async <T>(promise: Promise<T>): Promise<[Error, undefin
|
||||
return [error, undefined];
|
||||
}
|
||||
};
|
||||
|
||||
export interface CrawlOptions {
|
||||
pathsToCrawl: string[];
|
||||
recursive?: boolean;
|
||||
includeHidden?: boolean;
|
||||
exclusionPatterns?: string[];
|
||||
extensions: string[];
|
||||
}
|
||||
export const crawl = async (options: CrawlOptions): Promise<string[]> => {
|
||||
const { extensions: extensionsWithPeriod, recursive, pathsToCrawl, exclusionPatterns, includeHidden } = options;
|
||||
const extensions = extensionsWithPeriod.map((extension) => extension.replace('.', ''));
|
||||
|
||||
if (pathsToCrawl.length === 0) {
|
||||
return [];
|
||||
}
|
||||
|
||||
const patterns: string[] = [];
|
||||
const crawledFiles: string[] = [];
|
||||
|
||||
for await (const currentPath of pathsToCrawl) {
|
||||
try {
|
||||
const stats = await stat(currentPath);
|
||||
if (stats.isFile() || stats.isSymbolicLink()) {
|
||||
crawledFiles.push(currentPath);
|
||||
} else {
|
||||
patterns.push(currentPath);
|
||||
}
|
||||
} catch (error: any) {
|
||||
if (error.code === 'ENOENT') {
|
||||
patterns.push(currentPath);
|
||||
} else {
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
let searchPattern: string;
|
||||
if (patterns.length === 1) {
|
||||
searchPattern = patterns[0];
|
||||
} else if (patterns.length === 0) {
|
||||
return crawledFiles;
|
||||
} else {
|
||||
searchPattern = '{' + patterns.join(',') + '}';
|
||||
}
|
||||
|
||||
if (recursive) {
|
||||
searchPattern = searchPattern + '/**/';
|
||||
}
|
||||
|
||||
searchPattern = `${searchPattern}/*.{${extensions.join(',')}}`;
|
||||
|
||||
const globbedFiles = await glob(searchPattern, {
|
||||
absolute: true,
|
||||
nocase: true,
|
||||
nodir: true,
|
||||
dot: includeHidden,
|
||||
ignore: exclusionPatterns,
|
||||
});
|
||||
|
||||
return [...crawledFiles, ...globbedFiles].sort();
|
||||
};
|
||||
|
||||
export const sha1 = (filepath: string) => {
|
||||
const hash = createHash('sha1');
|
||||
return new Promise<string>((resolve, reject) => {
|
||||
const rs = createReadStream(filepath);
|
||||
rs.on('error', reject);
|
||||
rs.on('data', (chunk) => hash.update(chunk));
|
||||
rs.on('end', () => resolve(hash.digest('hex')));
|
||||
});
|
||||
};
|
||||
|
Before Width: | Height: | Size: 1.8 MiB After Width: | Height: | Size: 1.7 MiB |
@ -2,6 +2,8 @@
|
||||
# - https://immich.app/docs/developer/setup
|
||||
# - https://immich.app/docs/developer/troubleshooting
|
||||
|
||||
version: '3.8'
|
||||
|
||||
name: immich-dev
|
||||
|
||||
x-server-build: &server-common
|
||||
|
@ -1,3 +1,5 @@
|
||||
version: '3.8'
|
||||
|
||||
name: immich-prod
|
||||
|
||||
x-server-build: &server-common
|
||||
@ -76,7 +78,7 @@ services:
|
||||
container_name: immich_prometheus
|
||||
ports:
|
||||
- 9090:9090
|
||||
image: prom/prometheus@sha256:dec2018ae55885fed717f25c289b8c9cff0bf5fbb9e619fb49b6161ac493c016
|
||||
image: prom/prometheus@sha256:5ccad477d0057e62a7cd1981ffcc43785ac10c5a35522dc207466ff7e7ec845f
|
||||
volumes:
|
||||
- ./prometheus.yml:/etc/prometheus/prometheus.yml
|
||||
- prometheus-data:/prometheus
|
||||
@ -88,7 +90,7 @@ services:
|
||||
command: ['./run.sh', '-disable-reporting']
|
||||
ports:
|
||||
- 3000:3000
|
||||
image: grafana/grafana:10.4.1-ubuntu@sha256:65e0e7d0f0b001cb0478bce5093bff917677dc308dd27a0aa4b3ac38e4fd877c
|
||||
image: grafana/grafana:10.4.0-ubuntu@sha256:c1f582b7cc4c1b9805d187b5600ce7879550a12ef6d29571da133c3d3fc67a9c
|
||||
volumes:
|
||||
- grafana-data:/var/lib/grafana
|
||||
|
||||
|
@ -1,3 +1,5 @@
|
||||
version: '3.8'
|
||||
|
||||
#
|
||||
# WARNING: Make sure to use the docker-compose.yml of the current release:
|
||||
#
|
||||
@ -69,8 +71,9 @@ services:
|
||||
POSTGRES_USER: ${DB_USERNAME}
|
||||
POSTGRES_DB: ${DB_DATABASE_NAME}
|
||||
volumes:
|
||||
- ${DB_DATA_LOCATION}:/var/lib/postgresql/data
|
||||
- pgdata:/var/lib/postgresql/data
|
||||
restart: always
|
||||
|
||||
volumes:
|
||||
pgdata:
|
||||
model-cache:
|
||||
|
@ -14,6 +14,5 @@ DB_PASSWORD=postgres
|
||||
DB_HOSTNAME=immich_postgres
|
||||
DB_USERNAME=postgres
|
||||
DB_DATABASE_NAME=immich
|
||||
DB_DATA_LOCATION=./postgres
|
||||
|
||||
REDIS_HOSTNAME=immich_redis
|
||||
|
@ -10,8 +10,8 @@ Hello everyone, it is my pleasure to deliver the new release of Immich to you. T
|
||||
|
||||
Some notable features are:
|
||||
|
||||
- OAuth integration
|
||||
- LivePhoto support on iOS
|
||||
- [OAuth integration](#livephoto-ios-support-)
|
||||
- [LivePhoto support on iOS](#oauth-integration-)
|
||||
- User config system
|
||||
|
||||
<!--truncate-->
|
||||
|
@ -110,7 +110,7 @@ No. Our golden rule is that the original assets should always be untouched, so w
|
||||
|
||||
### How can I move all data (photos, persons, albums) from one user to another?
|
||||
|
||||
This is not officially supported, but can be accomplished with some database updates. You can do this on the command line (in the PostgreSQL container using the `psql` command), or you can add for example an [Adminer](https://www.adminer.org/) container to the `docker-compose.yml` file, so that you can use a web-interface.
|
||||
This is not officially supported, but can be accomplished with some database updates. You can do this on the command line (in the PostgreSQL container using the psql command), or you can add for example an [Adminer](https://www.adminer.org/) container to the `docker-compose.yml` file, so that you can use a web-interface.
|
||||
|
||||
:::warning
|
||||
This is an advanced operation. If you can't do it with the steps described here, this is not for you.
|
||||
@ -253,19 +253,8 @@ The initial backup is the most intensive due to the number of jobs running. The
|
||||
|
||||
### Can I limit the amount of CPU and RAM usage?
|
||||
|
||||
By default, a container has no resource constraints and can use as much of a given resource as the host's kernel scheduler allows. To limit this, you can add the following to the `docker-compose.yml` block of any containers that you want to have limited resources.
|
||||
|
||||
```yaml
|
||||
deploy:
|
||||
resources:
|
||||
limits:
|
||||
# Number of CPU threads
|
||||
cpus: '1.00'
|
||||
# Gigabytes of memory
|
||||
memory: '1G'
|
||||
```
|
||||
|
||||
For more details, you can look at the [original docker docs](https://docs.docker.com/config/containers/resource_constraints/) or use this [guide](https://www.baeldung.com/ops/docker-memory-limit).
|
||||
By default, a container has no resource constraints and can use as much of a given resource as the host's kernel scheduler allows.
|
||||
You can look at the [original docker docs](https://docs.docker.com/config/containers/resource_constraints/) or use this [guide](https://www.baeldung.com/ops/docker-memory-limit) to learn how to limit this.
|
||||
|
||||
### How can I boost machine learning speed?
|
||||
|
||||
@ -299,25 +288,10 @@ Immich components are typically deployed using docker. To see logs for deployed
|
||||
### How can I run Immich as a non-root user?
|
||||
|
||||
You can change the user in the container by setting the `user` argument in `docker-compose.yml` for each service.
|
||||
You may need to add mount points or docker volumes for the following internal container paths:
|
||||
|
||||
- `immich-machine-learning:/.config`
|
||||
- `immich-machine-learning:/.cache`
|
||||
- `redis:/data`
|
||||
You may need to add an additional volume to `immich-microservices` that mounts internally to `/usr/src/app/.reverse-geocoding-dump`.
|
||||
|
||||
The non-root user/group needs read/write access to the volume mounts, including `UPLOAD_LOCATION`.
|
||||
|
||||
For a further hardened system, you can add the following block to every container except for `immich_postgres`.
|
||||
|
||||
```yaml
|
||||
security_opt:
|
||||
# Prevent escalation of privileges after container is started
|
||||
- no-new-privileges:true
|
||||
cap_drop:
|
||||
# Prevent access to raw network traffic
|
||||
- NET_RAW
|
||||
```
|
||||
|
||||
### How can I **purge** data from Immich?
|
||||
|
||||
Data for Immich comes in two forms:
|
||||
@ -337,7 +311,7 @@ docker compose down -v
|
||||
|
||||
:::note Portainer
|
||||
If you use portainer, bring down the stack in portainer. Go into the volumes section
|
||||
and remove all the volumes related to immich then restart the stack.
|
||||
and remove all the volumes related to immcih then restart the stack.
|
||||
:::
|
||||
|
||||
After removing the containers and volumes, the **Files** should be removed from the `UPLOAD_LOCATION` to provide a clean start.
|
||||
|
@ -20,8 +20,8 @@ The recommended way to backup and restore the Immich database is to use the `pg_
|
||||
<Tabs>
|
||||
<TabItem value="Linux system based Backup" label="Linux system based Backup" default>
|
||||
|
||||
```bash title='Backup'
|
||||
docker exec -t immich_postgres pg_dumpall --clean --if-exists --username=postgres | gzip > "/path/to/backup/dump.sql.gz"
|
||||
```bash title='Bash'
|
||||
docker exec -t immich_postgres pg_dumpall -c -U postgres | gzip > "/path/to/backup/dump.sql.gz"
|
||||
```
|
||||
|
||||
```bash title='Restore'
|
||||
@ -30,7 +30,7 @@ docker compose pull # Update to latest version of Immich (if desired)
|
||||
docker compose create # Create Docker containers for Immich apps without running them.
|
||||
docker start immich_postgres # Start Postgres server
|
||||
sleep 10 # Wait for Postgres server to start up
|
||||
gunzip < "/path/to/backup/dump.sql.gz" | docker exec -i immich_postgres psql --username=postgres # Restore Backup
|
||||
gunzip < "/path/to/backup/dump.sql.gz" | docker exec -i immich_postgres psql -U postgres -d immich # Restore Backup
|
||||
docker compose up -d # Start remainder of Immich apps
|
||||
```
|
||||
|
||||
@ -38,7 +38,7 @@ docker compose up -d # Start remainder of Immich apps
|
||||
<TabItem value="Windows system based Backup" label="Windows system based Backup">
|
||||
|
||||
```powershell title='Backup'
|
||||
docker exec -t immich_postgres pg_dumpall --clean --if-exists --username=postgres > "\path\to\backup\dump.sql"
|
||||
docker exec -t immich_postgres pg_dumpall -c -U postgres > "\path\to\backup\dump.sql"
|
||||
```
|
||||
|
||||
```powershell title='Restore'
|
||||
@ -47,7 +47,7 @@ docker compose pull # Update to latest version of Immich (if desired)
|
||||
docker compose create # Create Docker containers for Immich apps without running them.
|
||||
docker start immich_postgres # Start Postgres server
|
||||
sleep 10 # Wait for Postgres server to start up
|
||||
gc "C:\path\to\backup\dump.sql" | docker exec -i immich_postgres psql --username=postgres # Restore Backup
|
||||
gc "C:\path\to\backup\dump.sql" | docker exec -i immich_postgres psql -U postgres -d immich # Restore Backup
|
||||
docker compose up -d # Start remainder of Immich apps
|
||||
```
|
||||
|
||||
@ -68,11 +68,10 @@ services:
|
||||
- .env
|
||||
environment:
|
||||
POSTGRES_HOST: database
|
||||
POSTGRES_CLUSTER: 'TRUE'
|
||||
POSTGRES_DB: ${DB_DATABASE_NAME}
|
||||
POSTGRES_USER: ${DB_USERNAME}
|
||||
POSTGRES_PASSWORD: ${DB_PASSWORD}
|
||||
SCHEDULE: "@daily"
|
||||
POSTGRES_EXTRA_OPTS: '--clean --if-exists'
|
||||
BACKUP_DIR: /db_dumps
|
||||
volumes:
|
||||
- ./db_dumps:/db_dumps
|
||||
@ -83,7 +82,7 @@ services:
|
||||
Then you can restore with the same command but pointed at the latest dump.
|
||||
|
||||
```bash title='Automated Restore'
|
||||
gunzip < db_dumps/last/immich-latest.sql.gz | docker exec -i immich_postgres psql --username=postgres
|
||||
gunzip < db_dumps/last/immich-latest.sql.gz | docker exec -i immich_postgres psql -U postgres -d immich
|
||||
```
|
||||
|
||||
## Filesystem
|
||||
|
Before Width: | Height: | Size: 68 KiB |
Before Width: | Height: | Size: 54 KiB |
Before Width: | Height: | Size: 60 KiB |
@ -1,13 +1,9 @@
|
||||
# Jobs
|
||||
|
||||
The `immich-server` responds to API requests for data and files for the web and mobile app. To do this quickly and reliably, it offloads most other work to `immich-microservices` in the form of _jobs_. Simply put, a job is a request to process data in the background. Jobs are picked up automatically by microservices containers.
|
||||
Several Immich functionalities are implemented as jobs, which run in the background. To view the status of a job navigate to the Administration Screen, and then the `Jobs` page.
|
||||
|
||||
When a new asset is uploaded it kicks off a series of jobs, which include metadata extraction, thumbnail generation, machine learning tasks, and storage template migration, if enabled. To view the status of a job navigate to the Administration -> Jobs page.
|
||||
|
||||
Additionally, some jobs run on a schedule, which is every night at midnight. This schedule, with the exception of [External Libraries](/docs/features/libraries) scanning, cannot be changed.
|
||||

|
||||
|
||||
:::info
|
||||
Storage Migration job can be run after changing the [Storage Template](/docs/administration/storage-template.mdx), in order to apply the change to the existing library.
|
||||
:::
|
||||
|
||||
<img src={require('./img/admin-jobs.png').default} width="80%" title="Admin jobs" />
|
||||
|
32
docs/docs/administration/password-login.md
Normal file
@ -0,0 +1,32 @@
|
||||
# Password Login
|
||||
|
||||
An overview of password login and related settings for Immich.
|
||||
|
||||
## Enable/Disable
|
||||
|
||||
Immich supports password login, which is enabled by default. The preferred way to disable it is via the [Administration Page](#administration-page), although it can also be changed via a [Server Command](#server-command) as well.
|
||||
|
||||
### Administration Page
|
||||
|
||||
To toggle the password login setting via the web, navigate to the "Administration", expand "Password Authentication", toggle the "Enabled" switch, and press "Save".
|
||||
|
||||

|
||||
|
||||
### Server Command
|
||||
|
||||
There are two [Server Commands](/docs/administration/server-commands.md) for password login:
|
||||
|
||||
1. `enable-password-login`
|
||||
2. `disable-password-login`
|
||||
|
||||
See [Server Commands](/docs/administration/server-commands.md) for more details about how to run them.
|
||||
|
||||
## Password Reset
|
||||
|
||||
### Admin
|
||||
|
||||
To reset the administrator password, use the `reset-admin-password` [Server Command](/docs/administration/server-commands.md).
|
||||
|
||||
### User
|
||||
|
||||
Immich does not currently support self-service password reset. However, the administration can reset passwords for other users. See [User Management: Password Reset](/docs/administration/user-management.mdx#password-reset) for more information about how to do this.
|
@ -1,66 +0,0 @@
|
||||
# Pre-existing Postgres
|
||||
|
||||
While not officially recommended, it is possible to run Immich using a pre-existing Postgres server. To use this setup, you should have a baseline level of familiarity with Postgres and the Linux command line. If you do not have these, we recommend using the default setup with a dedicated Postgres container.
|
||||
|
||||
By default, Immich expects superuser permission on the Postgres database and requires certain extensions to be installed. This guide outlines the steps required to prepare a pre-existing Postgres server to be used by Immich.
|
||||
|
||||
:::tip
|
||||
Running with a pre-existing Postgres server can unlock powerful administrative features, including logical replication, data page checksums, and streaming write-ahead log backups using programs like pgBackRest or Barman.
|
||||
:::
|
||||
|
||||
## Prerequisites
|
||||
|
||||
You must install pgvecto.rs using their [instructions](https://docs.pgvecto.rs/getting-started/installation.html). After installation, add `shared_preload_libraries = 'vectors.so'` to your `postgresql.conf`. If you already have some `shared_preload_libraries` set, you can separate each extension with a comma. For example, `shared_preload_libraries = 'pg_stat_statements, vectors.so'`.
|
||||
|
||||
:::note
|
||||
Make sure the installed version of pgvecto.rs is compatible with your version of Immich. For example, if your Immich version uses the dedicated database image `tensorchord/pgvecto-rs:pg14-v0.2.1`, you must install pgvecto.rs `>= 0.2.1, < 0.3.0`.
|
||||
:::
|
||||
|
||||
## Specifying the connection URL
|
||||
|
||||
You can connect to your pre-existing Postgres server by setting the `DB_URL` environment variable in the `.env` file.
|
||||
|
||||
```
|
||||
DB_URL='postgresql://immichdbusername:immichdbpassword@postgreshost:postgresport/immichdatabasename'
|
||||
|
||||
# require a SSL connection to Postgres
|
||||
# DB_URL='postgresql://immichdbusername:immichdbpassword@postgreshost:postgresport/immichdatabasename?sslmode=require'
|
||||
|
||||
# require a SSL connection, but don't enforce checking the certificate name
|
||||
# DB_URL='postgresql://immichdbusername:immichdbpassword@postgreshost:postgresport/immichdatabasename?sslmode=require&sslmode=no-verify'
|
||||
```
|
||||
|
||||
## With superuser permission
|
||||
|
||||
Typically Immich expects superuser permission in the database, which you can grant by running `ALTER USER <immichdbusername> WITH SUPERUSER;` at the `psql` console. If you prefer not to grant superuser permissions, follow the instructions in the next section.
|
||||
|
||||
## Without superuser permission
|
||||
|
||||
:::caution
|
||||
This method is recommended for **advanced users only** and often requires manual intervention when updating Immich.
|
||||
:::
|
||||
|
||||
Immich can run without superuser permissions by following the below instructions at the `psql` prompt to prepare the database.
|
||||
|
||||
```sql title="Set up Postgres for Immich"
|
||||
CREATE DATABASE <immichdatabasename>;
|
||||
\c <immichdatabasename>
|
||||
BEGIN;
|
||||
ALTER DATABASE <immichdatabasename> OWNER TO <immichdbusername>;
|
||||
CREATE EXTENSION vectors;
|
||||
CREATE EXTENSION earthdistance CASCADE;
|
||||
ALTER DATABASE <immichdatabasename> SET search_path TO "$user", public, vectors;
|
||||
GRANT USAGE ON SCHEMA vectors TO <immichdbusername>;
|
||||
ALTER DEFAULT PRIVILEGES IN SCHEMA vectors GRANT SELECT ON TABLES TO <immichdbusername>;
|
||||
COMMIT;
|
||||
```
|
||||
|
||||
### Updating pgvecto.rs
|
||||
|
||||
When installing a new version of pgvecto.rs, you will need to manually update the extension by connecting to the Immich database and running `ALTER EXTENSION vectors UPDATE;`.
|
||||
|
||||
### Common errors
|
||||
|
||||
#### Permission denied for view
|
||||
|
||||
If you get the error `driverError: error: permission denied for view pg_vector_index_stat`, you can fix this by connecting to the Immich database and running `GRANT SELECT ON TABLE pg_vector_index_stat to <immichdbusername>;`.
|
@ -1,27 +0,0 @@
|
||||
# Repair Page
|
||||
|
||||
The repair page is designed to give information to the system administrator about files that are not tracked, or offline paths.
|
||||
|
||||
## Natural State
|
||||
|
||||
In this situation, everything is in its place and there is no problem that the system administrator should be aware of.
|
||||
|
||||
<img src={require('./img/repair-page.png').default} title="server statistic" />
|
||||
|
||||
## Any Other Situation
|
||||
|
||||
:::note RAM Usage
|
||||
Several users report a situation where the page fails to load. In order to solve this problem you should try to allocate more RAM to Immich, if the problem continues, you should stop using the reverse proxy while loading the page.
|
||||
:::
|
||||
|
||||
In any other situation, there are 3 different options that can appear:
|
||||
|
||||
- MATCHES - These files are matched by their checksums.
|
||||
|
||||
- OFFLINE PATHS - These files are the result of manually deleting files in the upload library or a failed file move in the past (losing track of a file).
|
||||
|
||||
- UNTRACKED FILES - These files are not tracked by the application. They can be the result of failed moves, interrupted uploads, or left behind due to a bug.
|
||||
|
||||
In addition, you can download the information from a page, mark everything (in order to check hashing) and correct the problem if a match is found in the hashing.
|
||||
|
||||
<img src={require('./img/repair-page-1.png').default} title="server statistic" />
|
@ -1,41 +1,29 @@
|
||||
# Reverse Proxy
|
||||
|
||||
Users can deploy a custom reverse proxy that forwards requests to Immich. This way, the reverse proxy can handle TLS termination, load balancing, or other advanced features. All reverse proxies between Immich and the user must forward all headers and set the `Host`, `X-Real-IP`, `X-Forwarded-Proto` and `X-Forwarded-For` headers to their appropriate values. Additionally, your reverse proxy should allow for big enough uploads. By following these practices, you ensure that all custom reverse proxies are fully compatible with Immich.
|
||||
|
||||
:::note
|
||||
The Repair page can take a long time to load. To avoid server timeouts or errors, we recommend specifying a timeout of at least 10 minutes on your proxy server.
|
||||
:::
|
||||
Users can deploy a custom reverse proxy that forwards requests to Immich. This way, the reverse proxy can handle TLS termination, load balancing, or other advanced features. All reverse proxies between Immich and the user must forward all headers and set the `Host`, `X-Forwarded-Host`, `X-Forwarded-Proto` and `X-Forwarded-For` headers to their appropriate values. Additionally, your reverse proxy should allow for big enough uploads. By following these practices, you ensure that all custom reverse proxies are fully compatible with Immich.
|
||||
|
||||
### Nginx example config
|
||||
|
||||
Below is an example config for nginx. Make sure to set `public_url` to the front-facing URL of your instance, and `backend_url` to the path of the Immich server.
|
||||
Below is an example config for nginx. Make sure to include `client_max_body_size 50000M;` also in a `http` block in `/etc/nginx/nginx.conf`.
|
||||
|
||||
```nginx
|
||||
server {
|
||||
server_name <public_url>;
|
||||
server_name <snip>
|
||||
|
||||
# allow large file uploads
|
||||
client_max_body_size 50000M;
|
||||
|
||||
# Set headers
|
||||
location / {
|
||||
proxy_pass http://<snip>:2283;
|
||||
proxy_set_header Host $http_host;
|
||||
proxy_set_header X-Real-IP $remote_addr;
|
||||
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
|
||||
proxy_set_header X-Forwarded-Proto $scheme;
|
||||
|
||||
# enable websockets: http://nginx.org/en/docs/http/websocket.html
|
||||
# http://nginx.org/en/docs/http/websocket.html
|
||||
proxy_http_version 1.1;
|
||||
proxy_set_header Upgrade $http_upgrade;
|
||||
proxy_set_header Connection "upgrade";
|
||||
proxy_redirect off;
|
||||
|
||||
# set timeout
|
||||
proxy_read_timeout 600s;
|
||||
proxy_send_timeout 600s;
|
||||
send_timeout 600s;
|
||||
|
||||
location / {
|
||||
proxy_pass http://<backend_url>:2283;
|
||||
}
|
||||
}
|
||||
```
|
||||
@ -54,13 +42,15 @@ immich.example.org {
|
||||
|
||||
Below is an example config for Apache2 site configuration.
|
||||
|
||||
```ApacheConf
|
||||
```
|
||||
<VirtualHost *:80>
|
||||
ServerName <snip>
|
||||
ProxyRequests Off
|
||||
# set timeout in seconds
|
||||
ProxyPass / http://127.0.0.1:2283/ timeout=600 upgrade=websocket
|
||||
ProxyPassReverse / http://127.0.0.1:2283/
|
||||
ProxyPreserveHost On
|
||||
|
||||
</VirtualHost>
|
||||
```
|
||||
|
||||
**timeout:** is measured in seconds, and it is particularly useful when long operations are triggered (i.e. Repair), so the server doesn't return an error.
|
||||
|
@ -1,13 +0,0 @@
|
||||
# Server Stats
|
||||
|
||||
Server statistics to show the total number of videos, photos, and usage per user.
|
||||
|
||||
:::info
|
||||
If a storage quota has been defined for the user, the usage number will be displayed as a percentage of the total storage quota allocated to them.
|
||||
:::
|
||||
|
||||
:::info External library
|
||||
External library is not included in the storage quota.
|
||||
:::
|
||||
|
||||
<img src={require('./img/server-stats.png').default} title="server statistic" />
|
@ -1,173 +0,0 @@
|
||||
# System Settings
|
||||
|
||||
On the system settings page, the administrator can manage global settings for the Immich instance.
|
||||
|
||||
:::note
|
||||
Viewing and modifying the system settings is restricted to the Administrator.
|
||||
:::
|
||||
|
||||
:::tip
|
||||
You can always return to the default settings by clicking the `Reset to default` button.
|
||||
:::
|
||||
|
||||
## Job Settings
|
||||
|
||||
Using these settings, you can determine the amount of work that will run concurrently for each task in microservices. Some tasks can be set to higher values on computers with powerful hardware and storage with good I/O capabilities.
|
||||
|
||||
With higher concurrency, the host will work on more assets in parallel,
|
||||
this advice improves throughput, not latency, for example, it will make Smart Search jobs process more quickly, but it won't make searching faster.
|
||||
|
||||
It is important to remember that jobs like Smart Search, Face Detection, Facial Recognition, and Transcode Videos require a **lot** of processing power and therefore do not exaggerate the amount of jobs because you're probably thoroughly overloading the server.
|
||||
|
||||
:::info Facial Recognition Concurrency
|
||||
The Facial Recognition Concurrency value cannot be changed because
|
||||
[DBSCAN](https://www.youtube.com/watch?v=RDZUdRSDOok) is traditionally sequential, but there are parallel implementations of it out there. Our implementation isn't parallel.
|
||||
:::
|
||||
|
||||
## External Library
|
||||
|
||||
### Library watching (EXPERIMENTAL)
|
||||
|
||||
External libraries can automatically import changed files without a full rescan. It will import the file whenever the operating system reports a file change. If your photos are mounted over the network, this does not work.
|
||||
|
||||
### Periodic Scanning
|
||||
|
||||
You can define a custom interval for the trigger external library rescan under Administration -> Settings -> Library.
|
||||
You can set the scanning interval using the preset or cron format. For more information please refer to e.g. [Crontab Guru](https://crontab.guru/).
|
||||
|
||||
## Logging
|
||||
|
||||
By default logs are set to record at the log level, the network administrator can choose a deeper or lower level of logs according to his decision or according to the needs required by the Immich support team.
|
||||
|
||||
Here you can [learn about the different error levels](https://sematext.com/blog/logging-levels/).
|
||||
|
||||
## Machine Learning Settings
|
||||
|
||||
Through this setting, you can manage all the settings related to machine learning in Immich, from the setting of remote machine learning to the model and its parameters
|
||||
You can choose to disable a certain type of machine learning, for example smart search or facial recognition.
|
||||
|
||||
### Smart Search
|
||||
|
||||
The smart search settings are designed to allow the search tool to be used using [CLIP](https://openai.com/research/clip) models that [can be changed](/docs/FAQ#can-i-use-a-custom-clip-model), different models will necessarily give better results but may consume more processing power, when changing a model it is mandatory to re-run the
|
||||
Smart Search job on all images to fully apply the change.
|
||||
|
||||
:::info Internet connection
|
||||
Changing models requires a connection to the Internet to download the model.
|
||||
After downloading, there is no need for Immich to connect to the network
|
||||
Unless version checking has been enabled in the settings.
|
||||
:::
|
||||
|
||||
### Facial Recognition
|
||||
|
||||
Under these settings, you can change the facial recognition settings
|
||||
Editable settings:
|
||||
|
||||
- **Facial Recognition Model -** Models are listed in descending order of size. Larger models are slower and use more memory, but produce better results. Note that you must re-run the Face Detection job for all images upon changing a model.
|
||||
- **Min Detection Score -** Minimum confidence score for a face to be detected from 0-1. Lower values will detect more faces but may result in false positives.
|
||||
- **Max Recognition Distance -** Maximum distance between two faces to be considered the same person, ranging from 0-2. Lowering this can prevent labeling two people as the same person, while raising it can prevent labeling the same person as two different people. Note that it is easier to merge two people than to split one person in two, so err on the side of a lower threshold when possible.
|
||||
- **Min Recognized Faces -** The minimum number of recognized faces for a person to be created (AKA: Core face). Increasing this makes Facial Recognition more precise at the cost of increasing the chance that a face is not assigned to a person.
|
||||
|
||||
:::info
|
||||
When changing the values in Min Detection Score, Max Recognition Distance, and Min Recognized Faces.
|
||||
You will have to restart **only** the job FACIAL RECOGNITION - ALL.
|
||||
|
||||
If you replace the Facial Recognition Model, you will have to run the job FACE DETECTION - ALL.
|
||||
:::
|
||||
|
||||
:::tip identical twins
|
||||
If you have twins, you might want to lower the Max Recognition Distance value, decreasing this a **bit** can make it distinguish between them.
|
||||
:::
|
||||
|
||||
## Map & GPS Settings
|
||||
|
||||
### Map Settings
|
||||
|
||||
In these settings, you can change the appearance of the map in night and day modes according to your personal preference and according to the supported options.
|
||||
The map can be adjusted via [OpenMapTiles](https://openmaptiles.org/styles/) for example.
|
||||
|
||||
### Reverse Geocoding Settings
|
||||
|
||||
Immich supports [Reverse Geocoding](/docs/features/reverse-geocoding) using data from the [GeoNames](https://www.geonames.org/) geographical database.
|
||||
|
||||
## OAuth Authentication
|
||||
|
||||
Immich supports OAuth Authentication. Read more about this feature and its configuration [here](/docs/administration/oauth).
|
||||
|
||||
## Password Authentication
|
||||
|
||||
The administrator can choose to disable login with username and password for the entire instance. This means that **no one**, including the system administrator, will be able to log using this method. If [OAuth Authentication](/docs/administration/oauth) is also disabled, no users will be able to login using **any** method. Changing this setting does not affect existing sessions, just new login attempts.
|
||||
|
||||
:::tip
|
||||
You can always use the [Server CLI](/docs/administration/server-commands) to re-enable password login.
|
||||
:::
|
||||
|
||||
## Server Settings
|
||||
|
||||
### External Domain
|
||||
|
||||
When set, will override the domain name used when viewing and copying a shared link.
|
||||
|
||||
### Welcome Message
|
||||
|
||||
The administrator can set a custom message on the login screen (the message will be displayed to all users).
|
||||
|
||||
## Storage Template
|
||||
|
||||
Immich supports a custom [Storage Template](/docs/administration/storage-template). Learn more about this feature and its configuration [here](/docs/administration/storage-template).
|
||||
|
||||
## Theme Settings
|
||||
|
||||
You can write custom CSS that will get loaded in the web application for all users. This enables administrators to change fonts, colors, and other styles.
|
||||
|
||||
For example:
|
||||
|
||||
```css title='Custom CSS'
|
||||
p {
|
||||
color: green;
|
||||
}
|
||||
```
|
||||
|
||||
## Thumbnail Settings
|
||||
|
||||
By default Immich creates 3 thumbnails for each asset,
|
||||
Blurred (thumbhash) , Small (webp) , and Large (jpeg), using these settings you can change the quality for the thumbnail files that are created.
|
||||
|
||||
**Small thumbnail resolution**
|
||||
Used when viewing groups of photos (main timeline, album view, etc.). Higher resolutions can preserve more detail but take longer to encode, have larger file sizes, and can reduce app responsiveness.
|
||||
|
||||
**Large thumbnail resolution**
|
||||
Used when viewing a single photo and for machine learning. Higher resolutions can preserve more detail but take longer to encode, have larger file sizes, and can reduce app responsiveness.
|
||||
|
||||
**Quality**
|
||||
Thumbnail quality from 1-100. Higher is better for quality but produces larger files.
|
||||
|
||||
**Prefer wide gamut**
|
||||
Use display p3 for thumbnails. This better preserves the vibrance of images with wide color spaces, but images may appear differently on old devices with an old browser version. Srgb images are kept as srgb to avoid color shifts.
|
||||
|
||||
:::tip
|
||||
The default resolution for Large thumbnails can be lowered from 1440p (default) to 1080p or 720p to save storage space.
|
||||
:::
|
||||
|
||||
## Trash Settings
|
||||
|
||||
In the system administrator's option to set a trash for deleted files, these files will remain in the trash until the deletion date 30 days (default) or as defined by the system administrator.
|
||||
|
||||
The trash can be disabled, however this is not recommended as future files that are deleted will be permanently deleted.
|
||||
|
||||
:::tip Keyboard shortcut for permanently deletion
|
||||
You can select assets and press Ctrl + Del from the timeline for quick permanent deletion without the trash option.
|
||||
:::
|
||||
|
||||
## User Settings
|
||||
|
||||
### Delete delay
|
||||
|
||||
The system administrator can choose to delete users through the administration panel, the system administrator can delete users immediately or alternatively delay the deletion for users (7 days by default) this action permanently delete a user's account and assets. The user deletion job runs at midnight to check for users that are ready for deletion. Changes to this setting will be evaluated at the next execution.
|
||||
|
||||
## Version Check
|
||||
|
||||
When this option is enabled the `immich-server` will periodically make requests to GitHub to check for new releases.
|
||||
|
||||
## Video Transcoding Settings
|
||||
|
||||
The system administrator can define parameters according to which video files will be converted to different formats (depending on the settings). The settings can be changed in depth, to learn more about the terminology used here, refer to FFmpeg documentation for [H.264](https://trac.ffmpeg.org/wiki/Encode/H.264) codec, [HEVC](https://trac.ffmpeg.org/wiki/Encode/H.265) codec and [VP9](https://trac.ffmpeg.org/wiki/Encode/VP9) codec.
|
@ -1,12 +0,0 @@
|
||||
# Community Projects
|
||||
|
||||
This page lists community projects that are built around Immich, but not officially supported by the development team.
|
||||
|
||||
:::warning
|
||||
This list comes with no guarantees about security, performance, reliability, or accuracy. Use at your own risk.
|
||||
:::
|
||||
|
||||
import CommunityProjects from '../src/components/community-projects.tsx';
|
||||
import React from 'react';
|
||||
|
||||
<CommunityProjects />
|
@ -18,11 +18,12 @@ Thanks for being interested in contributing 😊
|
||||
|
||||
### Server and web app
|
||||
|
||||
This environment includes the services below. Additional details are available in each service's README.
|
||||
This environment includes the following services:
|
||||
|
||||
- Server - [`/server`](https://github.com/immich-app/immich/tree/main/server)
|
||||
- Web app - [`/web`](https://github.com/immich-app/immich/tree/main/web)
|
||||
- Machine learning - [`/machine-learning`](https://github.com/immich-app/immich/tree/main/machine-learning)
|
||||
- Core server - `/server/src/immich`
|
||||
- Machine learning - `/machine-learning`
|
||||
- Microservices - `/server/src/microservicess`
|
||||
- Web app - `/web`
|
||||
- Redis
|
||||
- PostgreSQL development database with exposed port `5432` so you can use any database client to acess it
|
||||
|
||||
|
@ -1,6 +1,6 @@
|
||||
# The Immich CLI
|
||||
|
||||
Immich has a command line interface (CLI) that allows you to perform certain actions from the command line.
|
||||
Immich has a CLI that allows you to perform certain actions from the command line. This CLI replaces the [legacy CLI](https://github.com/immich-app/CLI) that was previously available. The CLI is hosted in the [cli folder of the the main Immich github repository](https://github.com/immich-app/immich/tree/main/cli).
|
||||
|
||||
## Features
|
||||
|
||||
@ -55,17 +55,14 @@ Command line interface for Immich
|
||||
|
||||
Options:
|
||||
-V, --version output the version number
|
||||
-d, --config-directory <directory> Configuration directory where auth.yml will be stored (default: "~/.config/immich/", env:
|
||||
IMMICH_CONFIG_DIR)
|
||||
-u, --url [url] Immich server URL (env: IMMICH_INSTANCE_URL)
|
||||
-k, --key [key] Immich API key (env: IMMICH_API_KEY)
|
||||
-d, --config Configuration directory (env: IMMICH_CONFIG_DIR)
|
||||
-h, --help display help for command
|
||||
|
||||
Commands:
|
||||
login|login-key <url> <key> Login using an API key
|
||||
logout Remove stored credentials
|
||||
server-info Display server information
|
||||
upload [options] [paths...] Upload assets
|
||||
server-info Display server information
|
||||
login-key [instanceUrl] [apiKey] Login using an API key
|
||||
logout Remove stored credentials
|
||||
help [command] display help for command
|
||||
```
|
||||
|
||||
@ -74,7 +71,7 @@ Commands:
|
||||
The upload command supports the following options:
|
||||
|
||||
```
|
||||
Usage: immich upload [paths...] [options]
|
||||
Usage: immich upload [options] [paths...]
|
||||
|
||||
Upload assets
|
||||
|
||||
@ -83,13 +80,12 @@ Arguments:
|
||||
|
||||
Options:
|
||||
-r, --recursive Recursive (default: false, env: IMMICH_RECURSIVE)
|
||||
-i, --ignore [paths...] Paths to ignore (default: [], env: IMMICH_IGNORE_PATHS)
|
||||
-i, --ignore [paths...] Paths to ignore (env: IMMICH_IGNORE_PATHS)
|
||||
-h, --skip-hash Don't hash files before upload (default: false, env: IMMICH_SKIP_HASH)
|
||||
-H, --include-hidden Include hidden folders (default: false, env: IMMICH_INCLUDE_HIDDEN)
|
||||
-a, --album Automatically create albums based on folder name (default: false, env: IMMICH_AUTO_CREATE_ALBUM)
|
||||
-A, --album-name <name> Add all assets to specified album (env: IMMICH_ALBUM_NAME)
|
||||
-n, --dry-run Don't perform any actions, just show what will be done (default: false, env: IMMICH_DRY_RUN)
|
||||
-c, --concurrency <number> Number of assets to upload at the same time (default: 4, env: IMMICH_UPLOAD_CONCURRENCY)
|
||||
--delete Delete local assets after upload (env: IMMICH_DELETE_ASSETS)
|
||||
--help display help for command
|
||||
```
|
||||
@ -101,13 +97,13 @@ Note that the above options can read from environment variables as well.
|
||||
You begin by authenticating to your Immich server.
|
||||
|
||||
```bash
|
||||
immich login [url] [key]
|
||||
immich login-key [instanceUrl] [apiKey]
|
||||
```
|
||||
|
||||
For instance,
|
||||
|
||||
```bash
|
||||
immich login http://192.168.1.216:2283/api HFEJ38DNSDUEG
|
||||
immich login-key http://192.168.1.216:2283/api HFEJ38DNSDUEG
|
||||
```
|
||||
|
||||
This will store your credentials in a `auth.yml` file in the configuration directory which defaults to `~/.config/`. The directory can be set with the `-d` option or the environment variable `IMMICH_CONFIG_DIR`. Please keep the file secure, either by performing the logout command after you are done, or deleting it manually.
|
||||
|
Before Width: | Height: | Size: 1.9 MiB |
Before Width: | Height: | Size: 4.9 MiB |
Before Width: | Height: | Size: 43 KiB After Width: | Height: | Size: 84 KiB |
Before Width: | Height: | Size: 32 KiB After Width: | Height: | Size: 56 KiB |
Before Width: | Height: | Size: 236 KiB After Width: | Height: | Size: 183 KiB |
Before Width: | Height: | Size: 30 KiB |
Before Width: | Height: | Size: 28 KiB |
Before Width: | Height: | Size: 1.6 MiB |
Before Width: | Height: | Size: 37 KiB |
Before Width: | Height: | Size: 2.2 MiB |
BIN
docs/docs/features/img/search-ex-2.webp
Normal file
After Width: | Height: | Size: 162 KiB |
BIN
docs/docs/features/img/search-ex-3.webp
Normal file
After Width: | Height: | Size: 59 KiB |
@ -27,8 +27,8 @@ The metrics in immich are grouped into API (endpoint calls and response times),
|
||||
|
||||
Immich will not expose an endpoint for metrics by default. To enable this endpoint, you can add the `IMMICH_METRICS=true` environmental variable to your `.env` file. Note that only the server and microservices containers currently use this variable.
|
||||
|
||||
:::tip
|
||||
`IMMICH_METRICS` enables all metrics, but there are also [environmental variables](/docs/install/environment-variables.md#prometheus) to toggle specific metric groups. If you'd like to only expose certain kinds of metrics, you can set only those environmental variables to `true`. Explicitly setting the environmental variable for a metric group overrides `IMMICH_METRICS` for that group. For example, setting `IMMICH_METRICS=true` and `IMMICH_API_METRICS=false` will enable all metrics except API metrics.
|
||||
:::note
|
||||
`IMMICH_METRICS` is equivalent to enabling the following three environmental variables: `IMMICH_API_METRICS`, `IMMICH_HOST_METRICS`, and `IMMICH_IO_METRICS`. If you would like to only expose certain kinds of metrics, you can set only those environmental variables to `true`. Explicitly setting the environmental variable for a metric group overrides `IMMICH_METRICS` for that group.
|
||||
:::
|
||||
|
||||
The next step is to configure a new or existing Prometheus instance to scrape this endpoint. The following steps assume that you do not have an existing Prometheus instance, but the steps will be similar either way.
|
||||
|
@ -1,57 +1,17 @@
|
||||
# Partner Sharing
|
||||
|
||||
Immich allows you to share your library with other users. They can then view your library and download the assets.
|
||||
|
||||
You can manage one or multiple users to have access to your library from the [User Settings](docs/features/user-settings.md) page.
|
||||
|
||||
<img src={require('./img/partner-sharing-1.png').default} title='Partner Sharing 1' />
|
||||
|
||||
<img src={require('./img/partner-sharing-2.png').default} title='Partner Sharing 2' />
|
||||
|
||||
Accessing the shared library can be done from the Sharing page.
|
||||
|
||||
<img src={require('./img/partner-sharing-3.png').default} title='Partner Sharing 3' />
|
||||
|
||||
:::tip Sharing specific assets
|
||||
For sharing a specific set of assets, you can use the shared album feature of Immich.
|
||||
:::
|
||||
|
||||
Immich allows you to share your library with other users. They can then view your library and download the assets. You can manage Partner Sharing from the [User Settings](docs/features/user-settings.md) page on the web.
|
||||
|
||||
Partner Sharing includes:
|
||||
|
||||
- Access to all non-archived and trashed photos and videos.
|
||||
- Access to all metadata, including GPS information.
|
||||
- Access to share assets via shared links, albums, etc.
|
||||
|
||||
:::info
|
||||
Partner sharing is one-way. To view your partner's assets, they must also share them with you.
|
||||
:::
|
||||
|
||||
## Sharing with a Partner
|
||||
|
||||
:::note Duplicates
|
||||
Partner sharing may result in displaying duplicate assets on the main timeline.
|
||||
:::
|
||||
|
||||
<img src={require('./img/partner-sharing-1.png').default} width="70%" title='Add Partner 1' />
|
||||
|
||||
<img src={require('./img/partner-sharing-2.png').default} width="70%" title='Add Partner 2' />
|
||||
|
||||
<img src={require('./img/partner-sharing-4.png').default} width="70%" title='Add Partner 4' />
|
||||
|
||||
## Viewing Partner Assets
|
||||
|
||||
Access partner assets via the Sharing page.
|
||||
|
||||
<img src={require('./img/partner-sharing-3.png').default} width="70%" title='Access to the Shared Library' />
|
||||
|
||||
## Timeline Integration
|
||||
|
||||
Partner shared photos can be displayed in the main timeline. This feature can be enabled on a per-partner basis and can be viewed and updated on both the web and mobile app.
|
||||
|
||||
### Web:
|
||||
|
||||
Account Settings -> Sharing -> Show in timeline
|
||||
|
||||
<img src={require('./img/partner-sharing-5.png').default} width="70%" title='Partner Sharing for the web interface' />
|
||||
|
||||
### Mobile App:
|
||||
|
||||
From the partner’s view, on the top right corner of the app bar
|
||||
|
||||
<img src={require('./img/partner-sharing-6.png').default} width="30%" title='Partner Sharing for the mobile app' />
|
||||
|
||||
## Removing Access
|
||||
|
||||
In order to remove a partner, you can go to User -> Account Settings -> Sharing and click on the X button.
|
||||
|
||||
<img src={require('./img/partner-sharing-7.png').default} width="70%" title='Remove Partner' />
|
||||
|
@ -8,7 +8,7 @@ During Exif Extraction, assets with latitudes and longitudes are reverse geocode
|
||||
|
||||
## Usage
|
||||
|
||||
Data from a reverse geocode is displayed in the image details, and used in [Smart Search](/docs/features/smart-search.md).
|
||||
Data from a reverse geocode is displayed in the image details, and used in [Search](/docs/features/search.md).
|
||||
|
||||
<img src={require('./img/reverse-geocoding-mobile1.png').default} width='33%' title='Reverse Geocoding' />
|
||||
<img src={require('./img/reverse-geocoding-mobile2.png').default} width='33%' title='Reverse Geocoding' />
|
||||
|
14
docs/docs/features/search.md
Normal file
@ -0,0 +1,14 @@
|
||||
# Search
|
||||
|
||||
Immich uses Postgres as its search database for both metadata and smart search.
|
||||
|
||||
Smart search is powered by the [pgvecto.rs](https://github.com/tensorchord/pgvecto.rs) extension, utilizing machine learning models like CLIP to provide relevant search results. This allows for freeform searches without requiring specific keywords in the image or video metadata.
|
||||
|
||||
Metadata search (prefixed with `m:`) can search specifically by text without the use of a model.
|
||||
|
||||
Archived photos are not included in search results by default. To include them, add the query parameter `withArchived=true` to the url.
|
||||
|
||||
Some search examples:
|
||||
<img src={require('./img/search-ex-2.webp').default} title='Search Example 1' />
|
||||
|
||||
<img src={require('./img/search-ex-3.webp').default} title='Search Example 2' />
|
@ -1,49 +0,0 @@
|
||||
import Tabs from '@theme/Tabs';
|
||||
import TabItem from '@theme/TabItem';
|
||||
|
||||
# Smart Search
|
||||
|
||||
Immich uses Postgres as its search database for both metadata and smart search.
|
||||
|
||||
Smart search is powered by the [pgvecto.rs](https://github.com/tensorchord/pgvecto.rs) extension, utilizing machine learning models like [CLIP](https://openai.com/research/clip) to provide relevant search results. This allows for freeform searches without requiring specific keywords in the image or video metadata.
|
||||
|
||||
Archived photos are not included in search results by default. To include them, mark the checkbox in [advanced search filters](/docs/features/smart-search#advanced-search-filters).
|
||||
|
||||
:::tip Alternative CLIP Models
|
||||
More powerful models can be used for more accurate search results. For more information, see the related [FAQ](/docs/FAQ#can-i-use-a-custom-clip-model).
|
||||
:::
|
||||
|
||||
:::info
|
||||
Smart Search is currently limited to 5,000 results for a single search on the web.
|
||||
:::
|
||||
|
||||
## Advanced Search Filters
|
||||
|
||||
In addition, Immich offers advanced search functionality, allowing you to find specific content using customizable search filters. These filters include location, one or more faces, specific albums, and more. You can try out the search filters on the [Demo site](https://demo.immich.app).
|
||||
|
||||
Smart search features include:
|
||||
|
||||
- Search for one or more faces (with or without context search).
|
||||
- Search by Country or State or City or by all three.
|
||||
- Search by camera make and model.
|
||||
- Search by date range.
|
||||
- Search by file name.
|
||||
- Search by media types: image, video or all (**Note:** Image includes live images).
|
||||
- Search by condition: not in any album or archive or Favorite or all conditions.
|
||||
|
||||
<Tabs>
|
||||
<TabItem value="Computer" label="Computer" default>
|
||||
|
||||
Some search examples:
|
||||
|
||||
<img src={require('./img/advanced-search-filters.webp').default} width="70%" title='Advanced search filters' />
|
||||
|
||||
<img src={require('./img/search-ex-1.png').default} width="70%" title='Search Example 1' />
|
||||
|
||||
</TabItem>
|
||||
<TabItem value="Mobile" label="Mobile">
|
||||
|
||||
<img src={require('./img/moblie-smart-serach.webp').default} width="30%" title='Smart search on mobile' />
|
||||
|
||||
</TabItem>
|
||||
</Tabs>
|
@ -1,42 +0,0 @@
|
||||
# Supported formats
|
||||
|
||||
Immich supports a number of image and video formats, the most common of which are outlined here.
|
||||
|
||||
:::note
|
||||
For the full list, you can refer to the [Immich source code](https://github.com/immich-app/immich/blob/main/server/src/utils/mime-types.ts).
|
||||
:::
|
||||
|
||||
## Image formats
|
||||
|
||||
| Format | Extension(s) | Supported? | Notes |
|
||||
| :-------- | :---------------------------- | :----------------: | :-------------- |
|
||||
| `AVIF` | `.avif` | :white_check_mark: | |
|
||||
| `BMP` | `.bmp` | :white_check_mark: | |
|
||||
| `GIF` | `.gif` | :white_check_mark: | |
|
||||
| `HEIC` | `.heic` | :white_check_mark: | |
|
||||
| `HEIF` | `.heif` | :white_check_mark: | |
|
||||
| `JPEG` | `.jpeg` `.jpg` `.jpe` `.insp` | :white_check_mark: | |
|
||||
| `JPEG XL` | `.jxl` | :white_check_mark: | |
|
||||
| `PNG` | `.png` | :white_check_mark: | |
|
||||
| `PSD` | `.psd` | :white_check_mark: | Adobe Photoshop |
|
||||
| `RAW` | `.raw` | :white_check_mark: | |
|
||||
| `RW2` | `.rw2` | :white_check_mark: | |
|
||||
| `SVG` | `.svg` | :white_check_mark: | |
|
||||
| `TIFF` | `.tif` `.tiff` | :white_check_mark: | |
|
||||
| `WEBP` | `.webp` | :white_check_mark: | |
|
||||
|
||||
## Video formats
|
||||
|
||||
| Format | Extension(s) | Supported? | Notes |
|
||||
| :---------- | :-------------------- | :----------------: | :---- |
|
||||
| `3GPP` | `.3gp` `.3gpp` | :white_check_mark: | |
|
||||
| `AVI` | `.avi` | :white_check_mark: | |
|
||||
| `FLV` | `.flv` | :white_check_mark: | |
|
||||
| `M4V` | `.m4v` | :white_check_mark: | |
|
||||
| `MATROSKA` | `.mkv` | :white_check_mark: | |
|
||||
| `MP2T` | `.mts` `.m2ts` | :white_check_mark: | |
|
||||
| `MP4` | `.mp4` `.insv` | :white_check_mark: | |
|
||||
| `MPEG` | `.mpg` `.mpe` `.mpeg` | :white_check_mark: | |
|
||||
| `QUICKTIME` | `.mov` | :white_check_mark: | |
|
||||
| `WEBM` | `.webm` | :white_check_mark: | |
|
||||
| `WMV` | `.wmv` | :white_check_mark: | |
|
@ -6,7 +6,7 @@ Immich can ingest XMP sidecars on file upload (via the CLI) as well as detect ne
|
||||
|
||||
XMP sidecars are external XML files that contain metadata related to media files. Many applications read and write these files either exclusively or in addition to the metadata written to image files. They can be a powerful tool for editing and storing metadata of a media file without modifying the media file itself. When Immich receives or detects an XMP sidecar for a media file, it will attempt to extract the metadata from both the sidecar as well as the media file. It will prioritize the metadata for fields in the sidecar but will fall back and use the metadata in the media file if necessary.
|
||||
|
||||
When importing files via the CLI bulk uploader or parsing photo metadata for external libraries, Immich will automatically detect XMP sidecar files as files that exist next to the original media file. Immich will look files that have the same name as the photo, but with the `.xmp` file extension. The same name can either include the photo's file extension or without the photo's file extension. For example, for a photo named `PXL_20230401_203352928.MP.jpg`, Immich will look for an XMP file named either `PXL_20230401_203352928.MP.jpg.xmp` or `PXL_20230401_203352928.MP.xmp`. If both `PXL_20230401_203352928.MP.jpg.xmp` and `PXL_20230401_203352928.MP.xmp` are present, Immich will prefer `PXL_20230401_203352928.MP.jpg.xmp`.
|
||||
When importing files via the CLI bulk uploader, Immich will automatically detect XMP sidecar files as files that exist next to the original media file and have the exact same name with an additional `.xmp` file extension (i.e., `PXL_20230401_203352928.MP.jpg` and `PXL_20230401_203352928.MP.jpg.xmp`).
|
||||
|
||||
There are 2 administrator jobs associated with sidecar files: `SYNC` and `DISCOVER`. The sync job will re-scan all media with existing sidecar files and queue them for a metadata refresh. This is a great use case when third-party applications are used to modify the metadata of media. The discover job will attempt to scan the filesystem for new sidecar files for all media that does not currently have a sidecar file associated with it.
|
||||
|
||||
|
130
docs/docs/guides/api-album-sync.md
Normal file
@ -0,0 +1,130 @@
|
||||
# API Album Sync (Python Script)
|
||||
|
||||
This is an example of a python script for syncing an album to a local folder. This was used for a digital photoframe so the displayed photos could be managed from the immich web or app UI.
|
||||
|
||||
The script is copied below in it's current form. A repository is hosted [here](https://git.orenit.solutions/open/immichalbumpull).
|
||||
|
||||
:::danger
|
||||
This guide uses a generated API key. This key gives the same access to your immich instance as the user it is attached to, so be careful how the config file is stored and transferred.
|
||||
:::
|
||||
|
||||
### Prerequisites
|
||||
|
||||
- Python 3.7+
|
||||
- [requests library](https://pypi.org/project/requests/)
|
||||
|
||||
### Installing
|
||||
|
||||
Copy the contents of 'pull.py' (shown below) to your chosen location or clone the repository:
|
||||
|
||||
```bash
|
||||
git clone https://git.orenit.solutions/open/immichalbumpull
|
||||
```
|
||||
|
||||
Edit or create the 'config.ini' file in the same directory as the script with the necessary details:
|
||||
|
||||
```ini title='config.ini'
|
||||
[immich]
|
||||
# URL of target immich instance
|
||||
url = https://photo.example.com
|
||||
# API key from Account Settings -> API Keys
|
||||
apikey = xxxxxxxxxxxxxxxxxxxxxxxxxxxxxx
|
||||
# Full local path to target directory
|
||||
destination = /home/photo/photos
|
||||
# immich album name
|
||||
album = Photoframe
|
||||
```
|
||||
|
||||
### Usage
|
||||
|
||||
Run the script directly:
|
||||
|
||||
```bash
|
||||
./pull.py
|
||||
```
|
||||
|
||||
Or from cron (every 5 minutes):
|
||||
|
||||
```bash
|
||||
*/5 * * * * /usr/bin/python /home/user/immichalbumpull/pull.py
|
||||
```
|
||||
|
||||
### Python Script
|
||||
|
||||
```python title='pull.py'
|
||||
#!/usr/bin/env python
|
||||
|
||||
import requests
|
||||
import configparser
|
||||
import os
|
||||
import shutil
|
||||
|
||||
# Read config file
|
||||
config = configparser.ConfigParser()
|
||||
config.read('config.ini')
|
||||
|
||||
url = config['immich']['url']
|
||||
apikey = config['immich']['apikey']
|
||||
photodir = config['immich']['destination']
|
||||
albumname = config['immich']['album']
|
||||
|
||||
headers = {
|
||||
'Accept': 'application/json',
|
||||
'x-api-key': apikey
|
||||
}
|
||||
|
||||
# Set up the directory for the downloaded images
|
||||
os.makedirs(photodir, exist_ok=True)
|
||||
|
||||
# Get the list of albums from the API
|
||||
response = requests.get(url + "/api/album", headers=headers)
|
||||
|
||||
# Parse the JSON response
|
||||
data = response.json()
|
||||
|
||||
# Find the chosen album id
|
||||
for item in data:
|
||||
if item['albumName'] == albumname:
|
||||
albumid = item['id']
|
||||
|
||||
# Get the list of photos from the API using the albumid
|
||||
response = requests.get(url + "/api/album/" + albumid, headers=headers)
|
||||
|
||||
# Parse the JSON response and extract the URLs of the images
|
||||
data = response.json()
|
||||
image_urls = data['assets']
|
||||
|
||||
# Download each image from the URL and save it to the directory
|
||||
headers = {
|
||||
'Accept': 'application/octet-stream',
|
||||
'x-api-key': apikey
|
||||
}
|
||||
|
||||
photolist = []
|
||||
|
||||
for id in image_urls:
|
||||
# Query asset info endpoint for correct extension
|
||||
assetinfourl = url + "/api/asset/" + str(id['id'])
|
||||
response = requests.get(assetinfourl, headers=headers)
|
||||
assetinfo = response.json()
|
||||
ext = os.path.splitext(assetinfo['originalFileName'])
|
||||
|
||||
asseturl = url + "/api/download/asset/" + str(id['id'])
|
||||
response = requests.post(asseturl, headers=headers, stream=True)
|
||||
|
||||
# Build current photo list for deletions below
|
||||
photo = os.path.basename(asseturl) + ext[1]
|
||||
photolist.append(photo)
|
||||
|
||||
photofullpath = photodir + '/' + os.path.basename(asseturl) + ext[1]
|
||||
# Only download file if it doesn't already exist
|
||||
if not os.path.exists(photofullpath):
|
||||
with open(photofullpath, 'wb') as f:
|
||||
for chunk in response.iter_content(1024):
|
||||
f.write(chunk)
|
||||
|
||||
# Delete old photos removed from album
|
||||
for filename in os.listdir(photodir):
|
||||
if filename not in photolist:
|
||||
os.unlink(os.path.join(photodir, filename))
|
||||
```
|
@ -45,7 +45,7 @@ SELECT * FROM "assets" JOIN "exif" ON "assets"."id" = "exif"."assetId" WHERE "ex
|
||||
```
|
||||
|
||||
```sql title="Without thumbnails"
|
||||
SELECT * FROM "assets" WHERE "assets"."previewPath" IS NULL OR "assets"."thumbnailPath" IS NULL;
|
||||
SELECT * FROM "assets" WHERE "assets"."resizePath" IS NULL OR "assets"."webpPath" IS NULL;
|
||||
```
|
||||
|
||||
```sql title="By type"
|
||||
|
@ -56,4 +56,4 @@ A remote reverse proxy like [Cloudflare](https://www.cloudflare.com/learning/cdn
|
||||
### Cons
|
||||
|
||||
- Complex configuration
|
||||
- Depending on your configuration, both the Immich web interface and API may be exposed to the internet. Immich is under very active development and the existence of severe security vulnerabilities cannot be ruled out.
|
||||
- Depending on your configuration, both the Immich web interface and API may be exposed to the internet. Immich is under very active developement and the existence of severe security vulnerabilities cannot be ruled out.
|
||||
|
@ -4,7 +4,7 @@ To alleviate [performance issues on low-memory systems](/docs/FAQ.mdx#why-is-imm
|
||||
|
||||
- Set the URL in Machine Learning Settings on the Admin Settings page to point to the designated ML system, e.g. `http://workstation:3003`.
|
||||
- Copy the following `docker-compose.yml` to your ML system.
|
||||
- Start the container by running `docker compose up -d`.
|
||||
- Start the container by running `docker-compose up -d` or `docker compose up -d` (depending on your Docker version).
|
||||
|
||||
:::note Info
|
||||
Starting with version v1.93.0 face detection work and face recognize were split. From now on face detection is done in the immich_machine_learning service, but facial recognition is done in the immich_microservices service.
|
||||
|
176
docs/docs/guides/remove-offline-files.md
Normal file
@ -0,0 +1,176 @@
|
||||
# Remove Offline Files [Community]
|
||||
|
||||
import Tabs from '@theme/Tabs';
|
||||
import TabItem from '@theme/TabItem';
|
||||
|
||||
:::note
|
||||
**Before running the script**, please make sure you have a [backup](/docs/administration/backup-and-restore) of your assets and database.
|
||||
:::
|
||||
|
||||
:::info
|
||||
**None** of the scripts can delete orphaned files from the external library.
|
||||
:::
|
||||
|
||||
This page is a guide to get rid of offline files from the repair page.
|
||||
|
||||
<Tabs>
|
||||
|
||||
<TabItem value="Python script (Best way)" label="Python script (Best way)">
|
||||
|
||||
This way works by retrieving a file that contains a list of all the files that are defined as offline files, running a script that uses the [Immich API](/docs/api/delete-assets) in order to remove the offline files.
|
||||
|
||||
1. Create an API key under Admin User -> Account Settings -> API Keys -> New API Key -> Copy to clipboard.
|
||||
2. Copy and save the code to file -> `Immich Remove Offline Files.py`.
|
||||
3. Run the script and follow the instructions.
|
||||
|
||||
:::note
|
||||
You might need to run `pip install halo tabulate tqdm` if these dependencies are missing on your machine.
|
||||
:::
|
||||
|
||||
```bash title='Python'
|
||||
#!/usr/bin/env python3
|
||||
|
||||
# Note: you might need to run "pip install halo tabulate tqdm" if these dependencies are missing on your machine
|
||||
|
||||
import argparse
|
||||
import json
|
||||
import requests
|
||||
|
||||
from datetime import datetime
|
||||
from halo import Halo
|
||||
from tabulate import tabulate
|
||||
from tqdm import tqdm
|
||||
from urllib.parse import urlparse
|
||||
|
||||
def parse_arguments():
|
||||
parser = argparse.ArgumentParser(description='Fetch file report and delete orphaned media assets from Immich.')
|
||||
parser.add_argument('--apikey', help='Immich API key for authentication')
|
||||
parser.add_argument('--immichaddress', help='Full address for Immich, including protocol and port')
|
||||
parser.add_argument('--no_prompt', action='store_true', help='Delete orphaned media assets without confirmation')
|
||||
args = parser.parse_args()
|
||||
return args
|
||||
|
||||
def filter_entities(response_json, entity_type):
|
||||
return [
|
||||
{'pathValue': entity['pathValue'], 'entityId': entity['entityId'], 'entityType': entity['entityType']}
|
||||
for entity in response_json.get('orphans', []) if entity.get('entityType') == entity_type
|
||||
]
|
||||
|
||||
def main():
|
||||
args = parse_arguments()
|
||||
try:
|
||||
if args.apikey:
|
||||
api_key = args.apikey
|
||||
else:
|
||||
api_key = input('Enter the Immich API key: ')
|
||||
|
||||
if args.immichaddress:
|
||||
immich_server = args.immichaddress
|
||||
else:
|
||||
immich_server = input('Enter the full web address for Immich, including protocol and port: ')
|
||||
immich_parsed_url = urlparse(immich_server)
|
||||
base_url = f'{immich_parsed_url.scheme}://{immich_parsed_url.netloc}'
|
||||
api_url = f'{base_url}/api'
|
||||
file_report_url = api_url + '/audit/file-report'
|
||||
headers = {'x-api-key': api_key}
|
||||
|
||||
print()
|
||||
spinner = Halo(text='Retrieving list of orphaned media assets...', spinner='dots')
|
||||
spinner.start()
|
||||
|
||||
try:
|
||||
response = requests.get(file_report_url, headers=headers)
|
||||
response.raise_for_status()
|
||||
spinner.succeed('Success!')
|
||||
except requests.exceptions.RequestException as e:
|
||||
spinner.fail(f'Failed to fetch assets: {str(e)}')
|
||||
|
||||
person_assets = filter_entities(response.json(), 'person')
|
||||
orphan_media_assets = filter_entities(response.json(), 'asset')
|
||||
|
||||
num_entries = len(orphan_media_assets)
|
||||
|
||||
if num_entries == 0:
|
||||
print('No orphaned media assets found; exiting.')
|
||||
return
|
||||
|
||||
else:
|
||||
if not args.no_prompt:
|
||||
table_data = []
|
||||
for asset in orphan_media_assets:
|
||||
table_data.append([asset['pathValue'], asset['entityId']])
|
||||
print(tabulate(table_data, headers=['Path Value', 'Entity ID'], tablefmt='pretty'))
|
||||
print()
|
||||
|
||||
if person_assets:
|
||||
print('Found orphaned person assets! Please run the "RECOGNIZE FACES > ALL" job in Immich after running this tool to correct this.')
|
||||
print()
|
||||
|
||||
if num_entries > 0:
|
||||
summary = f'There {"is" if num_entries == 1 else "are"} {num_entries} orphaned media asset{"s" if num_entries != 1 else ""}. Would you like to delete {"them" if num_entries != 1 else "it"} from Immich? (yes/no): '
|
||||
user_input = input(summary).lower()
|
||||
print()
|
||||
|
||||
if user_input not in ('y', 'yes'):
|
||||
print('Exiting without making any changes.')
|
||||
return
|
||||
|
||||
with tqdm(total=num_entries, desc="Deleting orphaned media assets", unit="asset") as progress_bar:
|
||||
for asset in orphan_media_assets:
|
||||
entity_id = asset['entityId']
|
||||
asset_url = f'{api_url}/asset'
|
||||
delete_payload = json.dumps({'force': True, 'ids': [entity_id]})
|
||||
headers = {'Content-Type': 'application/json', 'x-api-key': api_key}
|
||||
response = requests.delete(asset_url, headers=headers, data=delete_payload)
|
||||
response.raise_for_status()
|
||||
progress_bar.set_postfix_str(entity_id)
|
||||
progress_bar.update(1)
|
||||
print()
|
||||
print('Orphaned media assets deleted successfully!')
|
||||
except Exception as e:
|
||||
print()
|
||||
print(f"An error occurred: {str(e)}")
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
```
|
||||
|
||||
Thanks to [DooMRunneR](https://discord.com/channels/979116623879368755/1179655214870040596/1194308198413373482) and [Sircharlo](https://discord.com/channels/979116623879368755/1179655214870040596/1195038609812758639) for writing this script.
|
||||
|
||||
</TabItem>
|
||||
|
||||
<TabItem value="Bash and PowerShell script" label="Bash and PowerShell script" default>
|
||||
|
||||
This way works by downloading a JSON file that contains a list of all the files that are defined as offline files, running a script that uses the [Immich API](/docs/api/delete-assets) in order to remove the offline files.
|
||||
|
||||
1. Create an API key under Admin User -> Account Settings -> API Keys -> New API Key -> Copy to clipboard.
|
||||
2. Download the JSON file under Administration -> repair -> Export.
|
||||
3. Replace `YOUR_IP_HERE` and `YOUR_API_KEY_HERE` with your actual IP address and API key in the script.
|
||||
4. Run the script in the same folder where the JSON file is located.
|
||||
|
||||
## Script for Linux based systems:
|
||||
|
||||
```bash title='Bash'
|
||||
awk -F\" '/entityId/ {print $4}' orphans.json | while read line; do curl --location --request DELETE 'http://YOUR_IP_HERE:2283/api/asset' --header 'Content- Type: application/json' --header 'x-api-key: YOUR_API_KEY_HERE' --data '{ "force": true, "ids": ["'"$line"'"]}';done
|
||||
```
|
||||
|
||||
## Script for the Windows system (run through PowerShell):
|
||||
|
||||
```powershell title='PowerShell'
|
||||
Get-Content orphans.json | Select-String -Pattern 'entityId' | ForEach-Object {
|
||||
$line = $_ -split '"' | Select-Object -Index 3
|
||||
$body = [pscustomobject]@{
|
||||
'ids' = @($line)
|
||||
'force' = (' true ' | ConvertFrom-Json)
|
||||
} | ConvertTo-Json -Depth 3
|
||||
Invoke-RestMethod -Uri 'http://YOUR_IP_HERE:2283/api/asset' -Method Delete -Headers @{
|
||||
'Content-Type' = 'application/json'
|
||||
'x-api-key' = 'YOUR_API_KEY_HERE'
|
||||
} -Body $body
|
||||
}
|
||||
```
|
||||
|
||||
Thanks to [DooMRunneR](https://discord.com/channels/979116623879368755/1179655214870040596/1194308198413373482) for writing this script.
|
||||
|
||||
</TabItem>
|
||||
</Tabs>
|
@ -43,9 +43,9 @@ REMOTE_BACKUP_PATH="/path/to/remote/backup/directory"
|
||||
### Local
|
||||
|
||||
# Backup Immich database
|
||||
docker exec -t immich_postgres pg_dumpall --clean --if-exists --username=postgres > "$UPLOAD_LOCATION"/database-backup/immich-database.sql
|
||||
docker exec -t immich_postgres pg_dumpall -c -U postgres > "$UPLOAD_LOCATION"/database-backup/immich-database.sql
|
||||
# For deduplicating backup programs such as Borg or Restic, compressing the content can increase backup size by making it harder to deduplicate. If you are using a different program or still prefer to compress, you can use the following command instead:
|
||||
# docker exec -t immich_postgres pg_dumpall --clean --if-exists --username=postgres | /usr/bin/gzip --rsyncable > "$UPLOAD_LOCATION"/database-backup/immich-database.sql.gz
|
||||
# docker exec -t immich_postgres pg_dumpall -c -U postgres | /usr/bin/gzip --rsyncable > "$UPLOAD_LOCATION"/database-backup/immich-database.sql.gz
|
||||
|
||||
### Append to local Borg repository
|
||||
borg create "$BACKUP_PATH/immich-borg::{now}" "$UPLOAD_LOCATION" --exclude "$UPLOAD_LOCATION"/thumbs/ --exclude "$UPLOAD_LOCATION"/encoded-video/
|
||||
|
@ -114,11 +114,9 @@ The default configuration looks like this:
|
||||
"hashVerificationEnabled": true,
|
||||
"template": "{{y}}/{{y}}-{{MM}}-{{dd}}/{{filename}}"
|
||||
},
|
||||
"image": {
|
||||
"thumbnailFormat": "webp",
|
||||
"thumbnailSize": 250,
|
||||
"previewFormat": "jpeg",
|
||||
"previewSize": 1440,
|
||||
"thumbnail": {
|
||||
"webpSize": 250,
|
||||
"jpegSize": 1440,
|
||||
"quality": 80,
|
||||
"colorspace": "p3"
|
||||
},
|
||||
|
@ -21,7 +21,7 @@ cd ./immich-app
|
||||
Download [`docker-compose.yml`][compose-file] and [`example.env`][env-file], either by running the following commands:
|
||||
|
||||
```bash title="Get docker-compose.yml file"
|
||||
wget -O docker-compose.yml https://github.com/immich-app/immich/releases/latest/download/docker-compose.yml
|
||||
wget https://github.com/immich-app/immich/releases/latest/download/docker-compose.yml
|
||||
```
|
||||
|
||||
```bash title="Get .env file"
|
||||
@ -29,11 +29,11 @@ wget -O .env https://github.com/immich-app/immich/releases/latest/download/examp
|
||||
```
|
||||
|
||||
```bash title="(Optional) Get hwaccel.transcoding.yml file"
|
||||
wget -O hwaccel.transcoding.yml https://github.com/immich-app/immich/releases/latest/download/hwaccel.transcoding.yml
|
||||
wget https://github.com/immich-app/immich/releases/latest/download/hwaccel.transcoding.yml
|
||||
```
|
||||
|
||||
```bash title="(Optional) Get hwaccel.ml.yml file"
|
||||
wget -O hwaccel.ml.yml https://github.com/immich-app/immich/releases/latest/download/hwaccel.ml.yml
|
||||
wget https://github.com/immich-app/immich/releases/latest/download/hwaccel.ml.yml
|
||||
```
|
||||
|
||||
or by downloading from your browser and moving the files to the directory that you created.
|
||||
@ -65,7 +65,7 @@ From the directory you created in Step 1, (which should now contain your customi
|
||||
docker compose up -d
|
||||
```
|
||||
|
||||
:::info Docker version
|
||||
:::tip
|
||||
If you get an error `unknown shorthand flag: 'd' in -d`, you are probably running the wrong Docker version. (This happens, for example, with the docker.io package in Ubuntu 22.04.3 LTS.) You can correct the problem by `apt remove`ing Ubuntu's docker.io package and installing docker and docker-compose via [Docker's official repository](https://docs.docker.com/engine/install/ubuntu/#install-using-the-repository).
|
||||
|
||||
Note that the correct command really is `docker compose`, not `docker-compose`. If you try the latter on vanilla Ubuntu 22.04 it will fail in a different way:
|
||||
@ -82,16 +82,12 @@ See the previous paragraph about installing from the official docker repository.
|
||||
For more information on how to use the application, please refer to the [Post Installation](/docs/install/post-install.mdx) guide.
|
||||
:::
|
||||
|
||||
:::note GitHub Authentication
|
||||
Downloading container images might require you to authenticate to the GitHub Container Registry ([steps here](https://docs.github.com/en/packages/working-with-a-github-packages-registry/working-with-the-container-registry#authenticating-to-the-container-registry)).
|
||||
:::tip
|
||||
Note that downloading container images might require you to authenticate to the GitHub Container Registry ([steps here](https://docs.github.com/en/packages/working-with-a-github-packages-registry/working-with-the-container-registry#authenticating-to-the-container-registry)).
|
||||
:::
|
||||
|
||||
### Step 4 - Upgrading
|
||||
|
||||
:::danger Breaking Changes
|
||||
It is important to follow breaking updates to avoid problems. You can see versions that had breaking changes [here](https://github.com/immich-app/immich/discussions?discussions_q=label%3Abreaking-change+sort%3Adate_created).
|
||||
:::
|
||||
|
||||
If `IMMICH_VERSION` is set, it will need to be updated to the latest or desired version.
|
||||
|
||||
When a new version of Immich is [released](https://github.com/immich-app/immich/releases), the application can be upgraded with the following commands, run in the directory with the `docker-compose.yml` file:
|
||||
@ -101,7 +97,7 @@ docker compose pull && docker compose up -d
|
||||
```
|
||||
|
||||
:::caution Automatic Updates
|
||||
Immich is currently under heavy development, which means you can expect [breaking changes](https://github.com/immich-app/immich/discussions?discussions_q=label%3Abreaking-change+sort%3Adate_created) and bugs. Therefore, we recommend reading the release notes prior to updating and to take special care when using automated tools like [Watchtower][watchtower].
|
||||
Immich is currently under heavy development, which means you can expect breaking changes and bugs. Therefore, we recommend reading the release notes prior to updating and to take special care when using automated tools like [Watchtower][watchtower].
|
||||
:::
|
||||
|
||||
[compose-file]: https://github.com/immich-app/immich/releases/latest/download/docker-compose.yml
|
||||
|
@ -18,8 +18,8 @@ If this should not work, try running `docker compose up -d --force-recreate`.
|
||||
## Docker Compose
|
||||
|
||||
| Variable | Description | Default | Services |
|
||||
| :---------------- | :-------------------- | :-------: | :-------------------------------------- |
|
||||
| `IMMICH_VERSION` | Image tags | `release` | server, microservices, machine learning |
|
||||
| :---------------- | :-------------------- | :-------: | :-------------------------------------------------- |
|
||||
| `IMMICH_VERSION` | Image tags | `release` | server, microservices, machine learning, web, proxy |
|
||||
| `UPLOAD_LOCATION` | Host Path for uploads | | server, microservices |
|
||||
|
||||
:::tip
|
||||
@ -31,25 +31,28 @@ These environment variables are used by the `docker-compose.yml` file and do **N
|
||||
## General
|
||||
|
||||
| Variable | Description | Default | Services |
|
||||
| :------------------------------ | :------------------------------------------- | :------------------: | :-------------------------------------- |
|
||||
| :------------------------------ | :------------------------------------------- | :------------------: | :------------------------------------------- |
|
||||
| `TZ` | Timezone | | microservices |
|
||||
| `NODE_ENV` | Environment (production, development) | `production` | server, microservices, machine learning |
|
||||
| `LOG_LEVEL` | Log Level (verbose, debug, log, warn, error) | `log` | server, microservices, machine learning |
|
||||
| `NODE_ENV` | Environment (production, development) | `production` | server, microservices, machine learning, web |
|
||||
| `LOG_LEVEL` | Log Level (verbose, debug, log, warn, error) | `log` | server, microservices |
|
||||
| `IMMICH_MEDIA_LOCATION` | Media Location | `./upload` | server, microservices |
|
||||
| `IMMICH_CONFIG_FILE` | Path to config file | | server, microservices |
|
||||
| `IMMICH_WEB_ROOT` | Path of root index.html | `/usr/src/app/www` | server |
|
||||
| `IMMICH_REVERSE_GEOCODING_ROOT` | Path of reverse geocoding dump directory | `/usr/src/resources` | microservices |
|
||||
|
||||
:::tip
|
||||
`TZ` should be set to a `TZ identifier` from [this list](https://en.wikipedia.org/wiki/List_of_tz_database_time_zones#List). For example, `TZ="Etc/UTC"`.
|
||||
|
||||
`TZ` is only used by `exiftool`, which is present in the microservices container, as a fallback in case the timezone cannot be determined from the image metadata.
|
||||
`TZ` is only used by the `exiftool` as a fallback in case the timezone cannot be determined from the image metadata.
|
||||
|
||||
`exiftool` is only present in the microservices container.
|
||||
|
||||
:::
|
||||
|
||||
## Ports
|
||||
|
||||
| Variable | Description | Default | Services |
|
||||
| :---------------------- | :-------------------- | :-------: | :--------------- |
|
||||
| `PORT` | Web Port | `3000` | web |
|
||||
| `SERVER_PORT` | Server Port | `3001` | server |
|
||||
| `MICROSERVICES_PORT` | Microservices Port | `3002` | microservices |
|
||||
| `MACHINE_LEARNING_HOST` | Machine Learning Host | `0.0.0.0` | machine learning |
|
||||
@ -144,18 +147,6 @@ Other machine learning parameters can be tuned from the admin UI.
|
||||
|
||||
:::
|
||||
|
||||
## Prometheus
|
||||
|
||||
| Variable | Description | Default | Services |
|
||||
| :----------------------------- | :-------------------------------------------------------------------------------------------- | :-----: | :-------------------- |
|
||||
| `IMMICH_METRICS`<sup>\*1</sup> | Toggle all metrics (one of [`true`, `false`]) | | server, microservices |
|
||||
| `IMMICH_API_METRICS` | Toggle metrics for endpoints and response times (one of [`true`, `false`]) | | server, microservices |
|
||||
| `IMMICH_HOST_METRICS` | Toggle metrics for CPU and memory utilization for host and process (one of [`true`, `false`]) | | server, microservices |
|
||||
| `IMMICH_IO_METRICS` | Toggle metrics for database queries, image processing, etc. (one of [`true`, `false`]) | | server, microservices |
|
||||
| `IMMICH_JOB_METRICS` | Toggle metrics for jobs and queues (one of [`true`, `false`]) | | server, microservices |
|
||||
|
||||
\*1: Overridden for a metric group when its corresponding environmental variable is set.
|
||||
|
||||
## Docker Secrets
|
||||
|
||||
The following variables support the use of [Docker secrets](https://docs.docker.com/engine/swarm/secrets/) for additional security.
|
||||
@ -164,12 +155,11 @@ To use any of these, replace the regular environment variable with the equivalen
|
||||
the `_FILE` variable should be set to the path of a file containing the variable value.
|
||||
|
||||
| Regular Variable | Equivalent Docker Secrets '\_FILE' Variable |
|
||||
| :----------------- | :------------------------------------------ |
|
||||
| :----------------: | :-----------------------------------------: |
|
||||
| `DB_HOSTNAME` | `DB_HOSTNAME_FILE`<sup>\*1</sup> |
|
||||
| `DB_DATABASE_NAME` | `DB_DATABASE_NAME_FILE`<sup>\*1</sup> |
|
||||
| `DB_USERNAME` | `DB_USERNAME_FILE`<sup>\*1</sup> |
|
||||
| `DB_PASSWORD` | `DB_PASSWORD_FILE`<sup>\*1</sup> |
|
||||
| `DB_URL` | `DB_URL_FILE`<sup>\*1</sup> |
|
||||
| `REDIS_PASSWORD` | `REDIS_PASSWORD_FILE`<sup>\*2</sup> |
|
||||
|
||||
\*1: See the [official documentation](https://github.com/docker-library/docs/tree/master/postgres#docker-secrets) for
|
||||
|
@ -1,134 +0,0 @@
|
||||
---
|
||||
sidebar_position: 90
|
||||
---
|
||||
|
||||
# Podman deploy with quadlets
|
||||
|
||||
You can deploy Immich on Podman using quadlets.
|
||||
|
||||
Here are some sample rootless quadlet container files that can be placed in /etc/containers/systemd/users/${ID} where ID is the uid of whatever your rootless user is.
|
||||
|
||||
Please note you'll need :z or :Z for selinux enabled hosts.
|
||||
|
||||
immich-database.container
|
||||
```bash
|
||||
[Unit]
|
||||
Description=Immich Database
|
||||
Requires=immich-redis.service
|
||||
|
||||
[Container]
|
||||
AutoUpdate=registry
|
||||
EnvironmentFile=${location_of_env_file}
|
||||
Image=registry.hub.docker.com/tensorchord/pgvecto-rs:pg16-v0.2.1
|
||||
Label=registry
|
||||
Network=slirp4netns:port_handler=slirp4netns
|
||||
PublishPort=5432:5432
|
||||
Volume=${host_database_directory}:/var/lib/postgresql/data:z
|
||||
Volume=/etc/localtime:/etc/localtime:ro
|
||||
|
||||
[Service]
|
||||
Restart=always
|
||||
|
||||
[Install]
|
||||
WantedBy=multi-user.target default.target
|
||||
```
|
||||
|
||||
immich-microservices.container
|
||||
```bash
|
||||
[Unit]
|
||||
Description=Immich Microservices
|
||||
Requires=immich-redis.service immich-database.service
|
||||
|
||||
[Container]
|
||||
#AddDevice=/dev/dri:/dev/dri #Needed for HWA
|
||||
#AddDevice=nvidia.com/gpu=0 #Needed for nvidia HWA, after setting up container tools
|
||||
AutoUpdate=registry
|
||||
EnvironmentFile=${location_of_env_file}
|
||||
Image=ghcr.io/immich-app/immich-server:release
|
||||
Label=registry
|
||||
Network=slirp4netns:port_handler=slirp4netns
|
||||
PublishPort=3002:3002
|
||||
Volume=${host_upload_directory}:/usr/src/app/upload:z
|
||||
Volume=/etc/localtime:/etc/localtime:ro
|
||||
Exec=start.sh microservices
|
||||
#Unmask=/dev/dri:/dev/dri #May be needed if doing HWA
|
||||
#UserNS=keep-id #May be needed if doing HWA
|
||||
|
||||
[Service]
|
||||
Restart=always
|
||||
|
||||
[Install]
|
||||
WantedBy=multi-user.target default.target
|
||||
```
|
||||
|
||||
immich-ml.container
|
||||
```bash
|
||||
|
||||
[Unit]
|
||||
Description=Immich Machine Learning
|
||||
Requires=immich-redis.service immich-database.service
|
||||
|
||||
[Container]
|
||||
#AddDevice=/dev/dri:/dev/dri #Needed for HWA
|
||||
#AddDevice=nvidia.com/gpu=0 #Needed for nvidia HWA, after setting up container tools
|
||||
AutoUpdate=registry
|
||||
EnvironmentFile=${location_of_env_file}
|
||||
Image=ghcr.io/immich-app/immich-machine-learning:release
|
||||
Label=registry
|
||||
Network=slirp4netns:port_handler=slirp4netns
|
||||
PublishPort=3003:3003
|
||||
Volume=${cache_directory}:/cache:z
|
||||
Volume=/etc/localtime:/etc/localtime:ro
|
||||
#Unmask=/dev/dri:/dev/dri #May be needed for HWA
|
||||
|
||||
[Service]
|
||||
Restart=always
|
||||
|
||||
[Install]
|
||||
WantedBy=multi-user.target default.target
|
||||
```
|
||||
|
||||
immich-redis.container
|
||||
```bash
|
||||
[Unit]
|
||||
Description=Immich Redis
|
||||
|
||||
[Container]
|
||||
AutoUpdate=registry
|
||||
Image=registry.hub.docker.com/library/redis:6.2-alpine@sha256:51d6c56749a4243096327e3fb964a48ed92254357108449cb6e23999c37773c5
|
||||
Label=registry
|
||||
Network=slirp4netns:port_handler=slirp4netns
|
||||
PublishPort=6379:6379
|
||||
Timezone=America/Montreal
|
||||
|
||||
[Service]
|
||||
Restart=always
|
||||
|
||||
[Install]
|
||||
WantedBy=multi-user.target default.target
|
||||
```
|
||||
|
||||
immich-server.container
|
||||
```bash
|
||||
[Unit]
|
||||
Description=Immich Server
|
||||
Requires=immich-redis.service immich-database.service
|
||||
|
||||
[Container]
|
||||
AutoUpdate=registry
|
||||
EnvironmentFile=${location_of_env_file}
|
||||
Image=ghcr.io/immich-app/immich-server:release
|
||||
Label=registry
|
||||
Network=slirp4netns:port_handler=slirp4netns
|
||||
Exec=start.sh immich
|
||||
PublishPort=3000:3000
|
||||
PublishPort=3001:3001
|
||||
Volume=${host_upload_directory}:/usr/src/app/upload
|
||||
Volume=/etc/localtime:/etc/localtime:ro
|
||||
|
||||
[Service]
|
||||
Restart=always
|
||||
|
||||
[Install]
|
||||
WantedBy=multi-user.target default.target
|
||||
```
|
@ -1,5 +1,5 @@
|
||||
---
|
||||
sidebar_position: 100
|
||||
sidebar_position: 90
|
||||
---
|
||||
|
||||
import RegisterAdminUser from '/docs/partials/_register-admin.md';
|
||||
|
@ -11,10 +11,6 @@ Hardware and software requirements for Immich
|
||||
- [Docker](https://docs.docker.com/get-docker/)
|
||||
- [Docker Compose](https://docs.docker.com/compose/install/)
|
||||
|
||||
:::note
|
||||
Immich requires the command `docker compose` - the similarly named `docker-compose` is [deprecated](https://docs.docker.com/compose/migrate/) and is no longer compatible with Immich.
|
||||
:::
|
||||
|
||||
:::info Podman
|
||||
You can also use Podman to run the application. However, additional configuration might be required.
|
||||
:::
|
||||
|
@ -17,11 +17,12 @@ curl -o- https://raw.githubusercontent.com/immich-app/immich/main/install.sh | b
|
||||
The script will perform the following actions:
|
||||
|
||||
1. Download [docker-compose.yml](https://github.com/immich-app/immich/releases/latest/download/docker-compose.yml), and the [.env](https://github.com/immich-app/immich/releases/latest/download/example.env) file from the main branch of the [repository](https://github.com/immich-app/immich).
|
||||
2. Start the containers.
|
||||
2. Populate the `.env` file with necessary information based on the current directory path.
|
||||
3. Start the containers.
|
||||
|
||||
The web application will be available at `http://<machine-ip-address>:2283`, and the server URL for the mobile app will be `http://<machine-ip-address>:2283/api`
|
||||
|
||||
The directory which is used to store the library files is `./immich-app` relative to the current directory.
|
||||
The directory which is used to store the library files is `./immich-data` relative to the current directory.
|
||||
|
||||
:::tip
|
||||
For common next steps, see [Post Install Steps](/docs/install/post-install.mdx).
|
||||
|
@ -27,7 +27,7 @@ For more information about setting up the community image see [here](https://git
|
||||
|
||||
:::info
|
||||
|
||||
- Guide was written using Unraid v6.12.10
|
||||
- Guide was written using Unraid v6.11.1
|
||||
- Requires you to have installed the plugin: [Docker Compose Manager](https://forums.unraid.net/topic/114415-plugin-docker-compose-manager/)
|
||||
- An Unraid share created for your images
|
||||
- There has been a [report](https://forums.unraid.net/topic/130006-errortraps-traps-node27707-trap-invalid-opcode-ip14fcfc8d03c0-sp7fff32889dd8-more/#comment-1189395) of this not working if your Unraid server doesn't support AVX _(e.g. using a T610)_
|
||||
@ -46,7 +46,7 @@ alt="Select Plugins > Compose.Manager > Add New Stack > Label it Immich"
|
||||
/>
|
||||
|
||||
3. Select the cog ⚙️ next to Immich then click "**Edit Stack**"
|
||||
4. Click "**Compose File**" and then paste the entire contents of the [Immich Docker Compose](https://github.com/immich-app/immich/releases/latest/download/docker-compose.yml) file into the Unraid editor. Remove any text that may be in the text area by default.
|
||||
4. Click "**Compose File**" and then paste the entire contents of the [Immich Docker Compose](https://github.com/immich-app/immich/releases/latest/download/docker-compose.yml) file into the Unraid editor
|
||||
<details >
|
||||
<summary>Using an existing Postgres container? Click me! Otherwise proceed to step 5.</summary>
|
||||
<ul>
|
||||
@ -98,7 +98,7 @@ alt="Select Plugins > Compose.Manager > Add New Stack > Label it Immich"
|
||||
|
||||
> Note: This can take several minutes depending on your Internet speed and Unraid hardware
|
||||
|
||||
9. Once on the Docker page you will see several Immich containers, one of them will be labelled `immich_server` and will have a port mapping. Visit the `IP:PORT` displayed in your web browser and you should see the Immich admin setup page.
|
||||
9. Once on the Docker page you will see several Immich containers, one of them will be labelled `immich_web` and will have a port mapping. Visit the `IP:PORT` displayed in your web browser and you should see the Immich admin setup page.
|
||||
|
||||
<img
|
||||
src={require('./img/unraid06.webp').default}
|
||||
@ -107,8 +107,8 @@ alt="Go to Docker Tab and visit the address listed next to immich-web"
|
||||
/>
|
||||
|
||||
<details >
|
||||
<summary>Using the FolderView plugin for organizing your Docker containers? Click me! Otherwise you're complete!</summary>
|
||||
<p>If you are using the FolderView plugin go the Docker tab and select "<b>New Folder</b>".<br />Label it <i>"Immich"</i> and use this URL as the logo: https://raw.githubusercontent.com/immich-app/immich/main/design/immich-logo.png<br/>Then simply select all the Immich related containers before clicking "<b>Submit</b>"</p>
|
||||
<summary>Using the Unraid Docker Folders plugin? Click me! Otherwise you're complete!</summary>
|
||||
<p>If you are using the Docker Folders plugin go the Docker tab and select "<b>New Folder</b>".<br />Label it <i>"Immich"</i> and use the logo from the <a href="https://immich.app/">Immich homepage</a> <i>(right click the logo, "Save As", and reupload to Unraid)</i><br />Then simply select all the Immich related containers before clicking "<b>Submit</b>"</p>
|
||||
<img
|
||||
src={require('./img/unraid07.webp').default}
|
||||
width="80%"
|
||||
|
@ -1,7 +1,7 @@
|
||||
Immich allows the admin user to set the uploaded filename pattern. Both at the directory and filename level.
|
||||
|
||||
:::note new version
|
||||
On new machines running version 1.92.0 storage template engine is off by default, for [more info](https://github.com/immich-app/immich/releases/tag/v1.92.0#:~:text=the%20partner%E2%80%99s%20assets.-,Hardening%20storage%20template,-We%20have%20further).
|
||||
On new machines running version 1.92.0 storage template engine is off by default, for [more info](https://github.com/immich-app/immich/releases#:~:text=the%20partner%E2%80%99s%20assets.-,Hardening%20storage%20template,-We%20have%20further).
|
||||
:::
|
||||
|
||||
:::tip
|
||||
|
48
docs/package-lock.json
generated
@ -3429,9 +3429,9 @@
|
||||
}
|
||||
},
|
||||
"node_modules/@tsconfig/docusaurus": {
|
||||
"version": "2.0.3",
|
||||
"resolved": "https://registry.npmjs.org/@tsconfig/docusaurus/-/docusaurus-2.0.3.tgz",
|
||||
"integrity": "sha512-3l1L5PzWVa7l0691TjnsZ0yOIEwG9DziSqu5IPZPlI5Dowi7z42cEym8Y35GHbgHvPcBfNxfrbxm7Cncn4nByQ==",
|
||||
"version": "2.0.2",
|
||||
"resolved": "https://registry.npmjs.org/@tsconfig/docusaurus/-/docusaurus-2.0.2.tgz",
|
||||
"integrity": "sha512-12HWfYmgUl4M2o76/TFufGtI68wl2k/b8qPrIrG7ci9YJLrpAtadpy897Bz5v29Mlkr7a1Hq4KHdQTKtU+2rhQ==",
|
||||
"dev": true
|
||||
},
|
||||
"node_modules/@types/acorn": {
|
||||
@ -4264,9 +4264,9 @@
|
||||
}
|
||||
},
|
||||
"node_modules/autoprefixer": {
|
||||
"version": "10.4.19",
|
||||
"resolved": "https://registry.npmjs.org/autoprefixer/-/autoprefixer-10.4.19.tgz",
|
||||
"integrity": "sha512-BaENR2+zBZ8xXhM4pUaKUxlVdxZ0EZhjvbopwnXmxRUfqDmwSpC2lAi/QXvx7NRdPCo1WKEcEF6mV64si1z4Ew==",
|
||||
"version": "10.4.18",
|
||||
"resolved": "https://registry.npmjs.org/autoprefixer/-/autoprefixer-10.4.18.tgz",
|
||||
"integrity": "sha512-1DKbDfsr6KUElM6wg+0zRNkB/Q7WcKYAaK+pzXn+Xqmszm/5Xa9coeNdtP88Vi+dPzZnMjhge8GIV49ZQkDa+g==",
|
||||
"funding": [
|
||||
{
|
||||
"type": "opencollective",
|
||||
@ -4283,7 +4283,7 @@
|
||||
],
|
||||
"dependencies": {
|
||||
"browserslist": "^4.23.0",
|
||||
"caniuse-lite": "^1.0.30001599",
|
||||
"caniuse-lite": "^1.0.30001591",
|
||||
"fraction.js": "^4.3.7",
|
||||
"normalize-range": "^0.1.2",
|
||||
"picocolors": "^1.0.0",
|
||||
@ -4728,9 +4728,9 @@
|
||||
}
|
||||
},
|
||||
"node_modules/caniuse-lite": {
|
||||
"version": "1.0.30001600",
|
||||
"resolved": "https://registry.npmjs.org/caniuse-lite/-/caniuse-lite-1.0.30001600.tgz",
|
||||
"integrity": "sha512-+2S9/2JFhYmYaDpZvo0lKkfvuKIglrx68MwOBqMGHhQsNkLjB5xtc/TGoEPs+MxjSyN/72qer2g97nzR641mOQ==",
|
||||
"version": "1.0.30001597",
|
||||
"resolved": "https://registry.npmjs.org/caniuse-lite/-/caniuse-lite-1.0.30001597.tgz",
|
||||
"integrity": "sha512-7LjJvmQU6Sj7bL0j5b5WY/3n7utXUJvAe1lxhsHDbLmwX9mdL86Yjtr+5SRCyf8qME4M7pU2hswj0FpyBVCv9w==",
|
||||
"funding": [
|
||||
{
|
||||
"type": "opencollective",
|
||||
@ -12691,9 +12691,9 @@
|
||||
}
|
||||
},
|
||||
"node_modules/postcss": {
|
||||
"version": "8.4.38",
|
||||
"resolved": "https://registry.npmjs.org/postcss/-/postcss-8.4.38.tgz",
|
||||
"integrity": "sha512-Wglpdk03BSfXkHoQa3b/oulrotAkwrlLDRSOb9D0bN86FdRyE9lppSp33aHNPgBa0JKCoB+drFLZkQoRRYae5A==",
|
||||
"version": "8.4.35",
|
||||
"resolved": "https://registry.npmjs.org/postcss/-/postcss-8.4.35.tgz",
|
||||
"integrity": "sha512-u5U8qYpBCpN13BsiEB0CbR1Hhh4Gc0zLFuedrHJKMctHCHAGrMdG0PRM/KErzAL3CU6/eckEtmHNB3x6e3c0vA==",
|
||||
"funding": [
|
||||
{
|
||||
"type": "opencollective",
|
||||
@ -12711,7 +12711,7 @@
|
||||
"dependencies": {
|
||||
"nanoid": "^3.3.7",
|
||||
"picocolors": "^1.0.0",
|
||||
"source-map-js": "^1.2.0"
|
||||
"source-map-js": "^1.0.2"
|
||||
},
|
||||
"engines": {
|
||||
"node": "^10 || ^12 || >=14"
|
||||
@ -15295,9 +15295,9 @@
|
||||
}
|
||||
},
|
||||
"node_modules/source-map-js": {
|
||||
"version": "1.2.0",
|
||||
"resolved": "https://registry.npmjs.org/source-map-js/-/source-map-js-1.2.0.tgz",
|
||||
"integrity": "sha512-itJW8lvSA0TXEphiRoawsCksnlf8SyvmFzIhltqAHluXd88pkCd+cXJVHTDwdCr0IzwptSm035IHQktUu1QUMg==",
|
||||
"version": "1.0.2",
|
||||
"resolved": "https://registry.npmjs.org/source-map-js/-/source-map-js-1.0.2.tgz",
|
||||
"integrity": "sha512-R0XvVJ9WusLiqTCEiGCmICCMplcCkIwwR11mOSD9CR5u+IXYdiseeEuXCVAjS54zqwkLcPNnmU4OeJ6tUrWhDw==",
|
||||
"engines": {
|
||||
"node": ">=0.10.0"
|
||||
}
|
||||
@ -15781,9 +15781,9 @@
|
||||
}
|
||||
},
|
||||
"node_modules/tailwindcss": {
|
||||
"version": "3.4.3",
|
||||
"resolved": "https://registry.npmjs.org/tailwindcss/-/tailwindcss-3.4.3.tgz",
|
||||
"integrity": "sha512-U7sxQk/n397Bmx4JHbJx/iSOOv5G+II3f1kpLpY2QeUv5DcPdcTsYLlusZfq1NthHS1c1cZoyFmmkex1rzke0A==",
|
||||
"version": "3.4.1",
|
||||
"resolved": "https://registry.npmjs.org/tailwindcss/-/tailwindcss-3.4.1.tgz",
|
||||
"integrity": "sha512-qAYmXRfk3ENzuPBakNK0SRrUDipP8NQnEY6772uDhflcQz5EhRdD7JNZxyrFHVQNCwULPBn6FNPp9brpO7ctcA==",
|
||||
"dependencies": {
|
||||
"@alloc/quick-lru": "^5.2.0",
|
||||
"arg": "^5.0.2",
|
||||
@ -15793,7 +15793,7 @@
|
||||
"fast-glob": "^3.3.0",
|
||||
"glob-parent": "^6.0.2",
|
||||
"is-glob": "^4.0.3",
|
||||
"jiti": "^1.21.0",
|
||||
"jiti": "^1.19.1",
|
||||
"lilconfig": "^2.1.0",
|
||||
"micromatch": "^4.0.5",
|
||||
"normalize-path": "^3.0.0",
|
||||
@ -16141,9 +16141,9 @@
|
||||
}
|
||||
},
|
||||
"node_modules/typescript": {
|
||||
"version": "5.4.3",
|
||||
"resolved": "https://registry.npmjs.org/typescript/-/typescript-5.4.3.tgz",
|
||||
"integrity": "sha512-KrPd3PKaCLr78MalgiwJnA25Nm8HAmdwN3mYUYZgG/wizIo9EainNVQI9/yDavtVFRN2h3k8uf3GLHuhDMgEHg==",
|
||||
"version": "5.4.2",
|
||||
"resolved": "https://registry.npmjs.org/typescript/-/typescript-5.4.2.tgz",
|
||||
"integrity": "sha512-+2/g0Fds1ERlP6JsakQQDXjZdZMM+rqpamFZJEKh4kwTIn3iDkgKtby0CeNd5ATNZ4Ry1ax15TMx0W2V+miizQ==",
|
||||
"bin": {
|
||||
"tsc": "bin/tsc",
|
||||
"tsserver": "bin/tsserver"
|
||||
|
@ -1,66 +0,0 @@
|
||||
import Link from '@docusaurus/Link';
|
||||
import React from 'react';
|
||||
|
||||
interface CommunityProjectProps {
|
||||
title: string;
|
||||
description: string;
|
||||
url: string;
|
||||
}
|
||||
|
||||
const projects: CommunityProjectProps[] = [
|
||||
{
|
||||
title: 'immich-go',
|
||||
description: `An alternative to the immich-CLI command that doesn't depend on nodejs installation. It tries its best for importing google photos takeout archives.`,
|
||||
url: 'https://github.com/simulot/immich-go',
|
||||
},
|
||||
{
|
||||
title: 'ImmichFrame',
|
||||
description: 'Run an Immich slideshow in a photo frame.',
|
||||
url: 'https://github.com/3rob3/ImmichFrame',
|
||||
},
|
||||
{
|
||||
title: 'API Album Sync',
|
||||
description: 'A python script to sync folders as albums.',
|
||||
url: 'https://git.orenit.solutions/open/immichalbumpull',
|
||||
},
|
||||
{
|
||||
title: 'Remove offline files',
|
||||
description: 'A python script to remove offline files.',
|
||||
url: 'https://gist.github.com/Thoroslives/ca5d8e1efd15111febc1e7b34ac72668',
|
||||
},
|
||||
];
|
||||
|
||||
function CommunityProject({ title, description, url }: CommunityProjectProps): JSX.Element {
|
||||
return (
|
||||
<section className="flex flex-col gap-4 justify-between dark:bg-immich-dark-gray bg-immich-gray dark:border-0 border-gray-200 border border-solid rounded-2xl p-4">
|
||||
<div className="flex flex-col gap-2">
|
||||
<p className="m-0 items-start flex gap-2">
|
||||
<span>{title}</span>
|
||||
</p>
|
||||
|
||||
<p className="m-0 text-sm text-gray-600 dark:text-gray-300">{description}</p>
|
||||
<p className="m-0 text-sm text-gray-600 dark:text-gray-300">
|
||||
<a href={url}>{url}</a>
|
||||
</p>
|
||||
</div>
|
||||
<div className="flex">
|
||||
<Link
|
||||
className="px-4 py-2 bg-immich-primary/10 dark:bg-gray-300 rounded-full hover:no-underline text-immich-primary dark:text-immich-dark-bg font-bold uppercase"
|
||||
to={url}
|
||||
>
|
||||
View Project
|
||||
</Link>
|
||||
</div>
|
||||
</section>
|
||||
);
|
||||
}
|
||||
|
||||
export default function CommunityProjects(): JSX.Element {
|
||||
return (
|
||||
<div className="grid grid-cols-1 xl:grid-cols-2 gap-4">
|
||||
{projects.map((project) => (
|
||||
<CommunityProject {...project} />
|
||||
))}
|
||||
</div>
|
||||
);
|
||||
}
|
4
docs/static/_redirects
vendored
@ -24,7 +24,3 @@
|
||||
/docs/features/user-management /docs/administration/user-management 301
|
||||
/docs/developer/contributing /docs/developer/pr-checklist 301
|
||||
/docs/guides/machine-learning /docs/guides/remote-machine-learning 301
|
||||
/docs/administration/password-login /docs/administration/system-settings 301
|
||||
/docs/features/search /docs/features/smart-search 301
|
||||
/docs/guides/api-album-sync /docs/community-projects 301
|
||||
/docs/guides/remove-offline-files /docs/community-projects 301
|
||||
|
@ -19,7 +19,6 @@ module.exports = {
|
||||
'@typescript-eslint/no-explicit-any': 'off',
|
||||
'@typescript-eslint/no-floating-promises': 'error',
|
||||
'unicorn/prefer-module': 'off',
|
||||
'unicorn/import-style': 'off',
|
||||
curly: 2,
|
||||
'prettier/prettier': 0,
|
||||
'unicorn/prevent-abbreviations': 'off',
|
||||
|
136
e2e/package-lock.json
generated
@ -1,12 +1,12 @@
|
||||
{
|
||||
"name": "immich-e2e",
|
||||
"version": "1.101.0",
|
||||
"version": "1.99.0",
|
||||
"lockfileVersion": 3,
|
||||
"requires": true,
|
||||
"packages": {
|
||||
"": {
|
||||
"name": "immich-e2e",
|
||||
"version": "1.101.0",
|
||||
"version": "1.99.0",
|
||||
"license": "GNU Affero General Public License version 3",
|
||||
"devDependencies": {
|
||||
"@immich/cli": "file:../cli",
|
||||
@ -23,7 +23,7 @@
|
||||
"eslint": "^8.57.0",
|
||||
"eslint-config-prettier": "^9.1.0",
|
||||
"eslint-plugin-prettier": "^5.1.3",
|
||||
"eslint-plugin-unicorn": "^52.0.0",
|
||||
"eslint-plugin-unicorn": "^51.0.1",
|
||||
"exiftool-vendored": "^24.5.0",
|
||||
"luxon": "^3.4.4",
|
||||
"pg": "^8.11.3",
|
||||
@ -38,7 +38,7 @@
|
||||
},
|
||||
"../cli": {
|
||||
"name": "@immich/cli",
|
||||
"version": "2.2.0",
|
||||
"version": "2.1.0",
|
||||
"dev": true,
|
||||
"license": "GNU Affero General Public License version 3",
|
||||
"dependencies": {
|
||||
@ -63,7 +63,7 @@
|
||||
"eslint": "^8.56.0",
|
||||
"eslint-config-prettier": "^9.1.0",
|
||||
"eslint-plugin-prettier": "^5.1.3",
|
||||
"eslint-plugin-unicorn": "^52.0.0",
|
||||
"eslint-plugin-unicorn": "^51.0.0",
|
||||
"glob": "^10.3.1",
|
||||
"mock-fs": "^5.2.0",
|
||||
"prettier": "^3.2.5",
|
||||
@ -80,7 +80,7 @@
|
||||
},
|
||||
"../open-api/typescript-sdk": {
|
||||
"name": "@immich/sdk",
|
||||
"version": "1.101.0",
|
||||
"version": "1.99.0",
|
||||
"dev": true,
|
||||
"license": "GNU Affero General Public License version 3",
|
||||
"dependencies": {
|
||||
@ -1158,9 +1158,9 @@
|
||||
"dev": true
|
||||
},
|
||||
"node_modules/@types/node": {
|
||||
"version": "20.11.30",
|
||||
"resolved": "https://registry.npmjs.org/@types/node/-/node-20.11.30.tgz",
|
||||
"integrity": "sha512-dHM6ZxwlmuZaRmUPfv1p+KrdD1Dci04FbdEm/9wEMouFqxYoFl5aMkt0VMAUtYRQDyYvD41WJLukhq/ha3YuTw==",
|
||||
"version": "20.11.28",
|
||||
"resolved": "https://registry.npmjs.org/@types/node/-/node-20.11.28.tgz",
|
||||
"integrity": "sha512-M/GPWVS2wLkSkNHVeLkrF2fD5Lx5UC4PxA0uZcKc6QqbIQUJyW1jVjueJYi1z8n0I5PxYrtpnPnWglE+y9A0KA==",
|
||||
"dev": true,
|
||||
"dependencies": {
|
||||
"undici-types": "~5.26.4"
|
||||
@ -1173,9 +1173,9 @@
|
||||
"dev": true
|
||||
},
|
||||
"node_modules/@types/pg": {
|
||||
"version": "8.11.4",
|
||||
"resolved": "https://registry.npmjs.org/@types/pg/-/pg-8.11.4.tgz",
|
||||
"integrity": "sha512-yw3Bwbda6vO+NvI1Ue/YKOwtl31AYvvd/e73O3V4ZkNzuGpTDndLSyc0dQRB2xrQqDePd20pEGIfqSp/GH3pRw==",
|
||||
"version": "8.11.2",
|
||||
"resolved": "https://registry.npmjs.org/@types/pg/-/pg-8.11.2.tgz",
|
||||
"integrity": "sha512-G2Mjygf2jFMU/9hCaTYxJrwdObdcnuQde1gndooZSOHsNSaCehAuwc7EIuSA34Do8Jx2yZ19KtvW8P0j4EuUXw==",
|
||||
"dev": true,
|
||||
"dependencies": {
|
||||
"@types/node": "*",
|
||||
@ -1277,16 +1277,16 @@
|
||||
}
|
||||
},
|
||||
"node_modules/@typescript-eslint/eslint-plugin": {
|
||||
"version": "7.4.0",
|
||||
"resolved": "https://registry.npmjs.org/@typescript-eslint/eslint-plugin/-/eslint-plugin-7.4.0.tgz",
|
||||
"integrity": "sha512-yHMQ/oFaM7HZdVrVm/M2WHaNPgyuJH4WelkSVEWSSsir34kxW2kDJCxlXRhhGWEsMN0WAW/vLpKfKVcm8k+MPw==",
|
||||
"version": "7.2.0",
|
||||
"resolved": "https://registry.npmjs.org/@typescript-eslint/eslint-plugin/-/eslint-plugin-7.2.0.tgz",
|
||||
"integrity": "sha512-mdekAHOqS9UjlmyF/LSs6AIEvfceV749GFxoBAjwAv0nkevfKHWQFDMcBZWUiIC5ft6ePWivXoS36aKQ0Cy3sw==",
|
||||
"dev": true,
|
||||
"dependencies": {
|
||||
"@eslint-community/regexpp": "^4.5.1",
|
||||
"@typescript-eslint/scope-manager": "7.4.0",
|
||||
"@typescript-eslint/type-utils": "7.4.0",
|
||||
"@typescript-eslint/utils": "7.4.0",
|
||||
"@typescript-eslint/visitor-keys": "7.4.0",
|
||||
"@typescript-eslint/scope-manager": "7.2.0",
|
||||
"@typescript-eslint/type-utils": "7.2.0",
|
||||
"@typescript-eslint/utils": "7.2.0",
|
||||
"@typescript-eslint/visitor-keys": "7.2.0",
|
||||
"debug": "^4.3.4",
|
||||
"graphemer": "^1.4.0",
|
||||
"ignore": "^5.2.4",
|
||||
@ -1295,7 +1295,7 @@
|
||||
"ts-api-utils": "^1.0.1"
|
||||
},
|
||||
"engines": {
|
||||
"node": "^18.18.0 || >=20.0.0"
|
||||
"node": "^16.0.0 || >=18.0.0"
|
||||
},
|
||||
"funding": {
|
||||
"type": "opencollective",
|
||||
@ -1312,19 +1312,19 @@
|
||||
}
|
||||
},
|
||||
"node_modules/@typescript-eslint/parser": {
|
||||
"version": "7.4.0",
|
||||
"resolved": "https://registry.npmjs.org/@typescript-eslint/parser/-/parser-7.4.0.tgz",
|
||||
"integrity": "sha512-ZvKHxHLusweEUVwrGRXXUVzFgnWhigo4JurEj0dGF1tbcGh6buL+ejDdjxOQxv6ytcY1uhun1p2sm8iWStlgLQ==",
|
||||
"version": "7.2.0",
|
||||
"resolved": "https://registry.npmjs.org/@typescript-eslint/parser/-/parser-7.2.0.tgz",
|
||||
"integrity": "sha512-5FKsVcHTk6TafQKQbuIVkXq58Fnbkd2wDL4LB7AURN7RUOu1utVP+G8+6u3ZhEroW3DF6hyo3ZEXxgKgp4KeCg==",
|
||||
"dev": true,
|
||||
"dependencies": {
|
||||
"@typescript-eslint/scope-manager": "7.4.0",
|
||||
"@typescript-eslint/types": "7.4.0",
|
||||
"@typescript-eslint/typescript-estree": "7.4.0",
|
||||
"@typescript-eslint/visitor-keys": "7.4.0",
|
||||
"@typescript-eslint/scope-manager": "7.2.0",
|
||||
"@typescript-eslint/types": "7.2.0",
|
||||
"@typescript-eslint/typescript-estree": "7.2.0",
|
||||
"@typescript-eslint/visitor-keys": "7.2.0",
|
||||
"debug": "^4.3.4"
|
||||
},
|
||||
"engines": {
|
||||
"node": "^18.18.0 || >=20.0.0"
|
||||
"node": "^16.0.0 || >=18.0.0"
|
||||
},
|
||||
"funding": {
|
||||
"type": "opencollective",
|
||||
@ -1340,16 +1340,16 @@
|
||||
}
|
||||
},
|
||||
"node_modules/@typescript-eslint/scope-manager": {
|
||||
"version": "7.4.0",
|
||||
"resolved": "https://registry.npmjs.org/@typescript-eslint/scope-manager/-/scope-manager-7.4.0.tgz",
|
||||
"integrity": "sha512-68VqENG5HK27ypafqLVs8qO+RkNc7TezCduYrx8YJpXq2QGZ30vmNZGJJJC48+MVn4G2dCV8m5ZTVnzRexTVtw==",
|
||||
"version": "7.2.0",
|
||||
"resolved": "https://registry.npmjs.org/@typescript-eslint/scope-manager/-/scope-manager-7.2.0.tgz",
|
||||
"integrity": "sha512-Qh976RbQM/fYtjx9hs4XkayYujB/aPwglw2choHmf3zBjB4qOywWSdt9+KLRdHubGcoSwBnXUH2sR3hkyaERRg==",
|
||||
"dev": true,
|
||||
"dependencies": {
|
||||
"@typescript-eslint/types": "7.4.0",
|
||||
"@typescript-eslint/visitor-keys": "7.4.0"
|
||||
"@typescript-eslint/types": "7.2.0",
|
||||
"@typescript-eslint/visitor-keys": "7.2.0"
|
||||
},
|
||||
"engines": {
|
||||
"node": "^18.18.0 || >=20.0.0"
|
||||
"node": "^16.0.0 || >=18.0.0"
|
||||
},
|
||||
"funding": {
|
||||
"type": "opencollective",
|
||||
@ -1357,18 +1357,18 @@
|
||||
}
|
||||
},
|
||||
"node_modules/@typescript-eslint/type-utils": {
|
||||
"version": "7.4.0",
|
||||
"resolved": "https://registry.npmjs.org/@typescript-eslint/type-utils/-/type-utils-7.4.0.tgz",
|
||||
"integrity": "sha512-247ETeHgr9WTRMqHbbQdzwzhuyaJ8dPTuyuUEMANqzMRB1rj/9qFIuIXK7l0FX9i9FXbHeBQl/4uz6mYuCE7Aw==",
|
||||
"version": "7.2.0",
|
||||
"resolved": "https://registry.npmjs.org/@typescript-eslint/type-utils/-/type-utils-7.2.0.tgz",
|
||||
"integrity": "sha512-xHi51adBHo9O9330J8GQYQwrKBqbIPJGZZVQTHHmy200hvkLZFWJIFtAG/7IYTWUyun6DE6w5InDReePJYJlJA==",
|
||||
"dev": true,
|
||||
"dependencies": {
|
||||
"@typescript-eslint/typescript-estree": "7.4.0",
|
||||
"@typescript-eslint/utils": "7.4.0",
|
||||
"@typescript-eslint/typescript-estree": "7.2.0",
|
||||
"@typescript-eslint/utils": "7.2.0",
|
||||
"debug": "^4.3.4",
|
||||
"ts-api-utils": "^1.0.1"
|
||||
},
|
||||
"engines": {
|
||||
"node": "^18.18.0 || >=20.0.0"
|
||||
"node": "^16.0.0 || >=18.0.0"
|
||||
},
|
||||
"funding": {
|
||||
"type": "opencollective",
|
||||
@ -1384,12 +1384,12 @@
|
||||
}
|
||||
},
|
||||
"node_modules/@typescript-eslint/types": {
|
||||
"version": "7.4.0",
|
||||
"resolved": "https://registry.npmjs.org/@typescript-eslint/types/-/types-7.4.0.tgz",
|
||||
"integrity": "sha512-mjQopsbffzJskos5B4HmbsadSJQWaRK0UxqQ7GuNA9Ga4bEKeiO6b2DnB6cM6bpc8lemaPseh0H9B/wyg+J7rw==",
|
||||
"version": "7.2.0",
|
||||
"resolved": "https://registry.npmjs.org/@typescript-eslint/types/-/types-7.2.0.tgz",
|
||||
"integrity": "sha512-XFtUHPI/abFhm4cbCDc5Ykc8npOKBSJePY3a3s+lwumt7XWJuzP5cZcfZ610MIPHjQjNsOLlYK8ASPaNG8UiyA==",
|
||||
"dev": true,
|
||||
"engines": {
|
||||
"node": "^18.18.0 || >=20.0.0"
|
||||
"node": "^16.0.0 || >=18.0.0"
|
||||
},
|
||||
"funding": {
|
||||
"type": "opencollective",
|
||||
@ -1397,13 +1397,13 @@
|
||||
}
|
||||
},
|
||||
"node_modules/@typescript-eslint/typescript-estree": {
|
||||
"version": "7.4.0",
|
||||
"resolved": "https://registry.npmjs.org/@typescript-eslint/typescript-estree/-/typescript-estree-7.4.0.tgz",
|
||||
"integrity": "sha512-A99j5AYoME/UBQ1ucEbbMEmGkN7SE0BvZFreSnTd1luq7yulcHdyGamZKizU7canpGDWGJ+Q6ZA9SyQobipePg==",
|
||||
"version": "7.2.0",
|
||||
"resolved": "https://registry.npmjs.org/@typescript-eslint/typescript-estree/-/typescript-estree-7.2.0.tgz",
|
||||
"integrity": "sha512-cyxS5WQQCoBwSakpMrvMXuMDEbhOo9bNHHrNcEWis6XHx6KF518tkF1wBvKIn/tpq5ZpUYK7Bdklu8qY0MsFIA==",
|
||||
"dev": true,
|
||||
"dependencies": {
|
||||
"@typescript-eslint/types": "7.4.0",
|
||||
"@typescript-eslint/visitor-keys": "7.4.0",
|
||||
"@typescript-eslint/types": "7.2.0",
|
||||
"@typescript-eslint/visitor-keys": "7.2.0",
|
||||
"debug": "^4.3.4",
|
||||
"globby": "^11.1.0",
|
||||
"is-glob": "^4.0.3",
|
||||
@ -1412,7 +1412,7 @@
|
||||
"ts-api-utils": "^1.0.1"
|
||||
},
|
||||
"engines": {
|
||||
"node": "^18.18.0 || >=20.0.0"
|
||||
"node": "^16.0.0 || >=18.0.0"
|
||||
},
|
||||
"funding": {
|
||||
"type": "opencollective",
|
||||
@ -1449,21 +1449,21 @@
|
||||
}
|
||||
},
|
||||
"node_modules/@typescript-eslint/utils": {
|
||||
"version": "7.4.0",
|
||||
"resolved": "https://registry.npmjs.org/@typescript-eslint/utils/-/utils-7.4.0.tgz",
|
||||
"integrity": "sha512-NQt9QLM4Tt8qrlBVY9lkMYzfYtNz8/6qwZg8pI3cMGlPnj6mOpRxxAm7BMJN9K0AiY+1BwJ5lVC650YJqYOuNg==",
|
||||
"version": "7.2.0",
|
||||
"resolved": "https://registry.npmjs.org/@typescript-eslint/utils/-/utils-7.2.0.tgz",
|
||||
"integrity": "sha512-YfHpnMAGb1Eekpm3XRK8hcMwGLGsnT6L+7b2XyRv6ouDuJU1tZir1GS2i0+VXRatMwSI1/UfcyPe53ADkU+IuA==",
|
||||
"dev": true,
|
||||
"dependencies": {
|
||||
"@eslint-community/eslint-utils": "^4.4.0",
|
||||
"@types/json-schema": "^7.0.12",
|
||||
"@types/semver": "^7.5.0",
|
||||
"@typescript-eslint/scope-manager": "7.4.0",
|
||||
"@typescript-eslint/types": "7.4.0",
|
||||
"@typescript-eslint/typescript-estree": "7.4.0",
|
||||
"@typescript-eslint/scope-manager": "7.2.0",
|
||||
"@typescript-eslint/types": "7.2.0",
|
||||
"@typescript-eslint/typescript-estree": "7.2.0",
|
||||
"semver": "^7.5.4"
|
||||
},
|
||||
"engines": {
|
||||
"node": "^18.18.0 || >=20.0.0"
|
||||
"node": "^16.0.0 || >=18.0.0"
|
||||
},
|
||||
"funding": {
|
||||
"type": "opencollective",
|
||||
@ -1474,16 +1474,16 @@
|
||||
}
|
||||
},
|
||||
"node_modules/@typescript-eslint/visitor-keys": {
|
||||
"version": "7.4.0",
|
||||
"resolved": "https://registry.npmjs.org/@typescript-eslint/visitor-keys/-/visitor-keys-7.4.0.tgz",
|
||||
"integrity": "sha512-0zkC7YM0iX5Y41homUUeW1CHtZR01K3ybjM1l6QczoMuay0XKtrb93kv95AxUGwdjGr64nNqnOCwmEl616N8CA==",
|
||||
"version": "7.2.0",
|
||||
"resolved": "https://registry.npmjs.org/@typescript-eslint/visitor-keys/-/visitor-keys-7.2.0.tgz",
|
||||
"integrity": "sha512-c6EIQRHhcpl6+tO8EMR+kjkkV+ugUNXOmeASA1rlzkd8EPIriavpWoiEz1HR/VLhbVIdhqnV6E7JZm00cBDx2A==",
|
||||
"dev": true,
|
||||
"dependencies": {
|
||||
"@typescript-eslint/types": "7.4.0",
|
||||
"@typescript-eslint/types": "7.2.0",
|
||||
"eslint-visitor-keys": "^3.4.1"
|
||||
},
|
||||
"engines": {
|
||||
"node": "^18.18.0 || >=20.0.0"
|
||||
"node": "^16.0.0 || >=18.0.0"
|
||||
},
|
||||
"funding": {
|
||||
"type": "opencollective",
|
||||
@ -2343,9 +2343,9 @@
|
||||
}
|
||||
},
|
||||
"node_modules/eslint-plugin-unicorn": {
|
||||
"version": "52.0.0",
|
||||
"resolved": "https://registry.npmjs.org/eslint-plugin-unicorn/-/eslint-plugin-unicorn-52.0.0.tgz",
|
||||
"integrity": "sha512-1Yzm7/m+0R4djH0tjDjfVei/ju2w3AzUGjG6q8JnuNIL5xIwsflyCooW5sfBvQp2pMYQFSWWCFONsjCax1EHng==",
|
||||
"version": "51.0.1",
|
||||
"resolved": "https://registry.npmjs.org/eslint-plugin-unicorn/-/eslint-plugin-unicorn-51.0.1.tgz",
|
||||
"integrity": "sha512-MuR/+9VuB0fydoI0nIn2RDA5WISRn4AsJyNSaNKLVwie9/ONvQhxOBbkfSICBPnzKrB77Fh6CZZXjgTt/4Latw==",
|
||||
"dev": true,
|
||||
"dependencies": {
|
||||
"@babel/helper-validator-identifier": "^7.22.20",
|
||||
@ -4790,9 +4790,9 @@
|
||||
}
|
||||
},
|
||||
"node_modules/typescript": {
|
||||
"version": "5.4.3",
|
||||
"resolved": "https://registry.npmjs.org/typescript/-/typescript-5.4.3.tgz",
|
||||
"integrity": "sha512-KrPd3PKaCLr78MalgiwJnA25Nm8HAmdwN3mYUYZgG/wizIo9EainNVQI9/yDavtVFRN2h3k8uf3GLHuhDMgEHg==",
|
||||
"version": "5.4.2",
|
||||
"resolved": "https://registry.npmjs.org/typescript/-/typescript-5.4.2.tgz",
|
||||
"integrity": "sha512-+2/g0Fds1ERlP6JsakQQDXjZdZMM+rqpamFZJEKh4kwTIn3iDkgKtby0CeNd5ATNZ4Ry1ax15TMx0W2V+miizQ==",
|
||||
"dev": true,
|
||||
"bin": {
|
||||
"tsc": "bin/tsc",
|
||||
|
@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "immich-e2e",
|
||||
"version": "1.101.0",
|
||||
"version": "1.99.0",
|
||||
"description": "",
|
||||
"main": "index.js",
|
||||
"type": "module",
|
||||
@ -33,7 +33,7 @@
|
||||
"eslint": "^8.57.0",
|
||||
"eslint-config-prettier": "^9.1.0",
|
||||
"eslint-plugin-prettier": "^5.1.3",
|
||||
"eslint-plugin-unicorn": "^52.0.0",
|
||||
"eslint-plugin-unicorn": "^51.0.1",
|
||||
"exiftool-vendored": "^24.5.0",
|
||||
"luxon": "^3.4.4",
|
||||
"pg": "^8.11.3",
|
||||
|
@ -148,7 +148,7 @@ describe('/activity', () => {
|
||||
});
|
||||
|
||||
it('should filter by userId', async () => {
|
||||
const reaction = await createActivity({ albumId: album.id, type: ReactionType.Like });
|
||||
const [reaction] = await Promise.all([createActivity({ albumId: album.id, type: ReactionType.Like })]);
|
||||
|
||||
const response1 = await request(app)
|
||||
.get('/activity')
|
||||
@ -250,7 +250,8 @@ describe('/activity', () => {
|
||||
});
|
||||
|
||||
it('should return a 200 for a duplicate like on the album', async () => {
|
||||
const reaction = await createActivity({ albumId: album.id, type: ReactionType.Like });
|
||||
const [reaction] = await Promise.all([createActivity({ albumId: album.id, type: ReactionType.Like })]);
|
||||
|
||||
const { status, body } = await request(app)
|
||||
.post('/activity')
|
||||
.set('Authorization', `Bearer ${admin.accessToken}`)
|
||||
@ -260,11 +261,13 @@ describe('/activity', () => {
|
||||
});
|
||||
|
||||
it('should not confuse an album like with an asset like', async () => {
|
||||
const reaction = await createActivity({
|
||||
const [reaction] = await Promise.all([
|
||||
createActivity({
|
||||
albumId: album.id,
|
||||
assetId: asset.id,
|
||||
type: ReactionType.Like,
|
||||
});
|
||||
}),
|
||||
]);
|
||||
const { status, body } = await request(app)
|
||||
.post('/activity')
|
||||
.set('Authorization', `Bearer ${admin.accessToken}`)
|
||||
@ -311,11 +314,13 @@ describe('/activity', () => {
|
||||
});
|
||||
|
||||
it('should return a 200 for a duplicate like on an asset', async () => {
|
||||
const reaction = await createActivity({
|
||||
const [reaction] = await Promise.all([
|
||||
createActivity({
|
||||
albumId: album.id,
|
||||
assetId: asset.id,
|
||||
type: ReactionType.Like,
|
||||
});
|
||||
}),
|
||||
]);
|
||||
|
||||
const { status, body } = await request(app)
|
||||
.post('/activity')
|
||||
|
@ -5,6 +5,7 @@ import {
|
||||
LibraryResponseDto,
|
||||
LoginResponseDto,
|
||||
SharedLinkType,
|
||||
TimeBucketSize,
|
||||
getAllLibraries,
|
||||
getAssetInfo,
|
||||
updateAssets,
|
||||
@ -111,7 +112,7 @@ describe('/asset', () => {
|
||||
utils.createAsset(user1.accessToken),
|
||||
]);
|
||||
|
||||
user2Assets = [await utils.createAsset(user2.accessToken)];
|
||||
user2Assets = await Promise.all([utils.createAsset(user2.accessToken)]);
|
||||
|
||||
await Promise.all([
|
||||
utils.createAsset(timeBucketUser.accessToken, { fileCreatedAt: new Date('1970-01-01').toISOString() }),
|
||||
@ -941,6 +942,146 @@ describe('/asset', () => {
|
||||
});
|
||||
});
|
||||
|
||||
describe('GET /asset/time-buckets', () => {
|
||||
it('should require authentication', async () => {
|
||||
const { status, body } = await request(app).get('/asset/time-buckets').query({ size: TimeBucketSize.Month });
|
||||
expect(status).toBe(401);
|
||||
expect(body).toEqual(errorDto.unauthorized);
|
||||
});
|
||||
|
||||
it('should get time buckets by month', async () => {
|
||||
const { status, body } = await request(app)
|
||||
.get('/asset/time-buckets')
|
||||
.set('Authorization', `Bearer ${timeBucketUser.accessToken}`)
|
||||
.query({ size: TimeBucketSize.Month });
|
||||
|
||||
expect(status).toBe(200);
|
||||
expect(body).toEqual(
|
||||
expect.arrayContaining([
|
||||
{ count: 3, timeBucket: '1970-02-01T00:00:00.000Z' },
|
||||
{ count: 1, timeBucket: '1970-01-01T00:00:00.000Z' },
|
||||
]),
|
||||
);
|
||||
});
|
||||
|
||||
it('should not allow access for unrelated shared links', async () => {
|
||||
const sharedLink = await utils.createSharedLink(user1.accessToken, {
|
||||
type: SharedLinkType.Individual,
|
||||
assetIds: user1Assets.map(({ id }) => id),
|
||||
});
|
||||
|
||||
const { status, body } = await request(app)
|
||||
.get('/asset/time-buckets')
|
||||
.query({ key: sharedLink.key, size: TimeBucketSize.Month });
|
||||
|
||||
expect(status).toBe(400);
|
||||
expect(body).toEqual(errorDto.noPermission);
|
||||
});
|
||||
|
||||
it('should get time buckets by day', async () => {
|
||||
const { status, body } = await request(app)
|
||||
.get('/asset/time-buckets')
|
||||
.set('Authorization', `Bearer ${timeBucketUser.accessToken}`)
|
||||
.query({ size: TimeBucketSize.Day });
|
||||
|
||||
expect(status).toBe(200);
|
||||
expect(body).toEqual([
|
||||
{ count: 2, timeBucket: '1970-02-11T00:00:00.000Z' },
|
||||
{ count: 1, timeBucket: '1970-02-10T00:00:00.000Z' },
|
||||
{ count: 1, timeBucket: '1970-01-01T00:00:00.000Z' },
|
||||
]);
|
||||
});
|
||||
});
|
||||
|
||||
describe('GET /asset/time-bucket', () => {
|
||||
it('should require authentication', async () => {
|
||||
const { status, body } = await request(app).get('/asset/time-bucket').query({
|
||||
size: TimeBucketSize.Month,
|
||||
timeBucket: '1900-01-01T00:00:00.000Z',
|
||||
});
|
||||
|
||||
expect(status).toBe(401);
|
||||
expect(body).toEqual(errorDto.unauthorized);
|
||||
});
|
||||
|
||||
it('should handle 5 digit years', async () => {
|
||||
const { status, body } = await request(app)
|
||||
.get('/asset/time-bucket')
|
||||
.query({ size: TimeBucketSize.Month, timeBucket: '+012345-01-01T00:00:00.000Z' })
|
||||
.set('Authorization', `Bearer ${timeBucketUser.accessToken}`);
|
||||
|
||||
expect(status).toBe(200);
|
||||
expect(body).toEqual([]);
|
||||
});
|
||||
|
||||
// TODO enable date string validation while still accepting 5 digit years
|
||||
// it('should fail if time bucket is invalid', async () => {
|
||||
// const { status, body } = await request(app)
|
||||
// .get('/asset/time-bucket')
|
||||
// .set('Authorization', `Bearer ${user1.accessToken}`)
|
||||
// .query({ size: TimeBucketSize.Month, timeBucket: 'foo' });
|
||||
|
||||
// expect(status).toBe(400);
|
||||
// expect(body).toEqual(errorDto.badRequest);
|
||||
// });
|
||||
|
||||
it('should return time bucket', async () => {
|
||||
const { status, body } = await request(app)
|
||||
.get('/asset/time-bucket')
|
||||
.set('Authorization', `Bearer ${timeBucketUser.accessToken}`)
|
||||
.query({ size: TimeBucketSize.Month, timeBucket: '1970-02-10T00:00:00.000Z' });
|
||||
|
||||
expect(status).toBe(200);
|
||||
expect(body).toEqual([]);
|
||||
});
|
||||
|
||||
it('should return error if time bucket is requested with partners asset and archived', async () => {
|
||||
const req1 = await request(app)
|
||||
.get('/asset/time-buckets')
|
||||
.set('Authorization', `Bearer ${timeBucketUser.accessToken}`)
|
||||
.query({ size: TimeBucketSize.Month, withPartners: true, isArchived: true });
|
||||
|
||||
expect(req1.status).toBe(400);
|
||||
expect(req1.body).toEqual(errorDto.badRequest());
|
||||
|
||||
const req2 = await request(app)
|
||||
.get('/asset/time-buckets')
|
||||
.set('Authorization', `Bearer ${user1.accessToken}`)
|
||||
.query({ size: TimeBucketSize.Month, withPartners: true, isArchived: undefined });
|
||||
|
||||
expect(req2.status).toBe(400);
|
||||
expect(req2.body).toEqual(errorDto.badRequest());
|
||||
});
|
||||
|
||||
it('should return error if time bucket is requested with partners asset and favorite', async () => {
|
||||
const req1 = await request(app)
|
||||
.get('/asset/time-buckets')
|
||||
.set('Authorization', `Bearer ${timeBucketUser.accessToken}`)
|
||||
.query({ size: TimeBucketSize.Month, withPartners: true, isFavorite: true });
|
||||
|
||||
expect(req1.status).toBe(400);
|
||||
expect(req1.body).toEqual(errorDto.badRequest());
|
||||
|
||||
const req2 = await request(app)
|
||||
.get('/asset/time-buckets')
|
||||
.set('Authorization', `Bearer ${timeBucketUser.accessToken}`)
|
||||
.query({ size: TimeBucketSize.Month, withPartners: true, isFavorite: false });
|
||||
|
||||
expect(req2.status).toBe(400);
|
||||
expect(req2.body).toEqual(errorDto.badRequest());
|
||||
});
|
||||
|
||||
it('should return error if time bucket is requested with partners asset and trash', async () => {
|
||||
const req = await request(app)
|
||||
.get('/asset/time-buckets')
|
||||
.set('Authorization', `Bearer ${user1.accessToken}`)
|
||||
.query({ size: TimeBucketSize.Month, withPartners: true, isTrashed: true });
|
||||
|
||||
expect(req.status).toBe(400);
|
||||
expect(req.body).toEqual(errorDto.badRequest());
|
||||
});
|
||||
});
|
||||
|
||||
describe('GET /asset', () => {
|
||||
it('should return stack data', async () => {
|
||||
const { status, body } = await request(app).get('/asset').set('Authorization', `Bearer ${stackUser.accessToken}`);
|
||||
|
@ -1,376 +0,0 @@
|
||||
import {
|
||||
AssetFileUploadResponseDto,
|
||||
LoginResponseDto,
|
||||
MemoryResponseDto,
|
||||
MemoryType,
|
||||
createMemory,
|
||||
getMemory,
|
||||
} from '@immich/sdk';
|
||||
import { createUserDto, uuidDto } from 'src/fixtures';
|
||||
import { errorDto } from 'src/responses';
|
||||
import { app, asBearerAuth, utils } from 'src/utils';
|
||||
import request from 'supertest';
|
||||
import { beforeAll, describe, expect, it } from 'vitest';
|
||||
|
||||
describe('/memories', () => {
|
||||
let admin: LoginResponseDto;
|
||||
let user: LoginResponseDto;
|
||||
let adminAsset: AssetFileUploadResponseDto;
|
||||
let userAsset1: AssetFileUploadResponseDto;
|
||||
let userAsset2: AssetFileUploadResponseDto;
|
||||
let userMemory: MemoryResponseDto;
|
||||
|
||||
beforeAll(async () => {
|
||||
await utils.resetDatabase();
|
||||
|
||||
admin = await utils.adminSetup();
|
||||
user = await utils.userSetup(admin.accessToken, createUserDto.user1);
|
||||
[adminAsset, userAsset1, userAsset2] = await Promise.all([
|
||||
utils.createAsset(admin.accessToken),
|
||||
utils.createAsset(user.accessToken),
|
||||
utils.createAsset(user.accessToken),
|
||||
]);
|
||||
userMemory = await createMemory(
|
||||
{
|
||||
memoryCreateDto: {
|
||||
type: MemoryType.OnThisDay,
|
||||
memoryAt: new Date(2021).toISOString(),
|
||||
data: { year: 2021 },
|
||||
assetIds: [],
|
||||
},
|
||||
},
|
||||
{ headers: asBearerAuth(user.accessToken) },
|
||||
);
|
||||
});
|
||||
|
||||
describe('GET /memories', () => {
|
||||
it('should require authentication', async () => {
|
||||
const { status, body } = await request(app).get('/memories');
|
||||
|
||||
expect(status).toBe(401);
|
||||
expect(body).toEqual(errorDto.unauthorized);
|
||||
});
|
||||
});
|
||||
|
||||
describe('POST /memories', () => {
|
||||
it('should require authentication', async () => {
|
||||
const { status, body } = await request(app).post('/memories');
|
||||
|
||||
expect(status).toBe(401);
|
||||
expect(body).toEqual(errorDto.unauthorized);
|
||||
});
|
||||
|
||||
it('should validate data when type is on this day', async () => {
|
||||
const { status, body } = await request(app)
|
||||
.post('/memories')
|
||||
.set('Authorization', `Bearer ${user.accessToken}`)
|
||||
.send({
|
||||
type: 'on_this_day',
|
||||
data: {},
|
||||
memoryAt: new Date(2021).toISOString(),
|
||||
});
|
||||
|
||||
expect(status).toBe(400);
|
||||
expect(body).toEqual(
|
||||
errorDto.badRequest(['data.year must be a positive number', 'data.year must be an integer number']),
|
||||
);
|
||||
});
|
||||
|
||||
it('should create a new memory', async () => {
|
||||
const { status, body } = await request(app)
|
||||
.post('/memories')
|
||||
.set('Authorization', `Bearer ${user.accessToken}`)
|
||||
.send({
|
||||
type: 'on_this_day',
|
||||
data: { year: 2021 },
|
||||
memoryAt: new Date(2021).toISOString(),
|
||||
});
|
||||
|
||||
expect(status).toBe(201);
|
||||
expect(body).toEqual({
|
||||
id: expect.any(String),
|
||||
type: 'on_this_day',
|
||||
data: { year: 2021 },
|
||||
createdAt: expect.any(String),
|
||||
updatedAt: expect.any(String),
|
||||
deletedAt: null,
|
||||
seenAt: null,
|
||||
isSaved: false,
|
||||
memoryAt: expect.any(String),
|
||||
ownerId: user.userId,
|
||||
assets: [],
|
||||
});
|
||||
});
|
||||
|
||||
it('should create a new memory (with assets)', async () => {
|
||||
const { status, body } = await request(app)
|
||||
.post('/memories')
|
||||
.set('Authorization', `Bearer ${user.accessToken}`)
|
||||
.send({
|
||||
type: 'on_this_day',
|
||||
data: { year: 2021 },
|
||||
memoryAt: new Date(2021).toISOString(),
|
||||
assetIds: [userAsset1.id, userAsset2.id],
|
||||
});
|
||||
|
||||
expect(status).toBe(201);
|
||||
expect(body).toMatchObject({
|
||||
id: expect.any(String),
|
||||
assets: expect.arrayContaining([
|
||||
expect.objectContaining({ id: userAsset1.id }),
|
||||
expect.objectContaining({ id: userAsset2.id }),
|
||||
]),
|
||||
});
|
||||
expect(body.assets).toHaveLength(2);
|
||||
});
|
||||
|
||||
it('should create a new memory and ignore assets the user does not have access to', async () => {
|
||||
const { status, body } = await request(app)
|
||||
.post('/memories')
|
||||
.set('Authorization', `Bearer ${user.accessToken}`)
|
||||
.send({
|
||||
type: 'on_this_day',
|
||||
data: { year: 2021 },
|
||||
memoryAt: new Date(2021).toISOString(),
|
||||
assetIds: [userAsset1.id, adminAsset.id],
|
||||
});
|
||||
|
||||
expect(status).toBe(201);
|
||||
expect(body).toMatchObject({
|
||||
id: expect.any(String),
|
||||
assets: [expect.objectContaining({ id: userAsset1.id })],
|
||||
});
|
||||
expect(body.assets).toHaveLength(1);
|
||||
});
|
||||
});
|
||||
|
||||
describe('GET /memories/:id', () => {
|
||||
it('should require authentication', async () => {
|
||||
const { status, body } = await request(app).get(`/memories/${uuidDto.invalid}`);
|
||||
|
||||
expect(status).toBe(401);
|
||||
expect(body).toEqual(errorDto.unauthorized);
|
||||
});
|
||||
|
||||
it('should require a valid id', async () => {
|
||||
const { status, body } = await request(app)
|
||||
.get(`/memories/${uuidDto.invalid}`)
|
||||
.set('Authorization', `Bearer ${user.accessToken}`);
|
||||
expect(status).toBe(400);
|
||||
expect(body).toEqual(errorDto.badRequest(['id must be a UUID']));
|
||||
});
|
||||
|
||||
it('should require access', async () => {
|
||||
const { status, body } = await request(app)
|
||||
.get(`/memories/${userMemory.id}`)
|
||||
.set('Authorization', `Bearer ${admin.accessToken}`);
|
||||
expect(status).toBe(400);
|
||||
expect(body).toEqual(errorDto.noPermission);
|
||||
});
|
||||
|
||||
it('should get the memory', async () => {
|
||||
const { status, body } = await request(app)
|
||||
.get(`/memories/${userMemory.id}`)
|
||||
.set('Authorization', `Bearer ${user.accessToken}`);
|
||||
expect(status).toBe(200);
|
||||
expect(body).toMatchObject({ id: userMemory.id });
|
||||
});
|
||||
});
|
||||
|
||||
describe('PUT /memories/:id', () => {
|
||||
it('should require authentication', async () => {
|
||||
const { status, body } = await request(app).put(`/memories/${uuidDto.invalid}`).send({ isSaved: true });
|
||||
|
||||
expect(status).toBe(401);
|
||||
expect(body).toEqual(errorDto.unauthorized);
|
||||
});
|
||||
|
||||
it('should require a valid id', async () => {
|
||||
const { status, body } = await request(app)
|
||||
.put(`/memories/${uuidDto.invalid}`)
|
||||
.send({ isSaved: true })
|
||||
.set('Authorization', `Bearer ${user.accessToken}`);
|
||||
expect(status).toBe(400);
|
||||
expect(body).toEqual(errorDto.badRequest(['id must be a UUID']));
|
||||
});
|
||||
|
||||
it('should require access', async () => {
|
||||
const { status, body } = await request(app)
|
||||
.put(`/memories/${userMemory.id}`)
|
||||
.send({ isSaved: true })
|
||||
.set('Authorization', `Bearer ${admin.accessToken}`);
|
||||
expect(status).toBe(400);
|
||||
expect(body).toEqual(errorDto.noPermission);
|
||||
});
|
||||
|
||||
it('should update the memory', async () => {
|
||||
const before = await getMemory({ id: userMemory.id }, { headers: asBearerAuth(user.accessToken) });
|
||||
expect(before.isSaved).toBe(false);
|
||||
|
||||
const { status, body } = await request(app)
|
||||
.put(`/memories/${userMemory.id}`)
|
||||
.send({ isSaved: true })
|
||||
.set('Authorization', `Bearer ${user.accessToken}`);
|
||||
expect(status).toBe(200);
|
||||
expect(body).toMatchObject({
|
||||
id: userMemory.id,
|
||||
isSaved: true,
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('PUT /memories/:id/assets', () => {
|
||||
it('should require authentication', async () => {
|
||||
const { status, body } = await request(app)
|
||||
.put(`/memories/${userMemory.id}/assets`)
|
||||
.send({ ids: [userAsset1.id] });
|
||||
expect(status).toBe(401);
|
||||
expect(body).toEqual(errorDto.unauthorized);
|
||||
});
|
||||
|
||||
it('should require a valid id', async () => {
|
||||
const { status, body } = await request(app)
|
||||
.put(`/memories/${uuidDto.invalid}/assets`)
|
||||
.send({ ids: [userAsset1.id] })
|
||||
.set('Authorization', `Bearer ${user.accessToken}`);
|
||||
expect(status).toBe(400);
|
||||
expect(body).toEqual(errorDto.badRequest(['id must be a UUID']));
|
||||
});
|
||||
|
||||
it('should require access', async () => {
|
||||
const { status, body } = await request(app)
|
||||
.put(`/memories/${userMemory.id}/assets`)
|
||||
.send({ ids: [userAsset1.id] })
|
||||
.set('Authorization', `Bearer ${admin.accessToken}`);
|
||||
expect(status).toBe(400);
|
||||
expect(body).toEqual(errorDto.noPermission);
|
||||
});
|
||||
|
||||
it('should require a valid asset id', async () => {
|
||||
const { status, body } = await request(app)
|
||||
.put(`/memories/${userMemory.id}/assets`)
|
||||
.send({ ids: [uuidDto.invalid] })
|
||||
.set('Authorization', `Bearer ${user.accessToken}`);
|
||||
expect(status).toBe(400);
|
||||
expect(body).toEqual(errorDto.badRequest(['each value in ids must be a UUID']));
|
||||
});
|
||||
|
||||
it('should require asset access', async () => {
|
||||
const { status, body } = await request(app)
|
||||
.put(`/memories/${userMemory.id}/assets`)
|
||||
.send({ ids: [adminAsset.id] })
|
||||
.set('Authorization', `Bearer ${user.accessToken}`);
|
||||
expect(status).toBe(200);
|
||||
expect(body).toHaveLength(1);
|
||||
expect(body[0]).toEqual({
|
||||
id: adminAsset.id,
|
||||
success: false,
|
||||
error: 'no_permission',
|
||||
});
|
||||
});
|
||||
|
||||
it('should add assets to the memory', async () => {
|
||||
const { status, body } = await request(app)
|
||||
.put(`/memories/${userMemory.id}/assets`)
|
||||
.send({ ids: [userAsset1.id] })
|
||||
.set('Authorization', `Bearer ${user.accessToken}`);
|
||||
expect(status).toBe(200);
|
||||
expect(body).toHaveLength(1);
|
||||
expect(body[0]).toEqual({ id: userAsset1.id, success: true });
|
||||
});
|
||||
});
|
||||
|
||||
describe('DELETE /memories/:id/assets', () => {
|
||||
it('should require authentication', async () => {
|
||||
const { status, body } = await request(app)
|
||||
.delete(`/memories/${userMemory.id}/assets`)
|
||||
.send({ ids: [userAsset1.id] });
|
||||
expect(status).toBe(401);
|
||||
expect(body).toEqual(errorDto.unauthorized);
|
||||
});
|
||||
|
||||
it('should require a valid id', async () => {
|
||||
const { status, body } = await request(app)
|
||||
.delete(`/memories/${uuidDto.invalid}/assets`)
|
||||
.send({ ids: [userAsset1.id] })
|
||||
.set('Authorization', `Bearer ${user.accessToken}`);
|
||||
expect(status).toBe(400);
|
||||
expect(body).toEqual(errorDto.badRequest(['id must be a UUID']));
|
||||
});
|
||||
|
||||
it('should require access', async () => {
|
||||
const { status, body } = await request(app)
|
||||
.delete(`/memories/${userMemory.id}/assets`)
|
||||
.send({ ids: [userAsset1.id] })
|
||||
.set('Authorization', `Bearer ${admin.accessToken}`);
|
||||
expect(status).toBe(400);
|
||||
expect(body).toEqual(errorDto.noPermission);
|
||||
});
|
||||
|
||||
it('should require a valid asset id', async () => {
|
||||
const { status, body } = await request(app)
|
||||
.delete(`/memories/${userMemory.id}/assets`)
|
||||
.send({ ids: [uuidDto.invalid] })
|
||||
.set('Authorization', `Bearer ${user.accessToken}`);
|
||||
expect(status).toBe(400);
|
||||
expect(body).toEqual(errorDto.badRequest(['each value in ids must be a UUID']));
|
||||
});
|
||||
|
||||
it('should only remove assets in the memory', async () => {
|
||||
const { status, body } = await request(app)
|
||||
.delete(`/memories/${userMemory.id}/assets`)
|
||||
.send({ ids: [adminAsset.id] })
|
||||
.set('Authorization', `Bearer ${user.accessToken}`);
|
||||
expect(status).toBe(200);
|
||||
expect(body).toHaveLength(1);
|
||||
expect(body[0]).toEqual({
|
||||
id: adminAsset.id,
|
||||
success: false,
|
||||
error: 'not_found',
|
||||
});
|
||||
});
|
||||
|
||||
it('should remove assets from the memory', async () => {
|
||||
const { status, body } = await request(app)
|
||||
.delete(`/memories/${userMemory.id}/assets`)
|
||||
.send({ ids: [userAsset1.id] })
|
||||
.set('Authorization', `Bearer ${user.accessToken}`);
|
||||
expect(status).toBe(200);
|
||||
expect(body).toHaveLength(1);
|
||||
expect(body[0]).toEqual({ id: userAsset1.id, success: true });
|
||||
});
|
||||
});
|
||||
|
||||
describe('DELETE /memories/:id', () => {
|
||||
it('should require authentication', async () => {
|
||||
const { status, body } = await request(app).delete(`/memories/${uuidDto.invalid}`);
|
||||
|
||||
expect(status).toBe(401);
|
||||
expect(body).toEqual(errorDto.unauthorized);
|
||||
});
|
||||
|
||||
it('should require a valid id', async () => {
|
||||
const { status, body } = await request(app)
|
||||
.delete(`/memories/${uuidDto.invalid}`)
|
||||
.set('Authorization', `Bearer ${user.accessToken}`);
|
||||
expect(status).toBe(400);
|
||||
expect(body).toEqual(errorDto.badRequest(['id must be a UUID']));
|
||||
});
|
||||
|
||||
it('should require access', async () => {
|
||||
const { status, body } = await request(app)
|
||||
.delete(`/memories/${userMemory.id}`)
|
||||
.set('Authorization', `Bearer ${admin.accessToken}`);
|
||||
expect(status).toBe(400);
|
||||
expect(body).toEqual(errorDto.noPermission);
|
||||
});
|
||||
|
||||
it('should delete the memory', async () => {
|
||||
const { status } = await request(app)
|
||||
.delete(`/memories/${userMemory.id}`)
|
||||
.send({ isSaved: true })
|
||||
.set('Authorization', `Bearer ${user.accessToken}`);
|
||||
expect(status).toBe(204);
|
||||
});
|
||||
});
|
||||
});
|
@ -1,4 +1,4 @@
|
||||
import { AssetFileUploadResponseDto, LoginResponseDto, deleteAssets, updateAsset } from '@immich/sdk';
|
||||
import { AssetFileUploadResponseDto, LoginResponseDto, deleteAssets } from '@immich/sdk';
|
||||
import { DateTime } from 'luxon';
|
||||
import { readFile } from 'node:fs/promises';
|
||||
import { join } from 'node:path';
|
||||
@ -7,6 +7,7 @@ import { errorDto } from 'src/responses';
|
||||
import { app, asBearerAuth, testAssetDir, utils } from 'src/utils';
|
||||
import request from 'supertest';
|
||||
import { afterAll, beforeAll, describe, expect, it } from 'vitest';
|
||||
|
||||
const today = DateTime.now();
|
||||
|
||||
describe('/search', () => {
|
||||
@ -18,7 +19,7 @@ describe('/search', () => {
|
||||
let assetCyclamen: AssetFileUploadResponseDto;
|
||||
let assetNotocactus: AssetFileUploadResponseDto;
|
||||
let assetSilver: AssetFileUploadResponseDto;
|
||||
let assetDensity: AssetFileUploadResponseDto;
|
||||
// let assetDensity: AssetFileUploadResponseDto;
|
||||
// let assetPhiladelphia: AssetFileUploadResponseDto;
|
||||
// let assetOrychophragmus: AssetFileUploadResponseDto;
|
||||
// let assetRidge: AssetFileUploadResponseDto;
|
||||
@ -78,37 +79,6 @@ describe('/search', () => {
|
||||
await utils.waitForWebsocketEvent({ event: 'assetUpload', id: asset.id });
|
||||
}
|
||||
|
||||
// note: the coordinates here are not the actual coordinates of the images and are random for most of them
|
||||
const cities = [
|
||||
{ latitude: 48.853_41, longitude: 2.3488 }, // paris
|
||||
{ latitude: 63.0695, longitude: -151.0074 }, // denali
|
||||
{ latitude: 52.524_37, longitude: 13.410_53 }, // berlin
|
||||
{ latitude: 1.314_663_1, longitude: 103.845_409_3 }, // singapore
|
||||
{ latitude: 41.013_84, longitude: 28.949_66 }, // istanbul
|
||||
{ latitude: 5.556_02, longitude: -0.1969 }, // accra
|
||||
{ latitude: 37.544_270_6, longitude: -4.727_752_8 }, // andalusia
|
||||
{ latitude: 23.133_02, longitude: -82.383_04 }, // havana
|
||||
{ latitude: 41.694_11, longitude: 44.833_68 }, // tbilisi
|
||||
{ latitude: 31.222_22, longitude: 121.458_06 }, // shanghai
|
||||
{ latitude: 47.040_57, longitude: 9.068_04 }, // glarus
|
||||
{ latitude: 38.9711, longitude: -109.7137 }, // thompson springs
|
||||
{ latitude: 40.714_27, longitude: -74.005_97 }, // new york
|
||||
{ latitude: 32.771_52, longitude: -89.116_73 }, // philadelphia
|
||||
{ latitude: 31.634_16, longitude: -7.999_94 }, // marrakesh
|
||||
{ latitude: 38.523_735_4, longitude: -78.488_619_4 }, // tanners ridge
|
||||
{ latitude: 59.938_63, longitude: 30.314_13 }, // st. petersburg
|
||||
{ latitude: 35.6895, longitude: 139.691_71 }, // tokyo
|
||||
];
|
||||
|
||||
const updates = assets.map((asset, i) =>
|
||||
updateAsset({ id: asset.id, updateAssetDto: cities[i] }, { headers: asBearerAuth(admin.accessToken) }),
|
||||
);
|
||||
|
||||
await Promise.all(updates);
|
||||
for (const asset of assets) {
|
||||
await utils.waitForWebsocketEvent({ event: 'assetUpdate', id: asset.id });
|
||||
}
|
||||
|
||||
[
|
||||
assetFalcon,
|
||||
assetDenali,
|
||||
@ -122,7 +92,7 @@ describe('/search', () => {
|
||||
assetOneJpg5,
|
||||
assetGlarus,
|
||||
assetSprings,
|
||||
assetDensity,
|
||||
// assetDensity,
|
||||
// assetPhiladelphia,
|
||||
// assetOrychophragmus,
|
||||
// assetRidge,
|
||||
@ -133,10 +103,10 @@ describe('/search', () => {
|
||||
assetLast = assets.at(-1) as AssetFileUploadResponseDto;
|
||||
|
||||
await deleteAssets({ assetBulkDeleteDto: { ids: [assetSilver.id] } }, { headers: asBearerAuth(admin.accessToken) });
|
||||
}, 30_000);
|
||||
});
|
||||
|
||||
afterAll(async () => {
|
||||
utils.disconnectWebsocket(websocket);
|
||||
await utils.disconnectWebsocket(websocket);
|
||||
});
|
||||
|
||||
describe('POST /search/metadata', () => {
|
||||
@ -328,15 +298,15 @@ describe('/search', () => {
|
||||
},
|
||||
{
|
||||
should: 'should search by city',
|
||||
deferred: () => ({ dto: { city: 'Accra' }, assets: [assetHeic] }),
|
||||
deferred: () => ({ dto: { city: 'Ralston' }, assets: [assetHeic] }),
|
||||
},
|
||||
{
|
||||
should: 'should search by state',
|
||||
deferred: () => ({ dto: { state: 'New York' }, assets: [assetDensity] }),
|
||||
deferred: () => ({ dto: { state: 'Douglas County, Nebraska' }, assets: [assetHeic] }),
|
||||
},
|
||||
{
|
||||
should: 'should search by country',
|
||||
deferred: () => ({ dto: { country: 'France' }, assets: [assetFalcon] }),
|
||||
deferred: () => ({ dto: { country: 'United States of America' }, assets: [assetHeic] }),
|
||||
},
|
||||
{
|
||||
should: 'should search by make',
|
||||
@ -400,44 +370,13 @@ describe('/search', () => {
|
||||
expect(body).toEqual(errorDto.unauthorized);
|
||||
});
|
||||
|
||||
it('should get relevant places', async () => {
|
||||
const name = 'Paris';
|
||||
|
||||
it('should get places', async () => {
|
||||
const { status, body } = await request(app)
|
||||
.get(`/search/places?name=${name}`)
|
||||
.get('/search/places?name=Paris')
|
||||
.set('Authorization', `Bearer ${admin.accessToken}`);
|
||||
|
||||
expect(status).toBe(200);
|
||||
expect(Array.isArray(body)).toBe(true);
|
||||
if (Array.isArray(body)) {
|
||||
expect(body.length).toBeGreaterThan(10);
|
||||
expect(body[0].name).toEqual(name);
|
||||
expect(body[0].admin2name).toEqual(name);
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
describe('GET /search/cities', () => {
|
||||
it('should require authentication', async () => {
|
||||
const { status, body } = await request(app).get('/search/cities');
|
||||
expect(status).toBe(401);
|
||||
expect(body).toEqual(errorDto.unauthorized);
|
||||
});
|
||||
|
||||
it('should get all cities', async () => {
|
||||
const { status, body } = await request(app)
|
||||
.get('/search/cities')
|
||||
.set('Authorization', `Bearer ${admin.accessToken}`);
|
||||
|
||||
expect(status).toBe(200);
|
||||
expect(Array.isArray(body)).toBe(true);
|
||||
if (Array.isArray(body)) {
|
||||
expect(body.length).toBeGreaterThan(10);
|
||||
const assetsWithCity = body.filter((asset) => !!asset.exifInfo?.city);
|
||||
expect(assetsWithCity.length).toEqual(body.length);
|
||||
const cities = new Set(assetsWithCity.map((asset) => asset.exifInfo.city));
|
||||
expect(cities.size).toEqual(body.length);
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
@ -452,21 +391,7 @@ describe('/search', () => {
|
||||
const { status, body } = await request(app)
|
||||
.get('/search/suggestions?type=country')
|
||||
.set('Authorization', `Bearer ${admin.accessToken}`);
|
||||
expect(body).toEqual([
|
||||
'Cuba',
|
||||
'France',
|
||||
'Georgia',
|
||||
'Germany',
|
||||
'Ghana',
|
||||
'Japan',
|
||||
'Morocco',
|
||||
"People's Republic of China",
|
||||
'Russian Federation',
|
||||
'Singapore',
|
||||
'Spain',
|
||||
'Switzerland',
|
||||
'United States of America',
|
||||
]);
|
||||
expect(body).toEqual(['United States of America']);
|
||||
expect(status).toBe(200);
|
||||
});
|
||||
|
||||
@ -474,23 +399,7 @@ describe('/search', () => {
|
||||
const { status, body } = await request(app)
|
||||
.get('/search/suggestions?type=state')
|
||||
.set('Authorization', `Bearer ${admin.accessToken}`);
|
||||
expect(body).toEqual([
|
||||
'Accra, Greater Accra',
|
||||
'Berlin',
|
||||
'Glarus, Glarus',
|
||||
'Havana',
|
||||
'Marrakech, Marrakesh-Safi',
|
||||
'Mesa County, Colorado',
|
||||
'Neshoba County, Mississippi',
|
||||
'New York',
|
||||
'Page County, Virginia',
|
||||
'Paris, Île-de-France',
|
||||
'Province of Córdoba, Andalusia',
|
||||
'Shanghai Municipality, Shanghai',
|
||||
'St.-Petersburg',
|
||||
'Tbilisi',
|
||||
'Tokyo',
|
||||
]);
|
||||
expect(body).toEqual(['Douglas County, Nebraska', 'Mesa County, Colorado']);
|
||||
expect(status).toBe(200);
|
||||
});
|
||||
|
||||
@ -498,24 +407,7 @@ describe('/search', () => {
|
||||
const { status, body } = await request(app)
|
||||
.get('/search/suggestions?type=city')
|
||||
.set('Authorization', `Bearer ${admin.accessToken}`);
|
||||
expect(body).toEqual([
|
||||
'Accra',
|
||||
'Berlin',
|
||||
'Glarus',
|
||||
'Havana',
|
||||
'Marrakesh',
|
||||
'Montalbán de Córdoba',
|
||||
'New York City',
|
||||
'Palisade',
|
||||
'Paris',
|
||||
'Philadelphia',
|
||||
'Saint Petersburg',
|
||||
'Shanghai',
|
||||
'Singapore',
|
||||
'Stanley',
|
||||
'Tbilisi',
|
||||
'Tokyo',
|
||||
]);
|
||||
expect(body).toEqual(['Palisade', 'Ralston']);
|
||||
expect(status).toBe(200);
|
||||
});
|
||||
|
||||
|
@ -1,4 +1,4 @@
|
||||
import { AssetFileUploadResponseDto, LoginResponseDto, SharedLinkType, getConfig } from '@immich/sdk';
|
||||
import { LoginResponseDto, getConfig } from '@immich/sdk';
|
||||
import { createUserDto } from 'src/fixtures';
|
||||
import { errorDto } from 'src/responses';
|
||||
import { app, asBearerAuth, utils } from 'src/utils';
|
||||
@ -10,14 +10,11 @@ const getSystemConfig = (accessToken: string) => getConfig({ headers: asBearerAu
|
||||
describe('/system-config', () => {
|
||||
let admin: LoginResponseDto;
|
||||
let nonAdmin: LoginResponseDto;
|
||||
let asset: AssetFileUploadResponseDto;
|
||||
|
||||
beforeAll(async () => {
|
||||
await utils.resetDatabase();
|
||||
admin = await utils.adminSetup();
|
||||
nonAdmin = await utils.userSetup(admin.accessToken, createUserDto.user1);
|
||||
|
||||
asset = await utils.createAsset(admin.accessToken);
|
||||
});
|
||||
|
||||
describe('GET /system-config/map/style.json', () => {
|
||||
@ -27,19 +24,6 @@ describe('/system-config', () => {
|
||||
expect(body).toEqual(errorDto.unauthorized);
|
||||
});
|
||||
|
||||
it('should allow shared link access', async () => {
|
||||
const sharedLink = await utils.createSharedLink(admin.accessToken, {
|
||||
type: SharedLinkType.Individual,
|
||||
assetIds: [asset.id],
|
||||
});
|
||||
const { status, body } = await request(app)
|
||||
.get(`/system-config/map/style.json?key=${sharedLink.key}`)
|
||||
.query({ theme: 'dark' });
|
||||
|
||||
expect(status).toBe(200);
|
||||
expect(body).toEqual(expect.objectContaining({ id: 'immich-map-dark' }));
|
||||
});
|
||||
|
||||
it('should throw an error if a theme is not light or dark', async () => {
|
||||
for (const theme of ['dark1', true, 123, '', null, undefined]) {
|
||||
const { status, body } = await request(app)
|
||||
|
@ -1,193 +0,0 @@
|
||||
import { AssetFileUploadResponseDto, LoginResponseDto, SharedLinkType, TimeBucketSize } from '@immich/sdk';
|
||||
import { DateTime } from 'luxon';
|
||||
import { createUserDto } from 'src/fixtures';
|
||||
import { errorDto } from 'src/responses';
|
||||
import { app, utils } from 'src/utils';
|
||||
import request from 'supertest';
|
||||
import { beforeAll, describe, expect, it } from 'vitest';
|
||||
|
||||
// TODO this should probably be a test util function
|
||||
const today = DateTime.fromObject({
|
||||
year: 2023,
|
||||
month: 11,
|
||||
day: 3,
|
||||
}) as DateTime<true>;
|
||||
const yesterday = today.minus({ days: 1 });
|
||||
|
||||
describe('/timeline', () => {
|
||||
let admin: LoginResponseDto;
|
||||
let user: LoginResponseDto;
|
||||
let timeBucketUser: LoginResponseDto;
|
||||
|
||||
let userAssets: AssetFileUploadResponseDto[];
|
||||
|
||||
beforeAll(async () => {
|
||||
await utils.resetDatabase();
|
||||
admin = await utils.adminSetup({ onboarding: false });
|
||||
[user, timeBucketUser] = await Promise.all([
|
||||
utils.userSetup(admin.accessToken, createUserDto.create('1')),
|
||||
utils.userSetup(admin.accessToken, createUserDto.create('time-bucket')),
|
||||
]);
|
||||
|
||||
userAssets = await Promise.all([
|
||||
utils.createAsset(user.accessToken),
|
||||
utils.createAsset(user.accessToken),
|
||||
utils.createAsset(user.accessToken, {
|
||||
isFavorite: true,
|
||||
isReadOnly: true,
|
||||
fileCreatedAt: yesterday.toISO(),
|
||||
fileModifiedAt: yesterday.toISO(),
|
||||
assetData: { filename: 'example.mp4' },
|
||||
}),
|
||||
utils.createAsset(user.accessToken),
|
||||
utils.createAsset(user.accessToken),
|
||||
]);
|
||||
|
||||
await Promise.all([
|
||||
utils.createAsset(timeBucketUser.accessToken, { fileCreatedAt: new Date('1970-01-01').toISOString() }),
|
||||
utils.createAsset(timeBucketUser.accessToken, { fileCreatedAt: new Date('1970-02-10').toISOString() }),
|
||||
utils.createAsset(timeBucketUser.accessToken, { fileCreatedAt: new Date('1970-02-11').toISOString() }),
|
||||
utils.createAsset(timeBucketUser.accessToken, { fileCreatedAt: new Date('1970-02-11').toISOString() }),
|
||||
]);
|
||||
});
|
||||
|
||||
describe('GET /timeline/buckets', () => {
|
||||
it('should require authentication', async () => {
|
||||
const { status, body } = await request(app).get('/timeline/buckets').query({ size: TimeBucketSize.Month });
|
||||
expect(status).toBe(401);
|
||||
expect(body).toEqual(errorDto.unauthorized);
|
||||
});
|
||||
|
||||
it('should get time buckets by month', async () => {
|
||||
const { status, body } = await request(app)
|
||||
.get('/timeline/buckets')
|
||||
.set('Authorization', `Bearer ${timeBucketUser.accessToken}`)
|
||||
.query({ size: TimeBucketSize.Month });
|
||||
|
||||
expect(status).toBe(200);
|
||||
expect(body).toEqual(
|
||||
expect.arrayContaining([
|
||||
{ count: 3, timeBucket: '1970-02-01T00:00:00.000Z' },
|
||||
{ count: 1, timeBucket: '1970-01-01T00:00:00.000Z' },
|
||||
]),
|
||||
);
|
||||
});
|
||||
|
||||
it('should not allow access for unrelated shared links', async () => {
|
||||
const sharedLink = await utils.createSharedLink(user.accessToken, {
|
||||
type: SharedLinkType.Individual,
|
||||
assetIds: userAssets.map(({ id }) => id),
|
||||
});
|
||||
|
||||
const { status, body } = await request(app)
|
||||
.get('/timeline/buckets')
|
||||
.query({ key: sharedLink.key, size: TimeBucketSize.Month });
|
||||
|
||||
expect(status).toBe(400);
|
||||
expect(body).toEqual(errorDto.noPermission);
|
||||
});
|
||||
|
||||
it('should get time buckets by day', async () => {
|
||||
const { status, body } = await request(app)
|
||||
.get('/timeline/buckets')
|
||||
.set('Authorization', `Bearer ${timeBucketUser.accessToken}`)
|
||||
.query({ size: TimeBucketSize.Day });
|
||||
|
||||
expect(status).toBe(200);
|
||||
expect(body).toEqual([
|
||||
{ count: 2, timeBucket: '1970-02-11T00:00:00.000Z' },
|
||||
{ count: 1, timeBucket: '1970-02-10T00:00:00.000Z' },
|
||||
{ count: 1, timeBucket: '1970-01-01T00:00:00.000Z' },
|
||||
]);
|
||||
});
|
||||
|
||||
it('should return error if time bucket is requested with partners asset and archived', async () => {
|
||||
const req1 = await request(app)
|
||||
.get('/timeline/buckets')
|
||||
.set('Authorization', `Bearer ${timeBucketUser.accessToken}`)
|
||||
.query({ size: TimeBucketSize.Month, withPartners: true, isArchived: true });
|
||||
|
||||
expect(req1.status).toBe(400);
|
||||
expect(req1.body).toEqual(errorDto.badRequest());
|
||||
|
||||
const req2 = await request(app)
|
||||
.get('/timeline/buckets')
|
||||
.set('Authorization', `Bearer ${user.accessToken}`)
|
||||
.query({ size: TimeBucketSize.Month, withPartners: true, isArchived: undefined });
|
||||
|
||||
expect(req2.status).toBe(400);
|
||||
expect(req2.body).toEqual(errorDto.badRequest());
|
||||
});
|
||||
|
||||
it('should return error if time bucket is requested with partners asset and favorite', async () => {
|
||||
const req1 = await request(app)
|
||||
.get('/timeline/buckets')
|
||||
.set('Authorization', `Bearer ${timeBucketUser.accessToken}`)
|
||||
.query({ size: TimeBucketSize.Month, withPartners: true, isFavorite: true });
|
||||
|
||||
expect(req1.status).toBe(400);
|
||||
expect(req1.body).toEqual(errorDto.badRequest());
|
||||
|
||||
const req2 = await request(app)
|
||||
.get('/timeline/buckets')
|
||||
.set('Authorization', `Bearer ${timeBucketUser.accessToken}`)
|
||||
.query({ size: TimeBucketSize.Month, withPartners: true, isFavorite: false });
|
||||
|
||||
expect(req2.status).toBe(400);
|
||||
expect(req2.body).toEqual(errorDto.badRequest());
|
||||
});
|
||||
|
||||
it('should return error if time bucket is requested with partners asset and trash', async () => {
|
||||
const req = await request(app)
|
||||
.get('/timeline/buckets')
|
||||
.set('Authorization', `Bearer ${user.accessToken}`)
|
||||
.query({ size: TimeBucketSize.Month, withPartners: true, isTrashed: true });
|
||||
|
||||
expect(req.status).toBe(400);
|
||||
expect(req.body).toEqual(errorDto.badRequest());
|
||||
});
|
||||
});
|
||||
|
||||
describe('GET /timeline/bucket', () => {
|
||||
it('should require authentication', async () => {
|
||||
const { status, body } = await request(app).get('/timeline/bucket').query({
|
||||
size: TimeBucketSize.Month,
|
||||
timeBucket: '1900-01-01T00:00:00.000Z',
|
||||
});
|
||||
|
||||
expect(status).toBe(401);
|
||||
expect(body).toEqual(errorDto.unauthorized);
|
||||
});
|
||||
|
||||
it('should handle 5 digit years', async () => {
|
||||
const { status, body } = await request(app)
|
||||
.get('/timeline/bucket')
|
||||
.query({ size: TimeBucketSize.Month, timeBucket: '+012345-01-01T00:00:00.000Z' })
|
||||
.set('Authorization', `Bearer ${timeBucketUser.accessToken}`);
|
||||
|
||||
expect(status).toBe(200);
|
||||
expect(body).toEqual([]);
|
||||
});
|
||||
|
||||
// TODO enable date string validation while still accepting 5 digit years
|
||||
// it('should fail if time bucket is invalid', async () => {
|
||||
// const { status, body } = await request(app)
|
||||
// .get('/timeline/bucket')
|
||||
// .set('Authorization', `Bearer ${user.accessToken}`)
|
||||
// .query({ size: TimeBucketSize.Month, timeBucket: 'foo' });
|
||||
|
||||
// expect(status).toBe(400);
|
||||
// expect(body).toEqual(errorDto.badRequest);
|
||||
// });
|
||||
|
||||
it('should return time bucket', async () => {
|
||||
const { status, body } = await request(app)
|
||||
.get('/timeline/bucket')
|
||||
.set('Authorization', `Bearer ${timeBucketUser.accessToken}`)
|
||||
.query({ size: TimeBucketSize.Month, timeBucket: '1970-02-10T00:00:00.000Z' });
|
||||
|
||||
expect(status).toBe(200);
|
||||
expect(body).toEqual([]);
|
||||
});
|
||||
});
|
||||
});
|
@ -2,25 +2,25 @@ import { stat } from 'node:fs/promises';
|
||||
import { app, immichCli, utils } from 'src/utils';
|
||||
import { beforeEach, describe, expect, it } from 'vitest';
|
||||
|
||||
describe(`immich login`, () => {
|
||||
describe(`immich login-key`, () => {
|
||||
beforeEach(async () => {
|
||||
await utils.resetDatabase();
|
||||
});
|
||||
|
||||
it('should require a url', async () => {
|
||||
const { stderr, exitCode } = await immichCli(['login']);
|
||||
const { stderr, exitCode } = await immichCli(['login-key']);
|
||||
expect(stderr).toBe("error: missing required argument 'url'");
|
||||
expect(exitCode).toBe(1);
|
||||
});
|
||||
|
||||
it('should require a key', async () => {
|
||||
const { stderr, exitCode } = await immichCli(['login', app]);
|
||||
const { stderr, exitCode } = await immichCli(['login-key', app]);
|
||||
expect(stderr).toBe("error: missing required argument 'key'");
|
||||
expect(exitCode).toBe(1);
|
||||
});
|
||||
|
||||
it('should require a valid key', async () => {
|
||||
const { stderr, exitCode } = await immichCli(['login', app, 'immich-is-so-cool']);
|
||||
const { stderr, exitCode } = await immichCli(['login-key', app, 'immich-is-so-cool']);
|
||||
expect(stderr).toContain('Failed to connect to server');
|
||||
expect(stderr).toContain('Invalid API key');
|
||||
expect(stderr).toContain('401');
|
||||
@ -30,7 +30,7 @@ describe(`immich login`, () => {
|
||||
it('should login and save auth.yml with 600', async () => {
|
||||
const admin = await utils.adminSetup();
|
||||
const key = await utils.createApiKey(admin.accessToken);
|
||||
const { stdout, stderr, exitCode } = await immichCli(['login', app, `${key.secret}`]);
|
||||
const { stdout, stderr, exitCode } = await immichCli(['login-key', app, `${key.secret}`]);
|
||||
expect(stdout.split('\n')).toEqual([
|
||||
'Logging in to http://127.0.0.1:2283/api',
|
||||
'Logged in as admin@immich.cloud',
|
||||
@ -47,7 +47,7 @@ describe(`immich login`, () => {
|
||||
it('should login without /api in the url', async () => {
|
||||
const admin = await utils.adminSetup();
|
||||
const key = await utils.createApiKey(admin.accessToken);
|
||||
const { stdout, stderr, exitCode } = await immichCli(['login', app.replaceAll('/api', ''), `${key.secret}`]);
|
||||
const { stdout, stderr, exitCode } = await immichCli(['login-key', app.replaceAll('/api', ''), `${key.secret}`]);
|
||||
expect(stdout.split('\n')).toEqual([
|
||||
'Logging in to http://127.0.0.1:2283',
|
||||
'Discovered API at http://127.0.0.1:2283/api',
|
||||
|
@ -4,20 +4,16 @@ import { beforeAll, describe, expect, it } from 'vitest';
|
||||
describe(`immich server-info`, () => {
|
||||
beforeAll(async () => {
|
||||
await utils.resetDatabase();
|
||||
const admin = await utils.adminSetup();
|
||||
await utils.cliLogin(admin.accessToken);
|
||||
await utils.cliLogin();
|
||||
});
|
||||
|
||||
it('should return the server info', async () => {
|
||||
const { stderr, stdout, exitCode } = await immichCli(['server-info']);
|
||||
expect(stdout.split('\n')).toEqual([
|
||||
expect.stringContaining('Server Info (via admin@immich.cloud'),
|
||||
' Url: http://127.0.0.1:2283/api',
|
||||
expect.stringContaining('Version:'),
|
||||
' Formats:',
|
||||
expect.stringContaining('Images:'),
|
||||
expect.stringContaining('Videos:'),
|
||||
' Statistics:',
|
||||
expect.stringContaining('Server Version:'),
|
||||
expect.stringContaining('Image Types:'),
|
||||
expect.stringContaining('Video Types:'),
|
||||
'Statistics:',
|
||||
' Images: 0',
|
||||
' Videos: 0',
|
||||
' Total: 0',
|
||||
|
@ -1,76 +1,26 @@
|
||||
import { LoginResponseDto, getAllAlbums, getAllAssets } from '@immich/sdk';
|
||||
import { readFileSync } from 'node:fs';
|
||||
import { getAllAlbums, getAllAssets } from '@immich/sdk';
|
||||
import { mkdir, readdir, rm, symlink } from 'node:fs/promises';
|
||||
import { asKeyAuth, immichCli, testAssetDir, utils } from 'src/utils';
|
||||
import { beforeAll, beforeEach, describe, expect, it } from 'vitest';
|
||||
|
||||
describe(`immich upload`, () => {
|
||||
let admin: LoginResponseDto;
|
||||
let key: string;
|
||||
|
||||
beforeAll(async () => {
|
||||
await utils.resetDatabase();
|
||||
|
||||
admin = await utils.adminSetup();
|
||||
key = await utils.cliLogin(admin.accessToken);
|
||||
key = await utils.cliLogin();
|
||||
});
|
||||
|
||||
beforeEach(async () => {
|
||||
await utils.resetDatabase(['assets', 'albums']);
|
||||
});
|
||||
|
||||
describe(`immich upload /path/to/file.jpg`, () => {
|
||||
it('should upload a single file', async () => {
|
||||
const { stderr, stdout, exitCode } = await immichCli(['upload', `${testAssetDir}/albums/nature/silver_fir.jpg`]);
|
||||
expect(stderr).toBe('');
|
||||
expect(stdout.split('\n')).toEqual(
|
||||
expect.arrayContaining([expect.stringContaining('Successfully uploaded 1 new asset')]),
|
||||
);
|
||||
expect(exitCode).toBe(0);
|
||||
|
||||
const assets = await getAllAssets({}, { headers: asKeyAuth(key) });
|
||||
expect(assets.length).toBe(1);
|
||||
});
|
||||
|
||||
it('should skip a duplicate file', async () => {
|
||||
const first = await immichCli(['upload', `${testAssetDir}/albums/nature/silver_fir.jpg`]);
|
||||
expect(first.stderr).toBe('');
|
||||
expect(first.stdout.split('\n')).toEqual(
|
||||
expect.arrayContaining([expect.stringContaining('Successfully uploaded 1 new asset')]),
|
||||
);
|
||||
expect(first.exitCode).toBe(0);
|
||||
|
||||
const assets = await getAllAssets({}, { headers: asKeyAuth(key) });
|
||||
expect(assets.length).toBe(1);
|
||||
|
||||
const second = await immichCli(['upload', `${testAssetDir}/albums/nature/silver_fir.jpg`]);
|
||||
expect(second.stderr).toBe('');
|
||||
expect(second.stdout.split('\n')).toEqual(
|
||||
expect.arrayContaining([
|
||||
expect.stringContaining('Found 0 new files and 1 duplicate'),
|
||||
expect.stringContaining('All assets were already uploaded, nothing to do'),
|
||||
]),
|
||||
);
|
||||
expect(first.exitCode).toBe(0);
|
||||
});
|
||||
|
||||
it('should skip files that do not exist', async () => {
|
||||
const { stderr, stdout, exitCode } = await immichCli(['upload', `/path/to/file`]);
|
||||
expect(stderr).toBe('');
|
||||
expect(stdout.split('\n')).toEqual(expect.arrayContaining([expect.stringContaining('No files found, exiting')]));
|
||||
expect(exitCode).toBe(0);
|
||||
|
||||
const assets = await getAllAssets({}, { headers: asKeyAuth(key) });
|
||||
expect(assets.length).toBe(0);
|
||||
});
|
||||
});
|
||||
|
||||
describe('immich upload --recursive', () => {
|
||||
it('should upload a folder recursively', async () => {
|
||||
const { stderr, stdout, exitCode } = await immichCli(['upload', `${testAssetDir}/albums/nature/`, '--recursive']);
|
||||
expect(stderr).toBe('');
|
||||
expect(stdout.split('\n')).toEqual(
|
||||
expect.arrayContaining([expect.stringContaining('Successfully uploaded 9 new assets')]),
|
||||
expect.arrayContaining([expect.stringContaining('Successfully uploaded 9 assets')]),
|
||||
);
|
||||
expect(exitCode).toBe(0);
|
||||
|
||||
@ -89,7 +39,7 @@ describe(`immich upload`, () => {
|
||||
]);
|
||||
expect(stdout.split('\n')).toEqual(
|
||||
expect.arrayContaining([
|
||||
expect.stringContaining('Successfully uploaded 9 new assets'),
|
||||
expect.stringContaining('Successfully uploaded 9 assets'),
|
||||
expect.stringContaining('Successfully created 1 new album'),
|
||||
expect.stringContaining('Successfully updated 9 assets'),
|
||||
]),
|
||||
@ -108,7 +58,7 @@ describe(`immich upload`, () => {
|
||||
it('should add existing assets to albums', async () => {
|
||||
const response1 = await immichCli(['upload', `${testAssetDir}/albums/nature/`, '--recursive']);
|
||||
expect(response1.stdout.split('\n')).toEqual(
|
||||
expect.arrayContaining([expect.stringContaining('Successfully uploaded 9 new assets')]),
|
||||
expect.arrayContaining([expect.stringContaining('Successfully uploaded 9 assets')]),
|
||||
);
|
||||
expect(response1.stderr).toBe('');
|
||||
expect(response1.exitCode).toBe(0);
|
||||
@ -148,7 +98,7 @@ describe(`immich upload`, () => {
|
||||
]);
|
||||
expect(stdout.split('\n')).toEqual(
|
||||
expect.arrayContaining([
|
||||
expect.stringContaining('Successfully uploaded 9 new assets'),
|
||||
expect.stringContaining('Successfully uploaded 9 assets'),
|
||||
expect.stringContaining('Successfully created 1 new album'),
|
||||
expect.stringContaining('Successfully updated 9 assets'),
|
||||
]),
|
||||
@ -181,7 +131,7 @@ describe(`immich upload`, () => {
|
||||
|
||||
expect(stdout.split('\n')).toEqual(
|
||||
expect.arrayContaining([
|
||||
expect.stringContaining('Successfully uploaded 9 new assets'),
|
||||
expect.stringContaining('Successfully uploaded 9 assets'),
|
||||
expect.stringContaining('Deleting assets that have been uploaded'),
|
||||
]),
|
||||
);
|
||||
@ -193,32 +143,6 @@ describe(`immich upload`, () => {
|
||||
});
|
||||
});
|
||||
|
||||
describe('immich upload --skip-hash', () => {
|
||||
it('should skip hashing', async () => {
|
||||
const filename = `albums/nature/silver_fir.jpg`;
|
||||
await utils.createAsset(admin.accessToken, {
|
||||
assetData: {
|
||||
bytes: readFileSync(`${testAssetDir}/${filename}`),
|
||||
filename: 'silver_fit.jpg',
|
||||
},
|
||||
});
|
||||
const { stderr, stdout, exitCode } = await immichCli(['upload', `${testAssetDir}/${filename}`, '--skip-hash']);
|
||||
|
||||
expect(stderr).toBe('');
|
||||
expect(stdout.split('\n')).toEqual(
|
||||
expect.arrayContaining([
|
||||
'Skipping hash check, assuming all files are new',
|
||||
expect.stringContaining('Successfully uploaded 0 new assets'),
|
||||
expect.stringContaining('Skipped 1 duplicate asset'),
|
||||
]),
|
||||
);
|
||||
expect(exitCode).toBe(0);
|
||||
|
||||
const assets = await getAllAssets({}, { headers: asKeyAuth(key) });
|
||||
expect(assets.length).toBe(1);
|
||||
});
|
||||
});
|
||||
|
||||
describe('immich upload --concurrency <number>', () => {
|
||||
it('should work', async () => {
|
||||
const { stderr, stdout, exitCode } = await immichCli([
|
||||
@ -230,10 +154,7 @@ describe(`immich upload`, () => {
|
||||
|
||||
expect(stderr).toBe('');
|
||||
expect(stdout.split('\n')).toEqual(
|
||||
expect.arrayContaining([
|
||||
'Found 9 new files and 0 duplicates',
|
||||
expect.stringContaining('Successfully uploaded 9 new assets'),
|
||||
]),
|
||||
expect.arrayContaining([expect.stringContaining('Successfully uploaded 9 assets')]),
|
||||
);
|
||||
expect(exitCode).toBe(0);
|
||||
|
||||
|
@ -1,7 +1,7 @@
|
||||
import { exec, spawn } from 'node:child_process';
|
||||
import { setTimeout } from 'node:timers';
|
||||
|
||||
const setup = async () => {
|
||||
export default async () => {
|
||||
let _resolve: () => unknown;
|
||||
let _reject: (error: Error) => unknown;
|
||||
|
||||
@ -31,5 +31,3 @@ const setup = async () => {
|
||||
await new Promise<void>((resolve) => exec('docker compose down', () => resolve()));
|
||||
};
|
||||
};
|
||||
|
||||
export default setup;
|
||||
|
@ -39,7 +39,7 @@ import { makeRandomImage } from 'src/generators';
|
||||
import request from 'supertest';
|
||||
|
||||
type CliResponse = { stdout: string; stderr: string; exitCode: number | null };
|
||||
type EventType = 'assetUpload' | 'assetUpdate' | 'assetDelete' | 'userDelete';
|
||||
type EventType = 'assetUpload' | 'assetDelete' | 'userDelete';
|
||||
type WaitOptions = { event: EventType; id?: string; total?: number; timeout?: number };
|
||||
type AdminSetupOptions = { onboarding?: boolean };
|
||||
type AssetData = { bytes?: Buffer; filename: string };
|
||||
@ -82,7 +82,6 @@ let client: pg.Client | null = null;
|
||||
|
||||
const events: Record<EventType, Set<string>> = {
|
||||
assetUpload: new Set<string>(),
|
||||
assetUpdate: new Set<string>(),
|
||||
assetDelete: new Set<string>(),
|
||||
userDelete: new Set<string>(),
|
||||
};
|
||||
@ -186,7 +185,6 @@ export const utils = {
|
||||
websocket
|
||||
.on('connect', () => resolve(websocket))
|
||||
.on('on_upload_success', (data: AssetResponseDto) => onEvent({ event: 'assetUpload', id: data.id }))
|
||||
.on('on_asset_update', (data: AssetResponseDto) => onEvent({ event: 'assetUpdate', id: data.id }))
|
||||
.on('on_asset_delete', (assetId: string) => onEvent({ event: 'assetDelete', id: assetId }))
|
||||
.on('on_user_delete', (userId: string) => onEvent({ event: 'userDelete', id: userId }))
|
||||
.connect();
|
||||
@ -406,9 +404,10 @@ export const utils = {
|
||||
},
|
||||
]),
|
||||
|
||||
cliLogin: async (accessToken: string) => {
|
||||
const key = await utils.createApiKey(accessToken);
|
||||
await immichCli(['login', app, `${key.secret}`]);
|
||||
cliLogin: async () => {
|
||||
const admin = await utils.adminSetup();
|
||||
const key = await utils.createApiKey(admin.accessToken);
|
||||
await immichCli(['login-key', app, `${key.secret}`]);
|
||||
return key.secret;
|
||||
},
|
||||
};
|
||||
|
82
install.sh
@ -1,78 +1,62 @@
|
||||
#!/usr/bin/env bash
|
||||
set -o nounset
|
||||
set -o pipefail
|
||||
|
||||
create_immich_directory() { local -r Tgt='./immich-app'
|
||||
echo "Starting Immich installation..."
|
||||
|
||||
ip_address=$(hostname -I | awk '{print $1}')
|
||||
|
||||
create_immich_directory() {
|
||||
echo "Creating Immich directory..."
|
||||
if [[ -e $Tgt ]]; then
|
||||
echo "Found existing directory $Tgt, will overwrite YAML files"
|
||||
else
|
||||
mkdir "$Tgt" || return
|
||||
fi
|
||||
cd "$Tgt" || return
|
||||
mkdir -p ./immich-app
|
||||
cd ./immich-app || exit
|
||||
}
|
||||
|
||||
download_docker_compose_file() {
|
||||
echo "Downloading docker-compose.yml..."
|
||||
"${Curl[@]}" "$RepoUrl"/docker-compose.yml -o ./docker-compose.yml
|
||||
curl -L https://github.com/immich-app/immich/releases/latest/download/docker-compose.yml -o ./docker-compose.yml >/dev/null 2>&1
|
||||
}
|
||||
|
||||
download_dot_env_file() {
|
||||
echo "Downloading .env file..."
|
||||
"${Curl[@]}" "$RepoUrl"/example.env -o ./.env
|
||||
curl -L https://github.com/immich-app/immich/releases/latest/download/example.env -o ./.env >/dev/null 2>&1
|
||||
}
|
||||
|
||||
start_docker_compose() {
|
||||
echo "Starting Immich's docker containers"
|
||||
|
||||
if ! docker compose >/dev/null 2>&1; then
|
||||
echo "failed to find 'docker compose'"
|
||||
return 1
|
||||
if docker compose >/dev/null 2>&1; then
|
||||
docker_bin="docker compose"
|
||||
elif docker-compose >/dev/null 2>&1; then
|
||||
docker_bin="docker-compose"
|
||||
else
|
||||
echo "Cannot find \`docker compose\` or \`docker-compose\`."
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if ! docker compose up --remove-orphans -d; then
|
||||
echo "Could not start. Check for errors above."
|
||||
return 1
|
||||
fi
|
||||
if $docker_bin up --remove-orphans -d; then
|
||||
show_friendly_message
|
||||
exit 0
|
||||
else
|
||||
echo "Could not start. Check for errors above."
|
||||
exit 1
|
||||
fi
|
||||
}
|
||||
|
||||
show_friendly_message() {
|
||||
local ip_address
|
||||
ip_address=$(hostname -I | awk '{print $1}')
|
||||
cat << EOF
|
||||
Successfully deployed Immich!
|
||||
You can access the website at http://$ip_address:2283 and the server URL for the mobile app is http://$ip_address:2283/api
|
||||
---------------------------------------------------
|
||||
If you want to configure custom information of the server, including the database, Redis information, or the backup (or upload) location, etc.
|
||||
echo "Successfully deployed Immich!"
|
||||
echo "You can access the website at http://$ip_address:2283 and the server URL for the mobile app is http://$ip_address:2283/api"
|
||||
echo "---------------------------------------------------"
|
||||
echo "If you want to configure custom information of the server, including the database, Redis information, or the backup (or upload) location, etc.
|
||||
|
||||
1. First bring down the containers with the command 'docker compose down' in the immich-app directory,
|
||||
1. First bring down the containers with the command 'docker-compose down' in the immich-app directory,
|
||||
|
||||
2. Then change the information that fits your needs in the '.env' file,
|
||||
|
||||
3. Finally, bring the containers back up with the command 'docker compose up --remove-orphans -d' in the immich-app directory
|
||||
EOF
|
||||
3. Finally, bring the containers back up with the command 'docker-compose up --remove-orphans -d' in the immich-app directory"
|
||||
|
||||
}
|
||||
|
||||
# MAIN
|
||||
main() {
|
||||
echo "Starting Immich installation..."
|
||||
local -r RepoUrl='https://github.com/immich-app/immich/releases/latest/download'
|
||||
local -a Curl
|
||||
if command -v curl >/dev/null; then
|
||||
Curl=(curl -fsSL)
|
||||
else
|
||||
echo 'no curl binary found; please install curl and try again'
|
||||
return 14
|
||||
fi
|
||||
|
||||
create_immich_directory || { echo 'error creating Immich directory'; return 10; }
|
||||
download_docker_compose_file || { echo 'error downloading Docker Compose file'; return 11; }
|
||||
download_dot_env_file || { echo 'error downloading .env'; return 12; }
|
||||
start_docker_compose || { echo 'error starting Docker'; return 13; }
|
||||
return 0; }
|
||||
|
||||
main
|
||||
Exit=$?
|
||||
[[ $Exit == 0 ]] || echo "There was an error installing Immich. Exit code: $Exit. Please provide these logs when asking for assistance."
|
||||
exit "$Exit"
|
||||
create_immich_directory
|
||||
download_docker_compose_file
|
||||
download_dot_env_file
|
||||
start_docker_compose
|
||||
|
@ -66,8 +66,8 @@ download:
|
||||
locale_code: es-MX
|
||||
- file: mobile/assets/i18n/sv-FI.json
|
||||
locale_code: sv-FI
|
||||
- file: mobile/assets/i18n/ca-CA.json
|
||||
locale_code: ca-CA
|
||||
- file: mobile/assets/i18n/ca.json
|
||||
locale_code: ca
|
||||
- file: mobile/assets/i18n/hu-HU.json
|
||||
locale_code: hu-HU
|
||||
- file: mobile/assets/i18n/lv-LV.json
|
||||
@ -76,19 +76,3 @@ download:
|
||||
locale_code: zh-Hans
|
||||
- file: mobile/assets/i18n/th-TH.json
|
||||
locale_code: th-TH
|
||||
- file: mobile/assets/i18n/lt-LT.json
|
||||
locale_code: lt-LT
|
||||
- file: mobile/assets/i18n/el-GR.json
|
||||
locale_code: el-GR
|
||||
- file: mobile/assets/i18n/fr-CA.json
|
||||
locale_code: fr-CA
|
||||
- file: mobile/assets/i18n/es-US.json
|
||||
locale_code: es-US
|
||||
- file: mobile/assets/i18n/sl-SI.json
|
||||
locale_code: sl-SI
|
||||
- file: mobile/assets/i18n/ar-JO.json
|
||||
locale_code: ar-JO
|
||||
- file: mobile/assets/i18n/he-IL.json
|
||||
locale_code: he-IL
|
||||
- file: mobile/assets/i18n/ro-RO.json
|
||||
locale_code: ro-RO
|
||||
|
@ -1,6 +1,6 @@
|
||||
ARG DEVICE=cpu
|
||||
|
||||
FROM python:3.11-bookworm@sha256:e2ed446c899827ed992f8a5a8875fa0853fcab32581e61418b650322061aa3c4 as builder-cpu
|
||||
FROM python:3.11-bookworm@sha256:991e20a11120277e977cadbc104e7a9b196a68a346597879821b19034285a403 as builder-cpu
|
||||
|
||||
FROM openvino/ubuntu22_runtime:2023.3.0@sha256:176646df619032ea6c10faf842867119c393e7497b7f88b5e307e932a0fd5aa8 as builder-openvino
|
||||
USER root
|
||||
@ -36,7 +36,7 @@ RUN python3 -m venv /opt/venv
|
||||
COPY poetry.lock pyproject.toml ./
|
||||
RUN poetry install --sync --no-interaction --no-ansi --no-root --with ${DEVICE} --without dev
|
||||
|
||||
FROM python:3.11-slim-bookworm@sha256:90f8795536170fd08236d2ceb74fe7065dbf74f738d8b84bfbf263656654dc9b as prod-cpu
|
||||
FROM python:3.11-slim-bookworm@sha256:a2eb07f336e4f194358382611b4fea136c632b40baa6314cb27a366deeaf0144 as prod-cpu
|
||||
|
||||
FROM openvino/ubuntu22_runtime:2023.3.0@sha256:176646df619032ea6c10faf842867119c393e7497b7f88b5e307e932a0fd5aa8 as prod-openvino
|
||||
USER root
|
||||
|
@ -22,19 +22,3 @@ You can change the models or adjust options like score thresholds through the Lo
|
||||
To get started, you can simply run `locust --web-host 127.0.0.1` and open `localhost:8089` in a browser to access the UI. See the [Locust documentation](https://docs.locust.io/en/stable/index.html) for more info on running Locust.
|
||||
|
||||
Note that in Locust's jargon, concurrency is measured in `users`, and each user runs one task at a time. To achieve a particular per-endpoint concurrency, multiply that number by the number of endpoints to be queried. For example, if there are 3 endpoints and you want each of them to receive 8 requests at a time, you should set the number of users to 24.
|
||||
|
||||
# Facial Recognition
|
||||
|
||||
## Acknowledgements
|
||||
This project utilizes facial recognition models from the [InsightFace](https://github.com/deepinsight/insightface/tree/master/model_zoo) project. We appreciate the work put into developing these models, which have been beneficial to the machine learning part of this project.
|
||||
|
||||
### Used Models
|
||||
* antelopev2
|
||||
* buffalo_l
|
||||
* buffalo_m
|
||||
* buffalo_s
|
||||
|
||||
## License and Use Restrictions
|
||||
We have received permission to use the InsightFace facial recognition models in our project, as granted via email by Jia Guo (guojia@insightface.ai) on 18th March 2023. However, it's important to note that this permission does not extend to the redistribution or commercial use of their models by third parties. Users and developers interested in using these models should review the licensing terms provided in the InsightFace GitHub repository.
|
||||
|
||||
For more information on the capabilities of the InsightFace models and to ensure compliance with their license, please refer to their [official repository](https://github.com/deepinsight/insightface). Adhering to the specified licensing terms is crucial for the respectful and lawful use of their work.
|
@ -1,4 +1,4 @@
|
||||
FROM mambaorg/micromamba:bookworm-slim@sha256:3624db3aee11d2f3f00d25f691aaaf8834b8bc4ec1b340dcdb48ef37281ea604 as builder
|
||||
FROM mambaorg/micromamba:bookworm-slim@sha256:881dbb68d115182b2c12e7e77dc54ea5005fd4e0123ca009d822adb5b0631785 as builder
|
||||
|
||||
ENV NODE_ENV=production \
|
||||
TRANSFORMERS_CACHE=/cache \
|
||||
|
228
machine-learning/poetry.lock
generated
@ -64,33 +64,33 @@ trio = ["trio (>=0.23)"]
|
||||
|
||||
[[package]]
|
||||
name = "black"
|
||||
version = "24.3.0"
|
||||
version = "24.2.0"
|
||||
description = "The uncompromising code formatter."
|
||||
optional = false
|
||||
python-versions = ">=3.8"
|
||||
files = [
|
||||
{file = "black-24.3.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:7d5e026f8da0322b5662fa7a8e752b3fa2dac1c1cbc213c3d7ff9bdd0ab12395"},
|
||||
{file = "black-24.3.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9f50ea1132e2189d8dff0115ab75b65590a3e97de1e143795adb4ce317934995"},
|
||||
{file = "black-24.3.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e2af80566f43c85f5797365077fb64a393861a3730bd110971ab7a0c94e873e7"},
|
||||
{file = "black-24.3.0-cp310-cp310-win_amd64.whl", hash = "sha256:4be5bb28e090456adfc1255e03967fb67ca846a03be7aadf6249096100ee32d0"},
|
||||
{file = "black-24.3.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:4f1373a7808a8f135b774039f61d59e4be7eb56b2513d3d2f02a8b9365b8a8a9"},
|
||||
{file = "black-24.3.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:aadf7a02d947936ee418777e0247ea114f78aff0d0959461057cae8a04f20597"},
|
||||
{file = "black-24.3.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:65c02e4ea2ae09d16314d30912a58ada9a5c4fdfedf9512d23326128ac08ac3d"},
|
||||
{file = "black-24.3.0-cp311-cp311-win_amd64.whl", hash = "sha256:bf21b7b230718a5f08bd32d5e4f1db7fc8788345c8aea1d155fc17852b3410f5"},
|
||||
{file = "black-24.3.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:2818cf72dfd5d289e48f37ccfa08b460bf469e67fb7c4abb07edc2e9f16fb63f"},
|
||||
{file = "black-24.3.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:4acf672def7eb1725f41f38bf6bf425c8237248bb0804faa3965c036f7672d11"},
|
||||
{file = "black-24.3.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c7ed6668cbbfcd231fa0dc1b137d3e40c04c7f786e626b405c62bcd5db5857e4"},
|
||||
{file = "black-24.3.0-cp312-cp312-win_amd64.whl", hash = "sha256:56f52cfbd3dabe2798d76dbdd299faa046a901041faf2cf33288bc4e6dae57b5"},
|
||||
{file = "black-24.3.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:79dcf34b33e38ed1b17434693763301d7ccbd1c5860674a8f871bd15139e7837"},
|
||||
{file = "black-24.3.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:e19cb1c6365fd6dc38a6eae2dcb691d7d83935c10215aef8e6c38edee3f77abd"},
|
||||
{file = "black-24.3.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:65b76c275e4c1c5ce6e9870911384bff5ca31ab63d19c76811cb1fb162678213"},
|
||||
{file = "black-24.3.0-cp38-cp38-win_amd64.whl", hash = "sha256:b5991d523eee14756f3c8d5df5231550ae8993e2286b8014e2fdea7156ed0959"},
|
||||
{file = "black-24.3.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:c45f8dff244b3c431b36e3224b6be4a127c6aca780853574c00faf99258041eb"},
|
||||
{file = "black-24.3.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:6905238a754ceb7788a73f02b45637d820b2f5478b20fec82ea865e4f5d4d9f7"},
|
||||
{file = "black-24.3.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d7de8d330763c66663661a1ffd432274a2f92f07feeddd89ffd085b5744f85e7"},
|
||||
{file = "black-24.3.0-cp39-cp39-win_amd64.whl", hash = "sha256:7bb041dca0d784697af4646d3b62ba4a6b028276ae878e53f6b4f74ddd6db99f"},
|
||||
{file = "black-24.3.0-py3-none-any.whl", hash = "sha256:41622020d7120e01d377f74249e677039d20e6344ff5851de8a10f11f513bf93"},
|
||||
{file = "black-24.3.0.tar.gz", hash = "sha256:a0c9c4a0771afc6919578cec71ce82a3e31e054904e7197deacbc9382671c41f"},
|
||||
{file = "black-24.2.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:6981eae48b3b33399c8757036c7f5d48a535b962a7c2310d19361edeef64ce29"},
|
||||
{file = "black-24.2.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:d533d5e3259720fdbc1b37444491b024003e012c5173f7d06825a77508085430"},
|
||||
{file = "black-24.2.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:61a0391772490ddfb8a693c067df1ef5227257e72b0e4108482b8d41b5aee13f"},
|
||||
{file = "black-24.2.0-cp310-cp310-win_amd64.whl", hash = "sha256:992e451b04667116680cb88f63449267c13e1ad134f30087dec8527242e9862a"},
|
||||
{file = "black-24.2.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:163baf4ef40e6897a2a9b83890e59141cc8c2a98f2dda5080dc15c00ee1e62cd"},
|
||||
{file = "black-24.2.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:e37c99f89929af50ffaf912454b3e3b47fd64109659026b678c091a4cd450fb2"},
|
||||
{file = "black-24.2.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4f9de21bafcba9683853f6c96c2d515e364aee631b178eaa5145fc1c61a3cc92"},
|
||||
{file = "black-24.2.0-cp311-cp311-win_amd64.whl", hash = "sha256:9db528bccb9e8e20c08e716b3b09c6bdd64da0dd129b11e160bf082d4642ac23"},
|
||||
{file = "black-24.2.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:d84f29eb3ee44859052073b7636533ec995bd0f64e2fb43aeceefc70090e752b"},
|
||||
{file = "black-24.2.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:1e08fb9a15c914b81dd734ddd7fb10513016e5ce7e6704bdd5e1251ceee51ac9"},
|
||||
{file = "black-24.2.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:810d445ae6069ce64030c78ff6127cd9cd178a9ac3361435708b907d8a04c693"},
|
||||
{file = "black-24.2.0-cp312-cp312-win_amd64.whl", hash = "sha256:ba15742a13de85e9b8f3239c8f807723991fbfae24bad92d34a2b12e81904982"},
|
||||
{file = "black-24.2.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:7e53a8c630f71db01b28cd9602a1ada68c937cbf2c333e6ed041390d6968faf4"},
|
||||
{file = "black-24.2.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:93601c2deb321b4bad8f95df408e3fb3943d85012dddb6121336b8e24a0d1218"},
|
||||
{file = "black-24.2.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a0057f800de6acc4407fe75bb147b0c2b5cbb7c3ed110d3e5999cd01184d53b0"},
|
||||
{file = "black-24.2.0-cp38-cp38-win_amd64.whl", hash = "sha256:faf2ee02e6612577ba0181f4347bcbcf591eb122f7841ae5ba233d12c39dcb4d"},
|
||||
{file = "black-24.2.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:057c3dc602eaa6fdc451069bd027a1b2635028b575a6c3acfd63193ced20d9c8"},
|
||||
{file = "black-24.2.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:08654d0797e65f2423f850fc8e16a0ce50925f9337fb4a4a176a7aa4026e63f8"},
|
||||
{file = "black-24.2.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ca610d29415ee1a30a3f30fab7a8f4144e9d34c89a235d81292a1edb2b55f540"},
|
||||
{file = "black-24.2.0-cp39-cp39-win_amd64.whl", hash = "sha256:4dd76e9468d5536abd40ffbc7a247f83b2324f0c050556d9c371c2b9a9a95e31"},
|
||||
{file = "black-24.2.0-py3-none-any.whl", hash = "sha256:e8a6ae970537e67830776488bca52000eaa37fa63b9988e8c487458d9cd5ace6"},
|
||||
{file = "black-24.2.0.tar.gz", hash = "sha256:bce4f25c27c3435e4dace4815bcb2008b87e167e3bf4ee47ccdc5ce906eb4894"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
@ -1274,13 +1274,13 @@ socks = ["socksio (==1.*)"]
|
||||
|
||||
[[package]]
|
||||
name = "huggingface-hub"
|
||||
version = "0.22.2"
|
||||
version = "0.21.4"
|
||||
description = "Client library to download and publish models, datasets and other repos on the huggingface.co hub"
|
||||
optional = false
|
||||
python-versions = ">=3.8.0"
|
||||
files = [
|
||||
{file = "huggingface_hub-0.22.2-py3-none-any.whl", hash = "sha256:3429e25f38ccb834d310804a3b711e7e4953db5a9e420cc147a5e194ca90fd17"},
|
||||
{file = "huggingface_hub-0.22.2.tar.gz", hash = "sha256:32e9a9a6843c92f253ff9ca16b9985def4d80a93fb357af5353f770ef74a81be"},
|
||||
{file = "huggingface_hub-0.21.4-py3-none-any.whl", hash = "sha256:df37c2c37fc6c82163cdd8a67ede261687d80d1e262526d6c0ce73b6b3630a7b"},
|
||||
{file = "huggingface_hub-0.21.4.tar.gz", hash = "sha256:e1f4968c93726565a80edf6dc309763c7b546d0cfe79aa221206034d50155531"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
@ -1293,16 +1293,15 @@ tqdm = ">=4.42.1"
|
||||
typing-extensions = ">=3.7.4.3"
|
||||
|
||||
[package.extras]
|
||||
all = ["InquirerPy (==0.3.4)", "Jinja2", "Pillow", "aiohttp", "gradio", "jedi", "minijinja (>=1.0)", "mypy (==1.5.1)", "numpy", "pytest", "pytest-asyncio", "pytest-cov", "pytest-env", "pytest-rerunfailures", "pytest-vcr", "pytest-xdist", "ruff (>=0.3.0)", "soundfile", "types-PyYAML", "types-requests", "types-simplejson", "types-toml", "types-tqdm", "types-urllib3", "typing-extensions (>=4.8.0)", "urllib3 (<2.0)"]
|
||||
all = ["InquirerPy (==0.3.4)", "Jinja2", "Pillow", "aiohttp", "gradio", "jedi", "mypy (==1.5.1)", "numpy", "pydantic (>1.1,<2.0)", "pydantic (>1.1,<3.0)", "pytest", "pytest-asyncio", "pytest-cov", "pytest-env", "pytest-rerunfailures", "pytest-vcr", "pytest-xdist", "ruff (>=0.1.3)", "soundfile", "types-PyYAML", "types-requests", "types-simplejson", "types-toml", "types-tqdm", "types-urllib3", "typing-extensions (>=4.8.0)", "urllib3 (<2.0)"]
|
||||
cli = ["InquirerPy (==0.3.4)"]
|
||||
dev = ["InquirerPy (==0.3.4)", "Jinja2", "Pillow", "aiohttp", "gradio", "jedi", "minijinja (>=1.0)", "mypy (==1.5.1)", "numpy", "pytest", "pytest-asyncio", "pytest-cov", "pytest-env", "pytest-rerunfailures", "pytest-vcr", "pytest-xdist", "ruff (>=0.3.0)", "soundfile", "types-PyYAML", "types-requests", "types-simplejson", "types-toml", "types-tqdm", "types-urllib3", "typing-extensions (>=4.8.0)", "urllib3 (<2.0)"]
|
||||
dev = ["InquirerPy (==0.3.4)", "Jinja2", "Pillow", "aiohttp", "gradio", "jedi", "mypy (==1.5.1)", "numpy", "pydantic (>1.1,<2.0)", "pydantic (>1.1,<3.0)", "pytest", "pytest-asyncio", "pytest-cov", "pytest-env", "pytest-rerunfailures", "pytest-vcr", "pytest-xdist", "ruff (>=0.1.3)", "soundfile", "types-PyYAML", "types-requests", "types-simplejson", "types-toml", "types-tqdm", "types-urllib3", "typing-extensions (>=4.8.0)", "urllib3 (<2.0)"]
|
||||
fastai = ["fastai (>=2.4)", "fastcore (>=1.3.27)", "toml"]
|
||||
hf-transfer = ["hf-transfer (>=0.1.4)"]
|
||||
inference = ["aiohttp", "minijinja (>=1.0)"]
|
||||
quality = ["mypy (==1.5.1)", "ruff (>=0.3.0)"]
|
||||
inference = ["aiohttp", "pydantic (>1.1,<2.0)", "pydantic (>1.1,<3.0)"]
|
||||
quality = ["mypy (==1.5.1)", "ruff (>=0.1.3)"]
|
||||
tensorflow = ["graphviz", "pydot", "tensorflow"]
|
||||
tensorflow-testing = ["keras (<3.0)", "tensorflow"]
|
||||
testing = ["InquirerPy (==0.3.4)", "Jinja2", "Pillow", "aiohttp", "gradio", "jedi", "minijinja (>=1.0)", "numpy", "pytest", "pytest-asyncio", "pytest-cov", "pytest-env", "pytest-rerunfailures", "pytest-vcr", "pytest-xdist", "soundfile", "urllib3 (<2.0)"]
|
||||
testing = ["InquirerPy (==0.3.4)", "Jinja2", "Pillow", "aiohttp", "gradio", "jedi", "numpy", "pydantic (>1.1,<2.0)", "pydantic (>1.1,<3.0)", "pytest", "pytest-asyncio", "pytest-cov", "pytest-env", "pytest-rerunfailures", "pytest-vcr", "pytest-xdist", "soundfile", "urllib3 (<2.0)"]
|
||||
torch = ["safetensors", "torch"]
|
||||
typing = ["types-PyYAML", "types-requests", "types-simplejson", "types-toml", "types-tqdm", "types-urllib3", "typing-extensions (>=4.8.0)"]
|
||||
|
||||
@ -1568,13 +1567,13 @@ test = ["pytest (>=7.4)", "pytest-cov (>=4.1)"]
|
||||
|
||||
[[package]]
|
||||
name = "locust"
|
||||
version = "2.24.1"
|
||||
version = "2.24.0"
|
||||
description = "Developer friendly load testing framework"
|
||||
optional = false
|
||||
python-versions = ">=3.8"
|
||||
files = [
|
||||
{file = "locust-2.24.1-py3-none-any.whl", hash = "sha256:7f6ed4dc289aad66c304582e6d25e4de5d7c3b175b580332442ab2be35b9d916"},
|
||||
{file = "locust-2.24.1.tar.gz", hash = "sha256:094161d44d94839bf1120fd7898b7abb9c143833743ba7c096beb470a236b9a7"},
|
||||
{file = "locust-2.24.0-py3-none-any.whl", hash = "sha256:1b6b878b4fd0108fec956120815e69775d2616c8f4d1e9f365c222a7a5c17d9a"},
|
||||
{file = "locust-2.24.0.tar.gz", hash = "sha256:6cffa378d995244a7472af6be1d6139331f19aee44e907deee73e0281252804d"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
@ -2110,62 +2109,61 @@ numpy = [
|
||||
|
||||
[[package]]
|
||||
name = "orjson"
|
||||
version = "3.10.0"
|
||||
version = "3.9.15"
|
||||
description = "Fast, correct Python JSON library supporting dataclasses, datetimes, and numpy"
|
||||
optional = false
|
||||
python-versions = ">=3.8"
|
||||
files = [
|
||||
{file = "orjson-3.10.0-cp310-cp310-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:47af5d4b850a2d1328660661f0881b67fdbe712aea905dadd413bdea6f792c33"},
|
||||
{file = "orjson-3.10.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c90681333619d78360d13840c7235fdaf01b2b129cb3a4f1647783b1971542b6"},
|
||||
{file = "orjson-3.10.0-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:400c5b7c4222cb27b5059adf1fb12302eebcabf1978f33d0824aa5277ca899bd"},
|
||||
{file = "orjson-3.10.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5dcb32e949eae80fb335e63b90e5808b4b0f64e31476b3777707416b41682db5"},
|
||||
{file = "orjson-3.10.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:aa7d507c7493252c0a0264b5cc7e20fa2f8622b8a83b04d819b5ce32c97cf57b"},
|
||||
{file = "orjson-3.10.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e286a51def6626f1e0cc134ba2067dcf14f7f4b9550f6dd4535fd9d79000040b"},
|
||||
{file = "orjson-3.10.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:8acd4b82a5f3a3ec8b1dc83452941d22b4711964c34727eb1e65449eead353ca"},
|
||||
{file = "orjson-3.10.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:30707e646080dd3c791f22ce7e4a2fc2438765408547c10510f1f690bd336217"},
|
||||
{file = "orjson-3.10.0-cp310-none-win32.whl", hash = "sha256:115498c4ad34188dcb73464e8dc80e490a3e5e88a925907b6fedcf20e545001a"},
|
||||
{file = "orjson-3.10.0-cp310-none-win_amd64.whl", hash = "sha256:6735dd4a5a7b6df00a87d1d7a02b84b54d215fb7adac50dd24da5997ffb4798d"},
|
||||
{file = "orjson-3.10.0-cp311-cp311-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:9587053e0cefc284e4d1cd113c34468b7d3f17666d22b185ea654f0775316a26"},
|
||||
{file = "orjson-3.10.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1bef1050b1bdc9ea6c0d08468e3e61c9386723633b397e50b82fda37b3563d72"},
|
||||
{file = "orjson-3.10.0-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:d16c6963ddf3b28c0d461641517cd312ad6b3cf303d8b87d5ef3fa59d6844337"},
|
||||
{file = "orjson-3.10.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4251964db47ef090c462a2d909f16c7c7d5fe68e341dabce6702879ec26d1134"},
|
||||
{file = "orjson-3.10.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:73bbbdc43d520204d9ef0817ac03fa49c103c7f9ea94f410d2950755be2c349c"},
|
||||
{file = "orjson-3.10.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:414e5293b82373606acf0d66313aecb52d9c8c2404b1900683eb32c3d042dbd7"},
|
||||
{file = "orjson-3.10.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:feaed5bb09877dc27ed0d37f037ddef6cb76d19aa34b108db270d27d3d2ef747"},
|
||||
{file = "orjson-3.10.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:5127478260db640323cea131ee88541cb1a9fbce051f0b22fa2f0892f44da302"},
|
||||
{file = "orjson-3.10.0-cp311-none-win32.whl", hash = "sha256:b98345529bafe3c06c09996b303fc0a21961820d634409b8639bc16bd4f21b63"},
|
||||
{file = "orjson-3.10.0-cp311-none-win_amd64.whl", hash = "sha256:658ca5cee3379dd3d37dbacd43d42c1b4feee99a29d847ef27a1cb18abdfb23f"},
|
||||
{file = "orjson-3.10.0-cp312-cp312-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:4329c1d24fd130ee377e32a72dc54a3c251e6706fccd9a2ecb91b3606fddd998"},
|
||||
{file = "orjson-3.10.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ef0f19fdfb6553342b1882f438afd53c7cb7aea57894c4490c43e4431739c700"},
|
||||
{file = "orjson-3.10.0-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:c4f60db24161534764277f798ef53b9d3063092f6d23f8f962b4a97edfa997a0"},
|
||||
{file = "orjson-3.10.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1de3fd5c7b208d836f8ecb4526995f0d5877153a4f6f12f3e9bf11e49357de98"},
|
||||
{file = "orjson-3.10.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f93e33f67729d460a177ba285002035d3f11425ed3cebac5f6ded4ef36b28344"},
|
||||
{file = "orjson-3.10.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:237ba922aef472761acd697eef77fef4831ab769a42e83c04ac91e9f9e08fa0e"},
|
||||
{file = "orjson-3.10.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:98c1bfc6a9bec52bc8f0ab9b86cc0874b0299fccef3562b793c1576cf3abb570"},
|
||||
{file = "orjson-3.10.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:30d795a24be16c03dca0c35ca8f9c8eaaa51e3342f2c162d327bd0225118794a"},
|
||||
{file = "orjson-3.10.0-cp312-none-win32.whl", hash = "sha256:6a3f53dc650bc860eb26ec293dfb489b2f6ae1cbfc409a127b01229980e372f7"},
|
||||
{file = "orjson-3.10.0-cp312-none-win_amd64.whl", hash = "sha256:983db1f87c371dc6ffc52931eb75f9fe17dc621273e43ce67bee407d3e5476e9"},
|
||||
{file = "orjson-3.10.0-cp38-cp38-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:9a667769a96a72ca67237224a36faf57db0c82ab07d09c3aafc6f956196cfa1b"},
|
||||
{file = "orjson-3.10.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ade1e21dfde1d37feee8cf6464c20a2f41fa46c8bcd5251e761903e46102dc6b"},
|
||||
{file = "orjson-3.10.0-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:23c12bb4ced1c3308eff7ba5c63ef8f0edb3e4c43c026440247dd6c1c61cea4b"},
|
||||
{file = "orjson-3.10.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b2d014cf8d4dc9f03fc9f870de191a49a03b1bcda51f2a957943fb9fafe55aac"},
|
||||
{file = "orjson-3.10.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:eadecaa16d9783affca33597781328e4981b048615c2ddc31c47a51b833d6319"},
|
||||
{file = "orjson-3.10.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cd583341218826f48bd7c6ebf3310b4126216920853cbc471e8dbeaf07b0b80e"},
|
||||
{file = "orjson-3.10.0-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:90bfc137c75c31d32308fd61951d424424426ddc39a40e367704661a9ee97095"},
|
||||
{file = "orjson-3.10.0-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:13b5d3c795b09a466ec9fcf0bd3ad7b85467d91a60113885df7b8d639a9d374b"},
|
||||
{file = "orjson-3.10.0-cp38-none-win32.whl", hash = "sha256:5d42768db6f2ce0162544845facb7c081e9364a5eb6d2ef06cd17f6050b048d8"},
|
||||
{file = "orjson-3.10.0-cp38-none-win_amd64.whl", hash = "sha256:33e6655a2542195d6fd9f850b428926559dee382f7a862dae92ca97fea03a5ad"},
|
||||
{file = "orjson-3.10.0-cp39-cp39-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:4050920e831a49d8782a1720d3ca2f1c49b150953667eed6e5d63a62e80f46a2"},
|
||||
{file = "orjson-3.10.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1897aa25a944cec774ce4a0e1c8e98fb50523e97366c637b7d0cddabc42e6643"},
|
||||
{file = "orjson-3.10.0-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:9bf565a69e0082ea348c5657401acec3cbbb31564d89afebaee884614fba36b4"},
|
||||
{file = "orjson-3.10.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b6ebc17cfbbf741f5c1a888d1854354536f63d84bee537c9a7c0335791bb9009"},
|
||||
{file = "orjson-3.10.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d2817877d0b69f78f146ab305c5975d0618df41acf8811249ee64231f5953fee"},
|
||||
{file = "orjson-3.10.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:57d017863ec8aa4589be30a328dacd13c2dc49de1c170bc8d8c8a98ece0f2925"},
|
||||
{file = "orjson-3.10.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:22c2f7e377ac757bd3476ecb7480c8ed79d98ef89648f0176deb1da5cd014eb7"},
|
||||
{file = "orjson-3.10.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:e62ba42bfe64c60c1bc84799944f80704e996592c6b9e14789c8e2a303279912"},
|
||||
{file = "orjson-3.10.0-cp39-none-win32.whl", hash = "sha256:60c0b1bdbccd959ebd1575bd0147bd5e10fc76f26216188be4a36b691c937077"},
|
||||
{file = "orjson-3.10.0-cp39-none-win_amd64.whl", hash = "sha256:175a41500ebb2fdf320bf78e8b9a75a1279525b62ba400b2b2444e274c2c8bee"},
|
||||
{file = "orjson-3.10.0.tar.gz", hash = "sha256:ba4d8cac5f2e2cff36bea6b6481cdb92b38c202bcec603d6f5ff91960595a1ed"},
|
||||
{file = "orjson-3.9.15-cp310-cp310-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:d61f7ce4727a9fa7680cd6f3986b0e2c732639f46a5e0156e550e35258aa313a"},
|
||||
{file = "orjson-3.9.15-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4feeb41882e8aa17634b589533baafdceb387e01e117b1ec65534ec724023d04"},
|
||||
{file = "orjson-3.9.15-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:fbbeb3c9b2edb5fd044b2a070f127a0ac456ffd079cb82746fc84af01ef021a4"},
|
||||
{file = "orjson-3.9.15-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b66bcc5670e8a6b78f0313bcb74774c8291f6f8aeef10fe70e910b8040f3ab75"},
|
||||
{file = "orjson-3.9.15-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2973474811db7b35c30248d1129c64fd2bdf40d57d84beed2a9a379a6f57d0ab"},
|
||||
{file = "orjson-3.9.15-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9fe41b6f72f52d3da4db524c8653e46243c8c92df826ab5ffaece2dba9cccd58"},
|
||||
{file = "orjson-3.9.15-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:4228aace81781cc9d05a3ec3a6d2673a1ad0d8725b4e915f1089803e9efd2b99"},
|
||||
{file = "orjson-3.9.15-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:6f7b65bfaf69493c73423ce9db66cfe9138b2f9ef62897486417a8fcb0a92bfe"},
|
||||
{file = "orjson-3.9.15-cp310-none-win32.whl", hash = "sha256:2d99e3c4c13a7b0fb3792cc04c2829c9db07838fb6973e578b85c1745e7d0ce7"},
|
||||
{file = "orjson-3.9.15-cp310-none-win_amd64.whl", hash = "sha256:b725da33e6e58e4a5d27958568484aa766e825e93aa20c26c91168be58e08cbb"},
|
||||
{file = "orjson-3.9.15-cp311-cp311-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:c8e8fe01e435005d4421f183038fc70ca85d2c1e490f51fb972db92af6e047c2"},
|
||||
{file = "orjson-3.9.15-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:87f1097acb569dde17f246faa268759a71a2cb8c96dd392cd25c668b104cad2f"},
|
||||
{file = "orjson-3.9.15-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ff0f9913d82e1d1fadbd976424c316fbc4d9c525c81d047bbdd16bd27dd98cfc"},
|
||||
{file = "orjson-3.9.15-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8055ec598605b0077e29652ccfe9372247474375e0e3f5775c91d9434e12d6b1"},
|
||||
{file = "orjson-3.9.15-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d6768a327ea1ba44c9114dba5fdda4a214bdb70129065cd0807eb5f010bfcbb5"},
|
||||
{file = "orjson-3.9.15-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:12365576039b1a5a47df01aadb353b68223da413e2e7f98c02403061aad34bde"},
|
||||
{file = "orjson-3.9.15-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:71c6b009d431b3839d7c14c3af86788b3cfac41e969e3e1c22f8a6ea13139404"},
|
||||
{file = "orjson-3.9.15-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:e18668f1bd39e69b7fed19fa7cd1cd110a121ec25439328b5c89934e6d30d357"},
|
||||
{file = "orjson-3.9.15-cp311-none-win32.whl", hash = "sha256:62482873e0289cf7313461009bf62ac8b2e54bc6f00c6fabcde785709231a5d7"},
|
||||
{file = "orjson-3.9.15-cp311-none-win_amd64.whl", hash = "sha256:b3d336ed75d17c7b1af233a6561cf421dee41d9204aa3cfcc6c9c65cd5bb69a8"},
|
||||
{file = "orjson-3.9.15-cp312-cp312-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:82425dd5c7bd3adfe4e94c78e27e2fa02971750c2b7ffba648b0f5d5cc016a73"},
|
||||
{file = "orjson-3.9.15-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2c51378d4a8255b2e7c1e5cc430644f0939539deddfa77f6fac7b56a9784160a"},
|
||||
{file = "orjson-3.9.15-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:6ae4e06be04dc00618247c4ae3f7c3e561d5bc19ab6941427f6d3722a0875ef7"},
|
||||
{file = "orjson-3.9.15-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:bcef128f970bb63ecf9a65f7beafd9b55e3aaf0efc271a4154050fc15cdb386e"},
|
||||
{file = "orjson-3.9.15-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b72758f3ffc36ca566ba98a8e7f4f373b6c17c646ff8ad9b21ad10c29186f00d"},
|
||||
{file = "orjson-3.9.15-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:10c57bc7b946cf2efa67ac55766e41764b66d40cbd9489041e637c1304400494"},
|
||||
{file = "orjson-3.9.15-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:946c3a1ef25338e78107fba746f299f926db408d34553b4754e90a7de1d44068"},
|
||||
{file = "orjson-3.9.15-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:2f256d03957075fcb5923410058982aea85455d035607486ccb847f095442bda"},
|
||||
{file = "orjson-3.9.15-cp312-none-win_amd64.whl", hash = "sha256:5bb399e1b49db120653a31463b4a7b27cf2fbfe60469546baf681d1b39f4edf2"},
|
||||
{file = "orjson-3.9.15-cp38-cp38-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:b17f0f14a9c0ba55ff6279a922d1932e24b13fc218a3e968ecdbf791b3682b25"},
|
||||
{file = "orjson-3.9.15-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7f6cbd8e6e446fb7e4ed5bac4661a29e43f38aeecbf60c4b900b825a353276a1"},
|
||||
{file = "orjson-3.9.15-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:76bc6356d07c1d9f4b782813094d0caf1703b729d876ab6a676f3aaa9a47e37c"},
|
||||
{file = "orjson-3.9.15-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:fdfa97090e2d6f73dced247a2f2d8004ac6449df6568f30e7fa1a045767c69a6"},
|
||||
{file = "orjson-3.9.15-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7413070a3e927e4207d00bd65f42d1b780fb0d32d7b1d951f6dc6ade318e1b5a"},
|
||||
{file = "orjson-3.9.15-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9cf1596680ac1f01839dba32d496136bdd5d8ffb858c280fa82bbfeb173bdd40"},
|
||||
{file = "orjson-3.9.15-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:809d653c155e2cc4fd39ad69c08fdff7f4016c355ae4b88905219d3579e31eb7"},
|
||||
{file = "orjson-3.9.15-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:920fa5a0c5175ab14b9c78f6f820b75804fb4984423ee4c4f1e6d748f8b22bc1"},
|
||||
{file = "orjson-3.9.15-cp38-none-win32.whl", hash = "sha256:2b5c0f532905e60cf22a511120e3719b85d9c25d0e1c2a8abb20c4dede3b05a5"},
|
||||
{file = "orjson-3.9.15-cp38-none-win_amd64.whl", hash = "sha256:67384f588f7f8daf040114337d34a5188346e3fae6c38b6a19a2fe8c663a2f9b"},
|
||||
{file = "orjson-3.9.15-cp39-cp39-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:6fc2fe4647927070df3d93f561d7e588a38865ea0040027662e3e541d592811e"},
|
||||
{file = "orjson-3.9.15-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:34cbcd216e7af5270f2ffa63a963346845eb71e174ea530867b7443892d77180"},
|
||||
{file = "orjson-3.9.15-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:f541587f5c558abd93cb0de491ce99a9ef8d1ae29dd6ab4dbb5a13281ae04cbd"},
|
||||
{file = "orjson-3.9.15-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:92255879280ef9c3c0bcb327c5a1b8ed694c290d61a6a532458264f887f052cb"},
|
||||
{file = "orjson-3.9.15-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:05a1f57fb601c426635fcae9ddbe90dfc1ed42245eb4c75e4960440cac667262"},
|
||||
{file = "orjson-3.9.15-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ede0bde16cc6e9b96633df1631fbcd66491d1063667f260a4f2386a098393790"},
|
||||
{file = "orjson-3.9.15-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:e88b97ef13910e5f87bcbc4dd7979a7de9ba8702b54d3204ac587e83639c0c2b"},
|
||||
{file = "orjson-3.9.15-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:57d5d8cf9c27f7ef6bc56a5925c7fbc76b61288ab674eb352c26ac780caa5b10"},
|
||||
{file = "orjson-3.9.15-cp39-none-win32.whl", hash = "sha256:001f4eb0ecd8e9ebd295722d0cbedf0748680fb9998d3993abaed2f40587257a"},
|
||||
{file = "orjson-3.9.15-cp39-none-win_amd64.whl", hash = "sha256:ea0b183a5fe6b2b45f3b854b0d19c4e932d6f5934ae1f723b07cf9560edd4ec7"},
|
||||
{file = "orjson-3.9.15.tar.gz", hash = "sha256:95cae920959d772f30ab36d3b25f83bb0f3be671e986c72ce22f8fa700dae061"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@ -2497,13 +2495,13 @@ testing = ["argcomplete", "attrs (>=19.2)", "hypothesis (>=3.56)", "mock", "pygm
|
||||
|
||||
[[package]]
|
||||
name = "pytest-asyncio"
|
||||
version = "0.23.6"
|
||||
version = "0.23.5.post1"
|
||||
description = "Pytest support for asyncio"
|
||||
optional = false
|
||||
python-versions = ">=3.8"
|
||||
files = [
|
||||
{file = "pytest-asyncio-0.23.6.tar.gz", hash = "sha256:ffe523a89c1c222598c76856e76852b787504ddb72dd5d9b6617ffa8aa2cde5f"},
|
||||
{file = "pytest_asyncio-0.23.6-py3-none-any.whl", hash = "sha256:68516fdd1018ac57b846c9846b954f0393b26f094764a28c955eabb0536a4e8a"},
|
||||
{file = "pytest-asyncio-0.23.5.post1.tar.gz", hash = "sha256:b9a8806bea78c21276bc34321bbf234ba1b2ea5b30d9f0ce0f2dea45e4685813"},
|
||||
{file = "pytest_asyncio-0.23.5.post1-py3-none-any.whl", hash = "sha256:30f54d27774e79ac409778889880242b0403d09cabd65b727ce90fe92dd5d80e"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
@ -2533,17 +2531,17 @@ testing = ["fields", "hunter", "process-tests", "pytest-xdist", "six", "virtuale
|
||||
|
||||
[[package]]
|
||||
name = "pytest-mock"
|
||||
version = "3.14.0"
|
||||
version = "3.12.0"
|
||||
description = "Thin-wrapper around the mock package for easier use with pytest"
|
||||
optional = false
|
||||
python-versions = ">=3.8"
|
||||
files = [
|
||||
{file = "pytest-mock-3.14.0.tar.gz", hash = "sha256:2719255a1efeceadbc056d6bf3df3d1c5015530fb40cf347c0f9afac88410bd0"},
|
||||
{file = "pytest_mock-3.14.0-py3-none-any.whl", hash = "sha256:0b72c38033392a5f4621342fe11e9219ac11ec9d375f8e2a0c164539e0d70f6f"},
|
||||
{file = "pytest-mock-3.12.0.tar.gz", hash = "sha256:31a40f038c22cad32287bb43932054451ff5583ff094bca6f675df2f8bc1a6e9"},
|
||||
{file = "pytest_mock-3.12.0-py3-none-any.whl", hash = "sha256:0972719a7263072da3a21c7f4773069bcc7486027d7e8e1f81d98a47e701bc4f"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
pytest = ">=6.2.5"
|
||||
pytest = ">=5.0"
|
||||
|
||||
[package.extras]
|
||||
dev = ["pre-commit", "pytest-asyncio", "tox"]
|
||||
@ -2846,28 +2844,28 @@ files = [
|
||||
|
||||
[[package]]
|
||||
name = "ruff"
|
||||
version = "0.3.4"
|
||||
version = "0.3.3"
|
||||
description = "An extremely fast Python linter and code formatter, written in Rust."
|
||||
optional = false
|
||||
python-versions = ">=3.7"
|
||||
files = [
|
||||
{file = "ruff-0.3.4-py3-none-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl", hash = "sha256:60c870a7d46efcbc8385d27ec07fe534ac32f3b251e4fc44b3cbfd9e09609ef4"},
|
||||
{file = "ruff-0.3.4-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:6fc14fa742e1d8f24910e1fff0bd5e26d395b0e0e04cc1b15c7c5e5fe5b4af91"},
|
||||
{file = "ruff-0.3.4-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d3ee7880f653cc03749a3bfea720cf2a192e4f884925b0cf7eecce82f0ce5854"},
|
||||
{file = "ruff-0.3.4-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:cf133dd744f2470b347f602452a88e70dadfbe0fcfb5fd46e093d55da65f82f7"},
|
||||
{file = "ruff-0.3.4-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3f3860057590e810c7ffea75669bdc6927bfd91e29b4baa9258fd48b540a4365"},
|
||||
{file = "ruff-0.3.4-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:986f2377f7cf12efac1f515fc1a5b753c000ed1e0a6de96747cdf2da20a1b369"},
|
||||
{file = "ruff-0.3.4-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c4fd98e85869603e65f554fdc5cddf0712e352fe6e61d29d5a6fe087ec82b76c"},
|
||||
{file = "ruff-0.3.4-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:64abeed785dad51801b423fa51840b1764b35d6c461ea8caef9cf9e5e5ab34d9"},
|
||||
{file = "ruff-0.3.4-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:df52972138318bc7546d92348a1ee58449bc3f9eaf0db278906eb511889c4b50"},
|
||||
{file = "ruff-0.3.4-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:98e98300056445ba2cc27d0b325fd044dc17fcc38e4e4d2c7711585bd0a958ed"},
|
||||
{file = "ruff-0.3.4-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:519cf6a0ebed244dce1dc8aecd3dc99add7a2ee15bb68cf19588bb5bf58e0488"},
|
||||
{file = "ruff-0.3.4-py3-none-musllinux_1_2_i686.whl", hash = "sha256:bb0acfb921030d00070539c038cd24bb1df73a2981e9f55942514af8b17be94e"},
|
||||
{file = "ruff-0.3.4-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:cf187a7e7098233d0d0c71175375c5162f880126c4c716fa28a8ac418dcf3378"},
|
||||
{file = "ruff-0.3.4-py3-none-win32.whl", hash = "sha256:af27ac187c0a331e8ef91d84bf1c3c6a5dea97e912a7560ac0cef25c526a4102"},
|
||||
{file = "ruff-0.3.4-py3-none-win_amd64.whl", hash = "sha256:de0d5069b165e5a32b3c6ffbb81c350b1e3d3483347196ffdf86dc0ef9e37dd6"},
|
||||
{file = "ruff-0.3.4-py3-none-win_arm64.whl", hash = "sha256:6810563cc08ad0096b57c717bd78aeac888a1bfd38654d9113cb3dc4d3f74232"},
|
||||
{file = "ruff-0.3.4.tar.gz", hash = "sha256:f0f4484c6541a99862b693e13a151435a279b271cff20e37101116a21e2a1ad1"},
|
||||
{file = "ruff-0.3.3-py3-none-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl", hash = "sha256:973a0e388b7bc2e9148c7f9be8b8c6ae7471b9be37e1cc732f8f44a6f6d7720d"},
|
||||
{file = "ruff-0.3.3-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:cfa60d23269d6e2031129b053fdb4e5a7b0637fc6c9c0586737b962b2f834493"},
|
||||
{file = "ruff-0.3.3-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1eca7ff7a47043cf6ce5c7f45f603b09121a7cc047447744b029d1b719278eb5"},
|
||||
{file = "ruff-0.3.3-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:e7d3f6762217c1da954de24b4a1a70515630d29f71e268ec5000afe81377642d"},
|
||||
{file = "ruff-0.3.3-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b24c19e8598916d9c6f5a5437671f55ee93c212a2c4c569605dc3842b6820386"},
|
||||
{file = "ruff-0.3.3-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:5a6cbf216b69c7090f0fe4669501a27326c34e119068c1494f35aaf4cc683778"},
|
||||
{file = "ruff-0.3.3-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:352e95ead6964974b234e16ba8a66dad102ec7bf8ac064a23f95371d8b198aab"},
|
||||
{file = "ruff-0.3.3-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8d6ab88c81c4040a817aa432484e838aaddf8bfd7ca70e4e615482757acb64f8"},
|
||||
{file = "ruff-0.3.3-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:79bca3a03a759cc773fca69e0bdeac8abd1c13c31b798d5bb3c9da4a03144a9f"},
|
||||
{file = "ruff-0.3.3-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:2700a804d5336bcffe063fd789ca2c7b02b552d2e323a336700abb8ae9e6a3f8"},
|
||||
{file = "ruff-0.3.3-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:fd66469f1a18fdb9d32e22b79f486223052ddf057dc56dea0caaf1a47bdfaf4e"},
|
||||
{file = "ruff-0.3.3-py3-none-musllinux_1_2_i686.whl", hash = "sha256:45817af234605525cdf6317005923bf532514e1ea3d9270acf61ca2440691376"},
|
||||
{file = "ruff-0.3.3-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:0da458989ce0159555ef224d5b7c24d3d2e4bf4c300b85467b08c3261c6bc6a8"},
|
||||
{file = "ruff-0.3.3-py3-none-win32.whl", hash = "sha256:f2831ec6a580a97f1ea82ea1eda0401c3cdf512cf2045fa3c85e8ef109e87de0"},
|
||||
{file = "ruff-0.3.3-py3-none-win_amd64.whl", hash = "sha256:be90bcae57c24d9f9d023b12d627e958eb55f595428bafcb7fec0791ad25ddfc"},
|
||||
{file = "ruff-0.3.3-py3-none-win_arm64.whl", hash = "sha256:0171aab5fecdc54383993389710a3d1227f2da124d76a2784a7098e818f92d61"},
|
||||
{file = "ruff-0.3.3.tar.gz", hash = "sha256:38671be06f57a2f8aba957d9f701ea889aa5736be806f18c0cd03d6ff0cbca8d"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@ -3291,13 +3289,13 @@ zstd = ["zstandard (>=0.18.0)"]
|
||||
|
||||
[[package]]
|
||||
name = "uvicorn"
|
||||
version = "0.29.0"
|
||||
version = "0.28.0"
|
||||
description = "The lightning-fast ASGI server."
|
||||
optional = false
|
||||
python-versions = ">=3.8"
|
||||
files = [
|
||||
{file = "uvicorn-0.29.0-py3-none-any.whl", hash = "sha256:2c2aac7ff4f4365c206fd773a39bf4ebd1047c238f8b8268ad996829323473de"},
|
||||
{file = "uvicorn-0.29.0.tar.gz", hash = "sha256:6a69214c0b6a087462412670b3ef21224fa48cae0e452b5883e8e8bdfdd11dd0"},
|
||||
{file = "uvicorn-0.28.0-py3-none-any.whl", hash = "sha256:6623abbbe6176204a4226e67607b4d52cc60ff62cda0ff177613645cefa2ece1"},
|
||||
{file = "uvicorn-0.28.0.tar.gz", hash = "sha256:cab4473b5d1eaeb5a0f6375ac4bc85007ffc75c3cc1768816d9e5d589857b067"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
|
@ -1,6 +1,6 @@
|
||||
[tool.poetry]
|
||||
name = "machine-learning"
|
||||
version = "1.101.0"
|
||||
version = "1.99.0"
|
||||
description = ""
|
||||
authors = ["Hau Tran <alex.tran1502@gmail.com>"]
|
||||
readme = "README.md"
|
||||
|
After Width: | Height: | Size: 4.8 KiB |
After Width: | Height: | Size: 3.3 KiB |
After Width: | Height: | Size: 6.2 KiB |
After Width: | Height: | Size: 7.3 KiB |
After Width: | Height: | Size: 19 KiB |