Merge branch 'main' of github.com:immich-app/immich into feat/user-sync-stream

This commit is contained in:
Alex 2025-04-16 10:15:32 -05:00
commit b72a91a8da
No known key found for this signature in database
GPG Key ID: 53CD082B3A5E1082
41 changed files with 1135 additions and 1337 deletions

View File

@ -178,6 +178,12 @@ jobs:
echo "cache-to=type=registry,ref=${{ env.GHCR_REPO }}-build-cache:${{ env.PLATFORM_PAIR }}-${{ matrix.device }}-${{ env.CACHE_KEY_SUFFIX }},mode=max,compression=zstd" >> $GITHUB_OUTPUT echo "cache-to=type=registry,ref=${{ env.GHCR_REPO }}-build-cache:${{ env.PLATFORM_PAIR }}-${{ matrix.device }}-${{ env.CACHE_KEY_SUFFIX }},mode=max,compression=zstd" >> $GITHUB_OUTPUT
fi fi
- name: Generate docker image tags
id: meta
uses: docker/metadata-action@902fa8ec7d6ecbf8d84d538b9b233a880e428804 # v5
env:
DOCKER_METADATA_PR_HEAD_SHA: 'true'
- name: Build and push image - name: Build and push image
id: build id: build
uses: docker/build-push-action@471d1dc4e07e5cdedd4c2171150001c434f0b7a4 # v6.15.0 uses: docker/build-push-action@471d1dc4e07e5cdedd4c2171150001c434f0b7a4 # v6.15.0
@ -185,7 +191,7 @@ jobs:
context: ${{ env.context }} context: ${{ env.context }}
file: ${{ env.file }} file: ${{ env.file }}
platforms: ${{ matrix.platforms }} platforms: ${{ matrix.platforms }}
labels: ${{ steps.metadata.outputs.labels }} labels: ${{ steps.meta.outputs.labels }}
cache-to: ${{ steps.cache-target.outputs.cache-to }} cache-to: ${{ steps.cache-target.outputs.cache-to }}
cache-from: | cache-from: |
type=registry,ref=${{ env.GHCR_REPO }}-build-cache:${{ env.PLATFORM_PAIR }}-${{ matrix.device }}-${{ env.CACHE_KEY_SUFFIX }} type=registry,ref=${{ env.GHCR_REPO }}-build-cache:${{ env.PLATFORM_PAIR }}-${{ matrix.device }}-${{ env.CACHE_KEY_SUFFIX }}
@ -287,8 +293,26 @@ jobs:
- name: Create manifest list and push - name: Create manifest list and push
working-directory: ${{ runner.temp }}/digests working-directory: ${{ runner.temp }}/digests
run: | run: |
docker buildx imagetools create $(jq -cr '.tags | map("-t " + .) | join(" ")' <<< "$DOCKER_METADATA_OUTPUT_JSON") \ # Process annotations
$(printf '${{ env.GHCR_REPO }}@sha256:%s ' *) declare -a ANNOTATIONS=()
if [[ -n "$DOCKER_METADATA_OUTPUT_JSON" ]]; then
while IFS= read -r annotation; do
# Extract key and value by removing the manifest: prefix
if [[ "$annotation" =~ ^manifest:(.+)=(.+)$ ]]; then
key="${BASH_REMATCH[1]}"
value="${BASH_REMATCH[2]}"
# Use array to properly handle arguments with spaces
ANNOTATIONS+=(--annotation "index:$key=$value")
fi
done < <(jq -r '.annotations[]' <<< "$DOCKER_METADATA_OUTPUT_JSON")
fi
TAGS=$(jq -cr '.tags | map("-t " + .) | join(" ")' <<< "$DOCKER_METADATA_OUTPUT_JSON") \
SOURCE_ARGS=$(printf '${{ env.GHCR_REPO }}@sha256:%s ' *)
echo "docker buildx imagetools create $TAGS "${ANNOTATIONS[@]}" $SOURCE_ARGS"
docker buildx imagetools create $TAGS "${ANNOTATIONS[@]}" $SOURCE_ARGS
build_and_push_server: build_and_push_server:
name: Build and Push Server name: Build and Push Server
@ -343,9 +367,15 @@ jobs:
# Essentially just ignore the cache output (forks can't write to registry cache) # Essentially just ignore the cache output (forks can't write to registry cache)
echo "cache-to=type=local,dest=/tmp/discard,ignore-error=true" >> $GITHUB_OUTPUT echo "cache-to=type=local,dest=/tmp/discard,ignore-error=true" >> $GITHUB_OUTPUT
else else
echo "cache-to=type=registry,ref=${{ env.GHCR_REPO }}-build-cache:${{ env.PLATFORM_PAIR }}-${{ matrix.device }}-${{ env.CACHE_KEY_SUFFIX }},mode=max,compression=zstd" >> $GITHUB_OUTPUT echo "cache-to=type=registry,ref=${{ env.GHCR_REPO }}-build-cache:${{ env.PLATFORM_PAIR }}-${{ env.CACHE_KEY_SUFFIX }},mode=max,compression=zstd" >> $GITHUB_OUTPUT
fi fi
- name: Generate docker image tags
id: meta
uses: docker/metadata-action@902fa8ec7d6ecbf8d84d538b9b233a880e428804 # v5
env:
DOCKER_METADATA_PR_HEAD_SHA: 'true'
- name: Build and push image - name: Build and push image
id: build id: build
uses: docker/build-push-action@471d1dc4e07e5cdedd4c2171150001c434f0b7a4 # v6.15.0 uses: docker/build-push-action@471d1dc4e07e5cdedd4c2171150001c434f0b7a4 # v6.15.0
@ -353,7 +383,7 @@ jobs:
context: ${{ env.context }} context: ${{ env.context }}
file: ${{ env.file }} file: ${{ env.file }}
platforms: ${{ matrix.platform }} platforms: ${{ matrix.platform }}
labels: ${{ steps.metadata.outputs.labels }} labels: ${{ steps.meta.outputs.labels }}
cache-to: ${{ steps.cache-target.outputs.cache-to }} cache-to: ${{ steps.cache-target.outputs.cache-to }}
cache-from: | cache-from: |
type=registry,ref=${{ env.GHCR_REPO }}-build-cache:${{ env.PLATFORM_PAIR }}-${{ env.CACHE_KEY_SUFFIX }} type=registry,ref=${{ env.GHCR_REPO }}-build-cache:${{ env.PLATFORM_PAIR }}-${{ env.CACHE_KEY_SUFFIX }}
@ -441,8 +471,26 @@ jobs:
- name: Create manifest list and push - name: Create manifest list and push
working-directory: ${{ runner.temp }}/digests working-directory: ${{ runner.temp }}/digests
run: | run: |
docker buildx imagetools create $(jq -cr '.tags | map("-t " + .) | join(" ")' <<< "$DOCKER_METADATA_OUTPUT_JSON") \ # Process annotations
$(printf '${{ env.GHCR_REPO }}@sha256:%s ' *) declare -a ANNOTATIONS=()
if [[ -n "$DOCKER_METADATA_OUTPUT_JSON" ]]; then
while IFS= read -r annotation; do
# Extract key and value by removing the manifest: prefix
if [[ "$annotation" =~ ^manifest:(.+)=(.+)$ ]]; then
key="${BASH_REMATCH[1]}"
value="${BASH_REMATCH[2]}"
# Use array to properly handle arguments with spaces
ANNOTATIONS+=(--annotation "index:$key=$value")
fi
done < <(jq -r '.annotations[]' <<< "$DOCKER_METADATA_OUTPUT_JSON")
fi
TAGS=$(jq -cr '.tags | map("-t " + .) | join(" ")' <<< "$DOCKER_METADATA_OUTPUT_JSON") \
SOURCE_ARGS=$(printf '${{ env.GHCR_REPO }}@sha256:%s ' *)
echo "docker buildx imagetools create $TAGS "${ANNOTATIONS[@]}" $SOURCE_ARGS"
docker buildx imagetools create $TAGS "${ANNOTATIONS[@]}" $SOURCE_ARGS
success-check-server: success-check-server:
name: Docker Build & Push Server Success name: Docker Build & Push Server Success

View File

@ -83,9 +83,20 @@ To see local changes to `@immich/ui` in Immich, do the following:
### Mobile app ### Mobile app
The mobile app `(/mobile)` will required Flutter toolchain 3.13.x and FVM to be installed on your system. #### Setup
Please refer to the [Flutter's official documentation](https://flutter.dev/docs/get-started/install) for more information on setting up the toolchain on your machine. 1. Setup Flutter toolchain using FVM.
2. Run `flutter pub get` to install the dependencies.
3. Run `make translation` to generate the translation file.
4. Run `fvm flutter run` to start the app.
#### Translation
To add a new translation text, enter the key-value pair in the `i18n/en.json` in the root of the immich project. Then, from the `mobile/` directory, run
```bash
make translation
```
The mobile app asks you what backend to connect to. You can utilize the demo backend (https://demo.immich.app/) if you don't need to change server code or upload photos. Alternatively, you can run the server yourself per the instructions above. The mobile app asks you what backend to connect to. You can utilize the demo backend (https://demo.immich.app/) if you don't need to change server code or upload photos. Alternatively, you can run the server yourself per the instructions above.

View File

@ -42,6 +42,12 @@ docker run -it -v "$(pwd)":/import:ro -e IMMICH_INSTANCE_URL=https://your-immich
Please modify the `IMMICH_INSTANCE_URL` and `IMMICH_API_KEY` environment variables as suitable. You can also use a Docker env file to store your sensitive API key. Please modify the `IMMICH_INSTANCE_URL` and `IMMICH_API_KEY` environment variables as suitable. You can also use a Docker env file to store your sensitive API key.
This `docker run` command will directly run the command `immich` inside the container. You can directly append the desired parameters (see under "usage") to the commandline like this:
```bash
docker run -it -v "$(pwd)":/import:ro -e IMMICH_INSTANCE_URL=https://your-immich-instance/api -e IMMICH_API_KEY=your-api-key ghcr.io/immich-app/immich-cli:latest upload -a -c 5 --recursive directory/
```
## Usage ## Usage
<details> <details>

View File

@ -1200,7 +1200,7 @@ wheels = [
[[package]] [[package]]
name = "locust" name = "locust"
version = "2.33.2" version = "2.34.1"
source = { registry = "https://pypi.org/simple" } source = { registry = "https://pypi.org/simple" }
dependencies = [ dependencies = [
{ name = "configargparse" }, { name = "configargparse" },
@ -1219,9 +1219,9 @@ dependencies = [
{ name = "typing-extensions", marker = "python_full_version < '3.11'" }, { name = "typing-extensions", marker = "python_full_version < '3.11'" },
{ name = "werkzeug" }, { name = "werkzeug" },
] ]
sdist = { url = "https://files.pythonhosted.org/packages/a2/9e/09ee87dc12b240248731080bfd460c7d384aadb3171f6d03a4e7314cd0e1/locust-2.33.2.tar.gz", hash = "sha256:e626ed0156f36cec94c3c6b030fc91046469e7e2f5c2e91a99aab0f28b84977e", size = 2237716 } sdist = { url = "https://files.pythonhosted.org/packages/56/21/c2bfe4f9482f8754e9a1ff2b1840a1abe63640576fc918a67a02fff7d961/locust-2.34.1.tar.gz", hash = "sha256:184a6ffcb0d6c543bbeae4de65cbb198c7e0739d569d48a2b8bf5db962077733", size = 2240533 }
wheels = [ wheels = [
{ url = "https://files.pythonhosted.org/packages/9c/c7/bb55ac53173d3e92b1b2577d0f36439500406ca5be476a27b7bc01ae8a75/locust-2.33.2-py3-none-any.whl", hash = "sha256:a2f3b53dcd5ed22cecee874cd989912749663d82ec9b030637d3e43044e5878e", size = 2254591 }, { url = "https://files.pythonhosted.org/packages/e2/e4/0944fbfb1ce0bf09cb400ed9349d4cbaed1230114e4018ac28805097f1c6/locust-2.34.1-py3-none-any.whl", hash = "sha256:487bfadd584e3320f9862adf5aa1cfa1023e030a6af414f4e0a92e62617ce451", size = 2257910 },
] ]
[[package]] [[package]]
@ -2225,27 +2225,27 @@ wheels = [
[[package]] [[package]]
name = "ruff" name = "ruff"
version = "0.11.4" version = "0.11.5"
source = { registry = "https://pypi.org/simple" } source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/e8/5b/3ae20f89777115944e89c2d8c2e795dcc5b9e04052f76d5347e35e0da66e/ruff-0.11.4.tar.gz", hash = "sha256:f45bd2fb1a56a5a85fae3b95add03fb185a0b30cf47f5edc92aa0355ca1d7407", size = 3933063 } sdist = { url = "https://files.pythonhosted.org/packages/45/71/5759b2a6b2279bb77fe15b1435b89473631c2cd6374d45ccdb6b785810be/ruff-0.11.5.tar.gz", hash = "sha256:cae2e2439cb88853e421901ec040a758960b576126dab520fa08e9de431d1bef", size = 3976488 }
wheels = [ wheels = [
{ url = "https://files.pythonhosted.org/packages/9c/db/baee59ac88f57527fcbaad3a7b309994e42329c6bc4d4d2b681a3d7b5426/ruff-0.11.4-py3-none-linux_armv6l.whl", hash = "sha256:d9f4a761ecbde448a2d3e12fb398647c7f0bf526dbc354a643ec505965824ed2", size = 10106493 }, { url = "https://files.pythonhosted.org/packages/23/db/6efda6381778eec7f35875b5cbefd194904832a1153d68d36d6b269d81a8/ruff-0.11.5-py3-none-linux_armv6l.whl", hash = "sha256:2561294e108eb648e50f210671cc56aee590fb6167b594144401532138c66c7b", size = 10103150 },
{ url = "https://files.pythonhosted.org/packages/c1/d6/9a0962cbb347f4ff98b33d699bf1193ff04ca93bed4b4222fd881b502154/ruff-0.11.4-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:8c1747d903447d45ca3d40c794d1a56458c51e5cc1bc77b7b64bd2cf0b1626cc", size = 10876382 }, { url = "https://files.pythonhosted.org/packages/44/f2/06cd9006077a8db61956768bc200a8e52515bf33a8f9b671ee527bb10d77/ruff-0.11.5-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:ac12884b9e005c12d0bd121f56ccf8033e1614f736f766c118ad60780882a077", size = 10898637 },
{ url = "https://files.pythonhosted.org/packages/3a/8f/62bab0c7d7e1ae3707b69b157701b41c1ccab8f83e8501734d12ea8a839f/ruff-0.11.4-py3-none-macosx_11_0_arm64.whl", hash = "sha256:51a6494209cacca79e121e9b244dc30d3414dac8cc5afb93f852173a2ecfc906", size = 10237050 }, { url = "https://files.pythonhosted.org/packages/18/f5/af390a013c56022fe6f72b95c86eb7b2585c89cc25d63882d3bfe411ecf1/ruff-0.11.5-py3-none-macosx_11_0_arm64.whl", hash = "sha256:4bfd80a6ec559a5eeb96c33f832418bf0fb96752de0539905cf7b0cc1d31d779", size = 10236012 },
{ url = "https://files.pythonhosted.org/packages/09/96/e296965ae9705af19c265d4d441958ed65c0c58fc4ec340c27cc9d2a1f5b/ruff-0.11.4-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3f171605f65f4fc49c87f41b456e882cd0c89e4ac9d58e149a2b07930e1d466f", size = 10424984 }, { url = "https://files.pythonhosted.org/packages/b8/ca/b9bf954cfed165e1a0c24b86305d5c8ea75def256707f2448439ac5e0d8b/ruff-0.11.5-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0947c0a1afa75dcb5db4b34b070ec2bccee869d40e6cc8ab25aca11a7d527794", size = 10415338 },
{ url = "https://files.pythonhosted.org/packages/e5/56/644595eb57d855afed6e54b852e2df8cd5ca94c78043b2f29bdfb29882d5/ruff-0.11.4-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ebf99ea9af918878e6ce42098981fc8c1db3850fef2f1ada69fb1dcdb0f8e79e", size = 9957438 }, { url = "https://files.pythonhosted.org/packages/d9/4d/2522dde4e790f1b59885283f8786ab0046958dfd39959c81acc75d347467/ruff-0.11.5-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ad871ff74b5ec9caa66cb725b85d4ef89b53f8170f47c3406e32ef040400b038", size = 9965277 },
{ url = "https://files.pythonhosted.org/packages/86/83/9d3f3bed0118aef3e871ded9e5687fb8c5776bde233427fd9ce0a45db2d4/ruff-0.11.4-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:edad2eac42279df12e176564a23fc6f4aaeeb09abba840627780b1bb11a9d223", size = 11547282 }, { url = "https://files.pythonhosted.org/packages/e5/7a/749f56f150eef71ce2f626a2f6988446c620af2f9ba2a7804295ca450397/ruff-0.11.5-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e6cf918390cfe46d240732d4d72fa6e18e528ca1f60e318a10835cf2fa3dc19f", size = 11541614 },
{ url = "https://files.pythonhosted.org/packages/40/e6/0c6e4f5ae72fac5ccb44d72c0111f294a5c2c8cc5024afcb38e6bda5f4b3/ruff-0.11.4-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:f103a848be9ff379fc19b5d656c1f911d0a0b4e3e0424f9532ececf319a4296e", size = 12182020 }, { url = "https://files.pythonhosted.org/packages/89/b2/7d9b8435222485b6aac627d9c29793ba89be40b5de11584ca604b829e960/ruff-0.11.5-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:56145ee1478582f61c08f21076dc59153310d606ad663acc00ea3ab5b2125f82", size = 12198873 },
{ url = "https://files.pythonhosted.org/packages/b5/92/4aed0e460aeb1df5ea0c2fbe8d04f9725cccdb25d8da09a0d3f5b8764bf8/ruff-0.11.4-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:193e6fac6eb60cc97b9f728e953c21cc38a20077ed64f912e9d62b97487f3f2d", size = 11679154 }, { url = "https://files.pythonhosted.org/packages/00/e0/a1a69ef5ffb5c5f9c31554b27e030a9c468fc6f57055886d27d316dfbabd/ruff-0.11.5-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e5f66f8f1e8c9fc594cbd66fbc5f246a8d91f916cb9667e80208663ec3728304", size = 11670190 },
{ url = "https://files.pythonhosted.org/packages/1b/d3/7316aa2609f2c592038e2543483eafbc62a0e1a6a6965178e284808c095c/ruff-0.11.4-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7af4e5f69b7c138be8dcffa5b4a061bf6ba6a3301f632a6bce25d45daff9bc99", size = 13905985 }, { url = "https://files.pythonhosted.org/packages/05/61/c1c16df6e92975072c07f8b20dad35cd858e8462b8865bc856fe5d6ccb63/ruff-0.11.5-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:80b4df4d335a80315ab9afc81ed1cff62be112bd165e162b5eed8ac55bfc8470", size = 13902301 },
{ url = "https://files.pythonhosted.org/packages/63/80/734d3d17546e47ff99871f44ea7540ad2bbd7a480ed197fe8a1c8a261075/ruff-0.11.4-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:126b1bf13154aa18ae2d6c3c5efe144ec14b97c60844cfa6eb960c2a05188222", size = 11348343 }, { url = "https://files.pythonhosted.org/packages/79/89/0af10c8af4363304fd8cb833bd407a2850c760b71edf742c18d5a87bb3ad/ruff-0.11.5-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3068befab73620b8a0cc2431bd46b3cd619bc17d6f7695a3e1bb166b652c382a", size = 11350132 },
{ url = "https://files.pythonhosted.org/packages/04/7b/70fc7f09a0161dce9613a4671d198f609e653d6f4ff9eee14d64c4c240fb/ruff-0.11.4-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:e8806daaf9dfa881a0ed603f8a0e364e4f11b6ed461b56cae2b1c0cab0645304", size = 10308487 }, { url = "https://files.pythonhosted.org/packages/b9/e1/ecb4c687cbf15164dd00e38cf62cbab238cad05dd8b6b0fc68b0c2785e15/ruff-0.11.5-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:f5da2e710a9641828e09aa98b92c9ebbc60518fdf3921241326ca3e8f8e55b8b", size = 10312937 },
{ url = "https://files.pythonhosted.org/packages/1a/22/1cdd62dabd678d75842bf4944fd889cf794dc9e58c18cc547f9eb28f95ed/ruff-0.11.4-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:5d94bb1cc2fc94a769b0eb975344f1b1f3d294da1da9ddbb5a77665feb3a3019", size = 9929091 }, { url = "https://files.pythonhosted.org/packages/cf/4f/0e53fe5e500b65934500949361e3cd290c5ba60f0324ed59d15f46479c06/ruff-0.11.5-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:ef39f19cb8ec98cbc762344921e216f3857a06c47412030374fffd413fb8fd3a", size = 9936683 },
{ url = "https://files.pythonhosted.org/packages/9f/20/40e0563506332313148e783bbc1e4276d657962cc370657b2fff20e6e058/ruff-0.11.4-py3-none-musllinux_1_2_i686.whl", hash = "sha256:995071203d0fe2183fc7a268766fd7603afb9996785f086b0d76edee8755c896", size = 10924659 }, { url = "https://files.pythonhosted.org/packages/04/a8/8183c4da6d35794ae7f76f96261ef5960853cd3f899c2671961f97a27d8e/ruff-0.11.5-py3-none-musllinux_1_2_i686.whl", hash = "sha256:b2a7cedf47244f431fd11aa5a7e2806dda2e0c365873bda7834e8f7d785ae159", size = 10950217 },
{ url = "https://files.pythonhosted.org/packages/b5/41/eef9b7aac8819d9e942f617f9db296f13d2c4576806d604aba8db5a753f1/ruff-0.11.4-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:7a37ca937e307ea18156e775a6ac6e02f34b99e8c23fe63c1996185a4efe0751", size = 11428160 }, { url = "https://files.pythonhosted.org/packages/26/88/9b85a5a8af21e46a0639b107fcf9bfc31da4f1d263f2fc7fbe7199b47f0a/ruff-0.11.5-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:81be52e7519f3d1a0beadcf8e974715b2dfc808ae8ec729ecfc79bddf8dbb783", size = 11404521 },
{ url = "https://files.pythonhosted.org/packages/ff/61/c488943414fb2b8754c02f3879de003e26efdd20f38167ded3fb3fc1cda3/ruff-0.11.4-py3-none-win32.whl", hash = "sha256:0e9365a7dff9b93af933dab8aebce53b72d8f815e131796268709890b4a83270", size = 10311496 }, { url = "https://files.pythonhosted.org/packages/fc/52/047f35d3b20fd1ae9ccfe28791ef0f3ca0ef0b3e6c1a58badd97d450131b/ruff-0.11.5-py3-none-win32.whl", hash = "sha256:e268da7b40f56e3eca571508a7e567e794f9bfcc0f412c4b607931d3af9c4afe", size = 10320697 },
{ url = "https://files.pythonhosted.org/packages/b6/2b/2a1c8deb5f5dfa3871eb7daa41492c4d2b2824a74d2b38e788617612a66d/ruff-0.11.4-py3-none-win_amd64.whl", hash = "sha256:5a9fa1c69c7815e39fcfb3646bbfd7f528fa8e2d4bebdcf4c2bd0fa037a255fb", size = 11399146 }, { url = "https://files.pythonhosted.org/packages/b9/fe/00c78010e3332a6e92762424cf4c1919065707e962232797d0b57fd8267e/ruff-0.11.5-py3-none-win_amd64.whl", hash = "sha256:6c6dc38af3cfe2863213ea25b6dc616d679205732dc0fb673356c2d69608f800", size = 11378665 },
{ url = "https://files.pythonhosted.org/packages/4f/03/3aec4846226d54a37822e4c7ea39489e4abd6f88388fba74e3d4abe77300/ruff-0.11.4-py3-none-win_arm64.whl", hash = "sha256:d435db6b9b93d02934cf61ef332e66af82da6d8c69aefdea5994c89997c7a0fc", size = 10450306 }, { url = "https://files.pythonhosted.org/packages/43/7c/c83fe5cbb70ff017612ff36654edfebec4b1ef79b558b8e5fd933bab836b/ruff-0.11.5-py3-none-win_arm64.whl", hash = "sha256:67e241b4314f4eacf14a601d586026a962f4002a475aa702c69980a38087aa4e", size = 10460287 },
] ]
[[package]] [[package]]

View File

@ -1,3 +1,3 @@
{ {
"flutter": "3.29.2" "flutter": "3.29.3"
} }

View File

@ -4,7 +4,18 @@ The Immich mobile app is a Flutter-based solution leveraging the Isar Database f
## Setup ## Setup
You must set up Flutter toolchain in your machine before you can perform any of the development. 1. Setup Flutter toolchain using FVM.
2. Run `flutter pub get` to install the dependencies.
3. Run `make translation` to generate the translation file.
4. Run `fvm flutter run` to start the app.
## Translation
To add a new translation text, enter the key-value pair in the `i18n/en.json` in the root of the immich project. Then, from the `mobile/` directory, run
```bash
make translation
```
## Immich-Flutter Directory Structure ## Immich-Flutter Directory Structure
@ -15,6 +26,7 @@ Below are the directory inside the `lib` directory:
- `extensions`: Extensions enhancing various existing functionalities within the app, such as asset_extensions.dart, string_extensions.dart, and more. - `extensions`: Extensions enhancing various existing functionalities within the app, such as asset_extensions.dart, string_extensions.dart, and more.
- `module_template`: Provides a template structure for different modules within the app, including subdivisions like models, providers, services, UI, and views. - `module_template`: Provides a template structure for different modules within the app, including subdivisions like models, providers, services, UI, and views.
- `models`: Placeholder for storing module-specific models. - `models`: Placeholder for storing module-specific models.
- `providers`: Section to define module-specific Riverpod providers. - `providers`: Section to define module-specific Riverpod providers.
- `services`: Houses services tailored to the module's functionality. - `services`: Houses services tailored to the module's functionality.

View File

@ -31,10 +31,10 @@ class TabControllerPage extends HookConsumerWidget {
children: [ children: [
icon, icon,
Positioned( Positioned(
right: -14, right: -18,
child: SizedBox( child: SizedBox(
height: 12, height: 20,
width: 12, width: 20,
child: CircularProgressIndicator( child: CircularProgressIndicator(
strokeWidth: 2, strokeWidth: 2,
valueColor: AlwaysStoppedAnimation<Color>( valueColor: AlwaysStoppedAnimation<Color>(

View File

@ -237,6 +237,9 @@ class PeopleCollectionCard extends ConsumerWidget {
), ),
), ),
child: people.widgetWhen( child: people.widgetWhen(
onLoading: () => const Center(
child: CircularProgressIndicator(),
),
onData: (people) { onData: (people) {
return GridView.count( return GridView.count(
crossAxisCount: 2, crossAxisCount: 2,

View File

@ -18,9 +18,9 @@ import 'package:immich_mobile/interfaces/album_media.interface.dart';
import 'package:immich_mobile/interfaces/asset.interface.dart'; import 'package:immich_mobile/interfaces/asset.interface.dart';
import 'package:immich_mobile/interfaces/etag.interface.dart'; import 'package:immich_mobile/interfaces/etag.interface.dart';
import 'package:immich_mobile/interfaces/local_files_manager.interface.dart'; import 'package:immich_mobile/interfaces/local_files_manager.interface.dart';
import 'package:immich_mobile/providers/app_settings.provider.dart';
import 'package:immich_mobile/interfaces/partner.interface.dart'; import 'package:immich_mobile/interfaces/partner.interface.dart';
import 'package:immich_mobile/interfaces/partner_api.interface.dart'; import 'package:immich_mobile/interfaces/partner_api.interface.dart';
import 'package:immich_mobile/providers/app_settings.provider.dart';
import 'package:immich_mobile/providers/infrastructure/exif.provider.dart'; import 'package:immich_mobile/providers/infrastructure/exif.provider.dart';
import 'package:immich_mobile/providers/infrastructure/user.provider.dart'; import 'package:immich_mobile/providers/infrastructure/user.provider.dart';
import 'package:immich_mobile/repositories/album.repository.dart'; import 'package:immich_mobile/repositories/album.repository.dart';
@ -29,9 +29,9 @@ import 'package:immich_mobile/repositories/album_media.repository.dart';
import 'package:immich_mobile/repositories/asset.repository.dart'; import 'package:immich_mobile/repositories/asset.repository.dart';
import 'package:immich_mobile/repositories/etag.repository.dart'; import 'package:immich_mobile/repositories/etag.repository.dart';
import 'package:immich_mobile/repositories/local_files_manager.repository.dart'; import 'package:immich_mobile/repositories/local_files_manager.repository.dart';
import 'package:immich_mobile/services/app_settings.service.dart';
import 'package:immich_mobile/repositories/partner.repository.dart'; import 'package:immich_mobile/repositories/partner.repository.dart';
import 'package:immich_mobile/repositories/partner_api.repository.dart'; import 'package:immich_mobile/repositories/partner_api.repository.dart';
import 'package:immich_mobile/services/app_settings.service.dart';
import 'package:immich_mobile/services/entity.service.dart'; import 'package:immich_mobile/services/entity.service.dart';
import 'package:immich_mobile/services/hash.service.dart'; import 'package:immich_mobile/services/hash.service.dart';
import 'package:immich_mobile/utils/async_mutex.dart'; import 'package:immich_mobile/utils/async_mutex.dart';
@ -839,13 +839,13 @@ class SyncService {
_toggleTrashStatusForAssets(assets); _toggleTrashStatusForAssets(assets);
} }
final exifInfos = assets.map((e) => e.exifInfo).nonNulls.toList();
try { try {
await _assetRepository.transaction(() async { await _assetRepository.transaction(() async {
await _assetRepository.updateAll(assets); await _assetRepository.updateAll(assets);
for (final Asset added in assets) { for (final Asset added in assets) {
added.exifInfo = added.exifInfo?.copyWith(assetId: added.id); added.exifInfo = added.exifInfo?.copyWith(assetId: added.id);
} }
final exifInfos = assets.map((e) => e.exifInfo).nonNulls.toList();
await _exifInfoRepository.updateAll(exifInfos); await _exifInfoRepository.updateAll(exifInfos);
}); });
_log.info("Upserted ${assets.length} assets into the DB"); _log.info("Upserted ${assets.length} assets into the DB");

View File

@ -82,10 +82,10 @@ packages:
dependency: "direct main" dependency: "direct main"
description: description:
name: background_downloader name: background_downloader
sha256: ed64a215cd24c83a478f602364a3ca86a6dafd178ad783188cc32c6956d5e529 sha256: "2d4c2b7438e7643585880f9cc00ace16a52d778088751f1bfbf714627b315462"
url: "https://pub.dev" url: "https://pub.dev"
source: hosted source: hosted
version: "8.9.4" version: "9.2.0"
boolean_selector: boolean_selector:
dependency: transitive dependency: transitive
description: description:
@ -1975,4 +1975,4 @@ packages:
version: "3.1.3" version: "3.1.3"
sdks: sdks:
dart: ">=3.7.0 <4.0.0" dart: ">=3.7.0 <4.0.0"
flutter: ">=3.29.2" flutter: ">=3.29.3"

View File

@ -6,7 +6,7 @@ version: 1.131.3+193
environment: environment:
sdk: '>=3.3.0 <4.0.0' sdk: '>=3.3.0 <4.0.0'
flutter: 3.29.2 flutter: 3.29.3
isar_version: &isar_version 3.1.8 isar_version: &isar_version 3.1.8
@ -16,7 +16,7 @@ dependencies:
async: ^2.11.0 async: ^2.11.0
auto_route: ^9.2.0 auto_route: ^9.2.0
background_downloader: ^8.5.5 background_downloader: ^9.2.0
cached_network_image: ^3.4.1 cached_network_image: ^3.4.1
cancellation_token_http: ^2.1.0 cancellation_token_http: ^2.1.0
collection: ^1.18.0 collection: ^1.18.0

View File

@ -274,6 +274,15 @@ void main() {
List.filled(assets.length, null), List.filled(assets.length, null),
); );
await s.upsertAssetsWithExif(assets); await s.upsertAssetsWithExif(assets);
verify(
() => exifInfoRepository.updateAll(
any(
that: containsAll(
assets.map((a) => a.exifInfo!.copyWith(assetId: a.id)),
),
),
),
);
expect(assets.map((a) => a.exifInfo?.assetId), assets.map((a) => a.id)); expect(assets.map((a) => a.exifInfo?.assetId), assets.map((a) => a.id));
}); });
}); });

330
server/package-lock.json generated
View File

@ -43,7 +43,7 @@
"ioredis": "^5.3.2", "ioredis": "^5.3.2",
"joi": "^17.10.0", "joi": "^17.10.0",
"js-yaml": "^4.1.0", "js-yaml": "^4.1.0",
"kysely": "^0.27.3", "kysely": "^0.28.0",
"kysely-postgres-js": "^2.0.0", "kysely-postgres-js": "^2.0.0",
"lodash": "^4.17.21", "lodash": "^4.17.21",
"luxon": "^3.4.2", "luxon": "^3.4.2",
@ -105,7 +105,6 @@
"eslint-plugin-prettier": "^5.1.3", "eslint-plugin-prettier": "^5.1.3",
"eslint-plugin-unicorn": "^57.0.0", "eslint-plugin-unicorn": "^57.0.0",
"globals": "^16.0.0", "globals": "^16.0.0",
"kysely-codegen": "^0.18.0",
"mock-fs": "^5.2.0", "mock-fs": "^5.2.0",
"node-addon-api": "^8.3.0", "node-addon-api": "^8.3.0",
"patch-package": "^8.0.0", "patch-package": "^8.0.0",
@ -8460,16 +8459,6 @@
"license": "Apache-2.0", "license": "Apache-2.0",
"peer": true "peer": true
}, },
"node_modules/diff": {
"version": "3.5.0",
"resolved": "https://registry.npmjs.org/diff/-/diff-3.5.0.tgz",
"integrity": "sha512-A46qtFgd+g7pDZinpnwiRJtxbC1hpgf0uzP3iG89scHk0AUC7A1TGxf5OiiOUv/JMZR8GOt8hL900hV0bOy5xA==",
"dev": true,
"license": "BSD-3-Clause",
"engines": {
"node": ">=0.3.1"
}
},
"node_modules/discontinuous-range": { "node_modules/discontinuous-range": {
"version": "1.0.0", "version": "1.0.0",
"resolved": "https://registry.npmjs.org/discontinuous-range/-/discontinuous-range-1.0.0.tgz", "resolved": "https://registry.npmjs.org/discontinuous-range/-/discontinuous-range-1.0.0.tgz",
@ -8680,22 +8669,6 @@
"url": "https://dotenvx.com" "url": "https://dotenvx.com"
} }
}, },
"node_modules/dotenv-expand": {
"version": "12.0.1",
"resolved": "https://registry.npmjs.org/dotenv-expand/-/dotenv-expand-12.0.1.tgz",
"integrity": "sha512-LaKRbou8gt0RNID/9RoI+J2rvXsBRPMV7p+ElHlPhcSARbCPDYcYG2s1TIzAfWv4YSgyY5taidWzzs31lNV3yQ==",
"dev": true,
"license": "BSD-2-Clause",
"dependencies": {
"dotenv": "^16.4.5"
},
"engines": {
"node": ">=12"
},
"funding": {
"url": "https://dotenvx.com"
}
},
"node_modules/dunder-proto": { "node_modules/dunder-proto": {
"version": "1.0.1", "version": "1.0.1",
"resolved": "https://registry.npmjs.org/dunder-proto/-/dunder-proto-1.0.1.tgz", "resolved": "https://registry.npmjs.org/dunder-proto/-/dunder-proto-1.0.1.tgz",
@ -8877,16 +8850,6 @@
"url": "https://github.com/fb55/entities?sponsor=1" "url": "https://github.com/fb55/entities?sponsor=1"
} }
}, },
"node_modules/env-paths": {
"version": "2.2.1",
"resolved": "https://registry.npmjs.org/env-paths/-/env-paths-2.2.1.tgz",
"integrity": "sha512-+h1lkLKhZMTYjog1VEpJNG7NZJWcuc2DDk/qsqSTRRCOXiLjeQ1d1/udrUGhqMxUgAlwKNZ0cf2uqan5GLuS2A==",
"dev": true,
"license": "MIT",
"engines": {
"node": ">=6"
}
},
"node_modules/error-ex": { "node_modules/error-ex": {
"version": "1.3.2", "version": "1.3.2",
"resolved": "https://registry.npmjs.org/error-ex/-/error-ex-1.3.2.tgz", "resolved": "https://registry.npmjs.org/error-ex/-/error-ex-1.3.2.tgz",
@ -10132,101 +10095,6 @@
"node": ">= 0.4" "node": ">= 0.4"
} }
}, },
"node_modules/git-diff": {
"version": "2.0.6",
"resolved": "https://registry.npmjs.org/git-diff/-/git-diff-2.0.6.tgz",
"integrity": "sha512-/Iu4prUrydE3Pb3lCBMbcSNIf81tgGt0W1ZwknnyF62t3tHmtiJTRj0f+1ZIhp3+Rh0ktz1pJVoa7ZXUCskivA==",
"dev": true,
"license": "ISC",
"dependencies": {
"chalk": "^2.3.2",
"diff": "^3.5.0",
"loglevel": "^1.6.1",
"shelljs": "^0.8.1",
"shelljs.exec": "^1.1.7"
},
"engines": {
"node": ">= 4.8.0"
}
},
"node_modules/git-diff/node_modules/ansi-styles": {
"version": "3.2.1",
"resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-3.2.1.tgz",
"integrity": "sha512-VT0ZI6kZRdTh8YyJw3SMbYm/u+NqfsAxEpWO0Pf9sq8/e94WxxOpPKx9FR1FlyCtOVDNOQ+8ntlqFxiRc+r5qA==",
"dev": true,
"license": "MIT",
"dependencies": {
"color-convert": "^1.9.0"
},
"engines": {
"node": ">=4"
}
},
"node_modules/git-diff/node_modules/chalk": {
"version": "2.4.2",
"resolved": "https://registry.npmjs.org/chalk/-/chalk-2.4.2.tgz",
"integrity": "sha512-Mti+f9lpJNcwF4tWV8/OrTTtF1gZi+f8FqlyAdouralcFWFQWF2+NgCHShjkCb+IFBLq9buZwE1xckQU4peSuQ==",
"dev": true,
"license": "MIT",
"dependencies": {
"ansi-styles": "^3.2.1",
"escape-string-regexp": "^1.0.5",
"supports-color": "^5.3.0"
},
"engines": {
"node": ">=4"
}
},
"node_modules/git-diff/node_modules/color-convert": {
"version": "1.9.3",
"resolved": "https://registry.npmjs.org/color-convert/-/color-convert-1.9.3.tgz",
"integrity": "sha512-QfAUtd+vFdAtFQcC8CCyYt1fYWxSqAiK2cSD6zDB8N3cpsEBAvRxp9zOGg6G/SHHJYAT88/az/IuDGALsNVbGg==",
"dev": true,
"license": "MIT",
"dependencies": {
"color-name": "1.1.3"
}
},
"node_modules/git-diff/node_modules/color-name": {
"version": "1.1.3",
"resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.3.tgz",
"integrity": "sha512-72fSenhMw2HZMTVHeCA9KCmpEIbzWiQsjN+BHcBbS9vr1mtt+vJjPdksIBNUmKAW8TFUDPJK5SUU3QhE9NEXDw==",
"dev": true,
"license": "MIT"
},
"node_modules/git-diff/node_modules/escape-string-regexp": {
"version": "1.0.5",
"resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-1.0.5.tgz",
"integrity": "sha512-vbRorB5FUQWvla16U8R/qgaFIya2qGzwDrNmCZuYKrbdSUMG6I1ZCGQRefkRVhuOkIGVne7BQ35DSfo1qvJqFg==",
"dev": true,
"license": "MIT",
"engines": {
"node": ">=0.8.0"
}
},
"node_modules/git-diff/node_modules/has-flag": {
"version": "3.0.0",
"resolved": "https://registry.npmjs.org/has-flag/-/has-flag-3.0.0.tgz",
"integrity": "sha512-sKJf1+ceQBr4SMkvQnBDNDtf4TXpVhVGateu0t918bl30FnbE2m4vNLX+VWe/dpjlb+HugGYzW7uQXH98HPEYw==",
"dev": true,
"license": "MIT",
"engines": {
"node": ">=4"
}
},
"node_modules/git-diff/node_modules/supports-color": {
"version": "5.5.0",
"resolved": "https://registry.npmjs.org/supports-color/-/supports-color-5.5.0.tgz",
"integrity": "sha512-QjVjwdXIt408MIiAqCX4oUKsgU2EqAGzs2Ppkm4aQYbjm+ZEWEcW4SfFNTr4uMNZma0ey4f5lgLrkB0aX0QMow==",
"dev": true,
"license": "MIT",
"dependencies": {
"has-flag": "^3.0.0"
},
"engines": {
"node": ">=4"
}
},
"node_modules/glob": { "node_modules/glob": {
"version": "11.0.1", "version": "11.0.1",
"resolved": "https://registry.npmjs.org/glob/-/glob-11.0.1.tgz", "resolved": "https://registry.npmjs.org/glob/-/glob-11.0.1.tgz",
@ -10745,16 +10613,6 @@
"node": ">=8" "node": ">=8"
} }
}, },
"node_modules/interpret": {
"version": "1.4.0",
"resolved": "https://registry.npmjs.org/interpret/-/interpret-1.4.0.tgz",
"integrity": "sha512-agE4QfB2Lkp9uICn7BAqoscw4SZP9kTE2hxiFI3jBPmXJfdqiahTbUuKGsMoN2GtqL9AxhYioAcVvgsb1HvRbA==",
"dev": true,
"license": "MIT",
"engines": {
"node": ">= 0.10"
}
},
"node_modules/ioredis": { "node_modules/ioredis": {
"version": "5.6.1", "version": "5.6.1",
"resolved": "https://registry.npmjs.org/ioredis/-/ioredis-5.6.1.tgz", "resolved": "https://registry.npmjs.org/ioredis/-/ioredis-5.6.1.tgz",
@ -11293,104 +11151,12 @@
} }
}, },
"node_modules/kysely": { "node_modules/kysely": {
"version": "0.27.6", "version": "0.28.0",
"resolved": "https://registry.npmjs.org/kysely/-/kysely-0.27.6.tgz", "resolved": "https://registry.npmjs.org/kysely/-/kysely-0.28.0.tgz",
"integrity": "sha512-FIyV/64EkKhJmjgC0g2hygpBv5RNWVPyNCqSAD7eTCv6eFWNIi4PN1UvdSJGicN/o35bnevgis4Y0UDC0qi8jQ==", "integrity": "sha512-hq8VcLy57Ww7oPTTVEOrT9ml+g8ehbbmEUkHmW4Xtubu+NHdKZi6SH6egmD4cjDhn3b/0s0h/6AjdPayOTJhNw==",
"license": "MIT", "license": "MIT",
"engines": { "engines": {
"node": ">=14.0.0" "node": ">=18.0.0"
}
},
"node_modules/kysely-codegen": {
"version": "0.18.3",
"resolved": "https://registry.npmjs.org/kysely-codegen/-/kysely-codegen-0.18.3.tgz",
"integrity": "sha512-u2PFL1i8kaD+Jhcd5aIGPLgDqNriWvfWKtA7+kkvR2zZxr3DkdvT/B3nJWObZ/uj+GeONq0TChvf6mf6RqqWuA==",
"dev": true,
"license": "MIT",
"dependencies": {
"chalk": "4.1.2",
"cosmiconfig": "^9.0.0",
"dotenv": "^16.4.7",
"dotenv-expand": "^12.0.1",
"git-diff": "^2.0.6",
"micromatch": "^4.0.8",
"minimist": "^1.2.8",
"pluralize": "^8.0.0",
"zod": "^3.24.2"
},
"bin": {
"kysely-codegen": "dist/cli/bin.js"
},
"peerDependencies": {
"@libsql/kysely-libsql": "^0.3.0 || ^0.4.1",
"@tediousjs/connection-string": "^0.5.0",
"better-sqlite3": ">=7.6.2",
"kysely": "^0.27.0",
"kysely-bun-sqlite": "^0.3.2",
"kysely-bun-worker": "^0.5.3",
"mysql2": "^2.3.3 || ^3.0.0",
"pg": "^8.8.0",
"tarn": "^3.0.0",
"tedious": "^18.0.0"
},
"peerDependenciesMeta": {
"@libsql/kysely-libsql": {
"optional": true
},
"@tediousjs/connection-string": {
"optional": true
},
"better-sqlite3": {
"optional": true
},
"kysely": {
"optional": false
},
"kysely-bun-sqlite": {
"optional": true
},
"kysely-bun-worker": {
"optional": true
},
"mysql2": {
"optional": true
},
"pg": {
"optional": true
},
"tarn": {
"optional": true
},
"tedious": {
"optional": true
}
}
},
"node_modules/kysely-codegen/node_modules/cosmiconfig": {
"version": "9.0.0",
"resolved": "https://registry.npmjs.org/cosmiconfig/-/cosmiconfig-9.0.0.tgz",
"integrity": "sha512-itvL5h8RETACmOTFc4UfIyB2RfEHi71Ax6E/PivVxq9NseKbOWpeyHEOIbmAw1rs8Ak0VursQNww7lf7YtUwzg==",
"dev": true,
"license": "MIT",
"dependencies": {
"env-paths": "^2.2.1",
"import-fresh": "^3.3.0",
"js-yaml": "^4.1.0",
"parse-json": "^5.2.0"
},
"engines": {
"node": ">=14"
},
"funding": {
"url": "https://github.com/sponsors/d-fischer"
},
"peerDependencies": {
"typescript": ">=4.9.5"
},
"peerDependenciesMeta": {
"typescript": {
"optional": true
}
} }
}, },
"node_modules/kysely-postgres-js": { "node_modules/kysely-postgres-js": {
@ -11582,20 +11348,6 @@
"url": "https://github.com/sponsors/sindresorhus" "url": "https://github.com/sponsors/sindresorhus"
} }
}, },
"node_modules/loglevel": {
"version": "1.9.2",
"resolved": "https://registry.npmjs.org/loglevel/-/loglevel-1.9.2.tgz",
"integrity": "sha512-HgMmCqIJSAKqo68l0rS2AanEWfkxaZ5wNiEFb5ggm08lDs9Xl2KxBlX3PTcaD2chBM1gXAYf491/M2Rv8Jwayg==",
"dev": true,
"license": "MIT",
"engines": {
"node": ">= 0.6.0"
},
"funding": {
"type": "tidelift",
"url": "https://tidelift.com/funding/github/npm/loglevel"
}
},
"node_modules/long": { "node_modules/long": {
"version": "5.3.1", "version": "5.3.1",
"resolved": "https://registry.npmjs.org/long/-/long-5.3.1.tgz", "resolved": "https://registry.npmjs.org/long/-/long-5.3.1.tgz",
@ -14032,18 +13784,6 @@
"url": "https://github.com/sponsors/jonschlinkert" "url": "https://github.com/sponsors/jonschlinkert"
} }
}, },
"node_modules/rechoir": {
"version": "0.6.2",
"resolved": "https://registry.npmjs.org/rechoir/-/rechoir-0.6.2.tgz",
"integrity": "sha512-HFM8rkZ+i3zrV+4LQjwQ0W+ez98pApMGM3HUrN04j3CqzPOzl9nmP15Y8YXNm8QHGv/eacOVEjqhmWpkRV0NAw==",
"dev": true,
"dependencies": {
"resolve": "^1.1.6"
},
"engines": {
"node": ">= 0.10"
}
},
"node_modules/redis-errors": { "node_modules/redis-errors": {
"version": "1.2.0", "version": "1.2.0",
"resolved": "https://registry.npmjs.org/redis-errors/-/redis-errors-1.2.0.tgz", "resolved": "https://registry.npmjs.org/redis-errors/-/redis-errors-1.2.0.tgz",
@ -14645,56 +14385,6 @@
"node": ">=8" "node": ">=8"
} }
}, },
"node_modules/shelljs": {
"version": "0.8.5",
"resolved": "https://registry.npmjs.org/shelljs/-/shelljs-0.8.5.tgz",
"integrity": "sha512-TiwcRcrkhHvbrZbnRcFYMLl30Dfov3HKqzp5tO5b4pt6G/SezKcYhmDg15zXVBswHmctSAQKznqNW2LO5tTDow==",
"dev": true,
"license": "BSD-3-Clause",
"dependencies": {
"glob": "^7.0.0",
"interpret": "^1.0.0",
"rechoir": "^0.6.2"
},
"bin": {
"shjs": "bin/shjs"
},
"engines": {
"node": ">=4"
}
},
"node_modules/shelljs.exec": {
"version": "1.1.8",
"resolved": "https://registry.npmjs.org/shelljs.exec/-/shelljs.exec-1.1.8.tgz",
"integrity": "sha512-vFILCw+lzUtiwBAHV8/Ex8JsFjelFMdhONIsgKNLgTzeRckp2AOYRQtHJE/9LhNvdMmE27AGtzWx0+DHpwIwSw==",
"dev": true,
"license": "ISC",
"engines": {
"node": ">= 4.0.0"
}
},
"node_modules/shelljs/node_modules/glob": {
"version": "7.2.3",
"resolved": "https://registry.npmjs.org/glob/-/glob-7.2.3.tgz",
"integrity": "sha512-nFR0zLpU2YCaRxwoCJvL6UvCH2JFyFVIvwTLsIf21AuHlMskA1hhTdk+LlYJtOlYt9v6dvszD2BGRqBL+iQK9Q==",
"deprecated": "Glob versions prior to v9 are no longer supported",
"dev": true,
"license": "ISC",
"dependencies": {
"fs.realpath": "^1.0.0",
"inflight": "^1.0.4",
"inherits": "2",
"minimatch": "^3.1.1",
"once": "^1.3.0",
"path-is-absolute": "^1.0.0"
},
"engines": {
"node": "*"
},
"funding": {
"url": "https://github.com/sponsors/isaacs"
}
},
"node_modules/shimmer": { "node_modules/shimmer": {
"version": "1.2.1", "version": "1.2.1",
"resolved": "https://registry.npmjs.org/shimmer/-/shimmer-1.2.1.tgz", "resolved": "https://registry.npmjs.org/shimmer/-/shimmer-1.2.1.tgz",
@ -18010,16 +17700,6 @@
"engines": { "engines": {
"node": ">= 14" "node": ">= 14"
} }
},
"node_modules/zod": {
"version": "3.24.2",
"resolved": "https://registry.npmjs.org/zod/-/zod-3.24.2.tgz",
"integrity": "sha512-lY7CDW43ECgW9u1TcT3IoXHflywfVqDYze4waEz812jR/bZ8FHDsl7pFQoSZTz5N+2NqRXs8GBwnAwo3ZNxqhQ==",
"dev": true,
"license": "MIT",
"funding": {
"url": "https://github.com/sponsors/colinhacks"
}
} }
} }
} }

View File

@ -69,7 +69,7 @@
"ioredis": "^5.3.2", "ioredis": "^5.3.2",
"joi": "^17.10.0", "joi": "^17.10.0",
"js-yaml": "^4.1.0", "js-yaml": "^4.1.0",
"kysely": "^0.27.3", "kysely": "^0.28.0",
"kysely-postgres-js": "^2.0.0", "kysely-postgres-js": "^2.0.0",
"lodash": "^4.17.21", "lodash": "^4.17.21",
"luxon": "^3.4.2", "luxon": "^3.4.2",
@ -131,7 +131,6 @@
"eslint-plugin-prettier": "^5.1.3", "eslint-plugin-prettier": "^5.1.3",
"eslint-plugin-unicorn": "^57.0.0", "eslint-plugin-unicorn": "^57.0.0",
"globals": "^16.0.0", "globals": "^16.0.0",
"kysely-codegen": "^0.18.0",
"mock-fs": "^5.2.0", "mock-fs": "^5.2.0",
"node-addon-api": "^8.3.0", "node-addon-api": "^8.3.0",
"patch-package": "^8.0.0", "patch-package": "^8.0.0",

View File

@ -2,11 +2,9 @@ import { BullModule } from '@nestjs/bullmq';
import { Inject, Module, OnModuleDestroy, OnModuleInit, ValidationPipe } from '@nestjs/common'; import { Inject, Module, OnModuleDestroy, OnModuleInit, ValidationPipe } from '@nestjs/common';
import { APP_FILTER, APP_GUARD, APP_INTERCEPTOR, APP_PIPE } from '@nestjs/core'; import { APP_FILTER, APP_GUARD, APP_INTERCEPTOR, APP_PIPE } from '@nestjs/core';
import { ScheduleModule, SchedulerRegistry } from '@nestjs/schedule'; import { ScheduleModule, SchedulerRegistry } from '@nestjs/schedule';
import { PostgresJSDialect } from 'kysely-postgres-js';
import { ClsModule } from 'nestjs-cls'; import { ClsModule } from 'nestjs-cls';
import { KyselyModule } from 'nestjs-kysely'; import { KyselyModule } from 'nestjs-kysely';
import { OpenTelemetryModule } from 'nestjs-otel'; import { OpenTelemetryModule } from 'nestjs-otel';
import postgres from 'postgres';
import { commands } from 'src/commands'; import { commands } from 'src/commands';
import { IWorker } from 'src/constants'; import { IWorker } from 'src/constants';
import { controllers } from 'src/controllers'; import { controllers } from 'src/controllers';
@ -25,6 +23,7 @@ import { teardownTelemetry, TelemetryRepository } from 'src/repositories/telemet
import { services } from 'src/services'; import { services } from 'src/services';
import { AuthService } from 'src/services/auth.service'; import { AuthService } from 'src/services/auth.service';
import { CliService } from 'src/services/cli.service'; import { CliService } from 'src/services/cli.service';
import { getKyselyConfig } from 'src/utils/database';
const common = [...repositories, ...services, GlobalExceptionFilter]; const common = [...repositories, ...services, GlobalExceptionFilter];
@ -45,19 +44,7 @@ const imports = [
BullModule.registerQueue(...bull.queues), BullModule.registerQueue(...bull.queues),
ClsModule.forRoot(cls.config), ClsModule.forRoot(cls.config),
OpenTelemetryModule.forRoot(otel), OpenTelemetryModule.forRoot(otel),
KyselyModule.forRoot({ KyselyModule.forRoot(getKyselyConfig(database.config.kysely)),
dialect: new PostgresJSDialect({ postgres: postgres(database.config.kysely) }),
log(event) {
if (event.level === 'error') {
console.error('Query failed :', {
durationMs: event.queryDurationMillis,
error: event.error,
sql: event.query.sql,
params: event.query.parameters,
});
}
},
}),
]; ];
class BaseModule implements OnModuleInit, OnModuleDestroy { class BaseModule implements OnModuleInit, OnModuleDestroy {

View File

@ -2,7 +2,6 @@
process.env.DB_URL = process.env.DB_URL || 'postgres://postgres:postgres@localhost:5432/immich'; process.env.DB_URL = process.env.DB_URL || 'postgres://postgres:postgres@localhost:5432/immich';
import { Kysely } from 'kysely'; import { Kysely } from 'kysely';
import { PostgresJSDialect } from 'kysely-postgres-js';
import { writeFileSync } from 'node:fs'; import { writeFileSync } from 'node:fs';
import { basename, dirname, extname, join } from 'node:path'; import { basename, dirname, extname, join } from 'node:path';
import postgres from 'postgres'; import postgres from 'postgres';
@ -11,6 +10,7 @@ import { DatabaseRepository } from 'src/repositories/database.repository';
import { LoggingRepository } from 'src/repositories/logging.repository'; import { LoggingRepository } from 'src/repositories/logging.repository';
import 'src/schema'; import 'src/schema';
import { schemaDiff, schemaFromCode, schemaFromDatabase } from 'src/sql-tools'; import { schemaDiff, schemaFromCode, schemaFromDatabase } from 'src/sql-tools';
import { getKyselyConfig } from 'src/utils/database';
const main = async () => { const main = async () => {
const command = process.argv[2]; const command = process.argv[2];
@ -52,19 +52,7 @@ const run = async (only?: 'kysely' | 'typeorm') => {
const configRepository = new ConfigRepository(); const configRepository = new ConfigRepository();
const { database } = configRepository.getEnv(); const { database } = configRepository.getEnv();
const logger = new LoggingRepository(undefined, configRepository); const logger = new LoggingRepository(undefined, configRepository);
const db = new Kysely<any>({ const db = new Kysely<any>(getKyselyConfig(database.config.kysely));
dialect: new PostgresJSDialect({ postgres: postgres(database.config.kysely) }),
log(event) {
if (event.level === 'error') {
console.error('Query failed :', {
durationMs: event.queryDurationMillis,
error: event.error,
sql: event.query.sql,
params: event.query.parameters,
});
}
},
});
const databaseRepository = new DatabaseRepository(db, logger, configRepository); const databaseRepository = new DatabaseRepository(db, logger, configRepository);
await databaseRepository.runMigrations({ only }); await databaseRepository.runMigrations({ only });

View File

@ -4,13 +4,11 @@ import { Reflector } from '@nestjs/core';
import { SchedulerRegistry } from '@nestjs/schedule'; import { SchedulerRegistry } from '@nestjs/schedule';
import { Test } from '@nestjs/testing'; import { Test } from '@nestjs/testing';
import { ClassConstructor } from 'class-transformer'; import { ClassConstructor } from 'class-transformer';
import { PostgresJSDialect } from 'kysely-postgres-js';
import { ClsModule } from 'nestjs-cls'; import { ClsModule } from 'nestjs-cls';
import { KyselyModule } from 'nestjs-kysely'; import { KyselyModule } from 'nestjs-kysely';
import { OpenTelemetryModule } from 'nestjs-otel'; import { OpenTelemetryModule } from 'nestjs-otel';
import { mkdir, rm, writeFile } from 'node:fs/promises'; import { mkdir, rm, writeFile } from 'node:fs/promises';
import { join } from 'node:path'; import { join } from 'node:path';
import postgres from 'postgres';
import { format } from 'sql-formatter'; import { format } from 'sql-formatter';
import { GENERATE_SQL_KEY, GenerateSqlQueries } from 'src/decorators'; import { GENERATE_SQL_KEY, GenerateSqlQueries } from 'src/decorators';
import { repositories } from 'src/repositories'; import { repositories } from 'src/repositories';
@ -18,6 +16,11 @@ import { AccessRepository } from 'src/repositories/access.repository';
import { ConfigRepository } from 'src/repositories/config.repository'; import { ConfigRepository } from 'src/repositories/config.repository';
import { LoggingRepository } from 'src/repositories/logging.repository'; import { LoggingRepository } from 'src/repositories/logging.repository';
import { AuthService } from 'src/services/auth.service'; import { AuthService } from 'src/services/auth.service';
import { getKyselyConfig } from 'src/utils/database';
const handleError = (label: string, error: Error | any) => {
console.error(`${label} error: ${error}`);
};
export class SqlLogger { export class SqlLogger {
queries: string[] = []; queries: string[] = [];
@ -75,7 +78,7 @@ class SqlGenerator {
const moduleFixture = await Test.createTestingModule({ const moduleFixture = await Test.createTestingModule({
imports: [ imports: [
KyselyModule.forRoot({ KyselyModule.forRoot({
dialect: new PostgresJSDialect({ postgres: postgres(database.config.kysely) }), ...getKyselyConfig(database.config.kysely),
log: (event) => { log: (event) => {
if (event.level === 'query') { if (event.level === 'query') {
this.sqlLogger.logQuery(event.query.sql); this.sqlLogger.logQuery(event.query.sql);
@ -135,7 +138,7 @@ class SqlGenerator {
queries.push({ params: [] }); queries.push({ params: [] });
} }
for (const { name, params } of queries) { for (const { name, params, stream } of queries) {
let queryLabel = `${label}.${key}`; let queryLabel = `${label}.${key}`;
if (name) { if (name) {
queryLabel += ` (${name})`; queryLabel += ` (${name})`;
@ -143,8 +146,19 @@ class SqlGenerator {
this.sqlLogger.clear(); this.sqlLogger.clear();
if (stream) {
try {
const result: AsyncIterableIterator<unknown> = target.apply(instance, params);
for await (const _ of result) {
break;
}
} catch (error) {
handleError(queryLabel, error);
}
} else {
// errors still generate sql, which is all we care about // errors still generate sql, which is all we care about
await target.apply(instance, params).catch((error: Error) => console.error(`${queryLabel} error: ${error}`)); await target.apply(instance, params).catch((error: Error) => handleError(queryLabel, error));
}
if (this.sqlLogger.queries.length === 0) { if (this.sqlLogger.queries.length === 0) {
console.warn(`No queries recorded for ${queryLabel}`); console.warn(`No queries recorded for ${queryLabel}`);

View File

@ -1,7 +1,7 @@
import { randomUUID } from 'node:crypto'; import { randomUUID } from 'node:crypto';
import { dirname, join, resolve } from 'node:path'; import { dirname, join, resolve } from 'node:path';
import { APP_MEDIA_LOCATION } from 'src/constants'; import { APP_MEDIA_LOCATION } from 'src/constants';
import { AssetEntity } from 'src/entities/asset.entity'; import { StorageAsset } from 'src/database';
import { AssetFileType, AssetPathType, ImageFormat, PathType, PersonPathType, StorageFolder } from 'src/enum'; import { AssetFileType, AssetPathType, ImageFormat, PathType, PersonPathType, StorageFolder } from 'src/enum';
import { AssetRepository } from 'src/repositories/asset.repository'; import { AssetRepository } from 'src/repositories/asset.repository';
import { ConfigRepository } from 'src/repositories/config.repository'; import { ConfigRepository } from 'src/repositories/config.repository';
@ -28,6 +28,8 @@ export interface MoveRequest {
export type GeneratedImageType = AssetPathType.PREVIEW | AssetPathType.THUMBNAIL | AssetPathType.FULLSIZE; export type GeneratedImageType = AssetPathType.PREVIEW | AssetPathType.THUMBNAIL | AssetPathType.FULLSIZE;
export type GeneratedAssetType = GeneratedImageType | AssetPathType.ENCODED_VIDEO; export type GeneratedAssetType = GeneratedImageType | AssetPathType.ENCODED_VIDEO;
type ThumbnailPathEntity = { id: string; ownerId: string };
let instance: StorageCore | null; let instance: StorageCore | null;
export class StorageCore { export class StorageCore {
@ -84,19 +86,19 @@ export class StorageCore {
return join(APP_MEDIA_LOCATION, folder); return join(APP_MEDIA_LOCATION, folder);
} }
static getPersonThumbnailPath(person: { id: string; ownerId: string }) { static getPersonThumbnailPath(person: ThumbnailPathEntity) {
return StorageCore.getNestedPath(StorageFolder.THUMBNAILS, person.ownerId, `${person.id}.jpeg`); return StorageCore.getNestedPath(StorageFolder.THUMBNAILS, person.ownerId, `${person.id}.jpeg`);
} }
static getImagePath(asset: AssetEntity, type: GeneratedImageType, format: ImageFormat) { static getImagePath(asset: ThumbnailPathEntity, type: GeneratedImageType, format: ImageFormat) {
return StorageCore.getNestedPath(StorageFolder.THUMBNAILS, asset.ownerId, `${asset.id}-${type}.${format}`); return StorageCore.getNestedPath(StorageFolder.THUMBNAILS, asset.ownerId, `${asset.id}-${type}.${format}`);
} }
static getEncodedVideoPath(asset: AssetEntity) { static getEncodedVideoPath(asset: ThumbnailPathEntity) {
return StorageCore.getNestedPath(StorageFolder.ENCODED_VIDEO, asset.ownerId, `${asset.id}.mp4`); return StorageCore.getNestedPath(StorageFolder.ENCODED_VIDEO, asset.ownerId, `${asset.id}.mp4`);
} }
static getAndroidMotionPath(asset: AssetEntity, uuid: string) { static getAndroidMotionPath(asset: ThumbnailPathEntity, uuid: string) {
return StorageCore.getNestedPath(StorageFolder.ENCODED_VIDEO, asset.ownerId, `${uuid}-MP.mp4`); return StorageCore.getNestedPath(StorageFolder.ENCODED_VIDEO, asset.ownerId, `${uuid}-MP.mp4`);
} }
@ -114,7 +116,7 @@ export class StorageCore {
return normalizedPath.startsWith(normalizedAppMediaLocation); return normalizedPath.startsWith(normalizedAppMediaLocation);
} }
async moveAssetImage(asset: AssetEntity, pathType: GeneratedImageType, format: ImageFormat) { async moveAssetImage(asset: StorageAsset, pathType: GeneratedImageType, format: ImageFormat) {
const { id: entityId, files } = asset; const { id: entityId, files } = asset;
const oldFile = getAssetFile(files, pathType); const oldFile = getAssetFile(files, pathType);
return this.moveFile({ return this.moveFile({
@ -125,7 +127,7 @@ export class StorageCore {
}); });
} }
async moveAssetVideo(asset: AssetEntity) { async moveAssetVideo(asset: StorageAsset) {
return this.moveFile({ return this.moveFile({
entityId: asset.id, entityId: asset.id,
pathType: AssetPathType.ENCODED_VIDEO, pathType: AssetPathType.ENCODED_VIDEO,

View File

@ -121,6 +121,13 @@ export type UserAdmin = User & {
metadata: UserMetadataItem[]; metadata: UserMetadataItem[];
}; };
export type StorageAsset = {
id: string;
ownerId: string;
files: AssetFile[];
encodedVideoPath: string | null;
};
export type Asset = { export type Asset = {
createdAt: Date; createdAt: Date;
updatedAt: Date; updatedAt: Date;

View File

@ -123,6 +123,7 @@ export const GENERATE_SQL_KEY = 'generate-sql-key';
export interface GenerateSqlQueries { export interface GenerateSqlQueries {
name?: string; name?: string;
params: unknown[]; params: unknown[];
stream?: boolean;
} }
export const Telemetry = (options: { enabled?: boolean }) => export const Telemetry = (options: { enabled?: boolean }) =>

View File

@ -53,12 +53,6 @@ export class AssetEntity {
duplicateId!: string | null; duplicateId!: string | null;
} }
export type AssetEntityPlaceholder = AssetEntity & {
fileCreatedAt: Date | null;
fileModifiedAt: Date | null;
localDateTime: Date | null;
};
export function withExif<O>(qb: SelectQueryBuilder<DB, 'assets', O>) { export function withExif<O>(qb: SelectQueryBuilder<DB, 'assets', O>) {
return qb return qb
.leftJoin('exif', 'assets.id', 'exif.assetId') .leftJoin('exif', 'assets.id', 'exif.assetId')

View File

@ -58,3 +58,109 @@ where
"assets"."id" = $1::uuid "assets"."id" = $1::uuid
limit limit
$2 $2
-- AssetJobRepository.streamForThumbnailJob
select
"assets"."id",
"assets"."thumbhash",
(
select
coalesce(json_agg(agg), '[]')
from
(
select
"asset_files"."id",
"asset_files"."path",
"asset_files"."type"
from
"asset_files"
where
"asset_files"."assetId" = "assets"."id"
) as agg
) as "files"
from
"assets"
inner join "asset_job_status" on "asset_job_status"."assetId" = "assets"."id"
where
"assets"."deletedAt" is null
and "assets"."isVisible" = $1
and (
"asset_job_status"."previewAt" is null
or "asset_job_status"."thumbnailAt" is null
or "assets"."thumbhash" is null
)
-- AssetJobRepository.getForMigrationJob
select
"assets"."id",
"assets"."ownerId",
"assets"."encodedVideoPath",
(
select
coalesce(json_agg(agg), '[]')
from
(
select
"asset_files"."id",
"asset_files"."path",
"asset_files"."type"
from
"asset_files"
where
"asset_files"."assetId" = "assets"."id"
) as agg
) as "files"
from
"assets"
where
"assets"."id" = $1
-- AssetJobRepository.getForStorageTemplateJob
select
"assets"."id",
"assets"."ownerId",
"assets"."type",
"assets"."checksum",
"assets"."originalPath",
"assets"."isExternal",
"assets"."sidecarPath",
"assets"."originalFileName",
"assets"."livePhotoVideoId",
"assets"."fileCreatedAt",
"exif"."timeZone",
"exif"."fileSizeInByte"
from
"assets"
inner join "exif" on "assets"."id" = "exif"."assetId"
where
"assets"."deletedAt" is null
and "assets"."id" = $1
-- AssetJobRepository.streamForStorageTemplateJob
select
"assets"."id",
"assets"."ownerId",
"assets"."type",
"assets"."checksum",
"assets"."originalPath",
"assets"."isExternal",
"assets"."sidecarPath",
"assets"."originalFileName",
"assets"."livePhotoVideoId",
"assets"."fileCreatedAt",
"exif"."timeZone",
"exif"."fileSizeInByte"
from
"assets"
inner join "exif" on "assets"."id" = "exif"."assetId"
where
"assets"."deletedAt" is null
-- AssetJobRepository.streamForDeletedJob
select
"id",
"isOffline"
from
"assets"
where
"assets"."deletedAt" <= $1

View File

@ -0,0 +1,248 @@
-- NOTE: This file is auto generated by ./sql-generator
-- SyncRepository.getCheckpoints
select
"type",
"ack"
from
"session_sync_checkpoints"
where
"sessionId" = $1
-- SyncRepository.deleteCheckpoints
delete from "session_sync_checkpoints"
where
"sessionId" = $1
-- SyncRepository.getUserUpserts
select
"id",
"name",
"email",
"deletedAt",
"updateId"
from
"users"
where
"updatedAt" < now() - interval '1 millisecond'
order by
"updateId" asc
-- SyncRepository.getUserDeletes
select
"id",
"userId"
from
"users_audit"
where
"deletedAt" < now() - interval '1 millisecond'
order by
"id" asc
-- SyncRepository.getPartnerUpserts
select
"sharedById",
"sharedWithId",
"inTimeline",
"updateId"
from
"partners"
where
(
"sharedById" = $1
or "sharedWithId" = $2
)
and "updatedAt" < now() - interval '1 millisecond'
order by
"updateId" asc
-- SyncRepository.getPartnerDeletes
select
"id",
"sharedById",
"sharedWithId"
from
"partners_audit"
where
(
"sharedById" = $1
or "sharedWithId" = $2
)
and "deletedAt" < now() - interval '1 millisecond'
order by
"id" asc
-- SyncRepository.getAssetUpserts
select
"id",
"ownerId",
"thumbhash",
"checksum",
"fileCreatedAt",
"fileModifiedAt",
"localDateTime",
"type",
"deletedAt",
"isFavorite",
"isVisible",
"updateId"
from
"assets"
where
"ownerId" = $1
and "updatedAt" < now() - interval '1 millisecond'
order by
"updateId" asc
-- SyncRepository.getPartnerAssetsUpserts
select
"id",
"ownerId",
"thumbhash",
"checksum",
"fileCreatedAt",
"fileModifiedAt",
"localDateTime",
"type",
"deletedAt",
"isFavorite",
"isVisible",
"updateId"
from
"assets"
where
"ownerId" in (
select
"sharedById"
from
"partners"
where
"sharedWithId" = $1
)
and "updatedAt" < now() - interval '1 millisecond'
order by
"updateId" asc
-- SyncRepository.getAssetDeletes
select
"id",
"assetId"
from
"assets_audit"
where
"ownerId" = $1
and "deletedAt" < now() - interval '1 millisecond'
order by
"id" asc
-- SyncRepository.getPartnerAssetDeletes
select
"id",
"assetId"
from
"assets_audit"
where
"ownerId" in (
select
"sharedById"
from
"partners"
where
"sharedWithId" = $1
)
and "deletedAt" < now() - interval '1 millisecond'
order by
"id" asc
-- SyncRepository.getAssetExifsUpserts
select
"exif"."assetId",
"exif"."description",
"exif"."exifImageWidth",
"exif"."exifImageHeight",
"exif"."fileSizeInByte",
"exif"."orientation",
"exif"."dateTimeOriginal",
"exif"."modifyDate",
"exif"."timeZone",
"exif"."latitude",
"exif"."longitude",
"exif"."projectionType",
"exif"."city",
"exif"."state",
"exif"."country",
"exif"."make",
"exif"."model",
"exif"."lensModel",
"exif"."fNumber",
"exif"."focalLength",
"exif"."iso",
"exif"."exposureTime",
"exif"."profileDescription",
"exif"."rating",
"exif"."fps",
"exif"."updateId"
from
"exif"
where
"assetId" in (
select
"id"
from
"assets"
where
"ownerId" = $1
)
and "updatedAt" < now() - interval '1 millisecond'
order by
"updateId" asc
-- SyncRepository.getPartnerAssetExifsUpserts
select
"exif"."assetId",
"exif"."description",
"exif"."exifImageWidth",
"exif"."exifImageHeight",
"exif"."fileSizeInByte",
"exif"."orientation",
"exif"."dateTimeOriginal",
"exif"."modifyDate",
"exif"."timeZone",
"exif"."latitude",
"exif"."longitude",
"exif"."projectionType",
"exif"."city",
"exif"."state",
"exif"."country",
"exif"."make",
"exif"."model",
"exif"."lensModel",
"exif"."fNumber",
"exif"."focalLength",
"exif"."iso",
"exif"."exposureTime",
"exif"."profileDescription",
"exif"."rating",
"exif"."fps",
"exif"."updateId"
from
"exif"
where
"assetId" in (
select
"id"
from
"assets"
where
"ownerId" in (
select
"sharedById"
from
"partners"
where
"sharedWithId" = $1
)
)
and "updatedAt" < now() - interval '1 millisecond'
order by
"updateId" asc

View File

@ -54,6 +54,39 @@ export class AssetJobRepository {
.executeTakeFirst(); .executeTakeFirst();
} }
@GenerateSql({ params: [false], stream: true })
streamForThumbnailJob(force: boolean) {
return this.db
.selectFrom('assets')
.select(['assets.id', 'assets.thumbhash'])
.select(withFiles)
.where('assets.deletedAt', 'is', null)
.where('assets.isVisible', '=', true)
.$if(!force, (qb) =>
qb
// If there aren't any entries, metadata extraction hasn't run yet which is required for thumbnails
.innerJoin('asset_job_status', 'asset_job_status.assetId', 'assets.id')
.where((eb) =>
eb.or([
eb('asset_job_status.previewAt', 'is', null),
eb('asset_job_status.thumbnailAt', 'is', null),
eb('assets.thumbhash', 'is', null),
]),
),
)
.stream();
}
@GenerateSql({ params: [DummyValue.UUID] })
getForMigrationJob(id: string) {
return this.db
.selectFrom('assets')
.select(['assets.id', 'assets.ownerId', 'assets.encodedVideoPath'])
.select(withFiles)
.where('assets.id', '=', id)
.executeTakeFirst();
}
private storageTemplateAssetQuery() { private storageTemplateAssetQuery() {
return this.db return this.db
.selectFrom('assets') .selectFrom('assets')
@ -75,16 +108,19 @@ export class AssetJobRepository {
.where('assets.deletedAt', 'is', null); .where('assets.deletedAt', 'is', null);
} }
@GenerateSql({ params: [DummyValue.UUID] })
getForStorageTemplateJob(id: string): Promise<StorageAsset | undefined> { getForStorageTemplateJob(id: string): Promise<StorageAsset | undefined> {
return this.storageTemplateAssetQuery().where('assets.id', '=', id).executeTakeFirst() as Promise< return this.storageTemplateAssetQuery().where('assets.id', '=', id).executeTakeFirst() as Promise<
StorageAsset | undefined StorageAsset | undefined
>; >;
} }
@GenerateSql({ params: [], stream: true })
streamForStorageTemplateJob() { streamForStorageTemplateJob() {
return this.storageTemplateAssetQuery().stream() as AsyncIterableIterator<StorageAsset>; return this.storageTemplateAssetQuery().stream() as AsyncIterableIterator<StorageAsset>;
} }
@GenerateSql({ params: [DummyValue.DATE], stream: true })
streamForDeletedJob(trashedBefore: Date) { streamForDeletedJob(trashedBefore: Date) {
return this.db return this.db
.selectFrom('assets') .selectFrom('assets')

View File

@ -6,7 +6,6 @@ import { AssetFiles, AssetJobStatus, Assets, DB, Exif } from 'src/db';
import { Chunked, ChunkedArray, DummyValue, GenerateSql } from 'src/decorators'; import { Chunked, ChunkedArray, DummyValue, GenerateSql } from 'src/decorators';
import { import {
AssetEntity, AssetEntity,
AssetEntityPlaceholder,
hasPeople, hasPeople,
searchAssetBuilder, searchAssetBuilder,
truncatedDate, truncatedDate,
@ -236,12 +235,8 @@ export class AssetRepository {
.execute(); .execute();
} }
create(asset: Insertable<Assets>): Promise<AssetEntityPlaceholder> { create(asset: Insertable<Assets>): Promise<AssetEntity> {
return this.db return this.db.insertInto('assets').values(asset).returningAll().executeTakeFirst() as any as Promise<AssetEntity>;
.insertInto('assets')
.values(asset)
.returningAll()
.executeTakeFirst() as any as Promise<AssetEntityPlaceholder>;
} }
createAll(assets: Insertable<Assets>[]): Promise<AssetEntity[]> { createAll(assets: Insertable<Assets>[]): Promise<AssetEntity[]> {

View File

@ -9,7 +9,6 @@ import { CLS_ID, ClsModuleOptions } from 'nestjs-cls';
import { OpenTelemetryModuleOptions } from 'nestjs-otel/lib/interfaces'; import { OpenTelemetryModuleOptions } from 'nestjs-otel/lib/interfaces';
import { join, resolve } from 'node:path'; import { join, resolve } from 'node:path';
import { parse } from 'pg-connection-string'; import { parse } from 'pg-connection-string';
import { Notice } from 'postgres';
import { citiesFile, excludePaths, IWorker } from 'src/constants'; import { citiesFile, excludePaths, IWorker } from 'src/constants';
import { Telemetry } from 'src/decorators'; import { Telemetry } from 'src/decorators';
import { EnvDto } from 'src/dtos/env.dto'; import { EnvDto } from 'src/dtos/env.dto';
@ -23,23 +22,10 @@ import {
QueueName, QueueName,
} from 'src/enum'; } from 'src/enum';
import { DatabaseConnectionParams, VectorExtension } from 'src/types'; import { DatabaseConnectionParams, VectorExtension } from 'src/types';
import { isValidSsl, PostgresConnectionConfig } from 'src/utils/database';
import { setDifference } from 'src/utils/set'; import { setDifference } from 'src/utils/set';
import { PostgresConnectionOptions } from 'typeorm/driver/postgres/PostgresConnectionOptions.js'; import { PostgresConnectionOptions } from 'typeorm/driver/postgres/PostgresConnectionOptions.js';
type Ssl = 'require' | 'allow' | 'prefer' | 'verify-full' | boolean | object;
type PostgresConnectionConfig = {
host?: string;
password?: string;
user?: string;
port?: number;
database?: string;
client_encoding?: string;
ssl?: Ssl;
application_name?: string;
fallback_application_name?: string;
options?: string;
};
export interface EnvData { export interface EnvData {
host?: string; host?: string;
port: number; port: number;
@ -144,9 +130,6 @@ const asSet = <T>(value: string | undefined, defaults: T[]) => {
return new Set(values.length === 0 ? defaults : (values as T[])); return new Set(values.length === 0 ? defaults : (values as T[]));
}; };
const isValidSsl = (ssl?: string | boolean | object): ssl is Ssl =>
typeof ssl !== 'string' || ssl === 'require' || ssl === 'allow' || ssl === 'prefer' || ssl === 'verify-full';
const getEnv = (): EnvData => { const getEnv = (): EnvData => {
const dto = plainToInstance(EnvDto, process.env); const dto = plainToInstance(EnvDto, process.env);
const errors = validateSync(dto); const errors = validateSync(dto);
@ -233,33 +216,6 @@ const getEnv = (): EnvData => {
}; };
} }
const driverOptions = {
...parsedOptions,
onnotice: (notice: Notice) => {
if (notice['severity'] !== 'NOTICE') {
console.warn('Postgres notice:', notice);
}
},
max: 10,
types: {
date: {
to: 1184,
from: [1082, 1114, 1184],
serialize: (x: Date | string) => (x instanceof Date ? x.toISOString() : x),
parse: (x: string) => new Date(x),
},
bigint: {
to: 20,
from: [20, 1700],
parse: (value: string) => Number.parseInt(value),
serialize: (value: number) => value.toString(),
},
},
connection: {
TimeZone: 'UTC',
},
};
return { return {
host: dto.IMMICH_HOST, host: dto.IMMICH_HOST,
port: dto.IMMICH_PORT || 2283, port: dto.IMMICH_PORT || 2283,
@ -325,7 +281,7 @@ const getEnv = (): EnvData => {
parseInt8: true, parseInt8: true,
...(databaseUrl ? { connectionType: 'url', url: databaseUrl } : parts), ...(databaseUrl ? { connectionType: 'url', url: databaseUrl } : parts),
}, },
kysely: driverOptions, kysely: parsedOptions,
}, },
skipMigrations: dto.DB_SKIP_MIGRATIONS ?? false, skipMigrations: dto.DB_SKIP_MIGRATIONS ?? false,

View File

@ -247,7 +247,7 @@ export class MapRepository {
let futures = []; let futures = [];
for await (const line of lineReader) { for await (const line of lineReader) {
const lineSplit = line.split('\t'); const lineSplit = line.split('\t');
if (lineSplit[7] === 'PPLX' && lineSplit[8] !== 'AU') { if ((lineSplit[7] === 'PPLX' && lineSplit[8] !== 'AU') || lineSplit[7] === 'PPLH') {
continue; continue;
} }

View File

@ -1,12 +1,11 @@
import { Injectable } from '@nestjs/common'; import { Injectable } from '@nestjs/common';
import { Kysely, OrderByDirectionExpression, sql } from 'kysely'; import { Kysely, OrderByDirection, sql } from 'kysely';
import { InjectKysely } from 'nestjs-kysely'; import { InjectKysely } from 'nestjs-kysely';
import { randomUUID } from 'node:crypto'; import { randomUUID } from 'node:crypto';
import { DB } from 'src/db'; import { DB } from 'src/db';
import { DummyValue, GenerateSql } from 'src/decorators'; import { DummyValue, GenerateSql } from 'src/decorators';
import { AssetEntity, searchAssetBuilder } from 'src/entities/asset.entity'; import { AssetEntity, searchAssetBuilder } from 'src/entities/asset.entity';
import { AssetStatus, AssetType } from 'src/enum'; import { AssetStatus, AssetType } from 'src/enum';
import { LoggingRepository } from 'src/repositories/logging.repository';
import { anyUuid, asUuid } from 'src/utils/database'; import { anyUuid, asUuid } from 'src/utils/database';
import { Paginated } from 'src/utils/pagination'; import { Paginated } from 'src/utils/pagination';
import { isValidInteger } from 'src/validation'; import { isValidInteger } from 'src/validation';
@ -203,12 +202,7 @@ export interface GetCameraMakesOptions {
@Injectable() @Injectable()
export class SearchRepository { export class SearchRepository {
constructor( constructor(@InjectKysely() private db: Kysely<DB>) {}
private logger: LoggingRepository,
@InjectKysely() private db: Kysely<DB>,
) {
this.logger.setContext(SearchRepository.name);
}
@GenerateSql({ @GenerateSql({
params: [ params: [
@ -223,7 +217,7 @@ export class SearchRepository {
], ],
}) })
async searchMetadata(pagination: SearchPaginationOptions, options: AssetSearchOptions): Paginated<AssetEntity> { async searchMetadata(pagination: SearchPaginationOptions, options: AssetSearchOptions): Paginated<AssetEntity> {
const orderDirection = (options.orderDirection?.toLowerCase() || 'desc') as OrderByDirectionExpression; const orderDirection = (options.orderDirection?.toLowerCase() || 'desc') as OrderByDirection;
const items = await searchAssetBuilder(this.db, options) const items = await searchAssetBuilder(this.db, options)
.orderBy('assets.fileCreatedAt', orderDirection) .orderBy('assets.fileCreatedAt', orderDirection)
.limit(pagination.size + 1) .limit(pagination.size + 1)

View File

@ -3,6 +3,7 @@ import { Insertable, Kysely, SelectQueryBuilder, sql } from 'kysely';
import { InjectKysely } from 'nestjs-kysely'; import { InjectKysely } from 'nestjs-kysely';
import { columns } from 'src/database'; import { columns } from 'src/database';
import { DB, SessionSyncCheckpoints } from 'src/db'; import { DB, SessionSyncCheckpoints } from 'src/db';
import { DummyValue, GenerateSql } from 'src/decorators';
import { SyncEntityType } from 'src/enum'; import { SyncEntityType } from 'src/enum';
import { SyncAck } from 'src/types'; import { SyncAck } from 'src/types';
@ -13,6 +14,7 @@ type upsertTables = 'users' | 'partners' | 'assets' | 'exif';
export class SyncRepository { export class SyncRepository {
constructor(@InjectKysely() private db: Kysely<DB>) {} constructor(@InjectKysely() private db: Kysely<DB>) {}
@GenerateSql({ params: [DummyValue.UUID] })
getCheckpoints(sessionId: string) { getCheckpoints(sessionId: string) {
return this.db return this.db
.selectFrom('session_sync_checkpoints') .selectFrom('session_sync_checkpoints')
@ -33,6 +35,7 @@ export class SyncRepository {
.execute(); .execute();
} }
@GenerateSql({ params: [DummyValue.UUID] })
deleteCheckpoints(sessionId: string, types?: SyncEntityType[]) { deleteCheckpoints(sessionId: string, types?: SyncEntityType[]) {
return this.db return this.db
.deleteFrom('session_sync_checkpoints') .deleteFrom('session_sync_checkpoints')
@ -41,6 +44,7 @@ export class SyncRepository {
.execute(); .execute();
} }
@GenerateSql({ params: [], stream: true })
getUserUpserts(ack?: SyncAck) { getUserUpserts(ack?: SyncAck) {
return this.db return this.db
.selectFrom('users') .selectFrom('users')
@ -49,6 +53,7 @@ export class SyncRepository {
.stream(); .stream();
} }
@GenerateSql({ params: [], stream: true })
getUserDeletes(ack?: SyncAck) { getUserDeletes(ack?: SyncAck) {
return this.db return this.db
.selectFrom('users_audit') .selectFrom('users_audit')
@ -57,6 +62,7 @@ export class SyncRepository {
.stream(); .stream();
} }
@GenerateSql({ params: [DummyValue.UUID], stream: true })
getPartnerUpserts(userId: string, ack?: SyncAck) { getPartnerUpserts(userId: string, ack?: SyncAck) {
return this.db return this.db
.selectFrom('partners') .selectFrom('partners')
@ -66,6 +72,7 @@ export class SyncRepository {
.stream(); .stream();
} }
@GenerateSql({ params: [DummyValue.UUID], stream: true })
getPartnerDeletes(userId: string, ack?: SyncAck) { getPartnerDeletes(userId: string, ack?: SyncAck) {
return this.db return this.db
.selectFrom('partners_audit') .selectFrom('partners_audit')
@ -75,6 +82,7 @@ export class SyncRepository {
.stream(); .stream();
} }
@GenerateSql({ params: [DummyValue.UUID], stream: true })
getAssetUpserts(userId: string, ack?: SyncAck) { getAssetUpserts(userId: string, ack?: SyncAck) {
return this.db return this.db
.selectFrom('assets') .selectFrom('assets')
@ -84,6 +92,7 @@ export class SyncRepository {
.stream(); .stream();
} }
@GenerateSql({ params: [DummyValue.UUID], stream: true })
getPartnerAssetsUpserts(userId: string, ack?: SyncAck) { getPartnerAssetsUpserts(userId: string, ack?: SyncAck) {
return this.db return this.db
.selectFrom('assets') .selectFrom('assets')
@ -95,6 +104,7 @@ export class SyncRepository {
.stream(); .stream();
} }
@GenerateSql({ params: [DummyValue.UUID], stream: true })
getAssetDeletes(userId: string, ack?: SyncAck) { getAssetDeletes(userId: string, ack?: SyncAck) {
return this.db return this.db
.selectFrom('assets_audit') .selectFrom('assets_audit')
@ -105,6 +115,7 @@ export class SyncRepository {
.stream(); .stream();
} }
@GenerateSql({ params: [DummyValue.UUID], stream: true })
getPartnerAssetDeletes(userId: string, ack?: SyncAck) { getPartnerAssetDeletes(userId: string, ack?: SyncAck) {
return this.db return this.db
.selectFrom('assets_audit') .selectFrom('assets_audit')
@ -116,6 +127,7 @@ export class SyncRepository {
.stream(); .stream();
} }
@GenerateSql({ params: [DummyValue.UUID], stream: true })
getAssetExifsUpserts(userId: string, ack?: SyncAck) { getAssetExifsUpserts(userId: string, ack?: SyncAck) {
return this.db return this.db
.selectFrom('exif') .selectFrom('exif')
@ -125,6 +137,7 @@ export class SyncRepository {
.stream(); .stream();
} }
@GenerateSql({ params: [DummyValue.UUID], stream: true })
getPartnerAssetExifsUpserts(userId: string, ack?: SyncAck) { getPartnerAssetExifsUpserts(userId: string, ack?: SyncAck) {
return this.db return this.db
.selectFrom('exif') .selectFrom('exif')

View File

@ -39,6 +39,7 @@ describe(MediaService.name, () => {
describe('handleQueueGenerateThumbnails', () => { describe('handleQueueGenerateThumbnails', () => {
it('should queue all assets', async () => { it('should queue all assets', async () => {
mocks.assetJob.streamForThumbnailJob.mockReturnValue(makeStream([assetStub.image]));
mocks.asset.getAll.mockResolvedValue({ mocks.asset.getAll.mockResolvedValue({
items: [assetStub.image], items: [assetStub.image],
hasNextPage: false, hasNextPage: false,
@ -49,8 +50,7 @@ describe(MediaService.name, () => {
await sut.handleQueueGenerateThumbnails({ force: true }); await sut.handleQueueGenerateThumbnails({ force: true });
expect(mocks.asset.getAll).toHaveBeenCalled(); expect(mocks.assetJob.streamForThumbnailJob).toHaveBeenCalledWith(true);
expect(mocks.asset.getWithout).not.toHaveBeenCalled();
expect(mocks.job.queueAll).toHaveBeenCalledWith([ expect(mocks.job.queueAll).toHaveBeenCalledWith([
{ {
name: JobName.GENERATE_THUMBNAILS, name: JobName.GENERATE_THUMBNAILS,
@ -68,6 +68,7 @@ describe(MediaService.name, () => {
}); });
it('should queue trashed assets when force is true', async () => { it('should queue trashed assets when force is true', async () => {
mocks.assetJob.streamForThumbnailJob.mockReturnValue(makeStream([assetStub.archived]));
mocks.asset.getAll.mockResolvedValue({ mocks.asset.getAll.mockResolvedValue({
items: [assetStub.trashed], items: [assetStub.trashed],
hasNextPage: false, hasNextPage: false,
@ -76,11 +77,7 @@ describe(MediaService.name, () => {
await sut.handleQueueGenerateThumbnails({ force: true }); await sut.handleQueueGenerateThumbnails({ force: true });
expect(mocks.asset.getAll).toHaveBeenCalledWith( expect(mocks.assetJob.streamForThumbnailJob).toHaveBeenCalledWith(true);
{ skip: 0, take: 1000 },
expect.objectContaining({ withDeleted: true }),
);
expect(mocks.asset.getWithout).not.toHaveBeenCalled();
expect(mocks.job.queueAll).toHaveBeenCalledWith([ expect(mocks.job.queueAll).toHaveBeenCalledWith([
{ {
name: JobName.GENERATE_THUMBNAILS, name: JobName.GENERATE_THUMBNAILS,
@ -90,19 +87,12 @@ describe(MediaService.name, () => {
}); });
it('should queue archived assets when force is true', async () => { it('should queue archived assets when force is true', async () => {
mocks.asset.getAll.mockResolvedValue({ mocks.assetJob.streamForThumbnailJob.mockReturnValue(makeStream([assetStub.archived]));
items: [assetStub.archived],
hasNextPage: false,
});
mocks.person.getAll.mockReturnValue(makeStream()); mocks.person.getAll.mockReturnValue(makeStream());
await sut.handleQueueGenerateThumbnails({ force: true }); await sut.handleQueueGenerateThumbnails({ force: true });
expect(mocks.asset.getAll).toHaveBeenCalledWith( expect(mocks.assetJob.streamForThumbnailJob).toHaveBeenCalledWith(true);
{ skip: 0, take: 1000 },
expect.objectContaining({ withArchived: true }),
);
expect(mocks.asset.getWithout).not.toHaveBeenCalled();
expect(mocks.job.queueAll).toHaveBeenCalledWith([ expect(mocks.job.queueAll).toHaveBeenCalledWith([
{ {
name: JobName.GENERATE_THUMBNAILS, name: JobName.GENERATE_THUMBNAILS,
@ -112,18 +102,13 @@ describe(MediaService.name, () => {
}); });
it('should queue all people with missing thumbnail path', async () => { it('should queue all people with missing thumbnail path', async () => {
mocks.asset.getWithout.mockResolvedValue({ mocks.assetJob.streamForThumbnailJob.mockReturnValue(makeStream([assetStub.image]));
items: [assetStub.image],
hasNextPage: false,
});
mocks.person.getAll.mockReturnValue(makeStream([personStub.noThumbnail, personStub.noThumbnail])); mocks.person.getAll.mockReturnValue(makeStream([personStub.noThumbnail, personStub.noThumbnail]));
mocks.person.getRandomFace.mockResolvedValueOnce(faceStub.face1); mocks.person.getRandomFace.mockResolvedValueOnce(faceStub.face1);
await sut.handleQueueGenerateThumbnails({ force: false }); await sut.handleQueueGenerateThumbnails({ force: false });
expect(mocks.asset.getAll).not.toHaveBeenCalled(); expect(mocks.assetJob.streamForThumbnailJob).toHaveBeenCalledWith(false);
expect(mocks.asset.getWithout).toHaveBeenCalledWith({ skip: 0, take: 1000 }, WithoutProperty.THUMBNAIL);
expect(mocks.person.getAll).toHaveBeenCalledWith({ thumbnailPath: '' }); expect(mocks.person.getAll).toHaveBeenCalledWith({ thumbnailPath: '' });
expect(mocks.person.getRandomFace).toHaveBeenCalled(); expect(mocks.person.getRandomFace).toHaveBeenCalled();
expect(mocks.person.update).toHaveBeenCalledTimes(1); expect(mocks.person.update).toHaveBeenCalledTimes(1);
@ -138,15 +123,11 @@ describe(MediaService.name, () => {
}); });
it('should queue all assets with missing resize path', async () => { it('should queue all assets with missing resize path', async () => {
mocks.asset.getWithout.mockResolvedValue({ mocks.assetJob.streamForThumbnailJob.mockReturnValue(makeStream([assetStub.noResizePath]));
items: [assetStub.noResizePath],
hasNextPage: false,
});
mocks.person.getAll.mockReturnValue(makeStream()); mocks.person.getAll.mockReturnValue(makeStream());
await sut.handleQueueGenerateThumbnails({ force: false }); await sut.handleQueueGenerateThumbnails({ force: false });
expect(mocks.asset.getAll).not.toHaveBeenCalled(); expect(mocks.assetJob.streamForThumbnailJob).toHaveBeenCalledWith(false);
expect(mocks.asset.getWithout).toHaveBeenCalledWith({ skip: 0, take: 1000 }, WithoutProperty.THUMBNAIL);
expect(mocks.job.queueAll).toHaveBeenCalledWith([ expect(mocks.job.queueAll).toHaveBeenCalledWith([
{ {
name: JobName.GENERATE_THUMBNAILS, name: JobName.GENERATE_THUMBNAILS,
@ -158,15 +139,11 @@ describe(MediaService.name, () => {
}); });
it('should queue all assets with missing webp path', async () => { it('should queue all assets with missing webp path', async () => {
mocks.asset.getWithout.mockResolvedValue({ mocks.assetJob.streamForThumbnailJob.mockReturnValue(makeStream([assetStub.noWebpPath]));
items: [assetStub.noWebpPath],
hasNextPage: false,
});
mocks.person.getAll.mockReturnValue(makeStream()); mocks.person.getAll.mockReturnValue(makeStream());
await sut.handleQueueGenerateThumbnails({ force: false }); await sut.handleQueueGenerateThumbnails({ force: false });
expect(mocks.asset.getAll).not.toHaveBeenCalled(); expect(mocks.assetJob.streamForThumbnailJob).toHaveBeenCalledWith(false);
expect(mocks.asset.getWithout).toHaveBeenCalledWith({ skip: 0, take: 1000 }, WithoutProperty.THUMBNAIL);
expect(mocks.job.queueAll).toHaveBeenCalledWith([ expect(mocks.job.queueAll).toHaveBeenCalledWith([
{ {
name: JobName.GENERATE_THUMBNAILS, name: JobName.GENERATE_THUMBNAILS,
@ -178,15 +155,11 @@ describe(MediaService.name, () => {
}); });
it('should queue all assets with missing thumbhash', async () => { it('should queue all assets with missing thumbhash', async () => {
mocks.asset.getWithout.mockResolvedValue({ mocks.assetJob.streamForThumbnailJob.mockReturnValue(makeStream([assetStub.noThumbhash]));
items: [assetStub.noThumbhash],
hasNextPage: false,
});
mocks.person.getAll.mockReturnValue(makeStream()); mocks.person.getAll.mockReturnValue(makeStream());
await sut.handleQueueGenerateThumbnails({ force: false }); await sut.handleQueueGenerateThumbnails({ force: false });
expect(mocks.asset.getAll).not.toHaveBeenCalled(); expect(mocks.assetJob.streamForThumbnailJob).toHaveBeenCalledWith(false);
expect(mocks.asset.getWithout).toHaveBeenCalledWith({ skip: 0, take: 1000 }, WithoutProperty.THUMBNAIL);
expect(mocks.job.queueAll).toHaveBeenCalledWith([ expect(mocks.job.queueAll).toHaveBeenCalledWith([
{ {
name: JobName.GENERATE_THUMBNAILS, name: JobName.GENERATE_THUMBNAILS,
@ -218,13 +191,14 @@ describe(MediaService.name, () => {
describe('handleAssetMigration', () => { describe('handleAssetMigration', () => {
it('should fail if asset does not exist', async () => { it('should fail if asset does not exist', async () => {
mocks.assetJob.getForMigrationJob.mockResolvedValue(void 0);
await expect(sut.handleAssetMigration({ id: assetStub.image.id })).resolves.toBe(JobStatus.FAILED); await expect(sut.handleAssetMigration({ id: assetStub.image.id })).resolves.toBe(JobStatus.FAILED);
expect(mocks.move.getByEntity).not.toHaveBeenCalled(); expect(mocks.move.getByEntity).not.toHaveBeenCalled();
}); });
it('should move asset files', async () => { it('should move asset files', async () => {
mocks.asset.getByIds.mockResolvedValue([assetStub.image]); mocks.assetJob.getForMigrationJob.mockResolvedValue(assetStub.image);
mocks.move.create.mockResolvedValue({ mocks.move.create.mockResolvedValue({
entityId: assetStub.image.id, entityId: assetStub.image.id,
id: 'move-id', id: 'move-id',

View File

@ -51,30 +51,16 @@ export class MediaService extends BaseService {
@OnJob({ name: JobName.QUEUE_GENERATE_THUMBNAILS, queue: QueueName.THUMBNAIL_GENERATION }) @OnJob({ name: JobName.QUEUE_GENERATE_THUMBNAILS, queue: QueueName.THUMBNAIL_GENERATION })
async handleQueueGenerateThumbnails({ force }: JobOf<JobName.QUEUE_GENERATE_THUMBNAILS>): Promise<JobStatus> { async handleQueueGenerateThumbnails({ force }: JobOf<JobName.QUEUE_GENERATE_THUMBNAILS>): Promise<JobStatus> {
const assetPagination = usePagination(JOBS_ASSET_PAGINATION_SIZE, (pagination) => { const thumbJobs: JobItem[] = [];
return force for await (const asset of this.assetJobRepository.streamForThumbnailJob(!!force)) {
? this.assetRepository.getAll(pagination, {
isVisible: true,
withDeleted: true,
withArchived: true,
})
: this.assetRepository.getWithout(pagination, WithoutProperty.THUMBNAIL);
});
for await (const assets of assetPagination) {
const jobs: JobItem[] = [];
for (const asset of assets) {
const { previewFile, thumbnailFile } = getAssetFiles(asset.files); const { previewFile, thumbnailFile } = getAssetFiles(asset.files);
if (!previewFile || !thumbnailFile || !asset.thumbhash || force) { if (!previewFile || !thumbnailFile || !asset.thumbhash || force) {
jobs.push({ name: JobName.GENERATE_THUMBNAILS, data: { id: asset.id } }); thumbJobs.push({ name: JobName.GENERATE_THUMBNAILS, data: { id: asset.id } });
continue; continue;
} }
} }
await this.jobRepository.queueAll(thumbJobs);
await this.jobRepository.queueAll(jobs);
}
const jobs: JobItem[] = []; const jobs: JobItem[] = [];
@ -135,7 +121,7 @@ export class MediaService extends BaseService {
@OnJob({ name: JobName.MIGRATE_ASSET, queue: QueueName.MIGRATION }) @OnJob({ name: JobName.MIGRATE_ASSET, queue: QueueName.MIGRATION })
async handleAssetMigration({ id }: JobOf<JobName.MIGRATE_ASSET>): Promise<JobStatus> { async handleAssetMigration({ id }: JobOf<JobName.MIGRATE_ASSET>): Promise<JobStatus> {
const { image } = await this.getConfig({ withCache: true }); const { image } = await this.getConfig({ withCache: true });
const [asset] = await this.assetRepository.getByIds([id], { files: true }); const asset = await this.assetJobRepository.getForMigrationJob(id);
if (!asset) { if (!asset) {
return JobStatus.FAILED; return JobStatus.FAILED;
} }

View File

@ -1,4 +1,77 @@
import { Expression, ExpressionBuilder, ExpressionWrapper, Nullable, Selectable, Simplify, sql } from 'kysely'; import {
Expression,
ExpressionBuilder,
ExpressionWrapper,
KyselyConfig,
Nullable,
Selectable,
Simplify,
sql,
} from 'kysely';
import { PostgresJSDialect } from 'kysely-postgres-js';
import postgres, { Notice } from 'postgres';
type Ssl = 'require' | 'allow' | 'prefer' | 'verify-full' | boolean | object;
export type PostgresConnectionConfig = {
host?: string;
password?: string;
user?: string;
port?: number;
database?: string;
max?: number;
client_encoding?: string;
ssl?: Ssl;
application_name?: string;
fallback_application_name?: string;
options?: string;
};
export const isValidSsl = (ssl?: string | boolean | object): ssl is Ssl =>
typeof ssl !== 'string' || ssl === 'require' || ssl === 'allow' || ssl === 'prefer' || ssl === 'verify-full';
export const getKyselyConfig = (options: PostgresConnectionConfig): KyselyConfig => {
return {
dialect: new PostgresJSDialect({
postgres: postgres({
onnotice: (notice: Notice) => {
if (notice['severity'] !== 'NOTICE') {
console.warn('Postgres notice:', notice);
}
},
max: 10,
types: {
date: {
to: 1184,
from: [1082, 1114, 1184],
serialize: (x: Date | string) => (x instanceof Date ? x.toISOString() : x),
parse: (x: string) => new Date(x),
},
bigint: {
to: 20,
from: [20, 1700],
parse: (value: string) => Number.parseInt(value),
serialize: (value: number) => value.toString(),
},
},
connection: {
TimeZone: 'UTC',
},
...options,
}),
}),
log(event) {
if (event.level === 'error') {
console.error('Query failed :', {
durationMs: event.queryDurationMillis,
error: event.error,
sql: event.query.sql,
params: event.query.parameters,
});
}
},
};
};
export const asUuid = (id: string | Expression<string>) => sql<string>`${id}::uuid`; export const asUuid = (id: string | Expression<string>) => sql<string>`${id}::uuid`;

View File

@ -1,372 +0,0 @@
import { Insertable, Kysely } from 'kysely';
import { randomBytes } from 'node:crypto';
import { Writable } from 'node:stream';
import { AssetFaces, Assets, DB, Person as DbPerson, FaceSearch, Partners, Sessions } from 'src/db';
import { AuthDto } from 'src/dtos/auth.dto';
import { AssetType, SourceType } from 'src/enum';
import { AccessRepository } from 'src/repositories/access.repository';
import { ActivityRepository } from 'src/repositories/activity.repository';
import { AlbumRepository } from 'src/repositories/album.repository';
import { ApiKeyRepository } from 'src/repositories/api-key.repository';
import { AssetRepository } from 'src/repositories/asset.repository';
import { AuditRepository } from 'src/repositories/audit.repository';
import { ConfigRepository } from 'src/repositories/config.repository';
import { LibraryRepository } from 'src/repositories/library.repository';
import { LoggingRepository } from 'src/repositories/logging.repository';
import { MachineLearningRepository } from 'src/repositories/machine-learning.repository';
import { MediaRepository } from 'src/repositories/media.repository';
import { MetadataRepository } from 'src/repositories/metadata.repository';
import { MoveRepository } from 'src/repositories/move.repository';
import { NotificationRepository } from 'src/repositories/notification.repository';
import { OAuthRepository } from 'src/repositories/oauth.repository';
import { PartnerRepository } from 'src/repositories/partner.repository';
import { PersonRepository } from 'src/repositories/person.repository';
import { ProcessRepository } from 'src/repositories/process.repository';
import { SearchRepository } from 'src/repositories/search.repository';
import { ServerInfoRepository } from 'src/repositories/server-info.repository';
import { SessionRepository } from 'src/repositories/session.repository';
import { SharedLinkRepository } from 'src/repositories/shared-link.repository';
import { StackRepository } from 'src/repositories/stack.repository';
import { StorageRepository } from 'src/repositories/storage.repository';
import { SyncRepository } from 'src/repositories/sync.repository';
import { SystemMetadataRepository } from 'src/repositories/system-metadata.repository';
import { TelemetryRepository } from 'src/repositories/telemetry.repository';
import { TrashRepository } from 'src/repositories/trash.repository';
import { UserRepository } from 'src/repositories/user.repository';
import { VersionHistoryRepository } from 'src/repositories/version-history.repository';
import { ViewRepository } from 'src/repositories/view-repository';
import { UserTable } from 'src/schema/tables/user.table';
import { newTelemetryRepositoryMock } from 'test/repositories/telemetry.repository.mock';
import { newDate, newEmbedding, newUuid } from 'test/small.factory';
import { automock } from 'test/utils';
class CustomWritable extends Writable {
private data = '';
_write(chunk: any, encoding: string, callback: () => void) {
this.data += chunk.toString();
callback();
}
getResponse() {
const result = this.data;
return result
.split('\n')
.filter((x) => x.length > 0)
.map((x) => JSON.parse(x));
}
}
type Asset = Partial<Insertable<Assets>>;
type User = Partial<Insertable<UserTable>>;
type Session = Omit<Insertable<Sessions>, 'token'> & { token?: string };
type Partner = Insertable<Partners>;
type AssetFace = Partial<Insertable<AssetFaces>>;
type Person = Partial<Insertable<DbPerson>>;
type Face = Partial<Insertable<FaceSearch>>;
export class TestFactory {
private assets: Asset[] = [];
private sessions: Session[] = [];
private users: User[] = [];
private partners: Partner[] = [];
private assetFaces: AssetFace[] = [];
private persons: Person[] = [];
private faces: Face[] = [];
private constructor(private context: TestContext) {}
static create(context: TestContext) {
return new TestFactory(context);
}
static stream() {
return new CustomWritable();
}
static asset(asset: Asset) {
const assetId = asset.id || newUuid();
const defaults: Insertable<Assets> = {
deviceAssetId: '',
deviceId: '',
originalFileName: '',
checksum: randomBytes(32),
type: AssetType.IMAGE,
originalPath: '/path/to/something.jpg',
ownerId: '@immich.cloud',
isVisible: true,
fileCreatedAt: new Date('2000-01-01T00:00:00Z'),
fileModifiedAt: new Date('2000-01-01T00:00:00Z'),
localDateTime: new Date('2000-01-01T00:00:00Z'),
};
return {
...defaults,
...asset,
id: assetId,
};
}
static auth(auth: { user: User; session?: Session }) {
return auth as AuthDto;
}
static user(user: User = {}) {
const userId = user.id || newUuid();
const defaults: Insertable<UserTable> = {
email: `${userId}@immich.cloud`,
name: `User ${userId}`,
deletedAt: null,
};
return {
...defaults,
...user,
id: userId,
};
}
static session(session: Session) {
const id = session.id || newUuid();
const defaults = {
token: randomBytes(36).toString('base64url'),
};
return {
...defaults,
...session,
id,
};
}
static partner(partner: Partner) {
const defaults = {
inTimeline: true,
};
return {
...defaults,
...partner,
};
}
static assetFace(assetFace: AssetFace) {
const defaults = {
assetId: assetFace.assetId || newUuid(),
boundingBoxX1: assetFace.boundingBoxX1 || 0,
boundingBoxX2: assetFace.boundingBoxX2 || 1,
boundingBoxY1: assetFace.boundingBoxY1 || 0,
boundingBoxY2: assetFace.boundingBoxY2 || 1,
deletedAt: assetFace.deletedAt || null,
id: assetFace.id || newUuid(),
imageHeight: assetFace.imageHeight || 10,
imageWidth: assetFace.imageWidth || 10,
personId: assetFace.personId || null,
sourceType: assetFace.sourceType || SourceType.MACHINE_LEARNING,
};
return { ...defaults, ...assetFace };
}
static person(person: Person) {
const defaults = {
birthDate: person.birthDate || null,
color: person.color || null,
createdAt: person.createdAt || newDate(),
faceAssetId: person.faceAssetId || null,
id: person.id || newUuid(),
isFavorite: person.isFavorite || false,
isHidden: person.isHidden || false,
name: person.name || 'Test Name',
ownerId: person.ownerId || newUuid(),
thumbnailPath: person.thumbnailPath || '/path/to/thumbnail.jpg',
updatedAt: person.updatedAt || newDate(),
updateId: person.updateId || newUuid(),
};
return { ...defaults, ...person };
}
static face(face: Face) {
const defaults = {
faceId: face.faceId || newUuid(),
embedding: face.embedding || newEmbedding(),
};
return {
...defaults,
...face,
};
}
withAsset(asset: Asset) {
this.assets.push(asset);
return this;
}
withSession(session: Session) {
this.sessions.push(session);
return this;
}
withUser(user: User = {}) {
this.users.push(user);
return this;
}
withPartner(partner: Partner) {
this.partners.push(partner);
return this;
}
withAssetFace(assetFace: AssetFace) {
this.assetFaces.push(assetFace);
return this;
}
withPerson(person: Person) {
this.persons.push(person);
return this;
}
withFaces(face: Face) {
this.faces.push(face);
return this;
}
async create() {
for (const user of this.users) {
await this.context.createUser(user);
}
for (const partner of this.partners) {
await this.context.createPartner(partner);
}
for (const session of this.sessions) {
await this.context.createSession(session);
}
for (const asset of this.assets) {
await this.context.createAsset(asset);
}
for (const person of this.persons) {
await this.context.createPerson(person);
}
await this.context.refreshFaces(
this.assetFaces,
[],
this.faces.map((f) => TestFactory.face(f)),
);
return this.context;
}
}
export class TestContext {
access: AccessRepository;
logger: LoggingRepository;
activity: ActivityRepository;
album: AlbumRepository;
apiKey: ApiKeyRepository;
asset: AssetRepository;
audit: AuditRepository;
config: ConfigRepository;
library: LibraryRepository;
machineLearning: MachineLearningRepository;
media: MediaRepository;
metadata: MetadataRepository;
move: MoveRepository;
notification: NotificationRepository;
oauth: OAuthRepository;
partner: PartnerRepository;
person: PersonRepository;
process: ProcessRepository;
search: SearchRepository;
serverInfo: ServerInfoRepository;
session: SessionRepository;
sharedLink: SharedLinkRepository;
stack: StackRepository;
storage: StorageRepository;
systemMetadata: SystemMetadataRepository;
sync: SyncRepository;
telemetry: TelemetryRepository;
trash: TrashRepository;
user: UserRepository;
versionHistory: VersionHistoryRepository;
view: ViewRepository;
private constructor(public db: Kysely<DB>) {
// eslint-disable-next-line no-sparse-arrays
const logger = automock(LoggingRepository, { args: [, { getEnv: () => ({}) }], strict: false });
const config = new ConfigRepository();
this.access = new AccessRepository(this.db);
this.logger = logger;
this.activity = new ActivityRepository(this.db);
this.album = new AlbumRepository(this.db);
this.apiKey = new ApiKeyRepository(this.db);
this.asset = new AssetRepository(this.db);
this.audit = new AuditRepository(this.db);
this.config = config;
this.library = new LibraryRepository(this.db);
this.machineLearning = new MachineLearningRepository(logger);
this.media = new MediaRepository(logger);
this.metadata = new MetadataRepository(logger);
this.move = new MoveRepository(this.db);
this.notification = new NotificationRepository(logger);
this.oauth = new OAuthRepository(logger);
this.partner = new PartnerRepository(this.db);
this.person = new PersonRepository(this.db);
this.process = new ProcessRepository();
this.search = new SearchRepository(logger, this.db);
this.serverInfo = new ServerInfoRepository(config, logger);
this.session = new SessionRepository(this.db);
this.sharedLink = new SharedLinkRepository(this.db);
this.stack = new StackRepository(this.db);
this.storage = new StorageRepository(logger);
this.sync = new SyncRepository(this.db);
this.systemMetadata = new SystemMetadataRepository(this.db);
this.telemetry = newTelemetryRepositoryMock() as unknown as TelemetryRepository;
this.trash = new TrashRepository(this.db);
this.user = new UserRepository(this.db);
this.versionHistory = new VersionHistoryRepository(this.db);
this.view = new ViewRepository(this.db);
}
static from(db: Kysely<DB>) {
return new TestContext(db).getFactory();
}
getFactory() {
return TestFactory.create(this);
}
createUser(user: User = {}) {
return this.user.create(TestFactory.user(user));
}
createPartner(partner: Partner) {
return this.partner.create(TestFactory.partner(partner));
}
createAsset(asset: Asset) {
return this.asset.create(TestFactory.asset(asset));
}
createSession(session: Session) {
return this.session.create(TestFactory.session(session));
}
createPerson(person: Person) {
return this.person.create(TestFactory.person(person));
}
refreshFaces(facesToAdd: AssetFace[], faceIdsToRemove: string[], embeddingsToAdd?: Insertable<FaceSearch>[]) {
return this.person.refreshFaces(
facesToAdd.map((f) => TestFactory.assetFace(f)),
faceIdsToRemove,
embeddingsToAdd,
);
}
}

View File

@ -1,9 +1,11 @@
import { ClassConstructor } from 'class-transformer'; import { ClassConstructor } from 'class-transformer';
import { Insertable, Kysely } from 'kysely'; import { Insertable, Kysely } from 'kysely';
import { DateTime } from 'luxon'; import { DateTime } from 'luxon';
import { randomBytes } from 'node:crypto'; import { createHash, randomBytes } from 'node:crypto';
import { AssetJobStatus, Assets, DB } from 'src/db'; import { Writable } from 'node:stream';
import { AssetType } from 'src/enum'; import { AssetFace } from 'src/database';
import { AssetJobStatus, Assets, DB, FaceSearch, Person, Sessions } from 'src/db';
import { AssetType, SourceType } from 'src/enum';
import { ActivityRepository } from 'src/repositories/activity.repository'; import { ActivityRepository } from 'src/repositories/activity.repository';
import { AlbumRepository } from 'src/repositories/album.repository'; import { AlbumRepository } from 'src/repositories/album.repository';
import { AssetJobRepository } from 'src/repositories/asset-job.repository'; import { AssetJobRepository } from 'src/repositories/asset-job.repository';
@ -15,17 +17,22 @@ import { JobRepository } from 'src/repositories/job.repository';
import { LoggingRepository } from 'src/repositories/logging.repository'; import { LoggingRepository } from 'src/repositories/logging.repository';
import { MemoryRepository } from 'src/repositories/memory.repository'; import { MemoryRepository } from 'src/repositories/memory.repository';
import { PartnerRepository } from 'src/repositories/partner.repository'; import { PartnerRepository } from 'src/repositories/partner.repository';
import { PersonRepository } from 'src/repositories/person.repository';
import { SearchRepository } from 'src/repositories/search.repository';
import { SessionRepository } from 'src/repositories/session.repository'; import { SessionRepository } from 'src/repositories/session.repository';
import { SyncRepository } from 'src/repositories/sync.repository';
import { SystemMetadataRepository } from 'src/repositories/system-metadata.repository'; import { SystemMetadataRepository } from 'src/repositories/system-metadata.repository';
import { UserRepository } from 'src/repositories/user.repository'; import { UserRepository } from 'src/repositories/user.repository';
import { VersionHistoryRepository } from 'src/repositories/version-history.repository'; import { VersionHistoryRepository } from 'src/repositories/version-history.repository';
import { UserTable } from 'src/schema/tables/user.table'; import { UserTable } from 'src/schema/tables/user.table';
import { BaseService } from 'src/services/base.service'; import { BaseService } from 'src/services/base.service';
import { RepositoryInterface } from 'src/types'; import { RepositoryInterface } from 'src/types';
import { newDate, newUuid } from 'test/small.factory'; import { newDate, newEmbedding, newUuid } from 'test/small.factory';
import { automock, ServiceOverrides } from 'test/utils'; import { automock, ServiceOverrides } from 'test/utils';
import { Mocked } from 'vitest'; import { Mocked } from 'vitest';
const sha256 = (value: string) => createHash('sha256').update(value).digest('base64');
// type Repositories = Omit<ServiceOverrides, 'access' | 'telemetry'>; // type Repositories = Omit<ServiceOverrides, 'access' | 'telemetry'>;
type Repositories = { type Repositories = {
activity: ActivityRepository; activity: ActivityRepository;
@ -40,7 +47,10 @@ type Repositories = {
logger: LoggingRepository; logger: LoggingRepository;
memory: MemoryRepository; memory: MemoryRepository;
partner: PartnerRepository; partner: PartnerRepository;
person: PersonRepository;
search: SearchRepository;
session: SessionRepository; session: SessionRepository;
sync: SyncRepository;
systemMetadata: SystemMetadataRepository; systemMetadata: SystemMetadataRepository;
versionHistory: VersionHistoryRepository; versionHistory: VersionHistoryRepository;
}; };
@ -145,10 +155,22 @@ export const getRepository = <K extends keyof Repositories>(key: K, db: Kysely<D
return new PartnerRepository(db); return new PartnerRepository(db);
} }
case 'person': {
return new PersonRepository(db);
}
case 'search': {
return new SearchRepository(db);
}
case 'session': { case 'session': {
return new SessionRepository(db); return new SessionRepository(db);
} }
case 'sync': {
return new SyncRepository(db);
}
case 'systemMetadata': { case 'systemMetadata': {
return new SystemMetadataRepository(db); return new SystemMetadataRepository(db);
} }
@ -216,10 +238,18 @@ const getRepositoryMock = <K extends keyof Repositories>(key: K) => {
return automock(PartnerRepository); return automock(PartnerRepository);
} }
case 'person': {
return automock(PersonRepository);
}
case 'session': { case 'session': {
return automock(SessionRepository); return automock(SessionRepository);
} }
case 'sync': {
return automock(SyncRepository);
}
case 'systemMetadata': { case 'systemMetadata': {
return automock(SystemMetadataRepository); return automock(SystemMetadataRepository);
} }
@ -266,7 +296,7 @@ export const asDeps = (repositories: ServiceOverrides) => {
repositories.notification, repositories.notification,
repositories.oauth, repositories.oauth,
repositories.partner || getRepositoryMock('partner'), repositories.partner || getRepositoryMock('partner'),
repositories.person, repositories.person || getRepositoryMock('person'),
repositories.process, repositories.process,
repositories.search, repositories.search,
repositories.serverInfo, repositories.serverInfo,
@ -274,7 +304,7 @@ export const asDeps = (repositories: ServiceOverrides) => {
repositories.sharedLink, repositories.sharedLink,
repositories.stack, repositories.stack,
repositories.storage, repositories.storage,
repositories.sync, repositories.sync || getRepositoryMock('sync'),
repositories.systemMetadata || getRepositoryMock('systemMetadata'), repositories.systemMetadata || getRepositoryMock('systemMetadata'),
repositories.tag, repositories.tag,
repositories.telemetry, repositories.telemetry,
@ -297,6 +327,7 @@ const assetInsert = (asset: Partial<Insertable<Assets>> = {}) => {
originalPath: '/path/to/something.jpg', originalPath: '/path/to/something.jpg',
ownerId: '@immich.cloud', ownerId: '@immich.cloud',
isVisible: true, isVisible: true,
isFavorite: false,
fileCreatedAt: now, fileCreatedAt: now,
fileModifiedAt: now, fileModifiedAt: now,
localDateTime: now, localDateTime: now,
@ -309,6 +340,38 @@ const assetInsert = (asset: Partial<Insertable<Assets>> = {}) => {
}; };
}; };
const faceInsert = (face: Partial<Insertable<FaceSearch>> & { faceId: string }) => {
const defaults = {
faceId: face.faceId,
embedding: face.embedding || newEmbedding(),
};
return {
...defaults,
...face,
};
};
const assetFaceInsert = (assetFace: Partial<AssetFace> & { assetId: string }) => {
const defaults = {
assetId: assetFace.assetId ?? newUuid(),
boundingBoxX1: assetFace.boundingBoxX1 ?? 0,
boundingBoxX2: assetFace.boundingBoxX2 ?? 1,
boundingBoxY1: assetFace.boundingBoxY1 ?? 0,
boundingBoxY2: assetFace.boundingBoxY2 ?? 1,
deletedAt: assetFace.deletedAt ?? null,
id: assetFace.id ?? newUuid(),
imageHeight: assetFace.imageHeight ?? 10,
imageWidth: assetFace.imageWidth ?? 10,
personId: assetFace.personId ?? null,
sourceType: assetFace.sourceType ?? SourceType.MACHINE_LEARNING,
};
return {
...defaults,
...assetFace,
};
};
const assetJobStatusInsert = ( const assetJobStatusInsert = (
job: Partial<Insertable<AssetJobStatus>> & { assetId: string }, job: Partial<Insertable<AssetJobStatus>> & { assetId: string },
): Insertable<AssetJobStatus> => { ): Insertable<AssetJobStatus> => {
@ -327,6 +390,41 @@ const assetJobStatusInsert = (
}; };
}; };
const personInsert = (person: Partial<Insertable<Person>> & { ownerId: string }) => {
const defaults = {
birthDate: person.birthDate || null,
color: person.color || null,
createdAt: person.createdAt || newDate(),
faceAssetId: person.faceAssetId || null,
id: person.id || newUuid(),
isFavorite: person.isFavorite || false,
isHidden: person.isHidden || false,
name: person.name || 'Test Name',
ownerId: person.ownerId || newUuid(),
thumbnailPath: person.thumbnailPath || '/path/to/thumbnail.jpg',
updatedAt: person.updatedAt || newDate(),
updateId: person.updateId || newUuid(),
};
return {
...defaults,
...person,
};
};
const sessionInsert = ({ id = newUuid(), userId, ...session }: Partial<Insertable<Sessions>> & { userId: string }) => {
const defaults: Insertable<Sessions> = {
id,
userId,
token: sha256(id),
};
return {
...defaults,
...session,
id,
};
};
const userInsert = (user: Partial<Insertable<UserTable>> = {}) => { const userInsert = (user: Partial<Insertable<UserTable>> = {}) => {
const id = user.id || newUuid(); const id = user.id || newUuid();
@ -339,8 +437,34 @@ const userInsert = (user: Partial<Insertable<UserTable>> = {}) => {
return { ...defaults, ...user, id }; return { ...defaults, ...user, id };
}; };
class CustomWritable extends Writable {
private data = '';
_write(chunk: any, encoding: string, callback: () => void) {
this.data += chunk.toString();
callback();
}
getResponse() {
const result = this.data;
return result
.split('\n')
.filter((x) => x.length > 0)
.map((x) => JSON.parse(x));
}
}
const syncStream = () => {
return new CustomWritable();
};
export const mediumFactory = { export const mediumFactory = {
assetInsert, assetInsert,
assetFaceInsert,
assetJobStatusInsert, assetJobStatusInsert,
faceInsert,
personInsert,
sessionInsert,
syncStream,
userInsert, userInsert,
}; };

View File

@ -1,9 +1,8 @@
import { FileMigrationProvider, Kysely, Migrator } from 'kysely'; import { FileMigrationProvider, Kysely, Migrator } from 'kysely';
import { PostgresJSDialect } from 'kysely-postgres-js';
import { mkdir, readdir } from 'node:fs/promises'; import { mkdir, readdir } from 'node:fs/promises';
import { join } from 'node:path'; import { join } from 'node:path';
import { parse } from 'pg-connection-string'; import { parse } from 'pg-connection-string';
import postgres, { Notice } from 'postgres'; import { getKyselyConfig } from 'src/utils/database';
import { GenericContainer, Wait } from 'testcontainers'; import { GenericContainer, Wait } from 'testcontainers';
import { DataSource } from 'typeorm'; import { DataSource } from 'typeorm';
@ -78,36 +77,7 @@ const globalSetup = async () => {
database: parsed.database ?? undefined, database: parsed.database ?? undefined,
}; };
const driverOptions = { const db = new Kysely(getKyselyConfig(parsedOptions));
...parsedOptions,
onnotice: (notice: Notice) => {
if (notice['severity'] !== 'NOTICE') {
console.warn('Postgres notice:', notice);
}
},
max: 10,
types: {
date: {
to: 1184,
from: [1082, 1114, 1184],
serialize: (x: Date | string) => (x instanceof Date ? x.toISOString() : x),
parse: (x: string) => new Date(x),
},
bigint: {
to: 20,
from: [20],
parse: (value: string) => Number.parseInt(value),
serialize: (value: number) => value.toString(),
},
},
connection: {
TimeZone: 'UTC',
},
};
const db = new Kysely({
dialect: new PostgresJSDialect({ postgres: postgres({ ...driverOptions, max: 1, database: 'postgres' }) }),
});
// TODO just call `databaseRepository.migrate()` (probably have to wait until TypeOrm is gone) // TODO just call `databaseRepository.migrate()` (probably have to wait until TypeOrm is gone)
const migrator = new Migrator({ const migrator = new Migrator({

View File

@ -0,0 +1,46 @@
import { Kysely } from 'kysely';
import { DB } from 'src/db';
import { AssetRepository } from 'src/repositories/asset.repository';
import { UserRepository } from 'src/repositories/user.repository';
import { AssetService } from 'src/services/asset.service';
import { mediumFactory, newMediumService } from 'test/medium.factory';
import { factory } from 'test/small.factory';
import { getKyselyDB } from 'test/utils';
describe(AssetService.name, () => {
let defaultDatabase: Kysely<DB>;
let assetRepo: AssetRepository;
let userRepo: UserRepository;
const createSut = (db?: Kysely<DB>) => {
return newMediumService(AssetService, {
database: db || defaultDatabase,
repos: {
asset: 'real',
},
});
};
beforeAll(async () => {
defaultDatabase = await getKyselyDB();
assetRepo = new AssetRepository(defaultDatabase);
userRepo = new UserRepository(defaultDatabase);
});
describe('getStatistics', () => {
it('should return stats as numbers, not strings', async () => {
const { sut } = createSut();
const user = mediumFactory.userInsert();
const asset = mediumFactory.assetInsert({ ownerId: user.id });
await userRepo.create(user);
await assetRepo.create(asset);
await assetRepo.upsertExif({ assetId: asset.id, fileSizeInByte: 12_345 });
const auth = factory.auth({ user: { id: user.id } });
await expect(sut.getStatistics(auth, {})).resolves.toEqual({ images: 1, total: 1, videos: 0 });
});
});
});

View File

@ -1,201 +0,0 @@
import { Kysely } from 'kysely';
import { JobStatus, SourceType } from 'src/enum';
import { PersonService } from 'src/services/person.service';
import { TestContext, TestFactory } from 'test/factory';
import { newEmbedding } from 'test/small.factory';
import { getKyselyDB, newTestService } from 'test/utils';
const setup = async (db: Kysely<any>) => {
const context = await TestContext.from(db).create();
const { sut, mocks } = newTestService(PersonService, context);
return { sut, mocks, context };
};
describe.concurrent(PersonService.name, () => {
let sut: PersonService;
let context: TestContext;
beforeAll(async () => {
({ sut, context } = await setup(await getKyselyDB()));
});
describe('handleRecognizeFaces', () => {
it('should skip if face source type is not MACHINE_LEARNING', async () => {
const user = TestFactory.user();
const asset = TestFactory.asset({ ownerId: user.id });
const assetFace = TestFactory.assetFace({ assetId: asset.id, sourceType: SourceType.MANUAL });
const face = TestFactory.face({ faceId: assetFace.id });
await context.getFactory().withUser(user).withAsset(asset).withAssetFace(assetFace).withFaces(face).create();
const result = await sut.handleRecognizeFaces({ id: assetFace.id, deferred: false });
expect(result).toBe(JobStatus.SKIPPED);
const newPersonId = await context.db
.selectFrom('asset_faces')
.select('asset_faces.personId')
.where('asset_faces.id', '=', assetFace.id)
.executeTakeFirst();
expect(newPersonId?.personId).toBeNull();
});
it('should fail if face does not have an embedding', async () => {
const user = TestFactory.user();
const asset = TestFactory.asset({ ownerId: user.id });
const assetFace = TestFactory.assetFace({ assetId: asset.id, sourceType: SourceType.MACHINE_LEARNING });
await context.getFactory().withUser(user).withAsset(asset).withAssetFace(assetFace).create();
const result = await sut.handleRecognizeFaces({ id: assetFace.id, deferred: false });
expect(result).toBe(JobStatus.FAILED);
const newPersonId = await context.db
.selectFrom('asset_faces')
.select('asset_faces.personId')
.where('asset_faces.id', '=', assetFace.id)
.executeTakeFirst();
expect(newPersonId?.personId).toBeNull();
});
it('should skip if face already has a person assigned', async () => {
const user = TestFactory.user();
const asset = TestFactory.asset({ ownerId: user.id });
const person = TestFactory.person({ ownerId: user.id });
const assetFace = TestFactory.assetFace({
assetId: asset.id,
sourceType: SourceType.MACHINE_LEARNING,
personId: person.id,
});
const face = TestFactory.face({ faceId: assetFace.id });
await context
.getFactory()
.withUser(user)
.withAsset(asset)
.withPerson(person)
.withAssetFace(assetFace)
.withFaces(face)
.create();
const result = await sut.handleRecognizeFaces({ id: assetFace.id, deferred: false });
expect(result).toBe(JobStatus.SKIPPED);
const newPersonId = await context.db
.selectFrom('asset_faces')
.select('asset_faces.personId')
.where('asset_faces.id', '=', assetFace.id)
.executeTakeFirst();
expect(newPersonId?.personId).toEqual(person.id);
});
it('should create a new person if no matches are found', async () => {
const user = TestFactory.user();
const embedding = newEmbedding();
let factory = context.getFactory().withUser(user);
for (let i = 0; i < 3; i++) {
const existingAsset = TestFactory.asset({ ownerId: user.id });
const existingAssetFace = TestFactory.assetFace({
assetId: existingAsset.id,
sourceType: SourceType.MACHINE_LEARNING,
});
const existingFace = TestFactory.face({ faceId: existingAssetFace.id, embedding });
factory = factory.withAsset(existingAsset).withAssetFace(existingAssetFace).withFaces(existingFace);
}
const newAsset = TestFactory.asset({ ownerId: user.id });
const newAssetFace = TestFactory.assetFace({ assetId: newAsset.id, sourceType: SourceType.MACHINE_LEARNING });
const newFace = TestFactory.face({ faceId: newAssetFace.id, embedding });
await factory.withAsset(newAsset).withAssetFace(newAssetFace).withFaces(newFace).create();
const result = await sut.handleRecognizeFaces({ id: newAssetFace.id, deferred: false });
expect(result).toBe(JobStatus.SUCCESS);
const newPersonId = await context.db
.selectFrom('asset_faces')
.select('asset_faces.personId')
.where('asset_faces.id', '=', newAssetFace.id)
.executeTakeFirstOrThrow();
expect(newPersonId.personId).toBeDefined();
});
it('should assign face to an existing person if matches are found', async () => {
const user = TestFactory.user();
const existingPerson = TestFactory.person({ ownerId: user.id });
const embedding = newEmbedding();
let factory = context.getFactory().withUser(user).withPerson(existingPerson);
const assetFaces: string[] = [];
for (let i = 0; i < 3; i++) {
const existingAsset = TestFactory.asset({ ownerId: user.id });
const existingAssetFace = TestFactory.assetFace({
assetId: existingAsset.id,
sourceType: SourceType.MACHINE_LEARNING,
});
assetFaces.push(existingAssetFace.id);
const existingFace = TestFactory.face({ faceId: existingAssetFace.id, embedding });
factory = factory.withAsset(existingAsset).withAssetFace(existingAssetFace).withFaces(existingFace);
}
const newAsset = TestFactory.asset({ ownerId: user.id });
const newAssetFace = TestFactory.assetFace({ assetId: newAsset.id, sourceType: SourceType.MACHINE_LEARNING });
const newFace = TestFactory.face({ faceId: newAssetFace.id, embedding });
await factory.withAsset(newAsset).withAssetFace(newAssetFace).withFaces(newFace).create();
await context.person.reassignFaces({ newPersonId: existingPerson.id, faceIds: assetFaces });
const result = await sut.handleRecognizeFaces({ id: newAssetFace.id, deferred: false });
expect(result).toBe(JobStatus.SUCCESS);
const after = await context.db
.selectFrom('asset_faces')
.select('asset_faces.personId')
.where('asset_faces.id', '=', newAssetFace.id)
.executeTakeFirstOrThrow();
expect(after.personId).toEqual(existingPerson.id);
});
it('should not assign face to an existing person if asset is older than person', async () => {
const user = TestFactory.user();
const assetCreatedAt = new Date('2020-02-23T05:06:29.716Z');
const birthDate = new Date(assetCreatedAt.getTime() + 3600 * 1000 * 365);
const existingPerson = TestFactory.person({ ownerId: user.id, birthDate });
const embedding = newEmbedding();
let factory = context.getFactory().withUser(user).withPerson(existingPerson);
const assetFaces: string[] = [];
for (let i = 0; i < 3; i++) {
const existingAsset = TestFactory.asset({ ownerId: user.id });
const existingAssetFace = TestFactory.assetFace({
assetId: existingAsset.id,
sourceType: SourceType.MACHINE_LEARNING,
});
assetFaces.push(existingAssetFace.id);
const existingFace = TestFactory.face({ faceId: existingAssetFace.id, embedding });
factory = factory.withAsset(existingAsset).withAssetFace(existingAssetFace).withFaces(existingFace);
}
const newAsset = TestFactory.asset({ ownerId: user.id, fileCreatedAt: assetCreatedAt });
const newAssetFace = TestFactory.assetFace({ assetId: newAsset.id, sourceType: SourceType.MACHINE_LEARNING });
const newFace = TestFactory.face({ faceId: newAssetFace.id, embedding });
await factory.withAsset(newAsset).withAssetFace(newAssetFace).withFaces(newFace).create();
await context.person.reassignFaces({ newPersonId: existingPerson.id, faceIds: assetFaces });
const result = await sut.handleRecognizeFaces({ id: newAssetFace.id, deferred: false });
expect(result).toBe(JobStatus.SKIPPED);
const after = await context.db
.selectFrom('asset_faces')
.select('asset_faces.personId')
.where('asset_faces.id', '=', newAssetFace.id)
.executeTakeFirstOrThrow();
expect(after.personId).toBeNull();
});
});
});

View File

@ -1,22 +1,37 @@
import { AuthDto } from 'src/dtos/auth.dto'; import { AuthDto } from 'src/dtos/auth.dto';
import { SyncEntityType, SyncRequestType } from 'src/enum'; import { SyncEntityType, SyncRequestType } from 'src/enum';
import { SYNC_TYPES_ORDER, SyncService } from 'src/services/sync.service'; import { SYNC_TYPES_ORDER, SyncService } from 'src/services/sync.service';
import { TestContext, TestFactory } from 'test/factory'; import { mediumFactory, newMediumService } from 'test/medium.factory';
import { getKyselyDB, newTestService } from 'test/utils'; import { factory } from 'test/small.factory';
import { getKyselyDB } from 'test/utils';
const setup = async () => { const setup = async () => {
const user = TestFactory.user();
const session = TestFactory.session({ userId: user.id });
const auth = TestFactory.auth({ session, user });
const db = await getKyselyDB(); const db = await getKyselyDB();
const context = await TestContext.from(db).withUser(user).withSession(session).create(); const { sut, mocks, repos, getRepository } = newMediumService(SyncService, {
database: db,
repos: {
sync: 'real',
session: 'real',
},
});
const { sut } = newTestService(SyncService, context); const user = mediumFactory.userInsert();
const session = mediumFactory.sessionInsert({ userId: user.id });
const auth = factory.auth({
session,
user: {
id: user.id,
name: user.name,
email: user.email,
},
});
await getRepository('user').create(user);
await getRepository('session').create(session);
const testSync = async (auth: AuthDto, types: SyncRequestType[]) => { const testSync = async (auth: AuthDto, types: SyncRequestType[]) => {
const stream = TestFactory.stream(); const stream = mediumFactory.syncStream();
// Wait for 1ms to ensure all updates are available // Wait for 1ms to ensure all updates are available
await new Promise((resolve) => setTimeout(resolve, 1)); await new Promise((resolve) => setTimeout(resolve, 1));
await sut.stream(auth, stream, { types }); await sut.stream(auth, stream, { types });
@ -25,9 +40,11 @@ const setup = async () => {
}; };
return { return {
auth,
context,
sut, sut,
auth,
mocks,
repos,
getRepository,
testSync, testSync,
}; };
}; };
@ -43,9 +60,10 @@ describe(SyncService.name, () => {
describe.concurrent(SyncEntityType.UserV1, () => { describe.concurrent(SyncEntityType.UserV1, () => {
it('should detect and sync the first user', async () => { it('should detect and sync the first user', async () => {
const { context, auth, sut, testSync } = await setup(); const { auth, sut, getRepository, testSync } = await setup();
const user = await context.user.get(auth.user.id, { withDeleted: false }); const userRepo = getRepository('user');
const user = await userRepo.get(auth.user.id, { withDeleted: false });
if (!user) { if (!user) {
expect.fail('First user should exist'); expect.fail('First user should exist');
} }
@ -73,10 +91,11 @@ describe(SyncService.name, () => {
}); });
it('should detect and sync a soft deleted user', async () => { it('should detect and sync a soft deleted user', async () => {
const { auth, context, sut, testSync } = await setup(); const { auth, sut, getRepository, testSync } = await setup();
const deletedAt = new Date().toISOString(); const deletedAt = new Date().toISOString();
const deleted = await context.createUser({ deletedAt }); const deletedUser = mediumFactory.userInsert({ deletedAt });
const deleted = await getRepository('user').create(deletedUser);
const response = await testSync(auth, [SyncRequestType.UsersV1]); const response = await testSync(auth, [SyncRequestType.UsersV1]);
@ -114,10 +133,12 @@ describe(SyncService.name, () => {
}); });
it('should detect and sync a deleted user', async () => { it('should detect and sync a deleted user', async () => {
const { auth, context, sut, testSync } = await setup(); const { auth, sut, getRepository, testSync } = await setup();
const user = await context.createUser(); const userRepo = getRepository('user');
await context.user.delete({ id: user.id }, true); const user = mediumFactory.userInsert();
await userRepo.create(user);
await userRepo.delete({ id: user.id }, true);
const response = await testSync(auth, [SyncRequestType.UsersV1]); const response = await testSync(auth, [SyncRequestType.UsersV1]);
@ -152,7 +173,7 @@ describe(SyncService.name, () => {
}); });
it('should sync a user and then an update to that same user', async () => { it('should sync a user and then an update to that same user', async () => {
const { auth, context, sut, testSync } = await setup(); const { auth, sut, getRepository, testSync } = await setup();
const initialSyncResponse = await testSync(auth, [SyncRequestType.UsersV1]); const initialSyncResponse = await testSync(auth, [SyncRequestType.UsersV1]);
@ -175,8 +196,8 @@ describe(SyncService.name, () => {
const acks = [initialSyncResponse[0].ack]; const acks = [initialSyncResponse[0].ack];
await sut.setAcks(auth, { acks }); await sut.setAcks(auth, { acks });
const updated = await context.user.update(auth.user.id, { name: 'new name' }); const userRepo = getRepository('user');
const updated = await userRepo.update(auth.user.id, { name: 'new name' });
const updatedSyncResponse = await testSync(auth, [SyncRequestType.UsersV1]); const updatedSyncResponse = await testSync(auth, [SyncRequestType.UsersV1]);
expect(updatedSyncResponse).toHaveLength(1); expect(updatedSyncResponse).toHaveLength(1);
@ -199,12 +220,16 @@ describe(SyncService.name, () => {
describe.concurrent(SyncEntityType.PartnerV1, () => { describe.concurrent(SyncEntityType.PartnerV1, () => {
it('should detect and sync the first partner', async () => { it('should detect and sync the first partner', async () => {
const { auth, context, sut, testSync } = await setup(); const { auth, sut, getRepository, testSync } = await setup();
const user1 = auth.user; const user1 = auth.user;
const user2 = await context.createUser(); const userRepo = getRepository('user');
const partnerRepo = getRepository('partner');
const partner = await context.createPartner({ sharedById: user2.id, sharedWithId: user1.id }); const user2 = mediumFactory.userInsert();
await userRepo.create(user2);
const partner = await partnerRepo.create({ sharedById: user2.id, sharedWithId: user1.id });
const initialSyncResponse = await testSync(auth, [SyncRequestType.PartnersV1]); const initialSyncResponse = await testSync(auth, [SyncRequestType.PartnersV1]);
@ -232,13 +257,16 @@ describe(SyncService.name, () => {
}); });
it('should detect and sync a deleted partner', async () => { it('should detect and sync a deleted partner', async () => {
const { auth, context, sut, testSync } = await setup(); const { auth, sut, getRepository, testSync } = await setup();
const userRepo = getRepository('user');
const user1 = auth.user; const user1 = auth.user;
const user2 = await context.createUser(); const user2 = mediumFactory.userInsert();
await userRepo.create(user2);
const partner = await context.createPartner({ sharedById: user2.id, sharedWithId: user1.id }); const partnerRepo = getRepository('partner');
await context.partner.remove(partner); const partner = await partnerRepo.create({ sharedById: user2.id, sharedWithId: user1.id });
await partnerRepo.remove(partner);
const response = await testSync(auth, [SyncRequestType.PartnersV1]); const response = await testSync(auth, [SyncRequestType.PartnersV1]);
@ -265,13 +293,15 @@ describe(SyncService.name, () => {
}); });
it('should detect and sync a partner share both to and from another user', async () => { it('should detect and sync a partner share both to and from another user', async () => {
const { auth, context, sut, testSync } = await setup(); const { auth, sut, getRepository, testSync } = await setup();
const userRepo = getRepository('user');
const user1 = auth.user; const user1 = auth.user;
const user2 = await context.createUser(); const user2 = await userRepo.create(mediumFactory.userInsert());
const partner1 = await context.createPartner({ sharedById: user2.id, sharedWithId: user1.id }); const partnerRepo = getRepository('partner');
const partner2 = await context.createPartner({ sharedById: user1.id, sharedWithId: user2.id }); const partner1 = await partnerRepo.create({ sharedById: user2.id, sharedWithId: user1.id });
const partner2 = await partnerRepo.create({ sharedById: user1.id, sharedWithId: user2.id });
const response = await testSync(auth, [SyncRequestType.PartnersV1]); const response = await testSync(auth, [SyncRequestType.PartnersV1]);
@ -307,12 +337,14 @@ describe(SyncService.name, () => {
}); });
it('should sync a partner and then an update to that same partner', async () => { it('should sync a partner and then an update to that same partner', async () => {
const { auth, context, sut, testSync } = await setup(); const { auth, sut, getRepository, testSync } = await setup();
const userRepo = getRepository('user');
const user1 = auth.user; const user1 = auth.user;
const user2 = await context.createUser(); const user2 = await userRepo.create(mediumFactory.userInsert());
const partner = await context.createPartner({ sharedById: user2.id, sharedWithId: user1.id }); const partnerRepo = getRepository('partner');
const partner = await partnerRepo.create({ sharedById: user2.id, sharedWithId: user1.id });
const initialSyncResponse = await testSync(auth, [SyncRequestType.PartnersV1]); const initialSyncResponse = await testSync(auth, [SyncRequestType.PartnersV1]);
@ -334,7 +366,7 @@ describe(SyncService.name, () => {
const acks = [initialSyncResponse[0].ack]; const acks = [initialSyncResponse[0].ack];
await sut.setAcks(auth, { acks }); await sut.setAcks(auth, { acks });
const updated = await context.partner.update( const updated = await partnerRepo.update(
{ sharedById: partner.sharedById, sharedWithId: partner.sharedWithId }, { sharedById: partner.sharedById, sharedWithId: partner.sharedWithId },
{ inTimeline: true }, { inTimeline: true },
); );
@ -358,26 +390,31 @@ describe(SyncService.name, () => {
}); });
it('should not sync a partner or partner delete for an unrelated user', async () => { it('should not sync a partner or partner delete for an unrelated user', async () => {
const { auth, context, testSync } = await setup(); const { auth, getRepository, testSync } = await setup();
const user2 = await context.createUser(); const userRepo = getRepository('user');
const user3 = await context.createUser(); const user2 = await userRepo.create(mediumFactory.userInsert());
const user3 = await userRepo.create(mediumFactory.userInsert());
await context.createPartner({ sharedById: user2.id, sharedWithId: user3.id }); const partnerRepo = getRepository('partner');
const partner = await partnerRepo.create({ sharedById: user2.id, sharedWithId: user3.id });
expect(await testSync(auth, [SyncRequestType.PartnersV1])).toHaveLength(0); expect(await testSync(auth, [SyncRequestType.PartnersV1])).toHaveLength(0);
await context.partner.remove({ sharedById: user2.id, sharedWithId: user3.id }); await partnerRepo.remove(partner);
expect(await testSync(auth, [SyncRequestType.PartnersV1])).toHaveLength(0); expect(await testSync(auth, [SyncRequestType.PartnersV1])).toHaveLength(0);
}); });
it('should not sync a partner delete after a user is deleted', async () => { it('should not sync a partner delete after a user is deleted', async () => {
const { auth, context, testSync } = await setup(); const { auth, getRepository, testSync } = await setup();
const user2 = await context.createUser(); const userRepo = getRepository('user');
await context.createPartner({ sharedById: user2.id, sharedWithId: auth.user.id }); const user2 = await userRepo.create(mediumFactory.userInsert());
await context.user.delete({ id: user2.id }, true);
const partnerRepo = getRepository('partner');
await partnerRepo.create({ sharedById: user2.id, sharedWithId: auth.user.id });
await userRepo.delete({ id: user2.id }, true);
expect(await testSync(auth, [SyncRequestType.PartnersV1])).toHaveLength(0); expect(await testSync(auth, [SyncRequestType.PartnersV1])).toHaveLength(0);
}); });
@ -385,21 +422,23 @@ describe(SyncService.name, () => {
describe.concurrent(SyncEntityType.AssetV1, () => { describe.concurrent(SyncEntityType.AssetV1, () => {
it('should detect and sync the first asset', async () => { it('should detect and sync the first asset', async () => {
const { auth, context, sut, testSync } = await setup(); const { auth, sut, getRepository, testSync } = await setup();
const checksum = '1115vHcVkZzNp3Q9G+FEA0nu6zUbGb4Tj4UOXkN0wRA='; const checksum = '1115vHcVkZzNp3Q9G+FEA0nu6zUbGb4Tj4UOXkN0wRA=';
const thumbhash = '2225vHcVkZzNp3Q9G+FEA0nu6zUbGb4Tj4UOXkN0wRA='; const thumbhash = '2225vHcVkZzNp3Q9G+FEA0nu6zUbGb4Tj4UOXkN0wRA=';
const date = new Date().toISOString(); const date = new Date().toISOString();
const asset = TestFactory.asset({ const assetRepo = getRepository('asset');
const asset = mediumFactory.assetInsert({
ownerId: auth.user.id, ownerId: auth.user.id,
checksum: Buffer.from(checksum, 'base64'), checksum: Buffer.from(checksum, 'base64'),
thumbhash: Buffer.from(thumbhash, 'base64'), thumbhash: Buffer.from(thumbhash, 'base64'),
fileCreatedAt: date, fileCreatedAt: date,
fileModifiedAt: date, fileModifiedAt: date,
localDateTime: date,
deletedAt: null, deletedAt: null,
}); });
await context.createAsset(asset); await assetRepo.create(asset);
const initialSyncResponse = await testSync(auth, [SyncRequestType.AssetsV1]); const initialSyncResponse = await testSync(auth, [SyncRequestType.AssetsV1]);
@ -413,12 +452,12 @@ describe(SyncService.name, () => {
ownerId: asset.ownerId, ownerId: asset.ownerId,
thumbhash, thumbhash,
checksum, checksum,
deletedAt: null, deletedAt: asset.deletedAt,
fileCreatedAt: date, fileCreatedAt: asset.fileCreatedAt,
fileModifiedAt: date, fileModifiedAt: asset.fileModifiedAt,
isFavorite: false, isFavorite: asset.isFavorite,
isVisible: true, isVisible: asset.isVisible,
localDateTime: '2000-01-01T00:00:00.000Z', localDateTime: asset.localDateTime,
type: asset.type, type: asset.type,
}, },
type: 'AssetV1', type: 'AssetV1',
@ -435,11 +474,12 @@ describe(SyncService.name, () => {
}); });
it('should detect and sync a deleted asset', async () => { it('should detect and sync a deleted asset', async () => {
const { auth, context, sut, testSync } = await setup(); const { auth, sut, getRepository, testSync } = await setup();
const asset = TestFactory.asset({ ownerId: auth.user.id }); const assetRepo = getRepository('asset');
await context.createAsset(asset); const asset = mediumFactory.assetInsert({ ownerId: auth.user.id });
await context.asset.remove(asset); await assetRepo.create(asset);
await assetRepo.remove(asset);
const response = await testSync(auth, [SyncRequestType.AssetsV1]); const response = await testSync(auth, [SyncRequestType.AssetsV1]);
@ -465,19 +505,26 @@ describe(SyncService.name, () => {
}); });
it('should not sync an asset or asset delete for an unrelated user', async () => { it('should not sync an asset or asset delete for an unrelated user', async () => {
const { auth, context, testSync } = await setup(); const { auth, getRepository, testSync } = await setup();
const user2 = await context.createUser(); const userRepo = getRepository('user');
const session = TestFactory.session({ userId: user2.id }); const user2 = mediumFactory.userInsert();
const auth2 = TestFactory.auth({ session, user: user2 }); await userRepo.create(user2);
const asset = TestFactory.asset({ ownerId: user2.id }); const sessionRepo = getRepository('session');
await context.createAsset(asset); const session = mediumFactory.sessionInsert({ userId: user2.id });
await sessionRepo.create(session);
const assetRepo = getRepository('asset');
const asset = mediumFactory.assetInsert({ ownerId: user2.id });
await assetRepo.create(asset);
const auth2 = factory.auth({ session, user: user2 });
expect(await testSync(auth2, [SyncRequestType.AssetsV1])).toHaveLength(1); expect(await testSync(auth2, [SyncRequestType.AssetsV1])).toHaveLength(1);
expect(await testSync(auth, [SyncRequestType.AssetsV1])).toHaveLength(0); expect(await testSync(auth, [SyncRequestType.AssetsV1])).toHaveLength(0);
await context.asset.remove(asset); await assetRepo.remove(asset);
expect(await testSync(auth2, [SyncRequestType.AssetsV1])).toHaveLength(1); expect(await testSync(auth2, [SyncRequestType.AssetsV1])).toHaveLength(1);
expect(await testSync(auth, [SyncRequestType.AssetsV1])).toHaveLength(0); expect(await testSync(auth, [SyncRequestType.AssetsV1])).toHaveLength(0);
}); });
@ -485,24 +532,30 @@ describe(SyncService.name, () => {
describe.concurrent(SyncRequestType.PartnerAssetsV1, () => { describe.concurrent(SyncRequestType.PartnerAssetsV1, () => {
it('should detect and sync the first partner asset', async () => { it('should detect and sync the first partner asset', async () => {
const { auth, context, sut, testSync } = await setup(); const { auth, sut, getRepository, testSync } = await setup();
const checksum = '1115vHcVkZzNp3Q9G+FEA0nu6zUbGb4Tj4UOXkN0wRA='; const checksum = '1115vHcVkZzNp3Q9G+FEA0nu6zUbGb4Tj4UOXkN0wRA=';
const thumbhash = '2225vHcVkZzNp3Q9G+FEA0nu6zUbGb4Tj4UOXkN0wRA='; const thumbhash = '2225vHcVkZzNp3Q9G+FEA0nu6zUbGb4Tj4UOXkN0wRA=';
const date = new Date().toISOString(); const date = new Date().toISOString();
const user2 = await context.createUser(); const userRepo = getRepository('user');
const user2 = mediumFactory.userInsert();
await userRepo.create(user2);
const asset = TestFactory.asset({ const assetRepo = getRepository('asset');
const asset = mediumFactory.assetInsert({
ownerId: user2.id, ownerId: user2.id,
checksum: Buffer.from(checksum, 'base64'), checksum: Buffer.from(checksum, 'base64'),
thumbhash: Buffer.from(thumbhash, 'base64'), thumbhash: Buffer.from(thumbhash, 'base64'),
fileCreatedAt: date, fileCreatedAt: date,
fileModifiedAt: date, fileModifiedAt: date,
localDateTime: date,
deletedAt: null, deletedAt: null,
}); });
await context.createAsset(asset); await assetRepo.create(asset);
await context.partner.create({ sharedById: user2.id, sharedWithId: auth.user.id });
const partnerRepo = getRepository('partner');
await partnerRepo.create({ sharedById: user2.id, sharedWithId: auth.user.id });
const initialSyncResponse = await testSync(auth, [SyncRequestType.PartnerAssetsV1]); const initialSyncResponse = await testSync(auth, [SyncRequestType.PartnerAssetsV1]);
@ -521,7 +574,7 @@ describe(SyncService.name, () => {
fileModifiedAt: date, fileModifiedAt: date,
isFavorite: false, isFavorite: false,
isVisible: true, isVisible: true,
localDateTime: '2000-01-01T00:00:00.000Z', localDateTime: date,
type: asset.type, type: asset.type,
}, },
type: SyncEntityType.PartnerAssetV1, type: SyncEntityType.PartnerAssetV1,
@ -538,13 +591,19 @@ describe(SyncService.name, () => {
}); });
it('should detect and sync a deleted partner asset', async () => { it('should detect and sync a deleted partner asset', async () => {
const { auth, context, sut, testSync } = await setup(); const { auth, sut, getRepository, testSync } = await setup();
const user2 = await context.createUser(); const userRepo = getRepository('user');
const asset = TestFactory.asset({ ownerId: user2.id }); const user2 = mediumFactory.userInsert();
await context.createAsset(asset); await userRepo.create(user2);
await context.partner.create({ sharedById: user2.id, sharedWithId: auth.user.id }); const asset = mediumFactory.assetInsert({ ownerId: user2.id });
await context.asset.remove(asset);
const assetRepo = getRepository('asset');
await assetRepo.create(asset);
const partnerRepo = getRepository('partner');
await partnerRepo.create({ sharedById: user2.id, sharedWithId: auth.user.id });
await assetRepo.remove(asset);
const response = await testSync(auth, [SyncRequestType.PartnerAssetsV1]); const response = await testSync(auth, [SyncRequestType.PartnerAssetsV1]);
@ -570,62 +629,89 @@ describe(SyncService.name, () => {
}); });
it('should not sync a deleted partner asset due to a user delete', async () => { it('should not sync a deleted partner asset due to a user delete', async () => {
const { auth, context, testSync } = await setup(); const { auth, getRepository, testSync } = await setup();
const user2 = await context.createUser(); const userRepo = getRepository('user');
await context.partner.create({ sharedById: user2.id, sharedWithId: auth.user.id }); const user2 = mediumFactory.userInsert();
await context.createAsset({ ownerId: user2.id }); await userRepo.create(user2);
await context.user.delete({ id: user2.id }, true);
const partnerRepo = getRepository('partner');
await partnerRepo.create({ sharedById: user2.id, sharedWithId: auth.user.id });
const assetRepo = getRepository('asset');
await assetRepo.create(mediumFactory.assetInsert({ ownerId: user2.id }));
await userRepo.delete({ id: user2.id }, true);
const response = await testSync(auth, [SyncRequestType.PartnerAssetsV1]); const response = await testSync(auth, [SyncRequestType.PartnerAssetsV1]);
expect(response).toHaveLength(0); expect(response).toHaveLength(0);
}); });
it('should not sync a deleted partner asset due to a partner delete (unshare)', async () => { it('should not sync a deleted partner asset due to a partner delete (unshare)', async () => {
const { auth, context, testSync } = await setup(); const { auth, getRepository, testSync } = await setup();
const user2 = await context.createUser(); const userRepo = getRepository('user');
await context.createAsset({ ownerId: user2.id }); const user2 = mediumFactory.userInsert();
await userRepo.create(user2);
const assetRepo = getRepository('asset');
await assetRepo.create(mediumFactory.assetInsert({ ownerId: user2.id }));
const partnerRepo = getRepository('partner');
const partner = { sharedById: user2.id, sharedWithId: auth.user.id }; const partner = { sharedById: user2.id, sharedWithId: auth.user.id };
await context.partner.create(partner); await partnerRepo.create(partner);
await expect(testSync(auth, [SyncRequestType.PartnerAssetsV1])).resolves.toHaveLength(1); await expect(testSync(auth, [SyncRequestType.PartnerAssetsV1])).resolves.toHaveLength(1);
await context.partner.remove(partner); await partnerRepo.remove(partner);
await expect(testSync(auth, [SyncRequestType.PartnerAssetsV1])).resolves.toHaveLength(0); await expect(testSync(auth, [SyncRequestType.PartnerAssetsV1])).resolves.toHaveLength(0);
}); });
it('should not sync an asset or asset delete for own user', async () => { it('should not sync an asset or asset delete for own user', async () => {
const { auth, context, testSync } = await setup(); const { auth, getRepository, testSync } = await setup();
const user2 = await context.createUser(); const userRepo = getRepository('user');
const asset = await context.createAsset({ ownerId: auth.user.id }); const user2 = mediumFactory.userInsert();
const partner = { sharedById: user2.id, sharedWithId: auth.user.id }; await userRepo.create(user2);
await context.partner.create(partner);
const assetRepo = getRepository('asset');
const asset = mediumFactory.assetInsert({ ownerId: auth.user.id });
await assetRepo.create(asset);
const partnerRepo = getRepository('partner');
await partnerRepo.create({ sharedById: user2.id, sharedWithId: auth.user.id });
await expect(testSync(auth, [SyncRequestType.AssetsV1])).resolves.toHaveLength(1); await expect(testSync(auth, [SyncRequestType.AssetsV1])).resolves.toHaveLength(1);
await expect(testSync(auth, [SyncRequestType.PartnerAssetsV1])).resolves.toHaveLength(0); await expect(testSync(auth, [SyncRequestType.PartnerAssetsV1])).resolves.toHaveLength(0);
await context.asset.remove(asset); await assetRepo.remove(asset);
await expect(testSync(auth, [SyncRequestType.AssetsV1])).resolves.toHaveLength(1); await expect(testSync(auth, [SyncRequestType.AssetsV1])).resolves.toHaveLength(1);
await expect(testSync(auth, [SyncRequestType.PartnerAssetsV1])).resolves.toHaveLength(0); await expect(testSync(auth, [SyncRequestType.PartnerAssetsV1])).resolves.toHaveLength(0);
}); });
it('should not sync an asset or asset delete for unrelated user', async () => { it('should not sync an asset or asset delete for unrelated user', async () => {
const { auth, context, testSync } = await setup(); const { auth, getRepository, testSync } = await setup();
const user2 = await context.createUser(); const userRepo = getRepository('user');
const session = TestFactory.session({ userId: user2.id }); const user2 = mediumFactory.userInsert();
const auth2 = TestFactory.auth({ session, user: user2 }); await userRepo.create(user2);
const asset = await context.createAsset({ ownerId: user2.id });
const sessionRepo = getRepository('session');
const session = mediumFactory.sessionInsert({ userId: user2.id });
await sessionRepo.create(session);
const auth2 = factory.auth({ session, user: user2 });
const assetRepo = getRepository('asset');
const asset = mediumFactory.assetInsert({ ownerId: user2.id });
await assetRepo.create(asset);
await expect(testSync(auth2, [SyncRequestType.AssetsV1])).resolves.toHaveLength(1); await expect(testSync(auth2, [SyncRequestType.AssetsV1])).resolves.toHaveLength(1);
await expect(testSync(auth, [SyncRequestType.PartnerAssetsV1])).resolves.toHaveLength(0); await expect(testSync(auth, [SyncRequestType.PartnerAssetsV1])).resolves.toHaveLength(0);
await context.asset.remove(asset); await assetRepo.remove(asset);
await expect(testSync(auth2, [SyncRequestType.AssetsV1])).resolves.toHaveLength(1); await expect(testSync(auth2, [SyncRequestType.AssetsV1])).resolves.toHaveLength(1);
await expect(testSync(auth, [SyncRequestType.PartnerAssetsV1])).resolves.toHaveLength(0); await expect(testSync(auth, [SyncRequestType.PartnerAssetsV1])).resolves.toHaveLength(0);
@ -634,13 +720,12 @@ describe(SyncService.name, () => {
describe.concurrent(SyncRequestType.AssetExifsV1, () => { describe.concurrent(SyncRequestType.AssetExifsV1, () => {
it('should detect and sync the first asset exif', async () => { it('should detect and sync the first asset exif', async () => {
const { auth, context, sut, testSync } = await setup(); const { auth, sut, getRepository, testSync } = await setup();
const asset = TestFactory.asset({ ownerId: auth.user.id }); const assetRepo = getRepository('asset');
const exif = { assetId: asset.id, make: 'Canon' }; const asset = mediumFactory.assetInsert({ ownerId: auth.user.id });
await assetRepo.create(asset);
await context.createAsset(asset); await assetRepo.upsertExif({ assetId: asset.id, make: 'Canon' });
await context.asset.upsertExif(exif);
const initialSyncResponse = await testSync(auth, [SyncRequestType.AssetExifsV1]); const initialSyncResponse = await testSync(auth, [SyncRequestType.AssetExifsV1]);
@ -690,19 +775,25 @@ describe(SyncService.name, () => {
}); });
it('should only sync asset exif for own user', async () => { it('should only sync asset exif for own user', async () => {
const { auth, context, testSync } = await setup(); const { auth, getRepository, testSync } = await setup();
const user2 = await context.createUser(); const userRepo = getRepository('user');
const session = TestFactory.session({ userId: user2.id }); const user2 = mediumFactory.userInsert();
const auth2 = TestFactory.auth({ session, user: user2 }); await userRepo.create(user2);
await context.partner.create({ sharedById: user2.id, sharedWithId: auth.user.id }); const partnerRepo = getRepository('partner');
const asset = TestFactory.asset({ ownerId: user2.id }); await partnerRepo.create({ sharedById: user2.id, sharedWithId: auth.user.id });
const exif = { assetId: asset.id, make: 'Canon' };
await context.createAsset(asset); const assetRepo = getRepository('asset');
await context.asset.upsertExif(exif); const asset = mediumFactory.assetInsert({ ownerId: user2.id });
await assetRepo.create(asset);
await assetRepo.upsertExif({ assetId: asset.id, make: 'Canon' });
const sessionRepo = getRepository('session');
const session = mediumFactory.sessionInsert({ userId: user2.id });
await sessionRepo.create(session);
const auth2 = factory.auth({ session, user: user2 });
await expect(testSync(auth2, [SyncRequestType.AssetExifsV1])).resolves.toHaveLength(1); await expect(testSync(auth2, [SyncRequestType.AssetExifsV1])).resolves.toHaveLength(1);
await expect(testSync(auth, [SyncRequestType.AssetExifsV1])).resolves.toHaveLength(0); await expect(testSync(auth, [SyncRequestType.AssetExifsV1])).resolves.toHaveLength(0);
}); });
@ -710,14 +801,19 @@ describe(SyncService.name, () => {
describe.concurrent(SyncRequestType.PartnerAssetExifsV1, () => { describe.concurrent(SyncRequestType.PartnerAssetExifsV1, () => {
it('should detect and sync the first partner asset exif', async () => { it('should detect and sync the first partner asset exif', async () => {
const { auth, context, sut, testSync } = await setup(); const { auth, sut, getRepository, testSync } = await setup();
const user2 = await context.createUser(); const userRepo = getRepository('user');
await context.partner.create({ sharedById: user2.id, sharedWithId: auth.user.id }); const user2 = mediumFactory.userInsert();
const asset = TestFactory.asset({ ownerId: user2.id }); await userRepo.create(user2);
await context.createAsset(asset);
const exif = { assetId: asset.id, make: 'Canon' }; const partnerRepo = getRepository('partner');
await context.asset.upsertExif(exif); await partnerRepo.create({ sharedById: user2.id, sharedWithId: auth.user.id });
const assetRepo = getRepository('asset');
const asset = mediumFactory.assetInsert({ ownerId: user2.id });
await assetRepo.create(asset);
await assetRepo.upsertExif({ assetId: asset.id, make: 'Canon' });
const initialSyncResponse = await testSync(auth, [SyncRequestType.PartnerAssetExifsV1]); const initialSyncResponse = await testSync(auth, [SyncRequestType.PartnerAssetExifsV1]);
@ -767,32 +863,46 @@ describe(SyncService.name, () => {
}); });
it('should not sync partner asset exif for own user', async () => { it('should not sync partner asset exif for own user', async () => {
const { auth, context, testSync } = await setup(); const { auth, getRepository, testSync } = await setup();
const user2 = await context.createUser(); const userRepo = getRepository('user');
await context.partner.create({ sharedById: user2.id, sharedWithId: auth.user.id }); const user2 = mediumFactory.userInsert();
const asset = TestFactory.asset({ ownerId: auth.user.id }); await userRepo.create(user2);
const exif = { assetId: asset.id, make: 'Canon' };
await context.createAsset(asset); const partnerRepo = getRepository('partner');
await context.asset.upsertExif(exif); await partnerRepo.create({ sharedById: user2.id, sharedWithId: auth.user.id });
const assetRepo = getRepository('asset');
const asset = mediumFactory.assetInsert({ ownerId: auth.user.id });
await assetRepo.create(asset);
await assetRepo.upsertExif({ assetId: asset.id, make: 'Canon' });
await expect(testSync(auth, [SyncRequestType.AssetExifsV1])).resolves.toHaveLength(1); await expect(testSync(auth, [SyncRequestType.AssetExifsV1])).resolves.toHaveLength(1);
await expect(testSync(auth, [SyncRequestType.PartnerAssetExifsV1])).resolves.toHaveLength(0); await expect(testSync(auth, [SyncRequestType.PartnerAssetExifsV1])).resolves.toHaveLength(0);
}); });
it('should not sync partner asset exif for unrelated user', async () => { it('should not sync partner asset exif for unrelated user', async () => {
const { auth, context, testSync } = await setup(); const { auth, getRepository, testSync } = await setup();
const user2 = await context.createUser(); const userRepo = getRepository('user');
const user3 = await context.createUser();
const session = TestFactory.session({ userId: user3.id });
const authUser3 = TestFactory.auth({ session, user: user3 });
await context.partner.create({ sharedById: user2.id, sharedWithId: auth.user.id });
const asset = TestFactory.asset({ ownerId: user3.id });
const exif = { assetId: asset.id, make: 'Canon' };
await context.createAsset(asset);
await context.asset.upsertExif(exif);
const user2 = mediumFactory.userInsert();
const user3 = mediumFactory.userInsert();
await Promise.all([userRepo.create(user2), userRepo.create(user3)]);
const partnerRepo = getRepository('partner');
await partnerRepo.create({ sharedById: user2.id, sharedWithId: auth.user.id });
const assetRepo = getRepository('asset');
const asset = mediumFactory.assetInsert({ ownerId: user3.id });
await assetRepo.create(asset);
await assetRepo.upsertExif({ assetId: asset.id, make: 'Canon' });
const sessionRepo = getRepository('session');
const session = mediumFactory.sessionInsert({ userId: user3.id });
await sessionRepo.create(session);
const authUser3 = factory.auth({ session, user: user3 });
await expect(testSync(authUser3, [SyncRequestType.AssetExifsV1])).resolves.toHaveLength(1); await expect(testSync(authUser3, [SyncRequestType.AssetExifsV1])).resolves.toHaveLength(1);
await expect(testSync(auth, [SyncRequestType.PartnerAssetExifsV1])).resolves.toHaveLength(0); await expect(testSync(auth, [SyncRequestType.PartnerAssetExifsV1])).resolves.toHaveLength(0);
}); });

View File

@ -41,7 +41,10 @@ const authFactory = ({
}: { }: {
apiKey?: Partial<AuthApiKey>; apiKey?: Partial<AuthApiKey>;
session?: { id: string }; session?: { id: string };
user?: Partial<UserAdmin>; user?: Omit<
Partial<UserAdmin>,
'createdAt' | 'updatedAt' | 'deletedAt' | 'fileCreatedAt' | 'fileModifiedAt' | 'localDateTime' | 'profileChangedAt'
>;
sharedLink?: Partial<AuthSharedLink>; sharedLink?: Partial<AuthSharedLink>;
} = {}) => { } = {}) => {
const auth: AuthDto = { const auth: AuthDto = {

View File

@ -1,11 +1,9 @@
import { ClassConstructor } from 'class-transformer'; import { ClassConstructor } from 'class-transformer';
import { Kysely, sql } from 'kysely'; import { Kysely, sql } from 'kysely';
import { PostgresJSDialect } from 'kysely-postgres-js';
import { ChildProcessWithoutNullStreams } from 'node:child_process'; import { ChildProcessWithoutNullStreams } from 'node:child_process';
import { Writable } from 'node:stream'; import { Writable } from 'node:stream';
import { parse } from 'pg-connection-string'; import { parse } from 'pg-connection-string';
import { PNG } from 'pngjs'; import { PNG } from 'pngjs';
import postgres, { Notice } from 'postgres';
import { DB } from 'src/db'; import { DB } from 'src/db';
import { AccessRepository } from 'src/repositories/access.repository'; import { AccessRepository } from 'src/repositories/access.repository';
import { ActivityRepository } from 'src/repositories/activity.repository'; import { ActivityRepository } from 'src/repositories/activity.repository';
@ -51,6 +49,7 @@ import { VersionHistoryRepository } from 'src/repositories/version-history.repos
import { ViewRepository } from 'src/repositories/view-repository'; import { ViewRepository } from 'src/repositories/view-repository';
import { BaseService } from 'src/services/base.service'; import { BaseService } from 'src/services/base.service';
import { RepositoryInterface } from 'src/types'; import { RepositoryInterface } from 'src/types';
import { getKyselyConfig } from 'src/utils/database';
import { IAccessRepositoryMock, newAccessRepositoryMock } from 'test/repositories/access.repository.mock'; import { IAccessRepositoryMock, newAccessRepositoryMock } from 'test/repositories/access.repository.mock';
import { newAssetRepositoryMock } from 'test/repositories/asset.repository.mock'; import { newAssetRepositoryMock } from 'test/repositories/asset.repository.mock';
import { newConfigRepositoryMock } from 'test/repositories/config.repository.mock'; import { newConfigRepositoryMock } from 'test/repositories/config.repository.mock';
@ -203,7 +202,7 @@ export const newTestService = <T extends BaseService>(
partner: automock(PartnerRepository, { strict: false }), partner: automock(PartnerRepository, { strict: false }),
person: newPersonRepositoryMock(), person: newPersonRepositoryMock(),
process: automock(ProcessRepository), process: automock(ProcessRepository),
search: automock(SearchRepository, { args: [loggerMock], strict: false }), search: automock(SearchRepository, { strict: false }),
// eslint-disable-next-line no-sparse-arrays // eslint-disable-next-line no-sparse-arrays
serverInfo: automock(ServerInfoRepository, { args: [, loggerMock], strict: false }), serverInfo: automock(ServerInfoRepository, { args: [, loggerMock], strict: false }),
session: automock(SessionRepository), session: automock(SessionRepository),
@ -305,44 +304,13 @@ export const getKyselyDB = async (suffix?: string): Promise<Kysely<DB>> => {
database: parsed.database ?? undefined, database: parsed.database ?? undefined,
}; };
const driverOptions = { const kysely = new Kysely<DB>(getKyselyConfig({ ...parsedOptions, max: 1, database: 'postgres' }));
...parsedOptions,
onnotice: (notice: Notice) => {
if (notice['severity'] !== 'NOTICE') {
console.warn('Postgres notice:', notice);
}
},
max: 10,
types: {
date: {
to: 1184,
from: [1082, 1114, 1184],
serialize: (x: Date | string) => (x instanceof Date ? x.toISOString() : x),
parse: (x: string) => new Date(x),
},
bigint: {
to: 20,
from: [20],
parse: (value: string) => Number.parseInt(value),
serialize: (value: number) => value.toString(),
},
},
connection: {
TimeZone: 'UTC',
},
};
const kysely = new Kysely<DB>({
dialect: new PostgresJSDialect({ postgres: postgres({ ...driverOptions, max: 1, database: 'postgres' }) }),
});
const randomSuffix = Math.random().toString(36).slice(2, 7); const randomSuffix = Math.random().toString(36).slice(2, 7);
const dbName = `immich_${suffix ?? randomSuffix}`; const dbName = `immich_${suffix ?? randomSuffix}`;
await sql.raw(`CREATE DATABASE ${dbName} WITH TEMPLATE immich OWNER postgres;`).execute(kysely); await sql.raw(`CREATE DATABASE ${dbName} WITH TEMPLATE immich OWNER postgres;`).execute(kysely);
return new Kysely<DB>({ return new Kysely<DB>(getKyselyConfig({ ...parsedOptions, database: dbName }));
dialect: new PostgresJSDialect({ postgres: postgres({ ...driverOptions, database: dbName }) }),
});
}; };
export const newRandomImage = () => { export const newRandomImage = () => {

View File

@ -69,9 +69,17 @@
<div in:fly={{ y: 10, duration: 200 }} class="absolute top-0 w-full z-[100] bg-transparent"> <div in:fly={{ y: 10, duration: 200 }} class="absolute top-0 w-full z-[100] bg-transparent">
<div <div
id="asset-selection-app-bar" id="asset-selection-app-bar"
class={`grid ${multiRow ? 'grid-cols-[100%] md:grid-cols-[25%_50%_25%]' : 'grid-cols-[10%_80%_10%] sm:grid-cols-[25%_50%_25%]'} justify-between lg:grid-cols-[25%_50%_25%] ${appBarBorder} mx-2 my-2 place-items-center rounded-lg p-2 transition-all ${tailwindClasses} dark:bg-immich-dark-gray ${ class={[
forceDark && 'bg-immich-dark-gray text-white' 'grid',
}`} multiRow && 'grid-cols-[100%] md:grid-cols-[25%_50%_25%]',
!multiRow && 'grid-cols-[10%_80%_10%] sm:grid-cols-[25%_50%_25%]',
'justify-between lg:grid-cols-[25%_50%_25%]',
appBarBorder,
'mx-2 my-2 place-items-center rounded-lg p-2 max-md:p-0 transition-all',
tailwindClasses,
'bg-immich-gray dark:bg-immich-dark-gray',
forceDark && 'bg-immich-dark-gray text-white',
]}
> >
<div class="flex place-items-center sm:gap-6 justify-self-start dark:text-immich-dark-fg"> <div class="flex place-items-center sm:gap-6 justify-self-start dark:text-immich-dark-fg">
{#if showBackButton} {#if showBackButton}