Merge branch 'main' into feat/sqlite-device-assets

# Conflicts:
#	mobile/lib/domain/utils/background_sync.dart
This commit is contained in:
shenlong-tanwen 2025-04-20 23:17:14 +05:30
commit 5fc1a63810
98 changed files with 2272 additions and 1603 deletions

View File

@ -7,6 +7,15 @@ on:
ref:
required: false
type: string
secrets:
KEY_JKS:
required: true
ALIAS:
required: true
ANDROID_KEY_PASSWORD:
required: true
ANDROID_STORE_PASSWORD:
required: true
pull_request:
push:
branches: [main]
@ -15,14 +24,21 @@ concurrency:
group: ${{ github.workflow }}-${{ github.ref }}
cancel-in-progress: true
permissions: {}
jobs:
pre-job:
runs-on: ubuntu-latest
permissions:
contents: read
outputs:
should_run: ${{ steps.found_paths.outputs.mobile == 'true' || steps.should_force.outputs.should_force == 'true' }}
steps:
- name: Checkout code
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
with:
persist-credentials: false
- id: found_paths
uses: dorny/paths-filter@de90cc6fb38fc0963ad72b210f1f284cd68cea36 # v3
with:
@ -38,22 +54,17 @@ jobs:
build-sign-android:
name: Build and sign Android
needs: pre-job
permissions:
contents: read
# Skip when PR from a fork
if: ${{ !github.event.pull_request.head.repo.fork && github.actor != 'dependabot[bot]' && needs.pre-job.outputs.should_run == 'true' }}
runs-on: macos-14
steps:
- name: Determine ref
id: get-ref
run: |
input_ref="${{ inputs.ref }}"
github_ref="${{ github.sha }}"
ref="${input_ref:-$github_ref}"
echo "ref=$ref" >> $GITHUB_OUTPUT
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
with:
ref: ${{ steps.get-ref.outputs.ref }}
ref: ${{ inputs.ref || github.sha }}
persist-credentials: false
- uses: actions/setup-java@c5195efecf7bdfc987ee8bae7a71cb8b11521c00 # v4
with:

View File

@ -8,31 +8,38 @@ concurrency:
group: ${{ github.workflow }}-${{ github.ref }}
cancel-in-progress: true
permissions: {}
jobs:
cleanup:
name: Cleanup
runs-on: ubuntu-latest
permissions:
contents: read
actions: write
steps:
- name: Check out code
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
with:
persist-credentials: false
- name: Cleanup
env:
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
REF: ${{ github.ref }}
run: |
gh extension install actions/gh-actions-cache
REPO=${{ github.repository }}
BRANCH=${{ github.ref }}
echo "Fetching list of cache keys"
cacheKeysForPR=$(gh actions-cache list -R $REPO -B $BRANCH -L 100 | cut -f 1 )
cacheKeysForPR=$(gh actions-cache list -R $REPO -B ${REF} -L 100 | cut -f 1 )
## Setting this to not fail the workflow while deleting cache keys.
set +e
echo "Deleting caches..."
for cacheKey in $cacheKeysForPR
do
gh actions-cache delete $cacheKey -R $REPO -B $BRANCH --confirm
gh actions-cache delete $cacheKey -R "$REPO" -B "${REF}" --confirm
done
echo "Done"
env:
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}

View File

@ -16,19 +16,23 @@ concurrency:
group: ${{ github.workflow }}-${{ github.ref }}
cancel-in-progress: true
permissions:
packages: write
permissions: {}
jobs:
publish:
name: CLI Publish
runs-on: ubuntu-latest
permissions:
contents: read
defaults:
run:
working-directory: ./cli
steps:
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
with:
persist-credentials: false
# Setup .npmrc file to publish to npm
- uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4
with:
@ -48,11 +52,16 @@ jobs:
docker:
name: Docker
runs-on: ubuntu-latest
permissions:
contents: read
packages: write
needs: publish
steps:
- name: Checkout
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
with:
persist-credentials: false
- name: Set up QEMU
uses: docker/setup-qemu-action@29109295f81e9208d7d86ff1c6c12d2833863392 # v3.6.0

View File

@ -24,6 +24,8 @@ concurrency:
group: ${{ github.workflow }}-${{ github.ref }}
cancel-in-progress: true
permissions: {}
jobs:
analyze:
name: Analyze
@ -43,6 +45,8 @@ jobs:
steps:
- name: Checkout repository
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
with:
persist-credentials: false
# Initializes the CodeQL tools for scanning.
- name: Initialize CodeQL

View File

@ -12,18 +12,21 @@ concurrency:
group: ${{ github.workflow }}-${{ github.ref }}
cancel-in-progress: true
permissions:
packages: write
permissions: {}
jobs:
pre-job:
runs-on: ubuntu-latest
permissions:
contents: read
outputs:
should_run_server: ${{ steps.found_paths.outputs.server == 'true' || steps.should_force.outputs.should_force == 'true' }}
should_run_ml: ${{ steps.found_paths.outputs.machine-learning == 'true' || steps.should_force.outputs.should_force == 'true' }}
steps:
- name: Checkout code
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
with:
persist-credentials: false
- id: found_paths
uses: dorny/paths-filter@de90cc6fb38fc0963ad72b210f1f284cd68cea36 # v3
with:
@ -45,6 +48,9 @@ jobs:
retag_ml:
name: Re-Tag ML
needs: pre-job
permissions:
contents: read
packages: write
if: ${{ needs.pre-job.outputs.should_run_ml == 'false' && !github.event.pull_request.head.repo.fork }}
runs-on: ubuntu-latest
strategy:
@ -58,18 +64,22 @@ jobs:
username: ${{ github.repository_owner }}
password: ${{ secrets.GITHUB_TOKEN }}
- name: Re-tag image
env:
REGISTRY_NAME: 'ghcr.io'
REPOSITORY: ${{ github.repository_owner }}/immich-machine-learning
TAG_OLD: main${{ matrix.suffix }}
TAG_PR: ${{ github.event.number == 0 && github.ref_name || format('pr-{0}', github.event.number) }}${{ matrix.suffix }}
TAG_COMMIT: commit-${{ github.event_name != 'pull_request' && github.sha || github.event.pull_request.head.sha }}${{ matrix.suffix }}
run: |
REGISTRY_NAME="ghcr.io"
REPOSITORY=${{ github.repository_owner }}/immich-machine-learning
TAG_OLD=main${{ matrix.suffix }}
TAG_PR=${{ github.event.number == 0 && github.ref_name || format('pr-{0}', github.event.number) }}${{ matrix.suffix }}
TAG_COMMIT=commit-${{ github.event_name != 'pull_request' && github.sha || github.event.pull_request.head.sha }}${{ matrix.suffix }}
docker buildx imagetools create -t $REGISTRY_NAME/$REPOSITORY:$TAG_PR $REGISTRY_NAME/$REPOSITORY:$TAG_OLD
docker buildx imagetools create -t $REGISTRY_NAME/$REPOSITORY:$TAG_COMMIT $REGISTRY_NAME/$REPOSITORY:$TAG_OLD
docker buildx imagetools create -t "${REGISTRY_NAME}/${REPOSITORY}:${TAG_PR}" "${REGISTRY_NAME}/${REPOSITORY}:${TAG_OLD}"
docker buildx imagetools create -t "${REGISTRY_NAME}/${REPOSITORY}:${TAG_COMMIT}" "${REGISTRY_NAME}/${REPOSITORY}:${TAG_OLD}"
retag_server:
name: Re-Tag Server
needs: pre-job
permissions:
contents: read
packages: write
if: ${{ needs.pre-job.outputs.should_run_server == 'false' && !github.event.pull_request.head.repo.fork }}
runs-on: ubuntu-latest
strategy:
@ -83,18 +93,22 @@ jobs:
username: ${{ github.repository_owner }}
password: ${{ secrets.GITHUB_TOKEN }}
- name: Re-tag image
env:
REGISTRY_NAME: 'ghcr.io'
REPOSITORY: ${{ github.repository_owner }}/immich-server
TAG_OLD: main${{ matrix.suffix }}
TAG_PR: ${{ github.event.number == 0 && github.ref_name || format('pr-{0}', github.event.number) }}${{ matrix.suffix }}
TAG_COMMIT: commit-${{ github.event_name != 'pull_request' && github.sha || github.event.pull_request.head.sha }}${{ matrix.suffix }}
run: |
REGISTRY_NAME="ghcr.io"
REPOSITORY=${{ github.repository_owner }}/immich-server
TAG_OLD=main${{ matrix.suffix }}
TAG_PR=${{ github.event.number == 0 && github.ref_name || format('pr-{0}', github.event.number) }}${{ matrix.suffix }}
TAG_COMMIT=commit-${{ github.event_name != 'pull_request' && github.sha || github.event.pull_request.head.sha }}${{ matrix.suffix }}
docker buildx imagetools create -t $REGISTRY_NAME/$REPOSITORY:$TAG_PR $REGISTRY_NAME/$REPOSITORY:$TAG_OLD
docker buildx imagetools create -t $REGISTRY_NAME/$REPOSITORY:$TAG_COMMIT $REGISTRY_NAME/$REPOSITORY:$TAG_OLD
docker buildx imagetools create -t "${REGISTRY_NAME}/${REPOSITORY}:${TAG_PR}" "${REGISTRY_NAME}/${REPOSITORY}:${TAG_OLD}"
docker buildx imagetools create -t "${REGISTRY_NAME}/${REPOSITORY}:${TAG_COMMIT}" "${REGISTRY_NAME}/${REPOSITORY}:${TAG_OLD}"
build_and_push_ml:
name: Build and Push ML
needs: pre-job
permissions:
contents: read
packages: write
if: ${{ needs.pre-job.outputs.should_run_ml == 'true' }}
runs-on: ${{ matrix.runner }}
env:
@ -148,6 +162,8 @@ jobs:
- name: Checkout
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
with:
persist-credentials: false
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@b5ca514318bd6ebac0fb2aedd5d36ec1b5c232a2 # v3.10.0
@ -161,11 +177,14 @@ jobs:
password: ${{ secrets.GITHUB_TOKEN }}
- name: Generate cache key suffix
env:
REF: ${{ github.ref_name }}
run: |
if [[ "${{ github.event_name }}" == "pull_request" ]]; then
echo "CACHE_KEY_SUFFIX=pr-${{ github.event.number }}" >> $GITHUB_ENV
else
echo "CACHE_KEY_SUFFIX=$(echo ${{ github.ref_name }} | sed 's/[^a-zA-Z0-9]/-/g')" >> $GITHUB_ENV
SUFFIX=$(echo "${REF}" | sed 's/[^a-zA-Z0-9]/-/g')
echo "CACHE_KEY_SUFFIX=${SUFFIX}" >> $GITHUB_ENV
fi
- name: Generate cache target
@ -175,7 +194,7 @@ jobs:
# Essentially just ignore the cache output (forks can't write to registry cache)
echo "cache-to=type=local,dest=/tmp/discard,ignore-error=true" >> $GITHUB_OUTPUT
else
echo "cache-to=type=registry,ref=${{ env.GHCR_REPO }}-build-cache:${{ env.PLATFORM_PAIR }}-${{ matrix.device }}-${{ env.CACHE_KEY_SUFFIX }},mode=max,compression=zstd" >> $GITHUB_OUTPUT
echo "cache-to=type=registry,ref=${GHCR_REPO}-build-cache:${PLATFORM_PAIR}-${{ matrix.device }}-${CACHE_KEY_SUFFIX},mode=max,compression=zstd" >> $GITHUB_OUTPUT
fi
- name: Generate docker image tags
@ -221,6 +240,10 @@ jobs:
merge_ml:
name: Merge & Push ML
runs-on: ubuntu-latest
permissions:
contents: read
actions: read
packages: write
if: ${{ needs.pre-job.outputs.should_run_ml == 'true' && !github.event.pull_request.head.repo.fork }}
env:
GHCR_REPO: ghcr.io/${{ github.repository_owner }}/immich-machine-learning
@ -308,15 +331,16 @@ jobs:
fi
TAGS=$(jq -cr '.tags | map("-t " + .) | join(" ")' <<< "$DOCKER_METADATA_OUTPUT_JSON") \
SOURCE_ARGS=$(printf '${{ env.GHCR_REPO }}@sha256:%s ' *)
echo "docker buildx imagetools create $TAGS "${ANNOTATIONS[@]}" $SOURCE_ARGS"
SOURCE_ARGS=$(printf "${GHCR_REPO}@sha256:%s " *)
docker buildx imagetools create $TAGS "${ANNOTATIONS[@]}" $SOURCE_ARGS
build_and_push_server:
name: Build and Push Server
runs-on: ${{ matrix.runner }}
permissions:
contents: read
packages: write
needs: pre-job
if: ${{ needs.pre-job.outputs.should_run_server == 'true' }}
env:
@ -340,6 +364,8 @@ jobs:
- name: Checkout
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
with:
persist-credentials: false
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@b5ca514318bd6ebac0fb2aedd5d36ec1b5c232a2 # v3
@ -353,11 +379,14 @@ jobs:
password: ${{ secrets.GITHUB_TOKEN }}
- name: Generate cache key suffix
env:
REF: ${{ github.ref_name }}
run: |
if [[ "${{ github.event_name }}" == "pull_request" ]]; then
echo "CACHE_KEY_SUFFIX=pr-${{ github.event.number }}" >> $GITHUB_ENV
else
echo "CACHE_KEY_SUFFIX=$(echo ${{ github.ref_name }} | sed 's/[^a-zA-Z0-9]/-/g')" >> $GITHUB_ENV
SUFFIX=$(echo "${REF}" | sed 's/[^a-zA-Z0-9]/-/g')
echo "CACHE_KEY_SUFFIX=${SUFFIX}" >> $GITHUB_ENV
fi
- name: Generate cache target
@ -367,7 +396,7 @@ jobs:
# Essentially just ignore the cache output (forks can't write to registry cache)
echo "cache-to=type=local,dest=/tmp/discard,ignore-error=true" >> $GITHUB_OUTPUT
else
echo "cache-to=type=registry,ref=${{ env.GHCR_REPO }}-build-cache:${{ env.PLATFORM_PAIR }}-${{ env.CACHE_KEY_SUFFIX }},mode=max,compression=zstd" >> $GITHUB_OUTPUT
echo "cache-to=type=registry,ref=${GHCR_REPO}-build-cache:${PLATFORM_PAIR}-${CACHE_KEY_SUFFIX},mode=max,compression=zstd" >> $GITHUB_OUTPUT
fi
- name: Generate docker image tags
@ -413,6 +442,10 @@ jobs:
merge_server:
name: Merge & Push Server
runs-on: ubuntu-latest
permissions:
contents: read
actions: read
packages: write
if: ${{ needs.pre-job.outputs.should_run_server == 'true' && !github.event.pull_request.head.repo.fork }}
env:
GHCR_REPO: ghcr.io/${{ github.repository_owner }}/immich-server
@ -486,15 +519,14 @@ jobs:
fi
TAGS=$(jq -cr '.tags | map("-t " + .) | join(" ")' <<< "$DOCKER_METADATA_OUTPUT_JSON") \
SOURCE_ARGS=$(printf '${{ env.GHCR_REPO }}@sha256:%s ' *)
echo "docker buildx imagetools create $TAGS "${ANNOTATIONS[@]}" $SOURCE_ARGS"
SOURCE_ARGS=$(printf "${GHCR_REPO}@sha256:%s " *)
docker buildx imagetools create $TAGS "${ANNOTATIONS[@]}" $SOURCE_ARGS
success-check-server:
name: Docker Build & Push Server Success
needs: [merge_server, retag_server]
permissions: {}
runs-on: ubuntu-latest
if: always()
steps:
@ -508,6 +540,7 @@ jobs:
success-check-ml:
name: Docker Build & Push ML Success
needs: [merge_ml, retag_ml]
permissions: {}
runs-on: ubuntu-latest
if: always()
steps:

View File

@ -10,14 +10,20 @@ concurrency:
group: ${{ github.workflow }}-${{ github.ref }}
cancel-in-progress: true
permissions: {}
jobs:
pre-job:
runs-on: ubuntu-latest
permissions:
contents: read
outputs:
should_run: ${{ steps.found_paths.outputs.docs == 'true' || steps.should_force.outputs.should_force == 'true' }}
steps:
- name: Checkout code
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
with:
persist-credentials: false
- id: found_paths
uses: dorny/paths-filter@de90cc6fb38fc0963ad72b210f1f284cd68cea36 # v3
with:
@ -33,6 +39,8 @@ jobs:
build:
name: Docs Build
needs: pre-job
permissions:
contents: read
if: ${{ needs.pre-job.outputs.should_run == 'true' }}
runs-on: ubuntu-latest
defaults:
@ -42,6 +50,8 @@ jobs:
steps:
- name: Checkout code
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
with:
persist-credentials: false
- name: Setup Node
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4

View File

@ -9,6 +9,9 @@ jobs:
checks:
name: Docs Deploy Checks
runs-on: ubuntu-latest
permissions:
actions: read
pull-requests: read
outputs:
parameters: ${{ steps.parameters.outputs.result }}
artifact: ${{ steps.get-artifact.outputs.result }}
@ -36,6 +39,8 @@ jobs:
- name: Determine deploy parameters
id: parameters
uses: actions/github-script@60a0d83039c74a4aee543508d2ffcb1c3799cdea # v7
env:
HEAD_SHA: ${{ github.event.workflow_run.head_sha }}
with:
script: |
const eventType = context.payload.workflow_run.event;
@ -57,7 +62,8 @@ jobs:
} else if (eventType == "pull_request") {
let pull_number = context.payload.workflow_run.pull_requests[0]?.number;
if(!pull_number) {
const response = await github.rest.search.issuesAndPullRequests({q: 'repo:${{ github.repository }} is:pr sha:${{ github.event.workflow_run.head_sha }}',per_page: 1,})
const {HEAD_SHA} = process.env;
const response = await github.rest.search.issuesAndPullRequests({q: `repo:${{ github.repository }} is:pr sha:${HEAD_SHA}`,per_page: 1,})
const items = response.data.items
if (items.length < 1) {
throw new Error("No pull request found for the commit")
@ -95,10 +101,16 @@ jobs:
name: Docs Deploy
runs-on: ubuntu-latest
needs: checks
permissions:
contents: read
actions: read
pull-requests: write
if: ${{ fromJson(needs.checks.outputs.artifact).found && fromJson(needs.checks.outputs.parameters).shouldDeploy }}
steps:
- name: Checkout code
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
with:
persist-credentials: false
- name: Load parameters
id: parameters
@ -162,9 +174,11 @@ jobs:
- name: Output Cleaning
id: clean
env:
TG_OUTPUT: ${{ steps.docs-output.outputs.tg_action_output }}
run: |
TG_OUT=$(echo '${{ steps.docs-output.outputs.tg_action_output }}' | sed 's|%0A|\n|g ; s|%3C|<|g' | jq -c .)
echo "output=$TG_OUT" >> $GITHUB_OUTPUT
CLEANED=$(echo "$TG_OUTPUT" | sed 's|%0A|\n|g ; s|%3C|<|g' | jq -c .)
echo "output=$CLEANED" >> $GITHUB_OUTPUT
- name: Publish to Cloudflare Pages
uses: cloudflare/pages-action@f0a1cd58cd66095dee69bfa18fa5efd1dde93bca # v1

View File

@ -3,13 +3,20 @@ on:
pull_request_target:
types: [closed]
permissions: {}
jobs:
deploy:
name: Docs Destroy
runs-on: ubuntu-latest
permissions:
contents: read
pull-requests: write
steps:
- name: Checkout code
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
with:
persist-credentials: false
- name: Destroy Docs Subdomain
env:

View File

@ -4,11 +4,14 @@ on:
pull_request:
types: [labeled]
permissions: {}
jobs:
fix-formatting:
runs-on: ubuntu-latest
if: ${{ github.event.label.name == 'fix:formatting' }}
permissions:
contents: write
pull-requests: write
steps:
- name: Generate a token
@ -23,6 +26,7 @@ jobs:
with:
ref: ${{ github.event.pull_request.head.ref }}
token: ${{ steps.generate-token.outputs.token }}
persist-credentials: true
- name: Setup Node
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4

View File

@ -4,6 +4,8 @@ on:
pull_request_target:
types: [opened, labeled, unlabeled, synchronize]
permissions: {}
jobs:
validate-release-label:
runs-on: ubuntu-latest

View File

@ -2,6 +2,8 @@ name: 'Pull Request Labeler'
on:
- pull_request_target
permissions: {}
jobs:
labeler:
permissions:

View File

@ -4,9 +4,13 @@ on:
pull_request:
types: [opened, synchronize, reopened, edited]
permissions: {}
jobs:
validate-pr-title:
runs-on: ubuntu-latest
permissions:
pull-requests: write
steps:
- name: PR Conventional Commit Validation
uses: ytanikin/PRConventionalCommits@b628c5a234cc32513014b7bfdd1e47b532124d98 # 1.3.0

View File

@ -21,13 +21,14 @@ concurrency:
group: ${{ github.workflow }}-${{ github.ref }}-root
cancel-in-progress: true
permissions: {}
jobs:
bump_version:
runs-on: ubuntu-latest
outputs:
ref: ${{ steps.push-tag.outputs.commit_long_sha }}
permissions: {} # No job-level permissions are needed because it uses the app-token
steps:
- name: Generate a token
id: generate-token
@ -40,6 +41,7 @@ jobs:
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
with:
token: ${{ steps.generate-token.outputs.token }}
persist-credentials: true
- name: Install uv
uses: astral-sh/setup-uv@0c5e2b8115b80b4c7c5ddf6ffdd634974642d182 # v5
@ -59,14 +61,20 @@ jobs:
build_mobile:
uses: ./.github/workflows/build-mobile.yml
needs: bump_version
secrets: inherit
secrets:
KEY_JKS: ${{ secrets.KEY_JKS }}
ALIAS: ${{ secrets.ALIAS }}
ANDROID_KEY_PASSWORD: ${{ secrets.ANDROID_KEY_PASSWORD }}
ANDROID_STORE_PASSWORD: ${{ secrets.ANDROID_STORE_PASSWORD }}
with:
ref: ${{ needs.bump_version.outputs.ref }}
prepare_release:
runs-on: ubuntu-latest
needs: build_mobile
permissions:
actions: read # To download the app artifact
# No content permissions are needed because it uses the app-token
steps:
- name: Generate a token
id: generate-token
@ -79,6 +87,7 @@ jobs:
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
with:
token: ${{ steps.generate-token.outputs.token }}
persist-credentials: false
- name: Download APK
uses: actions/download-artifact@95815c38cf2ff2164869cbab79da8d1f422bc89e # v4
@ -90,6 +99,7 @@ jobs:
with:
draft: true
tag_name: ${{ env.IMMICH_VERSION }}
token: ${{ steps.generate-token.outputs.token }}
generate_release_notes: true
body_path: misc/release/notes.tmpl
files: |

View File

@ -4,6 +4,8 @@ on:
pull_request:
types: [labeled, closed]
permissions: {}
jobs:
comment-status:
runs-on: ubuntu-latest

View File

@ -4,18 +4,22 @@ on:
release:
types: [published]
permissions:
packages: write
permissions: {}
jobs:
publish:
name: Publish `@immich/sdk`
runs-on: ubuntu-latest
permissions:
contents: read
defaults:
run:
working-directory: ./open-api/typescript-sdk
steps:
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
with:
persist-credentials: false
# Setup .npmrc file to publish to npm
- uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4
with:

View File

@ -9,14 +9,20 @@ concurrency:
group: ${{ github.workflow }}-${{ github.ref }}
cancel-in-progress: true
permissions: {}
jobs:
pre-job:
runs-on: ubuntu-latest
permissions:
contents: read
outputs:
should_run: ${{ steps.found_paths.outputs.mobile == 'true' || steps.should_force.outputs.should_force == 'true' }}
steps:
- name: Checkout code
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
with:
persist-credentials: false
- id: found_paths
uses: dorny/paths-filter@de90cc6fb38fc0963ad72b210f1f284cd68cea36 # v3
with:
@ -33,12 +39,14 @@ jobs:
name: Run Dart Code Analysis
needs: pre-job
if: ${{ needs.pre-job.outputs.should_run == 'true' }}
runs-on: ubuntu-latest
permissions:
contents: read
steps:
- name: Checkout code
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
with:
persist-credentials: false
- name: Setup Flutter SDK
uses: subosito/flutter-action@e938fdf56512cc96ef2f93601a5a40bde3801046 # v2
@ -69,9 +77,11 @@ jobs:
- name: Verify files have not changed
if: steps.verify-changed-files.outputs.files_changed == 'true'
env:
CHANGED_FILES: ${{ steps.verify-changed-files.outputs.changed_files }}
run: |
echo "ERROR: Generated files not up to date! Run make_build inside the mobile directory"
echo "Changed files: ${{ steps.verify-changed-files.outputs.changed_files }}"
echo "Changed files: ${CHANGED_FILES}"
exit 1
- name: Run dart analyze

View File

@ -9,9 +9,13 @@ concurrency:
group: ${{ github.workflow }}-${{ github.ref }}
cancel-in-progress: true
permissions: {}
jobs:
pre-job:
runs-on: ubuntu-latest
permissions:
contents: read
outputs:
should_run_web: ${{ steps.found_paths.outputs.web == 'true' || steps.should_force.outputs.should_force == 'true' }}
should_run_server: ${{ steps.found_paths.outputs.server == 'true' || steps.should_force.outputs.should_force == 'true' }}
@ -25,6 +29,9 @@ jobs:
steps:
- name: Checkout code
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
with:
persist-credentials: false
- id: found_paths
uses: dorny/paths-filter@de90cc6fb38fc0963ad72b210f1f284cd68cea36 # v3
with:
@ -58,6 +65,8 @@ jobs:
needs: pre-job
if: ${{ needs.pre-job.outputs.should_run_server == 'true' }}
runs-on: ubuntu-latest
permissions:
contents: read
defaults:
run:
working-directory: ./server
@ -65,6 +74,8 @@ jobs:
steps:
- name: Checkout code
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
with:
persist-credentials: false
- name: Setup Node
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4
@ -95,6 +106,8 @@ jobs:
needs: pre-job
if: ${{ needs.pre-job.outputs.should_run_cli == 'true' }}
runs-on: ubuntu-latest
permissions:
contents: read
defaults:
run:
working-directory: ./cli
@ -102,6 +115,8 @@ jobs:
steps:
- name: Checkout code
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
with:
persist-credentials: false
- name: Setup Node
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4
@ -136,6 +151,8 @@ jobs:
needs: pre-job
if: ${{ needs.pre-job.outputs.should_run_cli == 'true' }}
runs-on: windows-latest
permissions:
contents: read
defaults:
run:
working-directory: ./cli
@ -143,6 +160,8 @@ jobs:
steps:
- name: Checkout code
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
with:
persist-credentials: false
- name: Setup Node
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4
@ -170,6 +189,8 @@ jobs:
needs: pre-job
if: ${{ needs.pre-job.outputs.should_run_web == 'true' }}
runs-on: ubuntu-latest
permissions:
contents: read
defaults:
run:
working-directory: ./web
@ -177,6 +198,8 @@ jobs:
steps:
- name: Checkout code
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
with:
persist-credentials: false
- name: Setup Node
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4
@ -215,6 +238,8 @@ jobs:
needs: pre-job
if: ${{ needs.pre-job.outputs.should_run_e2e == 'true' }}
runs-on: ubuntu-latest
permissions:
contents: read
defaults:
run:
working-directory: ./e2e
@ -222,6 +247,8 @@ jobs:
steps:
- name: Checkout code
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
with:
persist-credentials: false
- name: Setup Node
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4
@ -254,6 +281,8 @@ jobs:
needs: pre-job
if: ${{ needs.pre-job.outputs.should_run_server == 'true' }}
runs-on: ubuntu-latest
permissions:
contents: read
defaults:
run:
working-directory: ./server
@ -261,6 +290,8 @@ jobs:
steps:
- name: Checkout code
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
with:
persist-credentials: false
- name: Setup Node
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4
@ -279,6 +310,8 @@ jobs:
needs: pre-job
if: ${{ needs.pre-job.outputs.should_run_e2e_server_cli == 'true' }}
runs-on: mich
permissions:
contents: read
defaults:
run:
working-directory: ./e2e
@ -287,6 +320,7 @@ jobs:
- name: Checkout code
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
with:
persist-credentials: false
submodules: 'recursive'
- name: Setup Node
@ -321,6 +355,8 @@ jobs:
needs: pre-job
if: ${{ needs.pre-job.outputs.should_run_e2e_web == 'true' }}
runs-on: mich
permissions:
contents: read
defaults:
run:
working-directory: ./e2e
@ -329,6 +365,7 @@ jobs:
- name: Checkout code
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
with:
persist-credentials: false
submodules: 'recursive'
- name: Setup Node
@ -362,8 +399,13 @@ jobs:
needs: pre-job
if: ${{ needs.pre-job.outputs.should_run_mobile == 'true' }}
runs-on: ubuntu-latest
permissions:
contents: read
steps:
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
with:
persist-credentials: false
- name: Setup Flutter SDK
uses: subosito/flutter-action@e938fdf56512cc96ef2f93601a5a40bde3801046 # v2
with:
@ -378,11 +420,16 @@ jobs:
needs: pre-job
if: ${{ needs.pre-job.outputs.should_run_ml == 'true' }}
runs-on: ubuntu-latest
permissions:
contents: read
defaults:
run:
working-directory: ./machine-learning
steps:
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
with:
persist-credentials: false
- name: Install uv
uses: astral-sh/setup-uv@0c5e2b8115b80b4c7c5ddf6ffdd634974642d182 # v5
- uses: actions/setup-python@8d9ed9ac5c53483de85588cdf95a591a75ab9f55 # v5
@ -411,6 +458,8 @@ jobs:
needs: pre-job
if: ${{ needs.pre-job.outputs['should_run_.github'] == 'true' }}
runs-on: ubuntu-latest
permissions:
contents: read
defaults:
run:
working-directory: ./.github
@ -418,6 +467,8 @@ jobs:
steps:
- name: Checkout code
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
with:
persist-credentials: false
- name: Setup Node
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4
@ -434,22 +485,31 @@ jobs:
shellcheck:
name: ShellCheck
runs-on: ubuntu-latest
permissions:
contents: read
steps:
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
with:
persist-credentials: false
- name: Run ShellCheck
uses: ludeeus/action-shellcheck@master
with:
ignore_paths: >-
**/open-api/**
**/openapi/**
**/openapi**
**/node_modules/**
generated-api-up-to-date:
name: OpenAPI Clients
runs-on: ubuntu-latest
permissions:
contents: read
steps:
- name: Checkout code
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
with:
persist-credentials: false
- name: Setup Node
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4
@ -476,14 +536,18 @@ jobs:
- name: Verify files have not changed
if: steps.verify-changed-files.outputs.files_changed == 'true'
env:
CHANGED_FILES: ${{ steps.verify-changed-files.outputs.changed_files }}
run: |
echo "ERROR: Generated files not up to date!"
echo "Changed files: ${{ steps.verify-changed-files.outputs.changed_files }}"
echo "Changed files: ${CHANGED_FILES}"
exit 1
generated-typeorm-migrations-up-to-date:
name: TypeORM Checks
runs-on: ubuntu-latest
permissions:
contents: read
services:
postgres:
image: tensorchord/pgvecto-rs:pg14-v0.2.0@sha256:739cdd626151ff1f796dc95a6591b55a714f341c737e27f045019ceabf8e8c52
@ -505,6 +569,8 @@ jobs:
steps:
- name: Checkout code
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
with:
persist-credentials: false
- name: Setup Node
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4
@ -521,7 +587,7 @@ jobs:
run: npm run migrations:run
- name: Test npm run schema:reset command works
run: npm run typeorm:schema:reset
run: npm run schema:reset
- name: Generate new migrations
continue-on-error: true
@ -535,9 +601,11 @@ jobs:
server/src
- name: Verify migration files have not changed
if: steps.verify-changed-files.outputs.files_changed == 'true'
env:
CHANGED_FILES: ${{ steps.verify-changed-files.outputs.changed_files }}
run: |
echo "ERROR: Generated migration files not up to date!"
echo "Changed files: ${{ steps.verify-changed-files.outputs.changed_files }}"
echo "Changed files: ${CHANGED_FILES}"
cat ./src/*-TestMigration.ts
exit 1
@ -555,9 +623,11 @@ jobs:
- name: Verify SQL files have not changed
if: steps.verify-changed-sql-files.outputs.files_changed == 'true'
env:
CHANGED_FILES: ${{ steps.verify-changed-sql-files.outputs.changed_files }}
run: |
echo "ERROR: Generated SQL files not up to date!"
echo "Changed files: ${{ steps.verify-changed-sql-files.outputs.changed_files }}"
echo "Changed files: ${CHANGED_FILES}"
exit 1
# mobile-integration-tests:

View File

@ -4,30 +4,32 @@ on:
pull_request:
branches: [main]
permissions: {}
jobs:
pre-job:
runs-on: ubuntu-latest
permissions:
contents: read
outputs:
should_run: ${{ steps.found_paths.outputs.i18n == 'true' && github.head_ref != 'chore/translations'}}
steps:
- name: Checkout code
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
with:
persist-credentials: false
- id: found_paths
uses: dorny/paths-filter@de90cc6fb38fc0963ad72b210f1f284cd68cea36 # v3
with:
filters: |
i18n:
- 'i18n/!(en)**\.json'
- name: Debug
run: |
echo "Should run: ${{ steps.found_paths.outputs.i18n == 'true' && github.head_ref != 'chore/translations'}}"
echo "Found i18n paths: ${{ steps.found_paths.outputs.i18n }}"
echo "Head ref: ${{ github.head_ref }}"
enforce-lock:
name: Check Weblate Lock
needs: [pre-job]
runs-on: ubuntu-latest
permissions: {}
if: ${{ needs.pre-job.outputs.should_run == 'true' }}
steps:
- name: Check weblate lock
@ -47,6 +49,7 @@ jobs:
name: Weblate Lock Check Success
needs: [enforce-lock]
runs-on: ubuntu-latest
permissions: {}
if: always()
steps:
- name: Any jobs failed?

View File

@ -1,14 +1,14 @@
# Database Migrations
After making any changes in the `server/src/entities`, a database migration need to run in order to register the changes in the database. Follow the steps below to create a new migration.
After making any changes in the `server/src/schema`, a database migration need to run in order to register the changes in the database. Follow the steps below to create a new migration.
1. Run the command
```bash
npm run typeorm:migrations:generate <migration-name>
npm run migrations:generate <migration-name>
```
2. Check if the migration file makes sense.
3. Move the migration file to folder `./server/src/migrations` in your code editor.
3. Move the migration file to folder `./server/src/schema/migrations` in your code editor.
The server will automatically detect `*.ts` file changes and restart. Part of the server start-up process includes running any new migrations, so it will be applied immediately.

5
docs/src/pages/errors.md Normal file
View File

@ -0,0 +1,5 @@
# Errors
## TypeORM Upgrade
The upgrade to Immich `v2.x.x` has a required upgrade path to `v1.132.0+`. This means it is required to start up the application at least once on version `1.132.0` (or later). Doing so will complete database schema upgrades that are required for `v2.0.0`. After Immich has successfully booted on this version, shut the system down and try the `v2.x.x` upgrade again.

View File

@ -996,6 +996,7 @@
"filetype": "Filetype",
"filter": "Filter",
"filter_people": "Filter people",
"filter_places": "Filter places",
"find_them_fast": "Find them fast by name with search",
"fix_incorrect_match": "Fix incorrect match",
"folder": "Folder",

View File

@ -1,8 +1,12 @@
import 'package:http/http.dart' as http;
import 'package:immich_mobile/domain/models/sync_event.model.dart';
import 'package:openapi/api.dart';
abstract interface class ISyncApiRepository {
Future<void> ack(List<String> data);
Stream<List<SyncEvent>> getSyncEvents(List<SyncRequestType> type);
Future<void> streamChanges(
Function(List<SyncEvent>, Function() abort) onData, {
int batchSize,
http.Client? httpClient,
});
}

View File

@ -2,9 +2,17 @@ import 'package:immich_mobile/domain/interfaces/db.interface.dart';
import 'package:openapi/api.dart';
abstract interface class ISyncStreamRepository implements IDatabaseRepository {
Future<bool> updateUsersV1(Iterable<SyncUserV1> data);
Future<bool> deleteUsersV1(Iterable<SyncUserDeleteV1> data);
Future<void> updateUsersV1(Iterable<SyncUserV1> data);
Future<void> deleteUsersV1(Iterable<SyncUserDeleteV1> data);
Future<bool> updatePartnerV1(Iterable<SyncPartnerV1> data);
Future<bool> deletePartnerV1(Iterable<SyncPartnerDeleteV1> data);
Future<void> updatePartnerV1(Iterable<SyncPartnerV1> data);
Future<void> deletePartnerV1(Iterable<SyncPartnerDeleteV1> data);
Future<void> updateAssetsV1(Iterable<SyncAssetV1> data);
Future<void> deleteAssetsV1(Iterable<SyncAssetDeleteV1> data);
Future<void> updateAssetsExifV1(Iterable<SyncAssetExifV1> data);
Future<void> updatePartnerAssetsV1(Iterable<SyncAssetV1> data);
Future<void> deletePartnerAssetsV1(Iterable<SyncAssetDeleteV1> data);
Future<void> updatePartnerAssetsExifV1(Iterable<SyncAssetExifV1> data);
}

View File

@ -25,7 +25,7 @@ class DeviceSyncService {
_localAlbumRepository = localAlbumRepository,
_localAssetRepository = localAssetRepository;
Future<void> syncAlbums() async {
Future<void> sync() async {
try {
final Stopwatch stopwatch = Stopwatch()..start();
// The deviceAlbums will not have the updatedAt field

View File

@ -2,25 +2,11 @@
import 'dart:async';
import 'package:collection/collection.dart';
import 'package:immich_mobile/domain/interfaces/sync_api.interface.dart';
import 'package:immich_mobile/domain/interfaces/sync_stream.interface.dart';
import 'package:immich_mobile/domain/models/sync_event.model.dart';
import 'package:logging/logging.dart';
import 'package:openapi/api.dart';
import 'package:worker_manager/worker_manager.dart';
const _kSyncTypeOrder = [
SyncEntityType.userDeleteV1,
SyncEntityType.userV1,
SyncEntityType.partnerDeleteV1,
SyncEntityType.partnerV1,
SyncEntityType.assetDeleteV1,
SyncEntityType.assetV1,
SyncEntityType.assetExifV1,
SyncEntityType.partnerAssetDeleteV1,
SyncEntityType.partnerAssetV1,
SyncEntityType.partnerAssetExifV1,
];
class SyncStreamService {
final Logger _logger = Logger('SyncStreamService');
@ -37,164 +23,70 @@ class SyncStreamService {
_syncStreamRepository = syncStreamRepository,
_cancelChecker = cancelChecker;
Future<bool> _handleSyncData(
bool get isCancelled => _cancelChecker?.call() ?? false;
Future<void> sync() => _syncApiRepository.streamChanges(_handleEvents);
Future<void> _handleEvents(List<SyncEvent> events, Function() abort) async {
List<SyncEvent> items = [];
for (final event in events) {
if (isCancelled) {
_logger.warning("Sync stream cancelled");
abort();
return;
}
if (event.type != items.firstOrNull?.type) {
await _processBatch(items);
}
items.add(event);
}
await _processBatch(items);
}
Future<void> _processBatch(List<SyncEvent> batch) async {
if (batch.isEmpty) {
return;
}
final type = batch.first.type;
await _handleSyncData(type, batch.map((e) => e.data));
await _syncApiRepository.ack([batch.last.ack]);
batch.clear();
}
Future<void> _handleSyncData(
SyncEntityType type,
// ignore: avoid-dynamic
Iterable<dynamic> data,
) async {
if (data.isEmpty) {
_logger.warning("Received empty sync data for $type");
return false;
}
_logger.fine("Processing sync data for $type of length ${data.length}");
try {
if (type == SyncEntityType.partnerV1) {
return await _syncStreamRepository.updatePartnerV1(data.cast());
}
if (type == SyncEntityType.partnerDeleteV1) {
return await _syncStreamRepository.deletePartnerV1(data.cast());
}
if (type == SyncEntityType.userV1) {
return await _syncStreamRepository.updateUsersV1(data.cast());
}
if (type == SyncEntityType.userDeleteV1) {
return await _syncStreamRepository.deleteUsersV1(data.cast());
}
} catch (error, stack) {
_logger.severe("Error processing sync data for $type", error, stack);
return false;
// ignore: prefer-switch-expression
switch (type) {
case SyncEntityType.userV1:
return _syncStreamRepository.updateUsersV1(data.cast());
case SyncEntityType.userDeleteV1:
return _syncStreamRepository.deleteUsersV1(data.cast());
case SyncEntityType.partnerV1:
return _syncStreamRepository.updatePartnerV1(data.cast());
case SyncEntityType.partnerDeleteV1:
return _syncStreamRepository.deletePartnerV1(data.cast());
case SyncEntityType.assetV1:
return _syncStreamRepository.updateAssetsV1(data.cast());
case SyncEntityType.assetDeleteV1:
return _syncStreamRepository.deleteAssetsV1(data.cast());
case SyncEntityType.assetExifV1:
return _syncStreamRepository.updateAssetsExifV1(data.cast());
case SyncEntityType.partnerAssetV1:
return _syncStreamRepository.updatePartnerAssetsV1(data.cast());
case SyncEntityType.partnerAssetDeleteV1:
return _syncStreamRepository.deletePartnerAssetsV1(data.cast());
case SyncEntityType.partnerAssetExifV1:
return _syncStreamRepository.updatePartnerAssetsExifV1(data.cast());
default:
_logger.warning("Unknown sync data type: $type");
}
_logger.warning("Unknown sync data type: $type");
return false;
}
Future<void> _syncEvent(List<SyncRequestType> types) {
_logger.info("Syncing Events: $types");
final streamCompleter = Completer();
bool shouldComplete = false;
// the onDone callback might fire before the events are processed
// the following flag ensures that the onDone callback is not called
// before the events are processed and also that events are processed sequentially
Completer? mutex;
StreamSubscription? subscription;
try {
subscription = _syncApiRepository.getSyncEvents(types).listen(
(events) async {
if (events.isEmpty) {
_logger.warning("Received empty sync events");
return;
}
// If previous events are still being processed, wait for them to finish
if (mutex != null) {
await mutex!.future;
}
if (_cancelChecker?.call() ?? false) {
_logger.info("Sync cancelled, stopping stream");
subscription?.cancel();
if (!streamCompleter.isCompleted) {
streamCompleter.completeError(
CanceledError(),
StackTrace.current,
);
}
return;
}
// Take control of the mutex and process the events
mutex = Completer();
try {
final eventsMap = events.groupListsBy((event) => event.type);
final Map<SyncEntityType, String> acks = {};
for (final type in _kSyncTypeOrder) {
final data = eventsMap[type];
if (data == null) {
continue;
}
if (_cancelChecker?.call() ?? false) {
_logger.info("Sync cancelled, stopping stream");
mutex?.complete();
mutex = null;
if (!streamCompleter.isCompleted) {
streamCompleter.completeError(
CanceledError(),
StackTrace.current,
);
}
return;
}
if (data.isEmpty) {
_logger.warning("Received empty sync events for $type");
continue;
}
if (await _handleSyncData(type, data.map((e) => e.data))) {
// ignore: avoid-unsafe-collection-methods
acks[type] = data.last.ack;
} else {
_logger.warning("Failed to handle sync events for $type");
}
}
if (acks.isNotEmpty) {
await _syncApiRepository.ack(acks.values.toList());
}
_logger.info("$types events processed");
} catch (error, stack) {
_logger.warning("Error handling sync events", error, stack);
} finally {
mutex?.complete();
mutex = null;
}
if (shouldComplete) {
_logger.info("Sync done, completing stream");
if (!streamCompleter.isCompleted) streamCompleter.complete();
}
},
onError: (error, stack) {
_logger.warning("Error in sync stream for $types", error, stack);
// Do not proceed if the stream errors
if (!streamCompleter.isCompleted) {
// ignore: avoid-missing-completer-stack-trace
streamCompleter.completeError(error, stack);
}
},
onDone: () {
_logger.info("$types stream done");
if (mutex == null && !streamCompleter.isCompleted) {
streamCompleter.complete();
} else {
// Marks the stream as done but does not complete the completer
// until the events are processed
shouldComplete = true;
}
},
);
} catch (error, stack) {
_logger.severe("Error starting sync stream", error, stack);
if (!streamCompleter.isCompleted) {
streamCompleter.completeError(error, stack);
}
}
return streamCompleter.future.whenComplete(() {
_logger.info("Sync stream completed");
return subscription?.cancel();
});
}
Future<void> syncUsers() =>
_syncEvent([SyncRequestType.usersV1, SyncRequestType.partnersV1]);
}

View File

@ -7,45 +7,49 @@ import 'package:immich_mobile/utils/isolate.dart';
import 'package:worker_manager/worker_manager.dart';
class BackgroundSyncManager {
Cancelable<void>? _userSyncTask;
Cancelable<void>? _syncTask;
Cancelable<void>? _deviceAlbumSyncTask;
BackgroundSyncManager();
Future<void> cancel() {
final futures = <Future>[];
if (_userSyncTask != null) {
futures.add(_userSyncTask!.future);
if (_syncTask != null) {
futures.add(_syncTask!.future);
}
_userSyncTask?.cancel();
_userSyncTask = null;
_syncTask?.cancel();
_syncTask = null;
return Future.wait(futures);
}
// No need to cancel the task, as it can also be run when the user logs out
Future<void> syncDeviceAlbums() {
Future<void> syncLocal() {
if (_deviceAlbumSyncTask != null) {
return _deviceAlbumSyncTask!.future;
}
_deviceAlbumSyncTask = runInIsolateGentle(
computation: (ref) => ref.read(deviceSyncServiceProvider).syncAlbums(),
computation: (ref) => ref.read(deviceSyncServiceProvider).sync(),
);
return _deviceAlbumSyncTask!.whenComplete(() {
_deviceAlbumSyncTask = null;
});
}
Future<void> syncUsers() {
if (_userSyncTask != null) {
return _userSyncTask!.future;
Future<void> syncRemote() {
if (_syncTask != null) {
return _syncTask!.future;
}
_userSyncTask = runInIsolateGentle(
computation: (ref) => ref.read(syncStreamServiceProvider).syncUsers(),
_syncTask = runInIsolateGentle(
computation: (ref) => ref.read(syncStreamServiceProvider).sync(),
);
return _userSyncTask!.whenComplete(() {
_userSyncTask = null;
return _syncTask!.whenComplete(() {
_syncTask = null;
});
}
}

View File

@ -12,22 +12,22 @@ import 'package:openapi/api.dart';
class SyncApiRepository implements ISyncApiRepository {
final Logger _logger = Logger('SyncApiRepository');
final ApiService _api;
final int _batchSize;
SyncApiRepository(this._api, {int batchSize = kSyncEventBatchSize})
: _batchSize = batchSize;
@override
Stream<List<SyncEvent>> getSyncEvents(List<SyncRequestType> type) {
return _getSyncStream(SyncStreamDto(types: type));
}
SyncApiRepository(this._api);
@override
Future<void> ack(List<String> data) {
return _api.syncApi.sendSyncAck(SyncAckSetDto(acks: data));
}
Stream<List<SyncEvent>> _getSyncStream(SyncStreamDto dto) async* {
final client = http.Client();
@override
Future<void> streamChanges(
Function(List<SyncEvent>, Function() abort) onData, {
int batchSize = kSyncEventBatchSize,
http.Client? httpClient,
}) async {
// ignore: avoid-unused-assignment
final stopwatch = Stopwatch()..start();
final client = httpClient ?? http.Client();
final endpoint = "${_api.apiClient.basePath}/sync/stream";
final headers = {
@ -35,20 +35,38 @@ class SyncApiRepository implements ISyncApiRepository {
'Accept': 'application/jsonlines+json',
};
final queryParams = <QueryParam>[];
final headerParams = <String, String>{};
await _api.applyToParams(queryParams, headerParams);
await _api.applyToParams([], headerParams);
headers.addAll(headerParams);
final request = http.Request('POST', Uri.parse(endpoint));
request.headers.addAll(headers);
request.body = jsonEncode(dto.toJson());
request.body = jsonEncode(
SyncStreamDto(
types: [
SyncRequestType.usersV1,
SyncRequestType.partnersV1,
SyncRequestType.assetsV1,
SyncRequestType.partnerAssetsV1,
SyncRequestType.assetExifsV1,
SyncRequestType.partnerAssetExifsV1,
],
).toJson(),
);
String previousChunk = '';
List<String> lines = [];
bool shouldAbort = false;
void abort() {
_logger.warning("Abort requested, stopping sync stream");
shouldAbort = true;
}
try {
final response = await client.send(request);
final response =
await client.send(request).timeout(const Duration(seconds: 20));
if (response.statusCode != 200) {
final errorBody = await response.stream.bytesToString();
@ -59,27 +77,38 @@ class SyncApiRepository implements ISyncApiRepository {
}
await for (final chunk in response.stream.transform(utf8.decoder)) {
if (shouldAbort) {
break;
}
previousChunk += chunk;
final parts = previousChunk.toString().split('\n');
previousChunk = parts.removeLast();
lines.addAll(parts);
if (lines.length < _batchSize) {
if (lines.length < batchSize) {
continue;
}
yield _parseSyncResponse(lines);
await onData(_parseLines(lines), abort);
lines.clear();
}
} finally {
if (lines.isNotEmpty) {
yield _parseSyncResponse(lines);
if (lines.isNotEmpty && !shouldAbort) {
await onData(_parseLines(lines), abort);
}
} catch (error, stack) {
_logger.severe("error processing stream", error, stack);
return Future.error(error, stack);
} finally {
client.close();
}
stopwatch.stop();
_logger
.info("Remote Sync completed in ${stopwatch.elapsed.inMilliseconds}ms");
}
List<SyncEvent> _parseSyncResponse(List<String> lines) {
List<SyncEvent> _parseLines(List<String> lines) {
final List<SyncEvent> data = [];
for (final line in lines) {
@ -110,4 +139,10 @@ const _kResponseMap = <SyncEntityType, Function(dynamic)>{
SyncEntityType.userDeleteV1: SyncUserDeleteV1.fromJson,
SyncEntityType.partnerV1: SyncPartnerV1.fromJson,
SyncEntityType.partnerDeleteV1: SyncPartnerDeleteV1.fromJson,
SyncEntityType.assetV1: SyncAssetV1.fromJson,
SyncEntityType.assetDeleteV1: SyncAssetDeleteV1.fromJson,
SyncEntityType.assetExifV1: SyncAssetExifV1.fromJson,
SyncEntityType.partnerAssetV1: SyncAssetV1.fromJson,
SyncEntityType.partnerAssetDeleteV1: SyncAssetDeleteV1.fromJson,
SyncEntityType.partnerAssetExifV1: SyncAssetExifV1.fromJson,
};

View File

@ -1,4 +1,5 @@
import 'package:drift/drift.dart';
import 'package:flutter/foundation.dart';
import 'package:immich_mobile/domain/interfaces/sync_stream.interface.dart';
import 'package:immich_mobile/extensions/string_extensions.dart';
import 'package:immich_mobile/infrastructure/entities/partner.entity.drift.dart';
@ -15,7 +16,7 @@ class DriftSyncStreamRepository extends DriftDatabaseRepository
DriftSyncStreamRepository(super.db) : _db = db;
@override
Future<bool> deleteUsersV1(Iterable<SyncUserDeleteV1> data) async {
Future<void> deleteUsersV1(Iterable<SyncUserDeleteV1> data) async {
try {
await _db.batch((batch) {
for (final user in data) {
@ -25,15 +26,14 @@ class DriftSyncStreamRepository extends DriftDatabaseRepository
);
}
});
return true;
} catch (e, s) {
_logger.severe('Error while processing SyncUserDeleteV1', e, s);
return false;
} catch (error, stack) {
_logger.severe('Error while processing SyncUserDeleteV1', error, stack);
rethrow;
}
}
@override
Future<bool> updateUsersV1(Iterable<SyncUserV1> data) async {
Future<void> updateUsersV1(Iterable<SyncUserV1> data) async {
try {
await _db.batch((batch) {
for (final user in data) {
@ -49,15 +49,14 @@ class DriftSyncStreamRepository extends DriftDatabaseRepository
);
}
});
return true;
} catch (e, s) {
_logger.severe('Error while processing SyncUserV1', e, s);
return false;
} catch (error, stack) {
_logger.severe('Error while processing SyncUserV1', error, stack);
rethrow;
}
}
@override
Future<bool> deletePartnerV1(Iterable<SyncPartnerDeleteV1> data) async {
Future<void> deletePartnerV1(Iterable<SyncPartnerDeleteV1> data) async {
try {
await _db.batch((batch) {
for (final partner in data) {
@ -70,15 +69,14 @@ class DriftSyncStreamRepository extends DriftDatabaseRepository
);
}
});
return true;
} catch (e, s) {
_logger.severe('Error while processing SyncPartnerDeleteV1', e, s);
return false;
rethrow;
}
}
@override
Future<bool> updatePartnerV1(Iterable<SyncPartnerV1> data) async {
Future<void> updatePartnerV1(Iterable<SyncPartnerV1> data) async {
try {
await _db.batch((batch) {
for (final partner in data) {
@ -95,10 +93,42 @@ class DriftSyncStreamRepository extends DriftDatabaseRepository
);
}
});
return true;
} catch (e, s) {
_logger.severe('Error while processing SyncPartnerV1', e, s);
return false;
rethrow;
}
}
// Assets
@override
Future<void> updateAssetsV1(Iterable<SyncAssetV1> data) async {
debugPrint("updateAssetsV1 - ${data.length}");
}
@override
Future<void> deleteAssetsV1(Iterable<SyncAssetDeleteV1> data) async {
debugPrint("deleteAssetsV1 - ${data.length}");
}
// Partner Assets
@override
Future<void> updatePartnerAssetsV1(Iterable<SyncAssetV1> data) async {
debugPrint("updatePartnerAssetsV1 - ${data.length}");
}
@override
Future<void> deletePartnerAssetsV1(Iterable<SyncAssetDeleteV1> data) async {
debugPrint("deletePartnerAssetsV1 - ${data.length}");
}
// EXIF
@override
Future<void> updateAssetsExifV1(Iterable<SyncAssetExifV1> data) async {
debugPrint("updateAssetsExifV1 - ${data.length}");
}
@override
Future<void> updatePartnerAssetsExifV1(Iterable<SyncAssetExifV1> data) async {
debugPrint("updatePartnerAssetsExifV1 - ${data.length}");
}
}

View File

@ -4,11 +4,11 @@ import 'package:flutter/material.dart';
import 'package:flutter_hooks/flutter_hooks.dart';
import 'package:hooks_riverpod/hooks_riverpod.dart';
import 'package:immich_mobile/extensions/build_context_extensions.dart';
import 'package:immich_mobile/extensions/theme_extensions.dart';
import 'package:immich_mobile/providers/search/people.provider.dart';
import 'package:immich_mobile/routing/router.dart';
import 'package:immich_mobile/services/api.service.dart';
import 'package:immich_mobile/utils/image_url_builder.dart';
import 'package:immich_mobile/widgets/common/search_field.dart';
import 'package:immich_mobile/widgets/search/person_name_edit_form.dart';
@RoutePage()
@ -42,47 +42,12 @@ class PeopleCollectionPage extends HookConsumerWidget {
appBar: AppBar(
automaticallyImplyLeading: search.value == null,
title: search.value != null
? TextField(
? SearchField(
focusNode: formFocus,
onTapOutside: (_) => formFocus.unfocus(),
onChanged: (value) => search.value = value,
decoration: InputDecoration(
contentPadding: const EdgeInsets.only(left: 24),
filled: true,
fillColor: context.primaryColor.withValues(alpha: 0.1),
hintStyle: context.textTheme.bodyLarge?.copyWith(
color: context.themeData.colorScheme.onSurfaceSecondary,
),
border: OutlineInputBorder(
borderRadius: BorderRadius.circular(25),
borderSide: BorderSide(
color: context.colorScheme.surfaceContainerHighest,
),
),
enabledBorder: OutlineInputBorder(
borderRadius: BorderRadius.circular(25),
borderSide: BorderSide(
color: context.colorScheme.surfaceContainerHighest,
),
),
disabledBorder: OutlineInputBorder(
borderRadius: BorderRadius.circular(25),
borderSide: BorderSide(
color: context.colorScheme.surfaceContainerHighest,
),
),
focusedBorder: OutlineInputBorder(
borderRadius: BorderRadius.circular(25),
borderSide: BorderSide(
color: context.colorScheme.primary.withAlpha(150),
),
),
prefixIcon: Icon(
Icons.search_rounded,
color: context.colorScheme.primary,
),
hintText: 'filter_people'.tr(),
),
filled: true,
hintText: 'filter_people'.tr(),
autofocus: true,
)
: Text('people'.tr()),

View File

@ -2,6 +2,7 @@ import 'package:auto_route/auto_route.dart';
import 'package:cached_network_image/cached_network_image.dart';
import 'package:easy_localization/easy_localization.dart';
import 'package:flutter/material.dart';
import 'package:flutter_hooks/flutter_hooks.dart' hide Store;
import 'package:hooks_riverpod/hooks_riverpod.dart';
import 'package:immich_mobile/domain/models/store.model.dart';
import 'package:immich_mobile/entities/asset.entity.dart';
@ -12,6 +13,7 @@ import 'package:immich_mobile/pages/common/large_leading_tile.dart';
import 'package:immich_mobile/providers/search/search_page_state.provider.dart';
import 'package:immich_mobile/routing/router.dart';
import 'package:immich_mobile/services/api.service.dart';
import 'package:immich_mobile/widgets/common/search_field.dart';
import 'package:immich_mobile/widgets/map/map_thumbnail.dart';
import 'package:maplibre_gl/maplibre_gl.dart';
@ -21,34 +23,62 @@ class PlacesCollectionPage extends HookConsumerWidget {
@override
Widget build(BuildContext context, WidgetRef ref) {
final places = ref.watch(getAllPlacesProvider);
final formFocus = useFocusNode();
final ValueNotifier<String?> search = useState(null);
return Scaffold(
appBar: AppBar(
title: Text('places'.tr()),
automaticallyImplyLeading: search.value == null,
title: search.value != null
? SearchField(
autofocus: true,
filled: true,
focusNode: formFocus,
onChanged: (value) => search.value = value,
onTapOutside: (_) => formFocus.unfocus(),
hintText: 'filter_places'.tr(),
)
: Text('places'.tr()),
actions: [
IconButton(
icon: Icon(search.value != null ? Icons.close : Icons.search),
onPressed: () {
search.value = search.value == null ? '' : null;
},
),
],
),
body: ListView(
shrinkWrap: true,
children: [
Padding(
padding: const EdgeInsets.all(16.0),
child: SizedBox(
height: 200,
width: context.width,
child: MapThumbnail(
onTap: (_, __) => context.pushRoute(const MapRoute()),
zoom: 8,
centre: const LatLng(
21.44950,
-157.91959,
if (search.value == null)
Padding(
padding: const EdgeInsets.all(16.0),
child: SizedBox(
height: 200,
width: context.width,
child: MapThumbnail(
onTap: (_, __) => context.pushRoute(const MapRoute()),
zoom: 8,
centre: const LatLng(
21.44950,
-157.91959,
),
showAttribution: false,
themeMode:
context.isDarkTheme ? ThemeMode.dark : ThemeMode.light,
),
showAttribution: false,
themeMode:
context.isDarkTheme ? ThemeMode.dark : ThemeMode.light,
),
),
),
places.when(
data: (places) {
if (search.value != null) {
places = places.where((place) {
return place.label
.toLowerCase()
.contains(search.value!.toLowerCase());
}).toList();
}
return ListView.builder(
shrinkWrap: true,
physics: const NeverScrollableScrollPhysics(),

View File

@ -2,7 +2,6 @@ import 'dart:async';
import 'package:auto_route/auto_route.dart';
import 'package:easy_localization/easy_localization.dart';
import 'package:flutter/foundation.dart';
import 'package:flutter/material.dart';
import 'package:flutter_hooks/flutter_hooks.dart';
import 'package:hooks_riverpod/hooks_riverpod.dart';
@ -14,7 +13,6 @@ import 'package:immich_mobile/providers/server_info.provider.dart';
import 'package:immich_mobile/providers/timeline.provider.dart';
import 'package:immich_mobile/providers/user.provider.dart';
import 'package:immich_mobile/providers/websocket.provider.dart';
import 'package:immich_mobile/routing/router.dart';
import 'package:immich_mobile/widgets/asset_grid/multiselect_grid.dart';
import 'package:immich_mobile/widgets/common/immich_app_bar.dart';
import 'package:immich_mobile/widgets/common/immich_loading_indicator.dart';
@ -130,15 +128,7 @@ class PhotosPage extends HookConsumerWidget {
child: Container(
height: kToolbarHeight + context.padding.top,
color: context.themeData.appBarTheme.backgroundColor,
child: ImmichAppBar(
actions: [
if (kDebugMode)
IconButton(
icon: const Icon(Icons.science_rounded),
onPressed: () => context.pushRoute(const FeatInDevRoute()),
),
],
),
child: const ImmichAppBar(),
),
),
],

View File

@ -9,12 +9,12 @@ final _features = [
_Features(
name: 'Sync Local',
icon: Icons.photo_album_rounded,
onTap: (ref) => ref.read(backgroundSyncProvider).syncDeviceAlbums(),
onTap: (ref) => ref.read(backgroundSyncProvider).syncLocal(),
),
_Features(
name: 'Sync Remote',
icon: Icons.refresh_rounded,
onTap: (ref) => ref.read(backgroundSyncProvider).syncUsers(),
onTap: (ref) => ref.read(backgroundSyncProvider).syncRemote(),
),
];

View File

@ -1,5 +1,6 @@
import 'package:auto_route/auto_route.dart';
import 'package:easy_localization/easy_localization.dart';
import 'package:flutter/foundation.dart';
import 'package:flutter/material.dart';
import 'package:flutter_svg/svg.dart';
import 'package:hooks_riverpod/hooks_riverpod.dart';
@ -178,6 +179,12 @@ class ImmichAppBar extends ConsumerWidget implements PreferredSizeWidget {
child: action,
),
),
if (kDebugMode)
if (kDebugMode)
IconButton(
icon: const Icon(Icons.science_rounded),
onPressed: () => context.pushRoute(const FeatInDevRoute()),
),
if (showUploadButton)
Padding(
padding: const EdgeInsets.only(right: 20),

View File

@ -2,3 +2,5 @@ import 'package:mocktail/mocktail.dart';
import 'package:openapi/api.dart';
class MockAssetsApi extends Mock implements AssetsApi {}
class MockSyncApi extends Mock implements SyncApi {}

View File

@ -73,9 +73,9 @@ void main() {
);
});
group('syncAlbums', () {
group('sync', () {
test('should return when no albums exist', () async {
await sut.syncAlbums();
await sut.sync();
verify(() => mockAlbumMediaRepo.getAll()).called(1);
verify(() => mockLocalAlbumRepo.getAll(sortBy: SortLocalAlbumsBy.id))
.called(1);
@ -104,7 +104,7 @@ void main() {
when(() => mockAlbumMediaRepo.getAssetsForAlbum(deviceAlbums.first.id))
.thenAnswer((_) async => [LocalAssetStub.image1]);
await sut.syncAlbums();
await sut.sync();
verify(() => mockAlbumMediaRepo.getAll()).called(1);
verify(() => mockLocalAlbumRepo.getAll(sortBy: SortLocalAlbumsBy.id))
@ -127,7 +127,7 @@ void main() {
when(() => mockLocalAlbumRepo.getAll(sortBy: SortLocalAlbumsBy.id))
.thenAnswer((_) async => dbAlbums);
await sut.syncAlbums();
await sut.sync();
verify(() => mockAlbumMediaRepo.getAll()).called(1);
verify(() => mockLocalAlbumRepo.getAll(sortBy: SortLocalAlbumsBy.id))
@ -159,7 +159,7 @@ void main() {
when(() => mockLocalAlbumRepo.getAssetsForAlbum(commonAlbum.id))
.thenAnswer((_) async => []); // DB has no assets initially
await sut.syncAlbums();
await sut.sync();
verify(() => mockAlbumMediaRepo.getAll()).called(1);
verify(() => mockLocalAlbumRepo.getAll(sortBy: SortLocalAlbumsBy.id))
@ -206,7 +206,7 @@ void main() {
when(() => mockAlbumMediaRepo.getAssetsForAlbum(albumToAdd.id))
.thenAnswer((_) async => [LocalAssetStub.image1]);
await sut.syncAlbums();
await sut.sync();
verify(() => mockAlbumMediaRepo.getAll()).called(1);
verify(() => mockLocalAlbumRepo.getAll(sortBy: SortLocalAlbumsBy.id))
@ -239,7 +239,7 @@ void main() {
when(() => mockAlbumMediaRepo.getAll())
.thenThrow(Exception("Device error"));
await sut.syncAlbums();
await sut.sync();
verify(() => mockAlbumMediaRepo.getAll()).called(1);
verifyNever(

View File

@ -1,4 +1,4 @@
// ignore_for_file: avoid-unnecessary-futures, avoid-async-call-in-sync-function
// ignore_for_file: avoid-declaring-call-method, avoid-unnecessary-futures
import 'dart:async';
@ -8,16 +8,22 @@ import 'package:immich_mobile/domain/interfaces/sync_stream.interface.dart';
import 'package:immich_mobile/domain/models/sync_event.model.dart';
import 'package:immich_mobile/domain/services/sync_stream.service.dart';
import 'package:mocktail/mocktail.dart';
import 'package:openapi/api.dart';
import 'package:worker_manager/worker_manager.dart';
import '../../fixtures/sync_stream.stub.dart';
import '../../infrastructure/repository.mock.dart';
class _AbortCallbackWrapper {
const _AbortCallbackWrapper();
bool call() => false;
}
class _MockAbortCallbackWrapper extends Mock implements _AbortCallbackWrapper {}
class _CancellationWrapper {
const _CancellationWrapper();
bool isCancelled() => false;
bool call() => false;
}
class _MockCancellationWrapper extends Mock implements _CancellationWrapper {}
@ -26,35 +32,26 @@ void main() {
late SyncStreamService sut;
late ISyncStreamRepository mockSyncStreamRepo;
late ISyncApiRepository mockSyncApiRepo;
late StreamController<List<SyncEvent>> streamController;
late Function(List<SyncEvent>, Function()) handleEventsCallback;
late _MockAbortCallbackWrapper mockAbortCallbackWrapper;
successHandler(Invocation _) async => true;
failureHandler(Invocation _) async => false;
setUp(() {
mockSyncStreamRepo = MockSyncStreamRepository();
mockSyncApiRepo = MockSyncApiRepository();
streamController = StreamController<List<SyncEvent>>.broadcast();
mockAbortCallbackWrapper = _MockAbortCallbackWrapper();
sut = SyncStreamService(
syncApiRepository: mockSyncApiRepo,
syncStreamRepository: mockSyncStreamRepo,
);
when(() => mockAbortCallbackWrapper()).thenReturn(false);
// Default stream setup - emits one batch and closes
when(() => mockSyncApiRepo.getSyncEvents(any()))
.thenAnswer((_) => streamController.stream);
when(() => mockSyncApiRepo.streamChanges(any()))
.thenAnswer((invocation) async {
// ignore: avoid-unsafe-collection-methods
handleEventsCallback = invocation.positionalArguments.first;
});
// Default ack setup
when(() => mockSyncApiRepo.ack(any())).thenAnswer((_) async => {});
// Register fallbacks for mocktail verification
registerFallbackValue(<SyncUserV1>[]);
registerFallbackValue(<SyncPartnerV1>[]);
registerFallbackValue(<SyncUserDeleteV1>[]);
registerFallbackValue(<SyncPartnerDeleteV1>[]);
// Default successful repository calls
when(() => mockSyncStreamRepo.updateUsersV1(any()))
.thenAnswer(successHandler);
when(() => mockSyncStreamRepo.deleteUsersV1(any()))
@ -63,381 +60,163 @@ void main() {
.thenAnswer(successHandler);
when(() => mockSyncStreamRepo.deletePartnerV1(any()))
.thenAnswer(successHandler);
when(() => mockSyncStreamRepo.updateAssetsV1(any()))
.thenAnswer(successHandler);
when(() => mockSyncStreamRepo.deleteAssetsV1(any()))
.thenAnswer(successHandler);
when(() => mockSyncStreamRepo.updateAssetsExifV1(any()))
.thenAnswer(successHandler);
when(() => mockSyncStreamRepo.updatePartnerAssetsV1(any()))
.thenAnswer(successHandler);
when(() => mockSyncStreamRepo.deletePartnerAssetsV1(any()))
.thenAnswer(successHandler);
when(() => mockSyncStreamRepo.updatePartnerAssetsExifV1(any()))
.thenAnswer(successHandler);
sut = SyncStreamService(
syncApiRepository: mockSyncApiRepo,
syncStreamRepository: mockSyncStreamRepo,
);
});
tearDown(() async {
if (!streamController.isClosed) {
await streamController.close();
}
});
// Helper to trigger sync and add events to the stream
Future<void> triggerSyncAndEmit(List<SyncEvent> events) async {
final future = sut.syncUsers(); // Start listening
await Future.delayed(Duration.zero); // Allow listener to attach
if (!streamController.isClosed) {
streamController.add(events);
await streamController.close(); // Close after emitting
}
await future; // Wait for processing to complete
Future<void> simulateEvents(List<SyncEvent> events) async {
await sut.sync();
await handleEventsCallback(events, mockAbortCallbackWrapper.call);
}
group("SyncStreamService", () {
group("SyncStreamService - _handleEvents", () {
test(
"completes successfully when stream emits data and handlers succeed",
"processes events and acks successfully when handlers succeed",
() async {
final events = [
...SyncStreamStub.userEvents,
...SyncStreamStub.partnerEvents,
SyncStreamStub.userDeleteV1,
SyncStreamStub.userV1Admin,
SyncStreamStub.userV1User,
SyncStreamStub.partnerDeleteV1,
SyncStreamStub.partnerV1,
];
final future = triggerSyncAndEmit(events);
await expectLater(future, completes);
// Verify ack includes last ack from each successfully handled type
verify(
() =>
mockSyncApiRepo.ack(any(that: containsAll(["5", "2", "4", "3"]))),
).called(1);
await simulateEvents(events);
verifyInOrder([
() => mockSyncStreamRepo.deleteUsersV1(any()),
() => mockSyncApiRepo.ack(["2"]),
() => mockSyncStreamRepo.updateUsersV1(any()),
() => mockSyncApiRepo.ack(["5"]),
() => mockSyncStreamRepo.deletePartnerV1(any()),
() => mockSyncApiRepo.ack(["4"]),
() => mockSyncStreamRepo.updatePartnerV1(any()),
() => mockSyncApiRepo.ack(["3"]),
]);
verifyNever(() => mockAbortCallbackWrapper());
},
);
test("completes successfully when stream emits an error", () async {
when(() => mockSyncApiRepo.getSyncEvents(any()))
.thenAnswer((_) => Stream.error(Exception("Stream Error")));
// Should complete gracefully without throwing
await expectLater(sut.syncUsers(), throwsException);
verifyNever(() => mockSyncApiRepo.ack(any())); // No ack on stream error
});
test("throws when initial getSyncEvents call fails", () async {
final apiException = Exception("API Error");
when(() => mockSyncApiRepo.getSyncEvents(any())).thenThrow(apiException);
// Should rethrow the exception from the initial call
await expectLater(sut.syncUsers(), throwsA(apiException));
verifyNever(() => mockSyncApiRepo.ack(any()));
});
test(
"completes successfully when a repository handler throws an exception",
() async {
when(() => mockSyncStreamRepo.updateUsersV1(any()))
.thenThrow(Exception("Repo Error"));
final events = [
...SyncStreamStub.userEvents,
...SyncStreamStub.partnerEvents,
];
// Should complete, but ack only for the successful types
await triggerSyncAndEmit(events);
// Only partner delete was successful by default setup
verify(() => mockSyncApiRepo.ack(["2", "4", "3"])).called(1);
},
);
test(
"completes successfully but sends no ack when all handlers fail",
() async {
when(() => mockSyncStreamRepo.updateUsersV1(any()))
.thenAnswer(failureHandler);
when(() => mockSyncStreamRepo.deleteUsersV1(any()))
.thenAnswer(failureHandler);
when(() => mockSyncStreamRepo.updatePartnerV1(any()))
.thenAnswer(failureHandler);
when(() => mockSyncStreamRepo.deletePartnerV1(any()))
.thenAnswer(failureHandler);
final events = [
...SyncStreamStub.userEvents,
...SyncStreamStub.partnerEvents,
];
await triggerSyncAndEmit(events);
verifyNever(() => mockSyncApiRepo.ack(any()));
},
);
test("sends ack only for types where handler returns true", () async {
// Mock specific handlers: user update fails, user delete succeeds
when(() => mockSyncStreamRepo.updateUsersV1(any()))
.thenAnswer(failureHandler);
when(() => mockSyncStreamRepo.deleteUsersV1(any()))
.thenAnswer(successHandler);
// partner update fails, partner delete succeeds
when(() => mockSyncStreamRepo.updatePartnerV1(any()))
.thenAnswer(failureHandler);
test("processes final batch correctly", () async {
final events = [
...SyncStreamStub.userEvents,
...SyncStreamStub.partnerEvents,
SyncStreamStub.userDeleteV1,
SyncStreamStub.userV1Admin,
];
await triggerSyncAndEmit(events);
// Expect ack only for userDeleteV1 (ack: "2") and partnerDeleteV1 (ack: "4")
verify(() => mockSyncApiRepo.ack(any(that: containsAll(["2", "4"]))))
.called(1);
await simulateEvents(events);
verifyInOrder([
() => mockSyncStreamRepo.deleteUsersV1(any()),
() => mockSyncApiRepo.ack(["2"]),
() => mockSyncStreamRepo.updateUsersV1(any()),
() => mockSyncApiRepo.ack(["1"]),
]);
verifyNever(() => mockAbortCallbackWrapper());
});
test("does not process or ack when stream emits an empty list", () async {
final future = sut.syncUsers();
streamController.add([]); // Emit empty list
await streamController.close();
await future; // Wait for completion
test("does not process or ack when event list is empty", () async {
await simulateEvents([]);
verifyNever(() => mockSyncStreamRepo.updateUsersV1(any()));
verifyNever(() => mockSyncStreamRepo.deleteUsersV1(any()));
verifyNever(() => mockSyncStreamRepo.updatePartnerV1(any()));
verifyNever(() => mockSyncStreamRepo.deletePartnerV1(any()));
verifyNever(() => mockAbortCallbackWrapper());
verifyNever(() => mockSyncApiRepo.ack(any()));
});
test("processes multiple batches sequentially using mutex", () async {
final completer1 = Completer<void>();
final completer2 = Completer<void>();
int callOrder = 0;
int handler1StartOrder = -1;
int handler2StartOrder = -1;
int handler1Calls = 0;
int handler2Calls = 0;
test("aborts and stops processing if cancelled during iteration", () async {
final cancellationChecker = _MockCancellationWrapper();
when(() => cancellationChecker()).thenReturn(false);
when(() => mockSyncStreamRepo.updateUsersV1(any())).thenAnswer((_) async {
handler1Calls++;
handler1StartOrder = ++callOrder;
await completer1.future;
return true;
});
when(() => mockSyncStreamRepo.updatePartnerV1(any()))
.thenAnswer((_) async {
handler2Calls++;
handler2StartOrder = ++callOrder;
await completer2.future;
return true;
sut = SyncStreamService(
syncApiRepository: mockSyncApiRepo,
syncStreamRepository: mockSyncStreamRepo,
cancelChecker: cancellationChecker.call,
);
await sut.sync();
final events = [
SyncStreamStub.userDeleteV1,
SyncStreamStub.userV1Admin,
SyncStreamStub.partnerDeleteV1,
];
when(() => mockSyncStreamRepo.deleteUsersV1(any())).thenAnswer((_) async {
when(() => cancellationChecker()).thenReturn(true);
});
final batch1 = SyncStreamStub.userEvents;
final batch2 = SyncStreamStub.partnerEvents;
await handleEventsCallback(events, mockAbortCallbackWrapper.call);
final syncFuture = sut.syncUsers();
await pumpEventQueue();
verify(() => mockSyncStreamRepo.deleteUsersV1(any())).called(1);
verifyNever(() => mockSyncStreamRepo.updateUsersV1(any()));
verifyNever(() => mockSyncStreamRepo.deletePartnerV1(any()));
streamController.add(batch1);
await pumpEventQueue();
// Small delay to ensure the first handler starts
await Future.delayed(const Duration(milliseconds: 20));
verify(() => mockAbortCallbackWrapper()).called(1);
expect(handler1StartOrder, 1, reason: "Handler 1 should start first");
expect(handler1Calls, 1);
streamController.add(batch2);
await pumpEventQueue();
// Small delay
await Future.delayed(const Duration(milliseconds: 20));
expect(handler2StartOrder, -1, reason: "Handler 2 should wait");
expect(handler2Calls, 0);
completer1.complete();
await pumpEventQueue(times: 40);
// Small delay to ensure the second handler starts
await Future.delayed(const Duration(milliseconds: 20));
expect(handler2StartOrder, 2, reason: "Handler 2 should start after H1");
expect(handler2Calls, 1);
completer2.complete();
await pumpEventQueue(times: 40);
// Small delay before closing the stream
await Future.delayed(const Duration(milliseconds: 20));
if (!streamController.isClosed) {
await streamController.close();
}
await pumpEventQueue(times: 40);
// Small delay to ensure the sync completes
await Future.delayed(const Duration(milliseconds: 20));
await syncFuture;
verify(() => mockSyncStreamRepo.updateUsersV1(any())).called(1);
verify(() => mockSyncStreamRepo.updatePartnerV1(any())).called(1);
verify(() => mockSyncApiRepo.ack(any())).called(2);
verify(() => mockSyncApiRepo.ack(["2"])).called(1);
});
test(
"stops processing and ack when cancel checker is completed",
"aborts and stops processing if cancelled before processing batch",
() async {
final cancellationChecker = _MockCancellationWrapper();
when(() => cancellationChecker.isCancelled()).thenAnswer((_) => false);
when(() => cancellationChecker()).thenReturn(false);
final processingCompleter = Completer<void>();
bool handler1Started = false;
when(() => mockSyncStreamRepo.deleteUsersV1(any()))
.thenAnswer((_) async {
handler1Started = true;
return processingCompleter.future;
});
sut = SyncStreamService(
syncApiRepository: mockSyncApiRepo,
syncStreamRepository: mockSyncStreamRepo,
cancelChecker: cancellationChecker.isCancelled,
cancelChecker: cancellationChecker.call,
);
final processingCompleter = Completer<void>();
bool handlerStarted = false;
await sut.sync();
// Make handler wait so we can cancel it mid-flight
when(() => mockSyncStreamRepo.deleteUsersV1(any()))
.thenAnswer((_) async {
handlerStarted = true;
await processingCompleter
.future; // Wait indefinitely until test completes it
return true;
});
final syncFuture = sut.syncUsers();
await pumpEventQueue(times: 30);
streamController.add(SyncStreamStub.userEvents);
// Ensure processing starts
await Future.delayed(const Duration(milliseconds: 10));
expect(handlerStarted, isTrue, reason: "Handler should have started");
when(() => cancellationChecker.isCancelled()).thenAnswer((_) => true);
// Allow cancellation logic to propagate
await Future.delayed(const Duration(milliseconds: 10));
// Complete the handler's completer after cancellation signal
// to ensure the cancellation logic itself isn't blocked by the handler.
processingCompleter.complete();
await expectLater(syncFuture, throwsA(isA<CanceledError>()));
// Verify that ack was NOT called because processing was cancelled
verifyNever(() => mockSyncApiRepo.ack(any()));
},
);
test("completes successfully when ack call throws an exception", () async {
when(() => mockSyncApiRepo.ack(any())).thenThrow(Exception("Ack Error"));
final events = [
...SyncStreamStub.userEvents,
...SyncStreamStub.partnerEvents,
];
// Should still complete even if ack fails
await triggerSyncAndEmit(events);
verify(() => mockSyncApiRepo.ack(any()))
.called(1); // Verify ack was attempted
});
test("waits for processing to finish if onDone called early", () async {
final processingCompleter = Completer<void>();
bool handlerFinished = false;
when(() => mockSyncStreamRepo.updateUsersV1(any())).thenAnswer((_) async {
await processingCompleter.future; // Wait inside handler
handlerFinished = true;
return true;
});
final syncFuture = sut.syncUsers();
// Allow listener to attach
// This is necessary to ensure the stream is ready to receive events
await Future.delayed(Duration.zero);
streamController.add(SyncStreamStub.userEvents); // Emit batch
await Future.delayed(
const Duration(milliseconds: 10),
); // Ensure processing starts
await streamController
.close(); // Close stream (triggers onDone internally)
await Future.delayed(
const Duration(milliseconds: 10),
); // Give onDone a chance to fire
// At this point, onDone was called, but processing is blocked
expect(handlerFinished, isFalse);
processingCompleter.complete(); // Allow processing to finish
await syncFuture; // Now the main future should complete
expect(handlerFinished, isTrue);
verify(() => mockSyncApiRepo.ack(any())).called(1);
});
test("processes events in the defined _kSyncTypeOrder", () async {
final future = sut.syncUsers();
await pumpEventQueue();
if (!streamController.isClosed) {
final events = [
SyncEvent(
type: SyncEntityType.partnerV1,
data: SyncStreamStub.partnerV1,
ack: "1",
), // Should be processed last
SyncEvent(
type: SyncEntityType.userV1,
data: SyncStreamStub.userV1Admin,
ack: "2",
), // Should be processed second
SyncEvent(
type: SyncEntityType.partnerDeleteV1,
data: SyncStreamStub.partnerDeleteV1,
ack: "3",
), // Should be processed third
SyncEvent(
type: SyncEntityType.userDeleteV1,
data: SyncStreamStub.userDeleteV1,
ack: "4",
), // Should be processed first
SyncStreamStub.userDeleteV1,
SyncStreamStub.userV1Admin,
SyncStreamStub.partnerDeleteV1,
];
streamController.add(events);
await streamController.close();
}
await future;
final processingFuture =
handleEventsCallback(events, mockAbortCallbackWrapper.call);
await pumpEventQueue();
verifyInOrder([
() => mockSyncStreamRepo.deleteUsersV1(any()),
() => mockSyncStreamRepo.updateUsersV1(any()),
() => mockSyncStreamRepo.deletePartnerV1(any()),
() => mockSyncStreamRepo.updatePartnerV1(any()),
// Verify ack happens after all processing
() => mockSyncApiRepo.ack(any()),
]);
});
});
expect(handler1Started, isTrue);
group("syncUsers", () {
test("calls getSyncEvents with correct types", () async {
// Need to close the stream for the future to complete
final future = sut.syncUsers();
await streamController.close();
await future;
// Signal cancellation while handler 1 is waiting
when(() => cancellationChecker()).thenReturn(true);
await pumpEventQueue();
verify(
() => mockSyncApiRepo.getSyncEvents([
SyncRequestType.usersV1,
SyncRequestType.partnersV1,
]),
).called(1);
});
processingCompleter.complete();
await processingFuture;
test("calls repository methods with correctly grouped data", () async {
final events = [
...SyncStreamStub.userEvents,
...SyncStreamStub.partnerEvents,
];
await triggerSyncAndEmit(events);
verifyNever(() => mockSyncStreamRepo.updateUsersV1(any()));
// Verify each handler was called with the correct list of data payloads
verify(
() => mockSyncStreamRepo.updateUsersV1(
[SyncStreamStub.userV1Admin, SyncStreamStub.userV1User],
),
).called(1);
verify(
() => mockSyncStreamRepo.deleteUsersV1([SyncStreamStub.userDeleteV1]),
).called(1);
verify(
() => mockSyncStreamRepo.updatePartnerV1([SyncStreamStub.partnerV1]),
).called(1);
verify(
() => mockSyncStreamRepo
.deletePartnerV1([SyncStreamStub.partnerDeleteV1]),
).called(1);
});
verify(() => mockSyncApiRepo.ack(["2"])).called(1);
},
);
});
}

View File

@ -2,44 +2,44 @@ import 'package:immich_mobile/domain/models/sync_event.model.dart';
import 'package:openapi/api.dart';
abstract final class SyncStreamStub {
static final userV1Admin = SyncUserV1(
deletedAt: DateTime(2020),
email: "admin@admin",
id: "1",
name: "Admin",
);
static final userV1User = SyncUserV1(
deletedAt: DateTime(2021),
email: "user@user",
id: "2",
name: "User",
);
static final userDeleteV1 = SyncUserDeleteV1(userId: "2");
static final userEvents = [
SyncEvent(type: SyncEntityType.userV1, data: userV1Admin, ack: "1"),
SyncEvent(
type: SyncEntityType.userDeleteV1,
data: userDeleteV1,
ack: "2",
static final userV1Admin = SyncEvent(
type: SyncEntityType.userV1,
data: SyncUserV1(
deletedAt: DateTime(2020),
email: "admin@admin",
id: "1",
name: "Admin",
),
SyncEvent(type: SyncEntityType.userV1, data: userV1User, ack: "5"),
];
ack: "1",
);
static final userV1User = SyncEvent(
type: SyncEntityType.userV1,
data: SyncUserV1(
deletedAt: DateTime(2021),
email: "user@user",
id: "5",
name: "User",
),
ack: "5",
);
static final userDeleteV1 = SyncEvent(
type: SyncEntityType.userDeleteV1,
data: SyncUserDeleteV1(userId: "2"),
ack: "2",
);
static final partnerV1 = SyncPartnerV1(
inTimeline: true,
sharedById: "1",
sharedWithId: "2",
);
static final partnerDeleteV1 = SyncPartnerDeleteV1(
sharedById: "3",
sharedWithId: "4",
);
static final partnerEvents = [
SyncEvent(
type: SyncEntityType.partnerDeleteV1,
data: partnerDeleteV1,
ack: "4",
static final partnerV1 = SyncEvent(
type: SyncEntityType.partnerV1,
data: SyncPartnerV1(
inTimeline: true,
sharedById: "1",
sharedWithId: "2",
),
SyncEvent(type: SyncEntityType.partnerV1, data: partnerV1, ack: "3"),
];
ack: "3",
);
static final partnerDeleteV1 = SyncEvent(
type: SyncEntityType.partnerDeleteV1,
data: SyncPartnerDeleteV1(sharedById: "3", sharedWithId: "4"),
ack: "4",
);
}

View File

@ -0,0 +1,299 @@
import 'dart:async';
import 'dart:convert';
import 'package:flutter_test/flutter_test.dart';
import 'package:http/http.dart' as http;
import 'package:immich_mobile/domain/models/sync_event.model.dart';
import 'package:immich_mobile/infrastructure/repositories/sync_api.repository.dart';
import 'package:mocktail/mocktail.dart';
import 'package:openapi/api.dart';
import '../../api.mocks.dart';
import '../../service.mocks.dart';
class MockHttpClient extends Mock implements http.Client {}
class MockApiClient extends Mock implements ApiClient {}
class MockStreamedResponse extends Mock implements http.StreamedResponse {}
class FakeBaseRequest extends Fake implements http.BaseRequest {}
String _createJsonLine(String type, Map<String, dynamic> data, String ack) {
return '${jsonEncode({'type': type, 'data': data, 'ack': ack})}\n';
}
void main() {
late SyncApiRepository sut;
late MockApiService mockApiService;
late MockApiClient mockApiClient;
late MockSyncApi mockSyncApi;
late MockHttpClient mockHttpClient;
late MockStreamedResponse mockStreamedResponse;
late StreamController<List<int>> responseStreamController;
late int testBatchSize = 3;
setUp(() {
mockApiService = MockApiService();
mockApiClient = MockApiClient();
mockSyncApi = MockSyncApi();
mockHttpClient = MockHttpClient();
mockStreamedResponse = MockStreamedResponse();
responseStreamController =
StreamController<List<int>>.broadcast(sync: true);
registerFallbackValue(FakeBaseRequest());
when(() => mockApiService.apiClient).thenReturn(mockApiClient);
when(() => mockApiService.syncApi).thenReturn(mockSyncApi);
when(() => mockApiClient.basePath).thenReturn('http://demo.immich.app/api');
when(() => mockApiService.applyToParams(any(), any()))
.thenAnswer((_) async => {});
// Mock HTTP client behavior
when(() => mockHttpClient.send(any()))
.thenAnswer((_) async => mockStreamedResponse);
when(() => mockStreamedResponse.statusCode).thenReturn(200);
when(() => mockStreamedResponse.stream)
.thenAnswer((_) => http.ByteStream(responseStreamController.stream));
when(() => mockHttpClient.close()).thenAnswer((_) => {});
sut = SyncApiRepository(mockApiService);
});
tearDown(() async {
if (!responseStreamController.isClosed) {
await responseStreamController.close();
}
});
Future<void> streamChanges(
Function(List<SyncEvent>, Function() abort) onDataCallback,
) {
return sut.streamChanges(
onDataCallback,
batchSize: testBatchSize,
httpClient: mockHttpClient,
);
}
test('streamChanges stops processing stream when abort is called', () async {
int onDataCallCount = 0;
bool abortWasCalledInCallback = false;
List<SyncEvent> receivedEventsBatch1 = [];
onDataCallback(List<SyncEvent> events, Function() abort) {
onDataCallCount++;
if (onDataCallCount == 1) {
receivedEventsBatch1 = events;
abort();
abortWasCalledInCallback = true;
} else {
fail("onData called more than once after abort was invoked");
}
}
final streamChangesFuture = streamChanges(onDataCallback);
await pumpEventQueue();
for (int i = 0; i < testBatchSize; i++) {
responseStreamController.add(
utf8.encode(
_createJsonLine(
SyncEntityType.userDeleteV1.toString(),
SyncUserDeleteV1(userId: "user$i").toJson(),
'ack$i',
),
),
);
}
for (int i = testBatchSize; i < testBatchSize * 2; i++) {
responseStreamController.add(
utf8.encode(
_createJsonLine(
SyncEntityType.userDeleteV1.toString(),
SyncUserDeleteV1(userId: "user$i").toJson(),
'ack$i',
),
),
);
}
await responseStreamController.close();
await expectLater(streamChangesFuture, completes);
expect(onDataCallCount, 1);
expect(abortWasCalledInCallback, isTrue);
expect(receivedEventsBatch1.length, testBatchSize);
verify(() => mockHttpClient.close()).called(1);
});
test(
'streamChanges does not process remaining lines in finally block if aborted',
() async {
int onDataCallCount = 0;
bool abortWasCalledInCallback = false;
onDataCallback(List<SyncEvent> events, Function() abort) {
onDataCallCount++;
if (onDataCallCount == 1) {
abort();
abortWasCalledInCallback = true;
} else {
fail("onData called more than once after abort was invoked");
}
}
final streamChangesFuture = streamChanges(onDataCallback);
await pumpEventQueue();
for (int i = 0; i < testBatchSize; i++) {
responseStreamController.add(
utf8.encode(
_createJsonLine(
SyncEntityType.userDeleteV1.toString(),
SyncUserDeleteV1(userId: "user$i").toJson(),
'ack$i',
),
),
);
}
// emit a single event to skip batching and trigger finally
responseStreamController.add(
utf8.encode(
_createJsonLine(
SyncEntityType.userDeleteV1.toString(),
SyncUserDeleteV1(userId: "user100").toJson(),
'ack100',
),
),
);
await responseStreamController.close();
await expectLater(streamChangesFuture, completes);
expect(onDataCallCount, 1);
expect(abortWasCalledInCallback, isTrue);
verify(() => mockHttpClient.close()).called(1);
},
);
test(
'streamChanges processes remaining lines in finally block if not aborted',
() async {
int onDataCallCount = 0;
List<SyncEvent> receivedEventsBatch1 = [];
List<SyncEvent> receivedEventsBatch2 = [];
onDataCallback(List<SyncEvent> events, Function() _) {
onDataCallCount++;
if (onDataCallCount == 1) {
receivedEventsBatch1 = events;
} else if (onDataCallCount == 2) {
receivedEventsBatch2 = events;
} else {
fail("onData called more than expected");
}
}
final streamChangesFuture = streamChanges(onDataCallback);
await pumpEventQueue();
// Batch 1
for (int i = 0; i < testBatchSize; i++) {
responseStreamController.add(
utf8.encode(
_createJsonLine(
SyncEntityType.userDeleteV1.toString(),
SyncUserDeleteV1(userId: "user$i").toJson(),
'ack$i',
),
),
);
}
// Partial Batch 2
responseStreamController.add(
utf8.encode(
_createJsonLine(
SyncEntityType.userDeleteV1.toString(),
SyncUserDeleteV1(userId: "user100").toJson(),
'ack100',
),
),
);
await responseStreamController.close();
await expectLater(streamChangesFuture, completes);
expect(onDataCallCount, 2);
expect(receivedEventsBatch1.length, testBatchSize);
expect(receivedEventsBatch2.length, 1);
verify(() => mockHttpClient.close()).called(1);
},
);
test('streamChanges handles stream error gracefully', () async {
final streamError = Exception("Network Error");
int onDataCallCount = 0;
onDataCallback(List<SyncEvent> events, Function() _) {
onDataCallCount++;
}
final streamChangesFuture = streamChanges(onDataCallback);
await pumpEventQueue();
responseStreamController.add(
utf8.encode(
_createJsonLine(
SyncEntityType.userDeleteV1.toString(),
SyncUserDeleteV1(userId: "user1").toJson(),
'ack1',
),
),
);
responseStreamController.addError(streamError);
await expectLater(streamChangesFuture, throwsA(streamError));
expect(onDataCallCount, 0);
verify(() => mockHttpClient.close()).called(1);
});
test('streamChanges throws ApiException on non-200 status code', () async {
when(() => mockStreamedResponse.statusCode).thenReturn(401);
final errorBodyController = StreamController<List<int>>(sync: true);
when(() => mockStreamedResponse.stream)
.thenAnswer((_) => http.ByteStream(errorBodyController.stream));
int onDataCallCount = 0;
onDataCallback(List<SyncEvent> events, Function() _) {
onDataCallCount++;
}
final future = streamChanges(onDataCallback);
errorBodyController.add(utf8.encode('{"error":"Unauthorized"}'));
await errorBodyController.close();
await expectLater(
future,
throwsA(
isA<ApiException>()
.having((e) => e.code, 'code', 401)
.having((e) => e.message, 'message', contains('Unauthorized')),
),
);
expect(onDataCallCount, 0);
verify(() => mockHttpClient.close()).called(1);
});
}

View File

@ -26,9 +26,8 @@
"migrations:generate": "node ./dist/bin/migrations.js generate",
"migrations:create": "node ./dist/bin/migrations.js create",
"migrations:run": "node ./dist/bin/migrations.js run",
"typeorm:migrations:revert": "typeorm migration:revert -d ./dist/bin/database.js",
"typeorm:schema:drop": "typeorm query -d ./dist/bin/database.js 'DROP schema public cascade; CREATE schema public;'",
"typeorm:schema:reset": "npm run typeorm:schema:drop && npm run migrations:run",
"schema:drop": "node ./dist/bin/migrations.js query 'DROP schema public cascade; CREATE schema public;'",
"schema:reset": "npm run schema:drop && npm run migrations:run",
"kysely:codegen": "npx kysely-codegen --include-pattern=\"(public|vectors).*\" --dialect postgres --url postgres://postgres:postgres@localhost/immich --log-level debug --out-file=./src/db.d.ts",
"sync:open-api": "node ./dist/bin/sync-open-api.js",
"sync:sql": "node ./dist/bin/sync-sql.js",

View File

@ -1,11 +0,0 @@
import { ConfigRepository } from 'src/repositories/config.repository';
import { DataSource } from 'typeorm';
const { database } = new ConfigRepository().getEnv();
/**
* @deprecated - DO NOT USE THIS
*
* this export is ONLY to be used for TypeORM commands in package.json#scripts
*/
export const dataSource = new DataSource({ ...database.config.typeorm, host: 'localhost' });

View File

@ -1,8 +1,8 @@
#!/usr/bin/env node
process.env.DB_URL = process.env.DB_URL || 'postgres://postgres:postgres@localhost:5432/immich';
import { Kysely } from 'kysely';
import { writeFileSync } from 'node:fs';
import { Kysely, sql } from 'kysely';
import { mkdirSync, writeFileSync } from 'node:fs';
import { basename, dirname, extname, join } from 'node:path';
import postgres from 'postgres';
import { ConfigRepository } from 'src/repositories/config.repository';
@ -23,8 +23,13 @@ const main = async () => {
}
case 'run': {
const only = process.argv[3] as 'kysely' | 'typeorm' | undefined;
await run(only);
await runMigrations();
return;
}
case 'query': {
const query = process.argv[3];
await runQuery(query);
return;
}
@ -48,14 +53,25 @@ const main = async () => {
}
};
const run = async (only?: 'kysely' | 'typeorm') => {
const getDatabaseClient = () => {
const configRepository = new ConfigRepository();
const { database } = configRepository.getEnv();
const logger = new LoggingRepository(undefined, configRepository);
const db = new Kysely<any>(getKyselyConfig(database.config.kysely));
const databaseRepository = new DatabaseRepository(db, logger, configRepository);
return new Kysely<any>(getKyselyConfig(database.config.kysely));
};
await databaseRepository.runMigrations({ only });
const runQuery = async (query: string) => {
const db = getDatabaseClient();
await sql.raw(query).execute(db);
await db.destroy();
};
const runMigrations = async () => {
const configRepository = new ConfigRepository();
const logger = new LoggingRepository(undefined, configRepository);
const db = getDatabaseClient();
const databaseRepository = new DatabaseRepository(db, logger, configRepository);
await databaseRepository.runMigrations();
await db.destroy();
};
const debug = async () => {
@ -81,7 +97,8 @@ const create = (path: string, up: string[], down: string[]) => {
const filename = `${timestamp}-${name}.ts`;
const folder = dirname(path);
const fullPath = join(folder, filename);
writeFileSync(fullPath, asMigration('typeorm', { name, timestamp, up, down }));
mkdirSync(folder, { recursive: true });
writeFileSync(fullPath, asMigration('kysely', { name, timestamp, up, down }));
console.log(`Wrote ${fullPath}`);
};

View File

@ -1,13 +1,13 @@
import { Selectable } from 'kysely';
import { AssetJobStatus as DatabaseAssetJobStatus, Exif as DatabaseExif } from 'src/db';
import { AssetEntity } from 'src/entities/asset.entity';
import { Albums, Exif as DatabaseExif } from 'src/db';
import { MapAsset } from 'src/dtos/asset-response.dto';
import {
AlbumUserRole,
AssetFileType,
AssetStatus,
AssetType,
MemoryType,
Permission,
SharedLinkType,
SourceType,
UserStatus,
} from 'src/enum';
@ -44,7 +44,7 @@ export type Library = {
exclusionPatterns: string[];
deletedAt: Date | null;
refreshedAt: Date | null;
assets?: Asset[];
assets?: MapAsset[];
};
export type AuthApiKey = {
@ -96,7 +96,26 @@ export type Memory = {
data: OnThisDayData;
ownerId: string;
isSaved: boolean;
assets: Asset[];
assets: MapAsset[];
};
export type Asset = {
id: string;
checksum: Buffer<ArrayBufferLike>;
deviceAssetId: string;
deviceId: string;
fileCreatedAt: Date;
fileModifiedAt: Date;
isExternal: boolean;
isVisible: boolean;
libraryId: string | null;
livePhotoVideoId: string | null;
localDateTime: Date;
originalFileName: string;
originalPath: string;
ownerId: string;
sidecarPath: string | null;
type: AssetType;
};
export type User = {
@ -128,39 +147,6 @@ export type StorageAsset = {
encodedVideoPath: string | null;
};
export type Asset = {
createdAt: Date;
updatedAt: Date;
deletedAt: Date | null;
id: string;
updateId: string;
status: AssetStatus;
checksum: Buffer<ArrayBufferLike>;
deviceAssetId: string;
deviceId: string;
duplicateId: string | null;
duration: string | null;
encodedVideoPath: string | null;
fileCreatedAt: Date | null;
fileModifiedAt: Date | null;
isArchived: boolean;
isExternal: boolean;
isFavorite: boolean;
isOffline: boolean;
isVisible: boolean;
libraryId: string | null;
livePhotoVideoId: string | null;
localDateTime: Date | null;
originalFileName: string;
originalPath: string;
ownerId: string;
sidecarPath: string | null;
stack?: Stack | null;
stackId: string | null;
thumbhash: Buffer<ArrayBufferLike> | null;
type: AssetType;
};
export type SidecarWriteAsset = {
id: string;
sidecarPath: string | null;
@ -173,7 +159,7 @@ export type Stack = {
primaryAssetId: string;
owner?: User;
ownerId: string;
assets: AssetEntity[];
assets: MapAsset[];
assetCount?: number;
};
@ -187,6 +173,28 @@ export type AuthSharedLink = {
password: string | null;
};
export type SharedLink = {
id: string;
album?: Album | null;
albumId: string | null;
allowDownload: boolean;
allowUpload: boolean;
assets: MapAsset[];
createdAt: Date;
description: string | null;
expiresAt: Date | null;
key: Buffer;
password: string | null;
showExif: boolean;
type: SharedLinkType;
userId: string;
};
export type Album = Selectable<Albums> & {
owner: User;
assets: MapAsset[];
};
export type AuthSession = {
id: string;
};
@ -256,10 +264,6 @@ export type AssetFace = {
person?: Person | null;
};
export type AssetJobStatus = Selectable<DatabaseAssetJobStatus> & {
asset: AssetEntity;
};
const userColumns = ['id', 'name', 'email', 'profileImagePath', 'profileChangedAt'] as const;
export const columns = {

6
server/src/db.d.ts vendored
View File

@ -143,8 +143,8 @@ export interface Assets {
duplicateId: string | null;
duration: string | null;
encodedVideoPath: Generated<string | null>;
fileCreatedAt: Timestamp | null;
fileModifiedAt: Timestamp | null;
fileCreatedAt: Timestamp;
fileModifiedAt: Timestamp;
id: Generated<string>;
isArchived: Generated<boolean>;
isExternal: Generated<boolean>;
@ -153,7 +153,7 @@ export interface Assets {
isVisible: Generated<boolean>;
libraryId: string | null;
livePhotoVideoId: string | null;
localDateTime: Timestamp | null;
localDateTime: Timestamp;
originalFileName: string;
originalPath: string;
ownerId: string;

View File

@ -2,10 +2,10 @@ import { ApiProperty } from '@nestjs/swagger';
import { Type } from 'class-transformer';
import { ArrayNotEmpty, IsArray, IsEnum, IsString, ValidateNested } from 'class-validator';
import _ from 'lodash';
import { AssetResponseDto, mapAsset } from 'src/dtos/asset-response.dto';
import { AlbumUser, AuthSharedLink, User } from 'src/database';
import { AssetResponseDto, MapAsset, mapAsset } from 'src/dtos/asset-response.dto';
import { AuthDto } from 'src/dtos/auth.dto';
import { UserResponseDto, mapUser } from 'src/dtos/user.dto';
import { AlbumEntity } from 'src/entities/album.entity';
import { AlbumUserRole, AssetOrder } from 'src/enum';
import { Optional, ValidateBoolean, ValidateUUID } from 'src/validation';
@ -142,7 +142,23 @@ export class AlbumResponseDto {
order?: AssetOrder;
}
export const mapAlbum = (entity: AlbumEntity, withAssets: boolean, auth?: AuthDto): AlbumResponseDto => {
export type MapAlbumDto = {
albumUsers?: AlbumUser[];
assets?: MapAsset[];
sharedLinks?: AuthSharedLink[];
albumName: string;
description: string;
albumThumbnailAssetId: string | null;
createdAt: Date;
updatedAt: Date;
id: string;
ownerId: string;
owner: User;
isActivityEnabled: boolean;
order: AssetOrder;
};
export const mapAlbum = (entity: MapAlbumDto, withAssets: boolean, auth?: AuthDto): AlbumResponseDto => {
const albumUsers: AlbumUserResponseDto[] = [];
if (entity.albumUsers) {
@ -159,7 +175,7 @@ export const mapAlbum = (entity: AlbumEntity, withAssets: boolean, auth?: AuthDt
const assets = entity.assets || [];
const hasSharedLink = entity.sharedLinks?.length > 0;
const hasSharedLink = !!entity.sharedLinks && entity.sharedLinks.length > 0;
const hasSharedUser = albumUsers.length > 0;
let startDate = assets.at(0)?.localDateTime;
@ -190,5 +206,5 @@ export const mapAlbum = (entity: AlbumEntity, withAssets: boolean, auth?: AuthDt
};
};
export const mapAlbumWithAssets = (entity: AlbumEntity) => mapAlbum(entity, true);
export const mapAlbumWithoutAssets = (entity: AlbumEntity) => mapAlbum(entity, false);
export const mapAlbumWithAssets = (entity: MapAlbumDto) => mapAlbum(entity, true);
export const mapAlbumWithoutAssets = (entity: MapAlbumDto) => mapAlbum(entity, false);

View File

@ -1,5 +1,6 @@
import { ApiProperty } from '@nestjs/swagger';
import { AssetFace } from 'src/database';
import { Selectable } from 'kysely';
import { AssetFace, AssetFile, Exif, Stack, Tag, User } from 'src/database';
import { PropertyLifecycle } from 'src/decorators';
import { AuthDto } from 'src/dtos/auth.dto';
import { ExifResponseDto, mapExif } from 'src/dtos/exif.dto';
@ -11,8 +12,7 @@ import {
} from 'src/dtos/person.dto';
import { TagResponseDto, mapTag } from 'src/dtos/tag.dto';
import { UserResponseDto, mapUser } from 'src/dtos/user.dto';
import { AssetEntity } from 'src/entities/asset.entity';
import { AssetType } from 'src/enum';
import { AssetStatus, AssetType } from 'src/enum';
import { mimeTypes } from 'src/utils/mime-types';
export class SanitizedAssetResponseDto {
@ -56,6 +56,44 @@ export class AssetResponseDto extends SanitizedAssetResponseDto {
resized?: boolean;
}
export type MapAsset = {
createdAt: Date;
updatedAt: Date;
deletedAt: Date | null;
id: string;
updateId: string;
status: AssetStatus;
checksum: Buffer<ArrayBufferLike>;
deviceAssetId: string;
deviceId: string;
duplicateId: string | null;
duration: string | null;
encodedVideoPath: string | null;
exifInfo?: Selectable<Exif> | null;
faces?: AssetFace[];
fileCreatedAt: Date;
fileModifiedAt: Date;
files?: AssetFile[];
isArchived: boolean;
isExternal: boolean;
isFavorite: boolean;
isOffline: boolean;
isVisible: boolean;
libraryId: string | null;
livePhotoVideoId: string | null;
localDateTime: Date;
originalFileName: string;
originalPath: string;
owner?: User | null;
ownerId: string;
sidecarPath: string | null;
stack?: Stack | null;
stackId: string | null;
tags?: Tag[];
thumbhash: Buffer<ArrayBufferLike> | null;
type: AssetType;
};
export class AssetStackResponseDto {
id!: string;
@ -72,7 +110,7 @@ export type AssetMapOptions = {
};
// TODO: this is inefficient
const peopleWithFaces = (faces: AssetFace[]): PersonWithFacesResponseDto[] => {
const peopleWithFaces = (faces?: AssetFace[]): PersonWithFacesResponseDto[] => {
const result: PersonWithFacesResponseDto[] = [];
if (faces) {
for (const face of faces) {
@ -90,7 +128,7 @@ const peopleWithFaces = (faces: AssetFace[]): PersonWithFacesResponseDto[] => {
return result;
};
const mapStack = (entity: AssetEntity) => {
const mapStack = (entity: { stack?: Stack | null }) => {
if (!entity.stack) {
return null;
}
@ -111,7 +149,7 @@ export const hexOrBufferToBase64 = (encoded: string | Buffer) => {
return encoded.toString('base64');
};
export function mapAsset(entity: AssetEntity, options: AssetMapOptions = {}): AssetResponseDto {
export function mapAsset(entity: MapAsset, options: AssetMapOptions = {}): AssetResponseDto {
const { stripMetadata = false, withStack = false } = options;
if (stripMetadata) {

View File

@ -4,7 +4,6 @@ import { IsEnum, IsInt, IsObject, IsPositive, ValidateNested } from 'class-valid
import { Memory } from 'src/database';
import { AssetResponseDto, mapAsset } from 'src/dtos/asset-response.dto';
import { AuthDto } from 'src/dtos/auth.dto';
import { AssetEntity } from 'src/entities/asset.entity';
import { MemoryType } from 'src/enum';
import { Optional, ValidateBoolean, ValidateDate, ValidateUUID } from 'src/validation';
@ -103,6 +102,6 @@ export const mapMemory = (entity: Memory, auth: AuthDto): MemoryResponseDto => {
type: entity.type as MemoryType,
data: entity.data as unknown as MemoryData,
isSaved: entity.isSaved,
assets: ('assets' in entity ? entity.assets : []).map((asset) => mapAsset(asset as AssetEntity, { auth })),
assets: ('assets' in entity ? entity.assets : []).map((asset) => mapAsset(asset, { auth })),
};
};

View File

@ -1,9 +1,9 @@
import { ApiProperty } from '@nestjs/swagger';
import { IsEnum, IsString } from 'class-validator';
import _ from 'lodash';
import { SharedLink } from 'src/database';
import { AlbumResponseDto, mapAlbumWithoutAssets } from 'src/dtos/album.dto';
import { AssetResponseDto, mapAsset } from 'src/dtos/asset-response.dto';
import { SharedLinkEntity } from 'src/entities/shared-link.entity';
import { SharedLinkType } from 'src/enum';
import { Optional, ValidateBoolean, ValidateDate, ValidateUUID } from 'src/validation';
@ -102,7 +102,7 @@ export class SharedLinkResponseDto {
showMetadata!: boolean;
}
export function mapSharedLink(sharedLink: SharedLinkEntity): SharedLinkResponseDto {
export function mapSharedLink(sharedLink: SharedLink): SharedLinkResponseDto {
const linkAssets = sharedLink.assets || [];
return {
@ -122,7 +122,7 @@ export function mapSharedLink(sharedLink: SharedLinkEntity): SharedLinkResponseD
};
}
export function mapSharedLinkWithoutMetadata(sharedLink: SharedLinkEntity): SharedLinkResponseDto {
export function mapSharedLinkWithoutMetadata(sharedLink: SharedLink): SharedLinkResponseDto {
const linkAssets = sharedLink.assets || [];
const albumAssets = (sharedLink?.album?.assets || []).map((asset) => asset);
@ -137,7 +137,7 @@ export function mapSharedLinkWithoutMetadata(sharedLink: SharedLinkEntity): Shar
type: sharedLink.type,
createdAt: sharedLink.createdAt,
expiresAt: sharedLink.expiresAt,
assets: assets.map((asset) => mapAsset(asset, { stripMetadata: true })) as AssetResponseDto[],
assets: assets.map((asset) => mapAsset(asset, { stripMetadata: true })),
album: sharedLink.album ? mapAlbumWithoutAssets(sharedLink.album) : undefined,
allowUpload: sharedLink.allowUpload,
allowDownload: sharedLink.allowDownload,

View File

@ -1,23 +0,0 @@
import { AlbumUser, User } from 'src/database';
import { AssetEntity } from 'src/entities/asset.entity';
import { SharedLinkEntity } from 'src/entities/shared-link.entity';
import { AssetOrder } from 'src/enum';
export class AlbumEntity {
id!: string;
owner!: User;
ownerId!: string;
albumName!: string;
description!: string;
createdAt!: Date;
updatedAt!: Date;
updateId?: string;
deletedAt!: Date | null;
albumThumbnailAsset!: AssetEntity | null;
albumThumbnailAssetId!: string | null;
albumUsers!: AlbumUser[];
assets!: AssetEntity[];
sharedLinks!: SharedLinkEntity[];
isActivityEnabled!: boolean;
order!: AssetOrder;
}

View File

@ -1,270 +0,0 @@
import { DeduplicateJoinsPlugin, ExpressionBuilder, Kysely, SelectQueryBuilder, sql } from 'kysely';
import { jsonArrayFrom, jsonObjectFrom } from 'kysely/helpers/postgres';
import { AssetFace, AssetFile, AssetJobStatus, columns, Exif, Stack, Tag, User } from 'src/database';
import { DB } from 'src/db';
import { SharedLinkEntity } from 'src/entities/shared-link.entity';
import { AssetFileType, AssetStatus, AssetType } from 'src/enum';
import { TimeBucketSize } from 'src/repositories/asset.repository';
import { AssetSearchBuilderOptions } from 'src/repositories/search.repository';
import { anyUuid, asUuid } from 'src/utils/database';
export const ASSET_CHECKSUM_CONSTRAINT = 'UQ_assets_owner_checksum';
export class AssetEntity {
id!: string;
deviceAssetId!: string;
owner!: User;
ownerId!: string;
libraryId?: string | null;
deviceId!: string;
type!: AssetType;
status!: AssetStatus;
originalPath!: string;
files!: AssetFile[];
thumbhash!: Buffer | null;
encodedVideoPath!: string | null;
createdAt!: Date;
updatedAt!: Date;
updateId?: string;
deletedAt!: Date | null;
fileCreatedAt!: Date;
localDateTime!: Date;
fileModifiedAt!: Date;
isFavorite!: boolean;
isArchived!: boolean;
isExternal!: boolean;
isOffline!: boolean;
checksum!: Buffer; // sha1 checksum
duration!: string | null;
isVisible!: boolean;
livePhotoVideo!: AssetEntity | null;
livePhotoVideoId!: string | null;
originalFileName!: string;
sidecarPath!: string | null;
exifInfo?: Exif;
tags?: Tag[];
sharedLinks!: SharedLinkEntity[];
faces!: AssetFace[];
stackId?: string | null;
stack?: Stack | null;
jobStatus?: AssetJobStatus;
duplicateId!: string | null;
}
export function withExif<O>(qb: SelectQueryBuilder<DB, 'assets', O>) {
return qb
.leftJoin('exif', 'assets.id', 'exif.assetId')
.select((eb) => eb.fn.toJson(eb.table('exif')).$castTo<Exif | null>().as('exifInfo'));
}
export function withExifInner<O>(qb: SelectQueryBuilder<DB, 'assets', O>) {
return qb
.innerJoin('exif', 'assets.id', 'exif.assetId')
.select((eb) => eb.fn.toJson(eb.table('exif')).$castTo<Exif>().as('exifInfo'));
}
export function withSmartSearch<O>(qb: SelectQueryBuilder<DB, 'assets', O>) {
return qb
.leftJoin('smart_search', 'assets.id', 'smart_search.assetId')
.select((eb) => eb.fn.toJson(eb.table('smart_search')).as('smartSearch'));
}
export function withFaces(eb: ExpressionBuilder<DB, 'assets'>, withDeletedFace?: boolean) {
return jsonArrayFrom(
eb
.selectFrom('asset_faces')
.selectAll('asset_faces')
.whereRef('asset_faces.assetId', '=', 'assets.id')
.$if(!withDeletedFace, (qb) => qb.where('asset_faces.deletedAt', 'is', null)),
).as('faces');
}
export function withFiles(eb: ExpressionBuilder<DB, 'assets'>, type?: AssetFileType) {
return jsonArrayFrom(
eb
.selectFrom('asset_files')
.select(columns.assetFiles)
.whereRef('asset_files.assetId', '=', 'assets.id')
.$if(!!type, (qb) => qb.where('asset_files.type', '=', type!)),
).as('files');
}
export function withFacesAndPeople(eb: ExpressionBuilder<DB, 'assets'>, withDeletedFace?: boolean) {
return jsonArrayFrom(
eb
.selectFrom('asset_faces')
.leftJoinLateral(
(eb) =>
eb.selectFrom('person').selectAll('person').whereRef('asset_faces.personId', '=', 'person.id').as('person'),
(join) => join.onTrue(),
)
.selectAll('asset_faces')
.select((eb) => eb.table('person').as('person'))
.whereRef('asset_faces.assetId', '=', 'assets.id')
.$if(!withDeletedFace, (qb) => qb.where('asset_faces.deletedAt', 'is', null)),
).as('faces');
}
export function hasPeople<O>(qb: SelectQueryBuilder<DB, 'assets', O>, personIds: string[]) {
return qb.innerJoin(
(eb) =>
eb
.selectFrom('asset_faces')
.select('assetId')
.where('personId', '=', anyUuid(personIds!))
.where('deletedAt', 'is', null)
.groupBy('assetId')
.having((eb) => eb.fn.count('personId').distinct(), '=', personIds.length)
.as('has_people'),
(join) => join.onRef('has_people.assetId', '=', 'assets.id'),
);
}
export function hasTags<O>(qb: SelectQueryBuilder<DB, 'assets', O>, tagIds: string[]) {
return qb.innerJoin(
(eb) =>
eb
.selectFrom('tag_asset')
.select('assetsId')
.innerJoin('tags_closure', 'tag_asset.tagsId', 'tags_closure.id_descendant')
.where('tags_closure.id_ancestor', '=', anyUuid(tagIds))
.groupBy('assetsId')
.having((eb) => eb.fn.count('tags_closure.id_ancestor').distinct(), '>=', tagIds.length)
.as('has_tags'),
(join) => join.onRef('has_tags.assetsId', '=', 'assets.id'),
);
}
export function withOwner(eb: ExpressionBuilder<DB, 'assets'>) {
return jsonObjectFrom(eb.selectFrom('users').selectAll().whereRef('users.id', '=', 'assets.ownerId')).as('owner');
}
export function withLibrary(eb: ExpressionBuilder<DB, 'assets'>) {
return jsonObjectFrom(eb.selectFrom('libraries').selectAll().whereRef('libraries.id', '=', 'assets.libraryId')).as(
'library',
);
}
export function withTags(eb: ExpressionBuilder<DB, 'assets'>) {
return jsonArrayFrom(
eb
.selectFrom('tags')
.select(columns.tag)
.innerJoin('tag_asset', 'tags.id', 'tag_asset.tagsId')
.whereRef('assets.id', '=', 'tag_asset.assetsId'),
).as('tags');
}
export function truncatedDate<O>(size: TimeBucketSize) {
return sql<O>`date_trunc(${size}, "localDateTime" at time zone 'UTC') at time zone 'UTC'`;
}
export function withTagId<O>(qb: SelectQueryBuilder<DB, 'assets', O>, tagId: string) {
return qb.where((eb) =>
eb.exists(
eb
.selectFrom('tags_closure')
.innerJoin('tag_asset', 'tag_asset.tagsId', 'tags_closure.id_descendant')
.whereRef('tag_asset.assetsId', '=', 'assets.id')
.where('tags_closure.id_ancestor', '=', tagId),
),
);
}
const joinDeduplicationPlugin = new DeduplicateJoinsPlugin();
/** TODO: This should only be used for search-related queries, not as a general purpose query builder */
export function searchAssetBuilder(kysely: Kysely<DB>, options: AssetSearchBuilderOptions) {
options.isArchived ??= options.withArchived ? undefined : false;
options.withDeleted ||= !!(options.trashedAfter || options.trashedBefore || options.isOffline);
return kysely
.withPlugin(joinDeduplicationPlugin)
.selectFrom('assets')
.selectAll('assets')
.$if(!!options.tagIds && options.tagIds.length > 0, (qb) => hasTags(qb, options.tagIds!))
.$if(!!options.personIds && options.personIds.length > 0, (qb) => hasPeople(qb, options.personIds!))
.$if(!!options.createdBefore, (qb) => qb.where('assets.createdAt', '<=', options.createdBefore!))
.$if(!!options.createdAfter, (qb) => qb.where('assets.createdAt', '>=', options.createdAfter!))
.$if(!!options.updatedBefore, (qb) => qb.where('assets.updatedAt', '<=', options.updatedBefore!))
.$if(!!options.updatedAfter, (qb) => qb.where('assets.updatedAt', '>=', options.updatedAfter!))
.$if(!!options.trashedBefore, (qb) => qb.where('assets.deletedAt', '<=', options.trashedBefore!))
.$if(!!options.trashedAfter, (qb) => qb.where('assets.deletedAt', '>=', options.trashedAfter!))
.$if(!!options.takenBefore, (qb) => qb.where('assets.fileCreatedAt', '<=', options.takenBefore!))
.$if(!!options.takenAfter, (qb) => qb.where('assets.fileCreatedAt', '>=', options.takenAfter!))
.$if(options.city !== undefined, (qb) =>
qb
.innerJoin('exif', 'assets.id', 'exif.assetId')
.where('exif.city', options.city === null ? 'is' : '=', options.city!),
)
.$if(options.state !== undefined, (qb) =>
qb
.innerJoin('exif', 'assets.id', 'exif.assetId')
.where('exif.state', options.state === null ? 'is' : '=', options.state!),
)
.$if(options.country !== undefined, (qb) =>
qb
.innerJoin('exif', 'assets.id', 'exif.assetId')
.where('exif.country', options.country === null ? 'is' : '=', options.country!),
)
.$if(options.make !== undefined, (qb) =>
qb
.innerJoin('exif', 'assets.id', 'exif.assetId')
.where('exif.make', options.make === null ? 'is' : '=', options.make!),
)
.$if(options.model !== undefined, (qb) =>
qb
.innerJoin('exif', 'assets.id', 'exif.assetId')
.where('exif.model', options.model === null ? 'is' : '=', options.model!),
)
.$if(options.lensModel !== undefined, (qb) =>
qb
.innerJoin('exif', 'assets.id', 'exif.assetId')
.where('exif.lensModel', options.lensModel === null ? 'is' : '=', options.lensModel!),
)
.$if(options.rating !== undefined, (qb) =>
qb
.innerJoin('exif', 'assets.id', 'exif.assetId')
.where('exif.rating', options.rating === null ? 'is' : '=', options.rating!),
)
.$if(!!options.checksum, (qb) => qb.where('assets.checksum', '=', options.checksum!))
.$if(!!options.deviceAssetId, (qb) => qb.where('assets.deviceAssetId', '=', options.deviceAssetId!))
.$if(!!options.deviceId, (qb) => qb.where('assets.deviceId', '=', options.deviceId!))
.$if(!!options.id, (qb) => qb.where('assets.id', '=', asUuid(options.id!)))
.$if(!!options.libraryId, (qb) => qb.where('assets.libraryId', '=', asUuid(options.libraryId!)))
.$if(!!options.userIds, (qb) => qb.where('assets.ownerId', '=', anyUuid(options.userIds!)))
.$if(!!options.encodedVideoPath, (qb) => qb.where('assets.encodedVideoPath', '=', options.encodedVideoPath!))
.$if(!!options.originalPath, (qb) =>
qb.where(sql`f_unaccent(assets."originalPath")`, 'ilike', sql`'%' || f_unaccent(${options.originalPath}) || '%'`),
)
.$if(!!options.originalFileName, (qb) =>
qb.where(
sql`f_unaccent(assets."originalFileName")`,
'ilike',
sql`'%' || f_unaccent(${options.originalFileName}) || '%'`,
),
)
.$if(!!options.description, (qb) =>
qb
.innerJoin('exif', 'assets.id', 'exif.assetId')
.where(sql`f_unaccent(exif.description)`, 'ilike', sql`'%' || f_unaccent(${options.description}) || '%'`),
)
.$if(!!options.type, (qb) => qb.where('assets.type', '=', options.type!))
.$if(options.isFavorite !== undefined, (qb) => qb.where('assets.isFavorite', '=', options.isFavorite!))
.$if(options.isOffline !== undefined, (qb) => qb.where('assets.isOffline', '=', options.isOffline!))
.$if(options.isVisible !== undefined, (qb) => qb.where('assets.isVisible', '=', options.isVisible!))
.$if(options.isArchived !== undefined, (qb) => qb.where('assets.isArchived', '=', options.isArchived!))
.$if(options.isEncoded !== undefined, (qb) =>
qb.where('assets.encodedVideoPath', options.isEncoded ? 'is not' : 'is', null),
)
.$if(options.isMotion !== undefined, (qb) =>
qb.where('assets.livePhotoVideoId', options.isMotion ? 'is not' : 'is', null),
)
.$if(!!options.isNotInAlbum, (qb) =>
qb.where((eb) =>
eb.not(eb.exists((eb) => eb.selectFrom('albums_assets_assets').whereRef('assetsId', '=', 'assets.id'))),
),
)
.$if(!!options.withExif, withExifInner)
.$if(!!(options.withFaces || options.withPeople || options.personIds), (qb) => qb.select(withFacesAndPeople))
.$if(!options.withDeleted, (qb) => qb.where('assets.deletedAt', 'is', null));
}

View File

@ -1,20 +0,0 @@
import { AlbumEntity } from 'src/entities/album.entity';
import { AssetEntity } from 'src/entities/asset.entity';
import { SharedLinkType } from 'src/enum';
export class SharedLinkEntity {
id!: string;
description!: string | null;
password!: string | null;
userId!: string;
key!: Buffer; // use to access the inidividual asset
type!: SharedLinkType;
createdAt!: Date;
expiresAt!: Date | null;
allowUpload!: boolean;
allowDownload!: boolean;
showExif!: boolean;
assets!: AssetEntity[];
album?: AlbumEntity;
albumId!: string | null;
}

View File

@ -0,0 +1,13 @@
import { MigrationInterface, QueryRunner } from 'typeorm';
export class AddMissingIndex1744910873956 implements MigrationInterface {
public async up(queryRunner: QueryRunner): Promise<void> {
await queryRunner.query(
`CREATE INDEX IF NOT EXISTS "IDX_geodata_gist_earthcoord" ON "geodata_places" (ll_to_earth_public(latitude, longitude))`,
);
}
public async down(queryRunner: QueryRunner): Promise<void> {
await queryRunner.query(`DROP INDEX "IDX_geodata_gist_earthcoord";`);
}
}

View File

@ -82,7 +82,7 @@ from
where
"assets"."id" = any ($1::uuid[])
-- AssetRepository.getByIdsWithAllRelations
-- AssetRepository.getByIdsWithAllRelationsButStacks
select
"assets".*,
(
@ -127,28 +127,13 @@ select
"assets"."id" = "tag_asset"."assetsId"
) as agg
) as "tags",
to_json("exif") as "exifInfo",
to_json("stacked_assets") as "stack"
to_json("exif") as "exifInfo"
from
"assets"
left join "exif" on "assets"."id" = "exif"."assetId"
left join "asset_stack" on "asset_stack"."id" = "assets"."stackId"
left join lateral (
select
"asset_stack".*,
array_agg("stacked") as "assets"
from
"assets" as "stacked"
where
"stacked"."stackId" = "asset_stack"."id"
and "stacked"."id" != "asset_stack"."primaryAssetId"
and "stacked"."deletedAt" is null
and "stacked"."isArchived" = $1
group by
"asset_stack"."id"
) as "stacked_assets" on "asset_stack"."id" is not null
where
"assets"."id" = any ($2::uuid[])
"assets"."id" = any ($1::uuid[])
-- AssetRepository.deleteAll
delete from "assets"

View File

@ -1,12 +1,11 @@
import { Injectable } from '@nestjs/common';
import { ExpressionBuilder, Insertable, Kysely, sql, Updateable } from 'kysely';
import { ExpressionBuilder, Insertable, Kysely, NotNull, sql, Updateable } from 'kysely';
import { jsonArrayFrom, jsonObjectFrom } from 'kysely/helpers/postgres';
import { InjectKysely } from 'nestjs-kysely';
import { columns } from 'src/database';
import { columns, Exif } from 'src/database';
import { Albums, DB } from 'src/db';
import { Chunked, ChunkedArray, ChunkedSet, DummyValue, GenerateSql } from 'src/decorators';
import { AlbumUserCreateDto } from 'src/dtos/album.dto';
import { AlbumEntity } from 'src/entities/album.entity';
export interface AlbumAssetCount {
albumId: string;
@ -21,9 +20,9 @@ export interface AlbumInfoOptions {
}
const withOwner = (eb: ExpressionBuilder<DB, 'albums'>) => {
return jsonObjectFrom(eb.selectFrom('users').select(columns.user).whereRef('users.id', '=', 'albums.ownerId')).as(
'owner',
);
return jsonObjectFrom(eb.selectFrom('users').select(columns.user).whereRef('users.id', '=', 'albums.ownerId'))
.$notNull()
.as('owner');
};
const withAlbumUsers = (eb: ExpressionBuilder<DB, 'albums'>) => {
@ -32,12 +31,14 @@ const withAlbumUsers = (eb: ExpressionBuilder<DB, 'albums'>) => {
.selectFrom('albums_shared_users_users as album_users')
.select('album_users.role')
.select((eb) =>
jsonObjectFrom(eb.selectFrom('users').select(columns.user).whereRef('users.id', '=', 'album_users.usersId')).as(
'user',
),
jsonObjectFrom(eb.selectFrom('users').select(columns.user).whereRef('users.id', '=', 'album_users.usersId'))
.$notNull()
.as('user'),
)
.whereRef('album_users.albumsId', '=', 'albums.id'),
).as('albumUsers');
)
.$notNull()
.as('albumUsers');
};
const withSharedLink = (eb: ExpressionBuilder<DB, 'albums'>) => {
@ -53,7 +54,7 @@ const withAssets = (eb: ExpressionBuilder<DB, 'albums'>) => {
.selectFrom('assets')
.selectAll('assets')
.leftJoin('exif', 'assets.id', 'exif.assetId')
.select((eb) => eb.table('exif').as('exifInfo'))
.select((eb) => eb.table('exif').$castTo<Exif>().as('exifInfo'))
.innerJoin('albums_assets_assets', 'albums_assets_assets.assetsId', 'assets.id')
.whereRef('albums_assets_assets.albumsId', '=', 'albums.id')
.where('assets.deletedAt', 'is', null)
@ -69,7 +70,7 @@ export class AlbumRepository {
constructor(@InjectKysely() private db: Kysely<DB>) {}
@GenerateSql({ params: [DummyValue.UUID, { withAssets: true }] })
async getById(id: string, options: AlbumInfoOptions): Promise<AlbumEntity | undefined> {
async getById(id: string, options: AlbumInfoOptions) {
return this.db
.selectFrom('albums')
.selectAll('albums')
@ -79,11 +80,12 @@ export class AlbumRepository {
.select(withAlbumUsers)
.select(withSharedLink)
.$if(options.withAssets, (eb) => eb.select(withAssets))
.executeTakeFirst() as Promise<AlbumEntity | undefined>;
.$narrowType<{ assets: NotNull }>()
.executeTakeFirst();
}
@GenerateSql({ params: [DummyValue.UUID, DummyValue.UUID] })
async getByAssetId(ownerId: string, assetId: string): Promise<AlbumEntity[]> {
async getByAssetId(ownerId: string, assetId: string) {
return this.db
.selectFrom('albums')
.selectAll('albums')
@ -105,7 +107,7 @@ export class AlbumRepository {
.select(withOwner)
.select(withAlbumUsers)
.orderBy('albums.createdAt', 'desc')
.execute() as unknown as Promise<AlbumEntity[]>;
.execute();
}
@GenerateSql({ params: [[DummyValue.UUID]] })
@ -134,7 +136,7 @@ export class AlbumRepository {
}
@GenerateSql({ params: [DummyValue.UUID] })
async getOwned(ownerId: string): Promise<AlbumEntity[]> {
async getOwned(ownerId: string) {
return this.db
.selectFrom('albums')
.selectAll('albums')
@ -144,14 +146,14 @@ export class AlbumRepository {
.where('albums.ownerId', '=', ownerId)
.where('albums.deletedAt', 'is', null)
.orderBy('albums.createdAt', 'desc')
.execute() as unknown as Promise<AlbumEntity[]>;
.execute();
}
/**
* Get albums shared with and shared by owner.
*/
@GenerateSql({ params: [DummyValue.UUID] })
async getShared(ownerId: string): Promise<AlbumEntity[]> {
async getShared(ownerId: string) {
return this.db
.selectFrom('albums')
.selectAll('albums')
@ -176,14 +178,14 @@ export class AlbumRepository {
.select(withOwner)
.select(withSharedLink)
.orderBy('albums.createdAt', 'desc')
.execute() as unknown as Promise<AlbumEntity[]>;
.execute();
}
/**
* Get albums of owner that are _not_ shared
*/
@GenerateSql({ params: [DummyValue.UUID] })
async getNotShared(ownerId: string): Promise<AlbumEntity[]> {
async getNotShared(ownerId: string) {
return this.db
.selectFrom('albums')
.selectAll('albums')
@ -203,7 +205,7 @@ export class AlbumRepository {
)
.select(withOwner)
.orderBy('albums.createdAt', 'desc')
.execute() as unknown as Promise<AlbumEntity[]>;
.execute();
}
async restoreAll(userId: string): Promise<void> {
@ -262,7 +264,7 @@ export class AlbumRepository {
await this.addAssets(this.db, albumId, assetIds);
}
create(album: Insertable<Albums>, assetIds: string[], albumUsers: AlbumUserCreateDto[]): Promise<AlbumEntity> {
create(album: Insertable<Albums>, assetIds: string[], albumUsers: AlbumUserCreateDto[]) {
return this.db.transaction().execute(async (tx) => {
const newAlbum = await tx.insertInto('albums').values(album).returning('albums.id').executeTakeFirst();
@ -290,11 +292,12 @@ export class AlbumRepository {
.select(withOwner)
.select(withAssets)
.select(withAlbumUsers)
.executeTakeFirst() as unknown as Promise<AlbumEntity>;
.$narrowType<{ assets: NotNull }>()
.executeTakeFirstOrThrow();
});
}
update(id: string, album: Updateable<Albums>): Promise<AlbumEntity> {
update(id: string, album: Updateable<Albums>) {
return this.db
.updateTable('albums')
.set(album)
@ -303,7 +306,7 @@ export class AlbumRepository {
.returning(withOwner)
.returning(withSharedLink)
.returning(withAlbumUsers)
.executeTakeFirst() as unknown as Promise<AlbumEntity>;
.executeTakeFirstOrThrow();
}
async delete(id: string): Promise<void> {

View File

@ -5,10 +5,9 @@ import { InjectKysely } from 'nestjs-kysely';
import { columns } from 'src/database';
import { DB } from 'src/db';
import { DummyValue, GenerateSql } from 'src/decorators';
import { withExifInner, withFaces, withFiles } from 'src/entities/asset.entity';
import { AssetFileType } from 'src/enum';
import { StorageAsset } from 'src/types';
import { asUuid } from 'src/utils/database';
import { anyUuid, asUuid, withExifInner, withFaces, withFiles } from 'src/utils/database';
@Injectable()
export class AssetJobRepository {
@ -149,6 +148,21 @@ export class AssetJobRepository {
.executeTakeFirst();
}
getForSyncAssets(ids: string[]) {
return this.db
.selectFrom('assets')
.select([
'assets.id',
'assets.isOffline',
'assets.libraryId',
'assets.originalPath',
'assets.status',
'assets.fileModifiedAt',
])
.where('assets.id', '=', anyUuid(ids))
.execute();
}
private storageTemplateAssetQuery() {
return this.db
.selectFrom('assets')

View File

@ -1,14 +1,21 @@
import { Injectable } from '@nestjs/common';
import { Insertable, Kysely, Selectable, UpdateResult, Updateable, sql } from 'kysely';
import { Insertable, Kysely, NotNull, Selectable, UpdateResult, Updateable, sql } from 'kysely';
import { isEmpty, isUndefined, omitBy } from 'lodash';
import { InjectKysely } from 'nestjs-kysely';
import { Stack } from 'src/database';
import { AssetFiles, AssetJobStatus, Assets, DB, Exif } from 'src/db';
import { Chunked, ChunkedArray, DummyValue, GenerateSql } from 'src/decorators';
import { MapAsset } from 'src/dtos/asset-response.dto';
import { AssetFileType, AssetOrder, AssetStatus, AssetType } from 'src/enum';
import { AssetSearchOptions, SearchExploreItem, SearchExploreItemSet } from 'src/repositories/search.repository';
import {
AssetEntity,
anyUuid,
asUuid,
hasPeople,
removeUndefinedKeys,
searchAssetBuilder,
truncatedDate,
unnest,
withExif,
withFaces,
withFacesAndPeople,
@ -18,12 +25,9 @@ import {
withSmartSearch,
withTagId,
withTags,
} from 'src/entities/asset.entity';
import { AssetFileType, AssetOrder, AssetStatus, AssetType } from 'src/enum';
import { AssetSearchOptions, SearchExploreItem, SearchExploreItemSet } from 'src/repositories/search.repository';
import { anyUuid, asUuid, removeUndefinedKeys, unnest } from 'src/utils/database';
} from 'src/utils/database';
import { globToSqlPattern } from 'src/utils/misc';
import { Paginated, PaginationOptions, paginationHelper } from 'src/utils/pagination';
import { PaginationOptions, paginationHelper } from 'src/utils/pagination';
export type AssetStats = Record<AssetType, number>;
@ -126,8 +130,6 @@ export interface AssetGetByChecksumOptions {
libraryId?: string;
}
export type AssetPathEntity = Pick<AssetEntity, 'id' | 'originalPath' | 'isOffline'>;
export interface GetByIdsRelations {
exifInfo?: boolean;
faces?: { person?: boolean; withDeleted?: boolean };
@ -141,12 +143,12 @@ export interface GetByIdsRelations {
export interface DuplicateGroup {
duplicateId: string;
assets: AssetEntity[];
assets: MapAsset[];
}
export interface DayOfYearAssets {
yearsAgo: number;
assets: AssetEntity[];
assets: MapAsset[];
}
@Injectable()
@ -234,12 +236,12 @@ export class AssetRepository {
.execute();
}
create(asset: Insertable<Assets>): Promise<AssetEntity> {
return this.db.insertInto('assets').values(asset).returningAll().executeTakeFirst() as any as Promise<AssetEntity>;
create(asset: Insertable<Assets>) {
return this.db.insertInto('assets').values(asset).returningAll().executeTakeFirstOrThrow();
}
createAll(assets: Insertable<Assets>[]): Promise<AssetEntity[]> {
return this.db.insertInto('assets').values(assets).returningAll().execute() as any as Promise<AssetEntity[]>;
createAll(assets: Insertable<Assets>[]) {
return this.db.insertInto('assets').values(assets).returningAll().execute();
}
@GenerateSql({ params: [DummyValue.UUID, { day: 1, month: 1 }] })
@ -299,56 +301,13 @@ export class AssetRepository {
@GenerateSql({ params: [[DummyValue.UUID]] })
@ChunkedArray()
async getByIds(
ids: string[],
{ exifInfo, faces, files, library, owner, smartSearch, stack, tags }: GetByIdsRelations = {},
): Promise<AssetEntity[]> {
const res = await this.db
.selectFrom('assets')
.selectAll('assets')
.where('assets.id', '=', anyUuid(ids))
.$if(!!exifInfo, withExif)
.$if(!!faces, (qb) =>
qb.select((eb) =>
faces?.person ? withFacesAndPeople(eb, faces.withDeleted) : withFaces(eb, faces?.withDeleted),
),
)
.$if(!!files, (qb) => qb.select(withFiles))
.$if(!!library, (qb) => qb.select(withLibrary))
.$if(!!owner, (qb) => qb.select(withOwner))
.$if(!!smartSearch, withSmartSearch)
.$if(!!stack, (qb) =>
qb
.leftJoin('asset_stack', 'asset_stack.id', 'assets.stackId')
.$if(!stack!.assets, (qb) => qb.select((eb) => eb.fn.toJson(eb.table('asset_stack')).as('stack')))
.$if(!!stack!.assets, (qb) =>
qb
.leftJoinLateral(
(eb) =>
eb
.selectFrom('assets as stacked')
.selectAll('asset_stack')
.select((eb) => eb.fn('array_agg', [eb.table('stacked')]).as('assets'))
.whereRef('stacked.stackId', '=', 'asset_stack.id')
.whereRef('stacked.id', '!=', 'asset_stack.primaryAssetId')
.where('stacked.deletedAt', 'is', null)
.where('stacked.isArchived', '=', false)
.groupBy('asset_stack.id')
.as('stacked_assets'),
(join) => join.on('asset_stack.id', 'is not', null),
)
.select((eb) => eb.fn.toJson(eb.table('stacked_assets')).as('stack')),
),
)
.$if(!!tags, (qb) => qb.select(withTags))
.execute();
return res as any as AssetEntity[];
getByIds(ids: string[]) {
return this.db.selectFrom('assets').selectAll('assets').where('assets.id', '=', anyUuid(ids)).execute();
}
@GenerateSql({ params: [[DummyValue.UUID]] })
@ChunkedArray()
getByIdsWithAllRelations(ids: string[]): Promise<AssetEntity[]> {
getByIdsWithAllRelationsButStacks(ids: string[]) {
return this.db
.selectFrom('assets')
.selectAll('assets')
@ -356,23 +315,8 @@ export class AssetRepository {
.select(withTags)
.$call(withExif)
.leftJoin('asset_stack', 'asset_stack.id', 'assets.stackId')
.leftJoinLateral(
(eb) =>
eb
.selectFrom('assets as stacked')
.selectAll('asset_stack')
.select((eb) => eb.fn('array_agg', [eb.table('stacked')]).as('assets'))
.whereRef('stacked.stackId', '=', 'asset_stack.id')
.whereRef('stacked.id', '!=', 'asset_stack.primaryAssetId')
.where('stacked.deletedAt', 'is', null)
.where('stacked.isArchived', '=', false)
.groupBy('asset_stack.id')
.as('stacked_assets'),
(join) => join.on('asset_stack.id', 'is not', null),
)
.select((eb) => eb.fn.toJson(eb.table('stacked_assets')).as('stack'))
.where('assets.id', '=', anyUuid(ids))
.execute() as any as Promise<AssetEntity[]>;
.execute();
}
@GenerateSql({ params: [DummyValue.UUID] })
@ -392,36 +336,29 @@ export class AssetRepository {
return assets.map((asset) => asset.deviceAssetId);
}
getByUserId(
pagination: PaginationOptions,
userId: string,
options: Omit<AssetSearchOptions, 'userIds'> = {},
): Paginated<AssetEntity> {
getByUserId(pagination: PaginationOptions, userId: string, options: Omit<AssetSearchOptions, 'userIds'> = {}) {
return this.getAll(pagination, { ...options, userIds: [userId] });
}
@GenerateSql({ params: [DummyValue.UUID, DummyValue.STRING] })
getByLibraryIdAndOriginalPath(libraryId: string, originalPath: string): Promise<AssetEntity | undefined> {
getByLibraryIdAndOriginalPath(libraryId: string, originalPath: string) {
return this.db
.selectFrom('assets')
.selectAll('assets')
.where('libraryId', '=', asUuid(libraryId))
.where('originalPath', '=', originalPath)
.limit(1)
.executeTakeFirst() as any as Promise<AssetEntity | undefined>;
.executeTakeFirst();
}
async getAll(
pagination: PaginationOptions,
{ orderDirection, ...options }: AssetSearchOptions = {},
): Paginated<AssetEntity> {
async getAll(pagination: PaginationOptions, { orderDirection, ...options }: AssetSearchOptions = {}) {
const builder = searchAssetBuilder(this.db, options)
.select(withFiles)
.orderBy('assets.createdAt', orderDirection ?? 'asc')
.limit(pagination.take + 1)
.offset(pagination.skip ?? 0);
const items = await builder.execute();
return paginationHelper(items as any as AssetEntity[], pagination.take);
return paginationHelper(items, pagination.take);
}
/**
@ -456,23 +393,22 @@ export class AssetRepository {
}
@GenerateSql({ params: [DummyValue.UUID] })
getById(
id: string,
{ exifInfo, faces, files, library, owner, smartSearch, stack, tags }: GetByIdsRelations = {},
): Promise<AssetEntity | undefined> {
getById(id: string, { exifInfo, faces, files, library, owner, smartSearch, stack, tags }: GetByIdsRelations = {}) {
return this.db
.selectFrom('assets')
.selectAll('assets')
.where('assets.id', '=', asUuid(id))
.$if(!!exifInfo, withExif)
.$if(!!faces, (qb) => qb.select(faces?.person ? withFacesAndPeople : withFaces))
.$if(!!faces, (qb) => qb.select(faces?.person ? withFacesAndPeople : withFaces).$narrowType<{ faces: NotNull }>())
.$if(!!library, (qb) => qb.select(withLibrary))
.$if(!!owner, (qb) => qb.select(withOwner))
.$if(!!smartSearch, withSmartSearch)
.$if(!!stack, (qb) =>
qb
.leftJoin('asset_stack', 'asset_stack.id', 'assets.stackId')
.$if(!stack!.assets, (qb) => qb.select((eb) => eb.fn.toJson(eb.table('asset_stack')).as('stack')))
.$if(!stack!.assets, (qb) =>
qb.select((eb) => eb.fn.toJson(eb.table('asset_stack')).$castTo<Stack | null>().as('stack')),
)
.$if(!!stack!.assets, (qb) =>
qb
.leftJoinLateral(
@ -489,13 +425,13 @@ export class AssetRepository {
.as('stacked_assets'),
(join) => join.on('asset_stack.id', 'is not', null),
)
.select((eb) => eb.fn.toJson(eb.table('stacked_assets')).as('stack')),
.select((eb) => eb.fn.toJson(eb.table('stacked_assets')).$castTo<Stack | null>().as('stack')),
),
)
.$if(!!files, (qb) => qb.select(withFiles))
.$if(!!tags, (qb) => qb.select(withTags))
.limit(1)
.executeTakeFirst() as any as Promise<AssetEntity | undefined>;
.executeTakeFirst();
}
@GenerateSql({ params: [[DummyValue.UUID], { deviceId: DummyValue.STRING }] })
@ -524,7 +460,7 @@ export class AssetRepository {
.execute();
}
async update(asset: Updateable<Assets> & { id: string }): Promise<AssetEntity> {
async update(asset: Updateable<Assets> & { id: string }) {
const value = omitBy(asset, isUndefined);
delete value.id;
if (!isEmpty(value)) {
@ -534,10 +470,10 @@ export class AssetRepository {
.selectAll('assets')
.$call(withExif)
.$call((qb) => qb.select(withFacesAndPeople))
.executeTakeFirst() as Promise<AssetEntity>;
.executeTakeFirst();
}
return this.getById(asset.id, { exifInfo: true, faces: { person: true } }) as Promise<AssetEntity>;
return this.getById(asset.id, { exifInfo: true, faces: { person: true } });
}
async remove(asset: { id: string }): Promise<void> {
@ -545,7 +481,7 @@ export class AssetRepository {
}
@GenerateSql({ params: [{ ownerId: DummyValue.UUID, libraryId: DummyValue.UUID, checksum: DummyValue.BUFFER }] })
getByChecksum({ ownerId, libraryId, checksum }: AssetGetByChecksumOptions): Promise<AssetEntity | undefined> {
getByChecksum({ ownerId, libraryId, checksum }: AssetGetByChecksumOptions) {
return this.db
.selectFrom('assets')
.selectAll('assets')
@ -553,17 +489,17 @@ export class AssetRepository {
.where('checksum', '=', checksum)
.$call((qb) => (libraryId ? qb.where('libraryId', '=', asUuid(libraryId)) : qb.where('libraryId', 'is', null)))
.limit(1)
.executeTakeFirst() as Promise<AssetEntity | undefined>;
.executeTakeFirst();
}
@GenerateSql({ params: [DummyValue.UUID, [DummyValue.BUFFER]] })
getByChecksums(userId: string, checksums: Buffer[]): Promise<AssetEntity[]> {
getByChecksums(userId: string, checksums: Buffer[]) {
return this.db
.selectFrom('assets')
.select(['id', 'checksum', 'deletedAt'])
.where('ownerId', '=', asUuid(userId))
.where('checksum', 'in', checksums)
.execute() as any as Promise<AssetEntity[]>;
.execute();
}
@GenerateSql({ params: [DummyValue.UUID, DummyValue.BUFFER] })
@ -580,7 +516,7 @@ export class AssetRepository {
return asset?.id;
}
findLivePhotoMatch(options: LivePhotoSearchOptions): Promise<AssetEntity | undefined> {
findLivePhotoMatch(options: LivePhotoSearchOptions) {
const { ownerId, otherAssetId, livePhotoCID, type } = options;
return this.db
.selectFrom('assets')
@ -591,7 +527,7 @@ export class AssetRepository {
.where('type', '=', type)
.where('exif.livePhotoCID', '=', livePhotoCID)
.limit(1)
.executeTakeFirst() as Promise<AssetEntity | undefined>;
.executeTakeFirst();
}
@GenerateSql(
@ -600,7 +536,7 @@ export class AssetRepository {
params: [DummyValue.PAGINATION, property],
})),
)
async getWithout(pagination: PaginationOptions, property: WithoutProperty): Paginated<AssetEntity> {
async getWithout(pagination: PaginationOptions, property: WithoutProperty) {
const items = await this.db
.selectFrom('assets')
.selectAll('assets')
@ -662,7 +598,7 @@ export class AssetRepository {
.orderBy('createdAt')
.execute();
return paginationHelper(items as any as AssetEntity[], pagination.take);
return paginationHelper(items, pagination.take);
}
getStatistics(ownerId: string, { isArchived, isFavorite, isTrashed }: AssetStatsOptions): Promise<AssetStats> {
@ -681,7 +617,7 @@ export class AssetRepository {
.executeTakeFirstOrThrow();
}
getRandom(userIds: string[], take: number): Promise<AssetEntity[]> {
getRandom(userIds: string[], take: number) {
return this.db
.selectFrom('assets')
.selectAll('assets')
@ -691,7 +627,7 @@ export class AssetRepository {
.where('deletedAt', 'is', null)
.orderBy((eb) => eb.fn('random'))
.limit(take)
.execute() as any as Promise<AssetEntity[]>;
.execute();
}
@GenerateSql({ params: [{ size: TimeBucketSize.MONTH }] })
@ -744,7 +680,7 @@ export class AssetRepository {
}
@GenerateSql({ params: [DummyValue.TIME_BUCKET, { size: TimeBucketSize.MONTH, withStacked: true }] })
async getTimeBucket(timeBucket: string, options: TimeBucketOptions): Promise<AssetEntity[]> {
async getTimeBucket(timeBucket: string, options: TimeBucketOptions) {
return this.db
.selectFrom('assets')
.selectAll('assets')
@ -777,7 +713,7 @@ export class AssetRepository {
.as('stacked_assets'),
(join) => join.on('asset_stack.id', 'is not', null),
)
.select((eb) => eb.fn.toJson(eb.table('stacked_assets')).as('stack')),
.select((eb) => eb.fn.toJson(eb.table('stacked_assets').$castTo<Stack | null>()).as('stack')),
)
.$if(!!options.assetType, (qb) => qb.where('assets.type', '=', options.assetType!))
.$if(options.isDuplicate !== undefined, (qb) =>
@ -789,11 +725,11 @@ export class AssetRepository {
.where('assets.isVisible', '=', true)
.where(truncatedDate(options.size), '=', timeBucket.replace(/^[+-]/, ''))
.orderBy('assets.localDateTime', options.order ?? 'desc')
.execute() as any as Promise<AssetEntity[]>;
.execute();
}
@GenerateSql({ params: [DummyValue.UUID] })
getDuplicates(userId: string): Promise<DuplicateGroup[]> {
getDuplicates(userId: string) {
return (
this.db
.with('duplicates', (qb) =>
@ -810,9 +746,15 @@ export class AssetRepository {
(join) => join.onTrue(),
)
.select('assets.duplicateId')
.select((eb) => eb.fn('jsonb_agg', [eb.table('asset')]).as('assets'))
.select((eb) =>
eb
.fn('jsonb_agg', [eb.table('asset')])
.$castTo<MapAsset[]>()
.as('assets'),
)
.where('assets.ownerId', '=', asUuid(userId))
.where('assets.duplicateId', 'is not', null)
.$narrowType<{ duplicateId: NotNull }>()
.where('assets.deletedAt', 'is', null)
.where('assets.isVisible', '=', true)
.where('assets.stackId', 'is', null)
@ -837,7 +779,7 @@ export class AssetRepository {
.where(({ not, exists }) =>
not(exists((eb) => eb.selectFrom('unique').whereRef('unique.duplicateId', '=', 'duplicates.duplicateId'))),
)
.execute() as any as Promise<DuplicateGroup[]>
.execute()
);
}
@ -881,7 +823,7 @@ export class AssetRepository {
},
],
})
getAllForUserFullSync(options: AssetFullSyncOptions): Promise<AssetEntity[]> {
getAllForUserFullSync(options: AssetFullSyncOptions) {
const { ownerId, lastId, updatedUntil, limit } = options;
return this.db
.selectFrom('assets')
@ -899,18 +841,18 @@ export class AssetRepository {
.as('stacked_assets'),
(join) => join.on('asset_stack.id', 'is not', null),
)
.select((eb) => eb.fn.toJson(eb.table('stacked_assets')).as('stack'))
.select((eb) => eb.fn.toJson(eb.table('stacked_assets')).$castTo<Stack | null>().as('stack'))
.where('assets.ownerId', '=', asUuid(ownerId))
.where('assets.isVisible', '=', true)
.where('assets.updatedAt', '<=', updatedUntil)
.$if(!!lastId, (qb) => qb.where('assets.id', '>', lastId!))
.orderBy('assets.id')
.limit(limit)
.execute() as any as Promise<AssetEntity[]>;
.execute();
}
@GenerateSql({ params: [{ userIds: [DummyValue.UUID], updatedAfter: DummyValue.DATE, limit: 100 }] })
async getChangedDeltaSync(options: AssetDeltaSyncOptions): Promise<AssetEntity[]> {
async getChangedDeltaSync(options: AssetDeltaSyncOptions) {
return this.db
.selectFrom('assets')
.selectAll('assets')
@ -927,12 +869,12 @@ export class AssetRepository {
.as('stacked_assets'),
(join) => join.on('asset_stack.id', 'is not', null),
)
.select((eb) => eb.fn.toJson(eb.table('stacked_assets')).as('stack'))
.select((eb) => eb.fn.toJson(eb.table('stacked_assets').$castTo<Stack | null>()).as('stack'))
.where('assets.ownerId', '=', anyUuid(options.userIds))
.where('assets.isVisible', '=', true)
.where('assets.updatedAt', '>', options.updatedAfter)
.limit(options.limit)
.execute() as any as Promise<AssetEntity[]>;
.execute();
}
async upsertFile(file: Pick<Insertable<AssetFiles>, 'assetId' | 'path' | 'type'>): Promise<void> {

View File

@ -2,7 +2,6 @@ import { Injectable } from '@nestjs/common';
import AsyncLock from 'async-lock';
import { FileMigrationProvider, Kysely, Migrator, sql, Transaction } from 'kysely';
import { InjectKysely } from 'nestjs-kysely';
import { existsSync } from 'node:fs';
import { readdir } from 'node:fs/promises';
import { join } from 'node:path';
import semver from 'semver';
@ -197,62 +196,54 @@ export class DatabaseRepository {
return dimSize;
}
async runMigrations(options?: { transaction?: 'all' | 'none' | 'each'; only?: 'kysely' | 'typeorm' }): Promise<void> {
async runMigrations(options?: { transaction?: 'all' | 'none' | 'each' }): Promise<void> {
const { database } = this.configRepository.getEnv();
if (options?.only !== 'kysely') {
const dataSource = new DataSource(database.config.typeorm);
this.logger.log('Running migrations, this may take a while');
this.logger.log('Running migrations, this may take a while');
const tableExists = sql<{ result: string | null }>`select to_regclass('migrations') as "result"`;
const { rows } = await tableExists.execute(this.db);
const hasTypeOrmMigrations = !!rows[0]?.result;
if (hasTypeOrmMigrations) {
this.logger.debug('Running typeorm migrations');
const dataSource = new DataSource(database.config.typeorm);
await dataSource.initialize();
await dataSource.runMigrations(options);
await dataSource.destroy();
this.logger.debug('Finished running typeorm migrations');
}
if (options?.only !== 'typeorm') {
// eslint-disable-next-line unicorn/prefer-module
const migrationFolder = join(__dirname, '..', 'schema/migrations');
this.logger.debug('Running kysely migrations');
const migrator = new Migrator({
db: this.db,
migrationLockTableName: 'kysely_migrations_lock',
migrationTableName: 'kysely_migrations',
provider: new FileMigrationProvider({
fs: { readdir },
path: { join },
// eslint-disable-next-line unicorn/prefer-module
migrationFolder: join(__dirname, '..', 'schema/migrations'),
}),
});
// TODO remove after we have at least one kysely migration
if (!existsSync(migrationFolder)) {
return;
const { error, results } = await migrator.migrateToLatest();
for (const result of results ?? []) {
if (result.status === 'Success') {
this.logger.log(`Migration "${result.migrationName}" succeeded`);
}
this.logger.debug('Running kysely migrations');
const migrator = new Migrator({
db: this.db,
migrationLockTableName: 'kysely_migrations_lock',
migrationTableName: 'kysely_migrations',
provider: new FileMigrationProvider({
fs: { readdir },
path: { join },
migrationFolder,
}),
});
const { error, results } = await migrator.migrateToLatest();
for (const result of results ?? []) {
if (result.status === 'Success') {
this.logger.log(`Migration "${result.migrationName}" succeeded`);
}
if (result.status === 'Error') {
this.logger.warn(`Migration "${result.migrationName}" failed`);
}
if (result.status === 'Error') {
this.logger.warn(`Migration "${result.migrationName}" failed`);
}
if (error) {
this.logger.error(`Kysely migrations failed: ${error}`);
throw error;
}
this.logger.debug('Finished running kysely migrations');
}
if (error) {
this.logger.error(`Kysely migrations failed: ${error}`);
throw error;
}
this.logger.debug('Finished running kysely migrations');
}
async withLock<R>(lock: DatabaseLock, callback: () => Promise<R>): Promise<R> {

View File

@ -74,11 +74,21 @@ export class MyConsoleLogger extends ConsoleLogger {
export class LoggingRepository {
private logger: MyConsoleLogger;
constructor(@Inject(ClsService) cls: ClsService | undefined, configRepository: ConfigRepository) {
const { noColor } = configRepository.getEnv();
constructor(
@Inject(ClsService) cls: ClsService | undefined,
@Inject(ConfigRepository) configRepository: ConfigRepository | undefined,
) {
let noColor = false;
if (configRepository) {
noColor = configRepository.getEnv().noColor;
}
this.logger = new MyConsoleLogger(cls, { context: LoggingRepository.name, color: !noColor });
}
static create() {
return new LoggingRepository(undefined, undefined);
}
setAppName(name: string): void {
appName = name.charAt(0).toUpperCase() + name.slice(1);
}

View File

@ -1,13 +1,12 @@
import { Injectable } from '@nestjs/common';
import { Kysely, OrderByDirection, sql } from 'kysely';
import { Kysely, OrderByDirection, Selectable, sql } from 'kysely';
import { InjectKysely } from 'nestjs-kysely';
import { randomUUID } from 'node:crypto';
import { DB } from 'src/db';
import { DB, Exif } from 'src/db';
import { DummyValue, GenerateSql } from 'src/decorators';
import { AssetEntity, searchAssetBuilder } from 'src/entities/asset.entity';
import { MapAsset } from 'src/dtos/asset-response.dto';
import { AssetStatus, AssetType } from 'src/enum';
import { anyUuid, asUuid } from 'src/utils/database';
import { Paginated } from 'src/utils/pagination';
import { anyUuid, asUuid, searchAssetBuilder } from 'src/utils/database';
import { isValidInteger } from 'src/validation';
export interface SearchResult<T> {
@ -216,7 +215,7 @@ export class SearchRepository {
},
],
})
async searchMetadata(pagination: SearchPaginationOptions, options: AssetSearchOptions): Paginated<AssetEntity> {
async searchMetadata(pagination: SearchPaginationOptions, options: AssetSearchOptions) {
const orderDirection = (options.orderDirection?.toLowerCase() || 'desc') as OrderByDirection;
const items = await searchAssetBuilder(this.db, options)
.orderBy('assets.fileCreatedAt', orderDirection)
@ -225,7 +224,7 @@ export class SearchRepository {
.execute();
const hasNextPage = items.length > pagination.size;
items.splice(pagination.size);
return { items: items as any as AssetEntity[], hasNextPage };
return { items, hasNextPage };
}
@GenerateSql({
@ -240,7 +239,7 @@ export class SearchRepository {
},
],
})
async searchRandom(size: number, options: AssetSearchOptions): Promise<AssetEntity[]> {
async searchRandom(size: number, options: AssetSearchOptions) {
const uuid = randomUUID();
const builder = searchAssetBuilder(this.db, options);
const lessThan = builder
@ -251,8 +250,8 @@ export class SearchRepository {
.where('assets.id', '>', uuid)
.orderBy(sql`random()`)
.limit(size);
const { rows } = await sql`${lessThan} union all ${greaterThan} limit ${size}`.execute(this.db);
return rows as any as AssetEntity[];
const { rows } = await sql<MapAsset>`${lessThan} union all ${greaterThan} limit ${size}`.execute(this.db);
return rows;
}
@GenerateSql({
@ -268,17 +267,17 @@ export class SearchRepository {
},
],
})
async searchSmart(pagination: SearchPaginationOptions, options: SmartSearchOptions): Paginated<AssetEntity> {
async searchSmart(pagination: SearchPaginationOptions, options: SmartSearchOptions) {
if (!isValidInteger(pagination.size, { min: 1, max: 1000 })) {
throw new Error(`Invalid value for 'size': ${pagination.size}`);
}
const items = (await searchAssetBuilder(this.db, options)
const items = await searchAssetBuilder(this.db, options)
.innerJoin('smart_search', 'assets.id', 'smart_search.assetId')
.orderBy(sql`smart_search.embedding <=> ${options.embedding}`)
.limit(pagination.size + 1)
.offset((pagination.page - 1) * pagination.size)
.execute()) as any as AssetEntity[];
.execute();
const hasNextPage = items.length > pagination.size;
items.splice(pagination.size);
@ -392,7 +391,7 @@ export class SearchRepository {
}
@GenerateSql({ params: [[DummyValue.UUID]] })
getAssetsByCity(userIds: string[]): Promise<AssetEntity[]> {
getAssetsByCity(userIds: string[]) {
return this.db
.withRecursive('cte', (qb) => {
const base = qb
@ -434,9 +433,14 @@ export class SearchRepository {
.innerJoin('exif', 'assets.id', 'exif.assetId')
.innerJoin('cte', 'assets.id', 'cte.assetId')
.selectAll('assets')
.select((eb) => eb.fn('to_jsonb', [eb.table('exif')]).as('exifInfo'))
.select((eb) =>
eb
.fn('to_jsonb', [eb.table('exif')])
.$castTo<Selectable<Exif>>()
.as('exifInfo'),
)
.orderBy('exif.city')
.execute() as any as Promise<AssetEntity[]>;
.execute();
}
async upsert(assetId: string, embedding: string): Promise<void> {

View File

@ -1,12 +1,12 @@
import { Injectable } from '@nestjs/common';
import { Insertable, Kysely, sql, Updateable } from 'kysely';
import { Insertable, Kysely, NotNull, sql, Updateable } from 'kysely';
import { jsonObjectFrom } from 'kysely/helpers/postgres';
import _ from 'lodash';
import { InjectKysely } from 'nestjs-kysely';
import { columns } from 'src/database';
import { Album, columns } from 'src/database';
import { DB, SharedLinks } from 'src/db';
import { DummyValue, GenerateSql } from 'src/decorators';
import { SharedLinkEntity } from 'src/entities/shared-link.entity';
import { MapAsset } from 'src/dtos/asset-response.dto';
import { SharedLinkType } from 'src/enum';
export type SharedLinkSearchOptions = {
@ -19,7 +19,7 @@ export class SharedLinkRepository {
constructor(@InjectKysely() private db: Kysely<DB>) {}
@GenerateSql({ params: [DummyValue.UUID, DummyValue.UUID] })
get(userId: string, id: string): Promise<SharedLinkEntity | undefined> {
get(userId: string, id: string) {
return this.db
.selectFrom('shared_links')
.selectAll('shared_links')
@ -87,18 +87,23 @@ export class SharedLinkRepository {
.as('album'),
(join) => join.onTrue(),
)
.select((eb) => eb.fn.coalesce(eb.fn.jsonAgg('a').filterWhere('a.id', 'is not', null), sql`'[]'`).as('assets'))
.select((eb) =>
eb.fn
.coalesce(eb.fn.jsonAgg('a').filterWhere('a.id', 'is not', null), sql`'[]'`)
.$castTo<MapAsset[]>()
.as('assets'),
)
.groupBy(['shared_links.id', sql`"album".*`])
.select((eb) => eb.fn.toJson('album').as('album'))
.select((eb) => eb.fn.toJson('album').$castTo<Album | null>().as('album'))
.where('shared_links.id', '=', id)
.where('shared_links.userId', '=', userId)
.where((eb) => eb.or([eb('shared_links.type', '=', SharedLinkType.INDIVIDUAL), eb('album.id', 'is not', null)]))
.orderBy('shared_links.createdAt', 'desc')
.executeTakeFirst() as Promise<SharedLinkEntity | undefined>;
.executeTakeFirst();
}
@GenerateSql({ params: [{ userId: DummyValue.UUID, albumId: DummyValue.UUID }] })
getAll({ userId, albumId }: SharedLinkSearchOptions): Promise<SharedLinkEntity[]> {
getAll({ userId, albumId }: SharedLinkSearchOptions) {
return this.db
.selectFrom('shared_links')
.selectAll('shared_links')
@ -115,6 +120,7 @@ export class SharedLinkRepository {
(join) => join.onTrue(),
)
.select('assets.assets')
.$narrowType<{ assets: NotNull }>()
.leftJoinLateral(
(eb) =>
eb
@ -152,12 +158,12 @@ export class SharedLinkRepository {
.as('album'),
(join) => join.onTrue(),
)
.select((eb) => eb.fn.toJson('album').as('album'))
.select((eb) => eb.fn.toJson('album').$castTo<Album | null>().as('album'))
.where((eb) => eb.or([eb('shared_links.type', '=', SharedLinkType.INDIVIDUAL), eb('album.id', 'is not', null)]))
.$if(!!albumId, (eb) => eb.where('shared_links.albumId', '=', albumId!))
.orderBy('shared_links.createdAt', 'desc')
.distinctOn(['shared_links.createdAt'])
.execute() as unknown as Promise<SharedLinkEntity[]>;
.execute();
}
@GenerateSql({ params: [DummyValue.BUFFER] })
@ -177,7 +183,7 @@ export class SharedLinkRepository {
.executeTakeFirst();
}
async create(entity: Insertable<SharedLinks> & { assetIds?: string[] }): Promise<SharedLinkEntity> {
async create(entity: Insertable<SharedLinks> & { assetIds?: string[] }) {
const { id } = await this.db
.insertInto('shared_links')
.values(_.omit(entity, 'assetIds'))
@ -194,7 +200,7 @@ export class SharedLinkRepository {
return this.getSharedLinks(id);
}
async update(entity: Updateable<SharedLinks> & { id: string; assetIds?: string[] }): Promise<SharedLinkEntity> {
async update(entity: Updateable<SharedLinks> & { id: string; assetIds?: string[] }) {
const { id } = await this.db
.updateTable('shared_links')
.set(_.omit(entity, 'assets', 'album', 'assetIds'))
@ -212,8 +218,8 @@ export class SharedLinkRepository {
return this.getSharedLinks(id);
}
async remove(entity: SharedLinkEntity): Promise<void> {
await this.db.deleteFrom('shared_links').where('shared_links.id', '=', entity.id).execute();
async remove(id: string): Promise<void> {
await this.db.deleteFrom('shared_links').where('shared_links.id', '=', id).execute();
}
private getSharedLinks(id: string) {
@ -236,9 +242,12 @@ export class SharedLinkRepository {
(join) => join.onTrue(),
)
.select((eb) =>
eb.fn.coalesce(eb.fn.jsonAgg('assets').filterWhere('assets.id', 'is not', null), sql`'[]'`).as('assets'),
eb.fn
.coalesce(eb.fn.jsonAgg('assets').filterWhere('assets.id', 'is not', null), sql`'[]'`)
.$castTo<MapAsset[]>()
.as('assets'),
)
.groupBy('shared_links.id')
.executeTakeFirstOrThrow() as Promise<SharedLinkEntity>;
.executeTakeFirstOrThrow();
}
}

View File

@ -5,7 +5,6 @@ import { InjectKysely } from 'nestjs-kysely';
import { columns } from 'src/database';
import { AssetStack, DB } from 'src/db';
import { DummyValue, GenerateSql } from 'src/decorators';
import { AssetEntity } from 'src/entities/asset.entity';
import { asUuid } from 'src/utils/database';
export interface StackSearch {
@ -36,9 +35,7 @@ const withAssets = (eb: ExpressionBuilder<DB, 'asset_stack'>, withTags = false)
.select((eb) => eb.fn.toJson('exifInfo').as('exifInfo'))
.where('assets.deletedAt', 'is', null)
.whereRef('assets.stackId', '=', 'asset_stack.id'),
)
.$castTo<AssetEntity[]>()
.as('assets');
).as('assets');
};
@Injectable()

View File

@ -2,8 +2,7 @@ import { Kysely } from 'kysely';
import { InjectKysely } from 'nestjs-kysely';
import { DB } from 'src/db';
import { DummyValue, GenerateSql } from 'src/decorators';
import { withExif } from 'src/entities/asset.entity';
import { asUuid } from 'src/utils/database';
import { asUuid, withExif } from 'src/utils/database';
export class ViewRepository {
constructor(@InjectKysely() private db: Kysely<DB>) {}

View File

@ -0,0 +1,410 @@
import { Kysely, sql } from 'kysely';
import { DatabaseExtension } from 'src/enum';
import { ConfigRepository } from 'src/repositories/config.repository';
import { LoggingRepository } from 'src/repositories/logging.repository';
const vectorExtension = new ConfigRepository().getEnv().database.vectorExtension;
const lastMigrationSql = sql<{ name: string }>`SELECT "name" FROM "migrations" ORDER BY "timestamp" DESC LIMIT 1;`;
const tableExists = sql<{ result: string | null }>`select to_regclass('migrations') as "result"`;
const logger = LoggingRepository.create();
export async function up(db: Kysely<any>): Promise<void> {
const { rows } = await tableExists.execute(db);
const hasTypeOrmMigrations = !!rows[0]?.result;
if (hasTypeOrmMigrations) {
const {
rows: [lastMigration],
} = await lastMigrationSql.execute(db);
if (lastMigration?.name !== 'AddMissingIndex1744910873956') {
throw new Error(
'Invalid upgrade path. For more information, see https://immich.app/errors#typeorm-upgrade',
);
}
logger.log('Database has up to date TypeORM migrations, skipping initial Kysely migration');
return;
}
await sql`CREATE EXTENSION IF NOT EXISTS "uuid-ossp";`.execute(db);
await sql`CREATE EXTENSION IF NOT EXISTS "unaccent";`.execute(db);
await sql`CREATE EXTENSION IF NOT EXISTS "cube";`.execute(db);
await sql`CREATE EXTENSION IF NOT EXISTS "earthdistance";`.execute(db);
await sql`CREATE EXTENSION IF NOT EXISTS "pg_trgm";`.execute(db);
await sql`CREATE EXTENSION IF NOT EXISTS "vectors";`.execute(db);
await sql`CREATE OR REPLACE FUNCTION immich_uuid_v7(p_timestamp timestamp with time zone default clock_timestamp())
RETURNS uuid
VOLATILE LANGUAGE SQL
AS $$
select encode(
set_bit(
set_bit(
overlay(uuid_send(gen_random_uuid())
placing substring(int8send(floor(extract(epoch from p_timestamp) * 1000)::bigint) from 3)
from 1 for 6
),
52, 1
),
53, 1
),
'hex')::uuid;
$$;`.execute(db);
await sql`CREATE OR REPLACE FUNCTION updated_at()
RETURNS TRIGGER
LANGUAGE PLPGSQL
AS $$
DECLARE
clock_timestamp TIMESTAMP := clock_timestamp();
BEGIN
new."updatedAt" = clock_timestamp;
new."updateId" = immich_uuid_v7(clock_timestamp);
return new;
END;
$$;`.execute(db);
await sql`CREATE OR REPLACE FUNCTION f_concat_ws(text, text[])
RETURNS text
PARALLEL SAFE IMMUTABLE LANGUAGE SQL
AS $$SELECT array_to_string($2, $1)$$;`.execute(db);
await sql`CREATE OR REPLACE FUNCTION f_unaccent(text)
RETURNS text
PARALLEL SAFE STRICT IMMUTABLE LANGUAGE SQL
RETURN unaccent('unaccent', $1)`.execute(db);
await sql`CREATE OR REPLACE FUNCTION ll_to_earth_public(latitude double precision, longitude double precision)
RETURNS public.earth
PARALLEL SAFE STRICT IMMUTABLE LANGUAGE SQL
AS $$
SELECT public.cube(public.cube(public.cube(public.earth()*cos(radians(latitude))*cos(radians(longitude))),public.earth()*cos(radians(latitude))*sin(radians(longitude))),public.earth()*sin(radians(latitude)))::public.earth
$$;`.execute(db);
await sql`CREATE OR REPLACE FUNCTION users_delete_audit()
RETURNS TRIGGER
LANGUAGE PLPGSQL
AS $$
BEGIN
INSERT INTO users_audit ("userId")
SELECT "id"
FROM OLD;
RETURN NULL;
END;
$$;`.execute(db);
await sql`CREATE OR REPLACE FUNCTION partners_delete_audit()
RETURNS TRIGGER
LANGUAGE PLPGSQL
AS $$
BEGIN
INSERT INTO partners_audit ("sharedById", "sharedWithId")
SELECT "sharedById", "sharedWithId"
FROM OLD;
RETURN NULL;
END;
$$;`.execute(db);
await sql`CREATE OR REPLACE FUNCTION assets_delete_audit()
RETURNS TRIGGER
LANGUAGE PLPGSQL
AS $$
BEGIN
INSERT INTO assets_audit ("assetId", "ownerId")
SELECT "id", "ownerId"
FROM OLD;
RETURN NULL;
END;
$$;`.execute(db);
if (vectorExtension === DatabaseExtension.VECTORS) {
await sql`SET search_path TO "$user", public, vectors`.execute(db);
await sql`SET vectors.pgvector_compatibility=on`.execute(db);
}
await sql`CREATE TYPE "assets_status_enum" AS ENUM ('active','trashed','deleted');`.execute(db);
await sql`CREATE TYPE "sourcetype" AS ENUM ('machine-learning','exif','manual');`.execute(db);
await sql`CREATE TABLE "users" ("id" uuid NOT NULL DEFAULT uuid_generate_v4(), "email" character varying NOT NULL, "password" character varying NOT NULL DEFAULT '', "createdAt" timestamp with time zone NOT NULL DEFAULT now(), "profileImagePath" character varying NOT NULL DEFAULT '', "isAdmin" boolean NOT NULL DEFAULT false, "shouldChangePassword" boolean NOT NULL DEFAULT true, "deletedAt" timestamp with time zone, "oauthId" character varying NOT NULL DEFAULT '', "updatedAt" timestamp with time zone NOT NULL DEFAULT now(), "storageLabel" character varying, "name" character varying NOT NULL DEFAULT '', "quotaSizeInBytes" bigint, "quotaUsageInBytes" bigint NOT NULL DEFAULT 0, "status" character varying NOT NULL DEFAULT 'active', "profileChangedAt" timestamp with time zone NOT NULL DEFAULT now(), "updateId" uuid NOT NULL DEFAULT immich_uuid_v7());`.execute(db);
await sql`CREATE TABLE "libraries" ("id" uuid NOT NULL DEFAULT uuid_generate_v4(), "name" character varying NOT NULL, "ownerId" uuid NOT NULL, "importPaths" text[] NOT NULL, "exclusionPatterns" text[] NOT NULL, "createdAt" timestamp with time zone NOT NULL DEFAULT now(), "updatedAt" timestamp with time zone NOT NULL DEFAULT now(), "deletedAt" timestamp with time zone, "refreshedAt" timestamp with time zone, "updateId" uuid NOT NULL DEFAULT immich_uuid_v7());`.execute(db);
await sql`CREATE TABLE "asset_stack" ("id" uuid NOT NULL DEFAULT uuid_generate_v4(), "primaryAssetId" uuid NOT NULL, "ownerId" uuid NOT NULL);`.execute(db);
await sql`CREATE TABLE "assets" ("id" uuid NOT NULL DEFAULT uuid_generate_v4(), "deviceAssetId" character varying NOT NULL, "ownerId" uuid NOT NULL, "deviceId" character varying NOT NULL, "type" character varying NOT NULL, "originalPath" character varying NOT NULL, "fileCreatedAt" timestamp with time zone NOT NULL, "fileModifiedAt" timestamp with time zone NOT NULL, "isFavorite" boolean NOT NULL DEFAULT false, "duration" character varying, "encodedVideoPath" character varying DEFAULT '', "checksum" bytea NOT NULL, "isVisible" boolean NOT NULL DEFAULT true, "livePhotoVideoId" uuid, "updatedAt" timestamp with time zone NOT NULL DEFAULT now(), "createdAt" timestamp with time zone NOT NULL DEFAULT now(), "isArchived" boolean NOT NULL DEFAULT false, "originalFileName" character varying NOT NULL, "sidecarPath" character varying, "thumbhash" bytea, "isOffline" boolean NOT NULL DEFAULT false, "libraryId" uuid, "isExternal" boolean NOT NULL DEFAULT false, "deletedAt" timestamp with time zone, "localDateTime" timestamp with time zone NOT NULL, "stackId" uuid, "duplicateId" uuid, "status" assets_status_enum NOT NULL DEFAULT 'active', "updateId" uuid NOT NULL DEFAULT immich_uuid_v7());`.execute(db);
await sql`CREATE TABLE "albums" ("id" uuid NOT NULL DEFAULT uuid_generate_v4(), "ownerId" uuid NOT NULL, "albumName" character varying NOT NULL DEFAULT 'Untitled Album', "createdAt" timestamp with time zone NOT NULL DEFAULT now(), "albumThumbnailAssetId" uuid, "updatedAt" timestamp with time zone NOT NULL DEFAULT now(), "description" text NOT NULL DEFAULT '', "deletedAt" timestamp with time zone, "isActivityEnabled" boolean NOT NULL DEFAULT true, "order" character varying NOT NULL DEFAULT 'desc', "updateId" uuid NOT NULL DEFAULT immich_uuid_v7());`.execute(db);
await sql`COMMENT ON COLUMN "albums"."albumThumbnailAssetId" IS 'Asset ID to be used as thumbnail';`.execute(db);
await sql`CREATE TABLE "activity" ("id" uuid NOT NULL DEFAULT uuid_generate_v4(), "createdAt" timestamp with time zone NOT NULL DEFAULT now(), "updatedAt" timestamp with time zone NOT NULL DEFAULT now(), "albumId" uuid NOT NULL, "userId" uuid NOT NULL, "assetId" uuid, "comment" text, "isLiked" boolean NOT NULL DEFAULT false, "updateId" uuid NOT NULL DEFAULT immich_uuid_v7());`.execute(db);
await sql`CREATE TABLE "albums_assets_assets" ("albumsId" uuid NOT NULL, "assetsId" uuid NOT NULL, "createdAt" timestamp with time zone NOT NULL DEFAULT now());`.execute(db);
await sql`CREATE TABLE "albums_shared_users_users" ("albumsId" uuid NOT NULL, "usersId" uuid NOT NULL, "role" character varying NOT NULL DEFAULT 'editor');`.execute(db);
await sql`CREATE TABLE "api_keys" ("name" character varying NOT NULL, "key" character varying NOT NULL, "userId" uuid NOT NULL, "createdAt" timestamp with time zone NOT NULL DEFAULT now(), "updatedAt" timestamp with time zone NOT NULL DEFAULT now(), "id" uuid NOT NULL DEFAULT uuid_generate_v4(), "permissions" character varying[] NOT NULL, "updateId" uuid NOT NULL DEFAULT immich_uuid_v7());`.execute(db);
await sql`CREATE TABLE "assets_audit" ("id" uuid NOT NULL DEFAULT immich_uuid_v7(), "assetId" uuid NOT NULL, "ownerId" uuid NOT NULL, "deletedAt" timestamp with time zone NOT NULL DEFAULT clock_timestamp());`.execute(db);
await sql`CREATE TABLE "person" ("id" uuid NOT NULL DEFAULT uuid_generate_v4(), "createdAt" timestamp with time zone NOT NULL DEFAULT now(), "updatedAt" timestamp with time zone NOT NULL DEFAULT now(), "ownerId" uuid NOT NULL, "name" character varying NOT NULL DEFAULT '', "thumbnailPath" character varying NOT NULL DEFAULT '', "isHidden" boolean NOT NULL DEFAULT false, "birthDate" date, "faceAssetId" uuid, "isFavorite" boolean NOT NULL DEFAULT false, "color" character varying, "updateId" uuid NOT NULL DEFAULT immich_uuid_v7());`.execute(db);
await sql`CREATE TABLE "asset_faces" ("assetId" uuid NOT NULL, "personId" uuid, "imageWidth" integer NOT NULL DEFAULT 0, "imageHeight" integer NOT NULL DEFAULT 0, "boundingBoxX1" integer NOT NULL DEFAULT 0, "boundingBoxY1" integer NOT NULL DEFAULT 0, "boundingBoxX2" integer NOT NULL DEFAULT 0, "boundingBoxY2" integer NOT NULL DEFAULT 0, "id" uuid NOT NULL DEFAULT uuid_generate_v4(), "sourceType" sourcetype NOT NULL DEFAULT 'machine-learning', "deletedAt" timestamp with time zone);`.execute(db);
await sql`CREATE TABLE "asset_files" ("id" uuid NOT NULL DEFAULT uuid_generate_v4(), "assetId" uuid NOT NULL, "createdAt" timestamp with time zone NOT NULL DEFAULT now(), "updatedAt" timestamp with time zone NOT NULL DEFAULT now(), "type" character varying NOT NULL, "path" character varying NOT NULL, "updateId" uuid NOT NULL DEFAULT immich_uuid_v7());`.execute(db);
await sql`CREATE TABLE "asset_job_status" ("assetId" uuid NOT NULL, "facesRecognizedAt" timestamp with time zone, "metadataExtractedAt" timestamp with time zone, "duplicatesDetectedAt" timestamp with time zone, "previewAt" timestamp with time zone, "thumbnailAt" timestamp with time zone);`.execute(db);
await sql`CREATE TABLE "audit" ("id" serial NOT NULL, "entityType" character varying NOT NULL, "entityId" uuid NOT NULL, "action" character varying NOT NULL, "ownerId" uuid NOT NULL, "createdAt" timestamp with time zone NOT NULL DEFAULT now());`.execute(db);
await sql`CREATE TABLE "exif" ("assetId" uuid NOT NULL, "make" character varying, "model" character varying, "exifImageWidth" integer, "exifImageHeight" integer, "fileSizeInByte" bigint, "orientation" character varying, "dateTimeOriginal" timestamp with time zone, "modifyDate" timestamp with time zone, "lensModel" character varying, "fNumber" double precision, "focalLength" double precision, "iso" integer, "latitude" double precision, "longitude" double precision, "city" character varying, "state" character varying, "country" character varying, "description" text NOT NULL DEFAULT '', "fps" double precision, "exposureTime" character varying, "livePhotoCID" character varying, "timeZone" character varying, "projectionType" character varying, "profileDescription" character varying, "colorspace" character varying, "bitsPerSample" integer, "autoStackId" character varying, "rating" integer, "updatedAt" timestamp with time zone NOT NULL DEFAULT clock_timestamp(), "updateId" uuid NOT NULL DEFAULT immich_uuid_v7());`.execute(db);
await sql`CREATE TABLE "face_search" ("faceId" uuid NOT NULL, "embedding" vector(512) NOT NULL);`.execute(db);
await sql`CREATE TABLE "geodata_places" ("id" integer NOT NULL, "name" character varying(200) NOT NULL, "longitude" double precision NOT NULL, "latitude" double precision NOT NULL, "countryCode" character(2) NOT NULL, "admin1Code" character varying(20), "admin2Code" character varying(80), "modificationDate" date NOT NULL, "admin1Name" character varying, "admin2Name" character varying, "alternateNames" character varying);`.execute(db);
await sql`CREATE TABLE "memories" ("id" uuid NOT NULL DEFAULT uuid_generate_v4(), "createdAt" timestamp with time zone NOT NULL DEFAULT now(), "updatedAt" timestamp with time zone NOT NULL DEFAULT now(), "deletedAt" timestamp with time zone, "ownerId" uuid NOT NULL, "type" character varying NOT NULL, "data" jsonb NOT NULL, "isSaved" boolean NOT NULL DEFAULT false, "memoryAt" timestamp with time zone NOT NULL, "seenAt" timestamp with time zone, "showAt" timestamp with time zone, "hideAt" timestamp with time zone, "updateId" uuid NOT NULL DEFAULT immich_uuid_v7());`.execute(db);
await sql`CREATE TABLE "memories_assets_assets" ("memoriesId" uuid NOT NULL, "assetsId" uuid NOT NULL);`.execute(db);
await sql`CREATE TABLE "move_history" ("id" uuid NOT NULL DEFAULT uuid_generate_v4(), "entityId" uuid NOT NULL, "pathType" character varying NOT NULL, "oldPath" character varying NOT NULL, "newPath" character varying NOT NULL);`.execute(db);
await sql`CREATE TABLE "naturalearth_countries" ("id" integer NOT NULL GENERATED ALWAYS AS IDENTITY, "admin" character varying(50) NOT NULL, "admin_a3" character varying(3) NOT NULL, "type" character varying(50) NOT NULL, "coordinates" polygon NOT NULL);`.execute(db);
await sql`CREATE TABLE "partners_audit" ("id" uuid NOT NULL DEFAULT immich_uuid_v7(), "sharedById" uuid NOT NULL, "sharedWithId" uuid NOT NULL, "deletedAt" timestamp with time zone NOT NULL DEFAULT clock_timestamp());`.execute(db);
await sql`CREATE TABLE "partners" ("sharedById" uuid NOT NULL, "sharedWithId" uuid NOT NULL, "createdAt" timestamp with time zone NOT NULL DEFAULT now(), "updatedAt" timestamp with time zone NOT NULL DEFAULT now(), "inTimeline" boolean NOT NULL DEFAULT false, "updateId" uuid NOT NULL DEFAULT immich_uuid_v7());`.execute(db);
await sql`CREATE TABLE "sessions" ("id" uuid NOT NULL DEFAULT uuid_generate_v4(), "token" character varying NOT NULL, "createdAt" timestamp with time zone NOT NULL DEFAULT now(), "updatedAt" timestamp with time zone NOT NULL DEFAULT now(), "userId" uuid NOT NULL, "deviceType" character varying NOT NULL DEFAULT '', "deviceOS" character varying NOT NULL DEFAULT '', "updateId" uuid NOT NULL DEFAULT immich_uuid_v7());`.execute(db);
await sql`CREATE TABLE "shared_links" ("id" uuid NOT NULL DEFAULT uuid_generate_v4(), "description" character varying, "userId" uuid NOT NULL, "key" bytea NOT NULL, "type" character varying NOT NULL, "createdAt" timestamp with time zone NOT NULL DEFAULT now(), "expiresAt" timestamp with time zone, "allowUpload" boolean NOT NULL DEFAULT false, "albumId" uuid, "allowDownload" boolean NOT NULL DEFAULT true, "showExif" boolean NOT NULL DEFAULT true, "password" character varying);`.execute(db);
await sql`CREATE TABLE "shared_link__asset" ("assetsId" uuid NOT NULL, "sharedLinksId" uuid NOT NULL);`.execute(db);
await sql`CREATE TABLE "smart_search" ("assetId" uuid NOT NULL, "embedding" vector(512) NOT NULL);`.execute(db);
await sql`ALTER TABLE "smart_search" ALTER COLUMN "embedding" SET STORAGE EXTERNAL;`.execute(db);
await sql`CREATE TABLE "session_sync_checkpoints" ("sessionId" uuid NOT NULL, "type" character varying NOT NULL, "createdAt" timestamp with time zone NOT NULL DEFAULT now(), "updatedAt" timestamp with time zone NOT NULL DEFAULT now(), "ack" character varying NOT NULL, "updateId" uuid NOT NULL DEFAULT immich_uuid_v7());`.execute(db);
await sql`CREATE TABLE "system_metadata" ("key" character varying NOT NULL, "value" jsonb NOT NULL);`.execute(db);
await sql`CREATE TABLE "tags" ("id" uuid NOT NULL DEFAULT uuid_generate_v4(), "userId" uuid NOT NULL, "value" character varying NOT NULL, "createdAt" timestamp with time zone NOT NULL DEFAULT now(), "updatedAt" timestamp with time zone NOT NULL DEFAULT now(), "color" character varying, "parentId" uuid, "updateId" uuid NOT NULL DEFAULT immich_uuid_v7());`.execute(db);
await sql`CREATE TABLE "tag_asset" ("assetsId" uuid NOT NULL, "tagsId" uuid NOT NULL);`.execute(db);
await sql`CREATE TABLE "tags_closure" ("id_ancestor" uuid NOT NULL, "id_descendant" uuid NOT NULL);`.execute(db);
await sql`CREATE TABLE "users_audit" ("userId" uuid NOT NULL, "deletedAt" timestamp with time zone NOT NULL DEFAULT clock_timestamp(), "id" uuid NOT NULL DEFAULT immich_uuid_v7());`.execute(db);
await sql`CREATE TABLE "user_metadata" ("userId" uuid NOT NULL, "key" character varying NOT NULL, "value" jsonb NOT NULL);`.execute(db);
await sql`CREATE TABLE "version_history" ("id" uuid NOT NULL DEFAULT uuid_generate_v4(), "createdAt" timestamp with time zone NOT NULL DEFAULT now(), "version" character varying NOT NULL);`.execute(db);
await sql`ALTER TABLE "users" ADD CONSTRAINT "PK_a3ffb1c0c8416b9fc6f907b7433" PRIMARY KEY ("id");`.execute(db);
await sql`ALTER TABLE "libraries" ADD CONSTRAINT "PK_505fedfcad00a09b3734b4223de" PRIMARY KEY ("id");`.execute(db);
await sql`ALTER TABLE "asset_stack" ADD CONSTRAINT "PK_74a27e7fcbd5852463d0af3034b" PRIMARY KEY ("id");`.execute(db);
await sql`ALTER TABLE "assets" ADD CONSTRAINT "PK_da96729a8b113377cfb6a62439c" PRIMARY KEY ("id");`.execute(db);
await sql`ALTER TABLE "albums" ADD CONSTRAINT "PK_7f71c7b5bc7c87b8f94c9a93a00" PRIMARY KEY ("id");`.execute(db);
await sql`ALTER TABLE "activity" ADD CONSTRAINT "PK_24625a1d6b1b089c8ae206fe467" PRIMARY KEY ("id");`.execute(db);
await sql`ALTER TABLE "albums_assets_assets" ADD CONSTRAINT "PK_c67bc36fa845fb7b18e0e398180" PRIMARY KEY ("albumsId", "assetsId");`.execute(db);
await sql`ALTER TABLE "albums_shared_users_users" ADD CONSTRAINT "PK_7df55657e0b2e8b626330a0ebc8" PRIMARY KEY ("albumsId", "usersId");`.execute(db);
await sql`ALTER TABLE "api_keys" ADD CONSTRAINT "PK_5c8a79801b44bd27b79228e1dad" PRIMARY KEY ("id");`.execute(db);
await sql`ALTER TABLE "assets_audit" ADD CONSTRAINT "PK_99bd5c015f81a641927a32b4212" PRIMARY KEY ("id");`.execute(db);
await sql`ALTER TABLE "person" ADD CONSTRAINT "PK_5fdaf670315c4b7e70cce85daa3" PRIMARY KEY ("id");`.execute(db);
await sql`ALTER TABLE "asset_faces" ADD CONSTRAINT "PK_6df76ab2eb6f5b57b7c2f1fc684" PRIMARY KEY ("id");`.execute(db);
await sql`ALTER TABLE "asset_files" ADD CONSTRAINT "PK_c41dc3e9ef5e1c57ca5a08a0004" PRIMARY KEY ("id");`.execute(db);
await sql`ALTER TABLE "asset_job_status" ADD CONSTRAINT "PK_420bec36fc02813bddf5c8b73d4" PRIMARY KEY ("assetId");`.execute(db);
await sql`ALTER TABLE "audit" ADD CONSTRAINT "PK_1d3d120ddaf7bc9b1ed68ed463a" PRIMARY KEY ("id");`.execute(db);
await sql`ALTER TABLE "exif" ADD CONSTRAINT "PK_c0117fdbc50b917ef9067740c44" PRIMARY KEY ("assetId");`.execute(db);
await sql`ALTER TABLE "face_search" ADD CONSTRAINT "face_search_pkey" PRIMARY KEY ("faceId");`.execute(db);
await sql`ALTER TABLE "geodata_places" ADD CONSTRAINT "PK_c29918988912ef4036f3d7fbff4" PRIMARY KEY ("id");`.execute(db);
await sql`ALTER TABLE "memories" ADD CONSTRAINT "PK_aaa0692d9496fe827b0568612f8" PRIMARY KEY ("id");`.execute(db);
await sql`ALTER TABLE "memories_assets_assets" ADD CONSTRAINT "PK_fcaf7112a013d1703c011c6793d" PRIMARY KEY ("memoriesId", "assetsId");`.execute(db);
await sql`ALTER TABLE "move_history" ADD CONSTRAINT "PK_af608f132233acf123f2949678d" PRIMARY KEY ("id");`.execute(db);
await sql`ALTER TABLE "naturalearth_countries" ADD CONSTRAINT "PK_21a6d86d1ab5d841648212e5353" PRIMARY KEY ("id");`.execute(db);
await sql`ALTER TABLE "partners_audit" ADD CONSTRAINT "PK_952b50217ff78198a7e380f0359" PRIMARY KEY ("id");`.execute(db);
await sql`ALTER TABLE "partners" ADD CONSTRAINT "PK_f1cc8f73d16b367f426261a8736" PRIMARY KEY ("sharedById", "sharedWithId");`.execute(db);
await sql`ALTER TABLE "sessions" ADD CONSTRAINT "PK_48cb6b5c20faa63157b3c1baf7f" PRIMARY KEY ("id");`.execute(db);
await sql`ALTER TABLE "shared_links" ADD CONSTRAINT "PK_642e2b0f619e4876e5f90a43465" PRIMARY KEY ("id");`.execute(db);
await sql`ALTER TABLE "shared_link__asset" ADD CONSTRAINT "PK_9b4f3687f9b31d1e311336b05e3" PRIMARY KEY ("assetsId", "sharedLinksId");`.execute(db);
await sql`ALTER TABLE "smart_search" ADD CONSTRAINT "smart_search_pkey" PRIMARY KEY ("assetId");`.execute(db);
await sql`ALTER TABLE "session_sync_checkpoints" ADD CONSTRAINT "PK_b846ab547a702863ef7cd9412fb" PRIMARY KEY ("sessionId", "type");`.execute(db);
await sql`ALTER TABLE "system_metadata" ADD CONSTRAINT "PK_fa94f6857470fb5b81ec6084465" PRIMARY KEY ("key");`.execute(db);
await sql`ALTER TABLE "tags" ADD CONSTRAINT "PK_e7dc17249a1148a1970748eda99" PRIMARY KEY ("id");`.execute(db);
await sql`ALTER TABLE "tag_asset" ADD CONSTRAINT "PK_ef5346fe522b5fb3bc96454747e" PRIMARY KEY ("assetsId", "tagsId");`.execute(db);
await sql`ALTER TABLE "tags_closure" ADD CONSTRAINT "PK_eab38eb12a3ec6df8376c95477c" PRIMARY KEY ("id_ancestor", "id_descendant");`.execute(db);
await sql`ALTER TABLE "users_audit" ADD CONSTRAINT "PK_e9b2bdfd90e7eb5961091175180" PRIMARY KEY ("id");`.execute(db);
await sql`ALTER TABLE "user_metadata" ADD CONSTRAINT "PK_5931462150b3438cbc83277fe5a" PRIMARY KEY ("userId", "key");`.execute(db);
await sql`ALTER TABLE "version_history" ADD CONSTRAINT "PK_5db259cbb09ce82c0d13cfd1b23" PRIMARY KEY ("id");`.execute(db);
await sql`ALTER TABLE "libraries" ADD CONSTRAINT "FK_0f6fc2fb195f24d19b0fb0d57c1" FOREIGN KEY ("ownerId") REFERENCES "users" ("id") ON UPDATE CASCADE ON DELETE CASCADE;`.execute(db);
await sql`ALTER TABLE "asset_stack" ADD CONSTRAINT "FK_91704e101438fd0653f582426dc" FOREIGN KEY ("primaryAssetId") REFERENCES "assets" ("id") ON UPDATE NO ACTION ON DELETE NO ACTION;`.execute(db);
await sql`ALTER TABLE "asset_stack" ADD CONSTRAINT "FK_c05079e542fd74de3b5ecb5c1c8" FOREIGN KEY ("ownerId") REFERENCES "users" ("id") ON UPDATE CASCADE ON DELETE CASCADE;`.execute(db);
await sql`ALTER TABLE "assets" ADD CONSTRAINT "FK_2c5ac0d6fb58b238fd2068de67d" FOREIGN KEY ("ownerId") REFERENCES "users" ("id") ON UPDATE CASCADE ON DELETE CASCADE;`.execute(db);
await sql`ALTER TABLE "assets" ADD CONSTRAINT "FK_16294b83fa8c0149719a1f631ef" FOREIGN KEY ("livePhotoVideoId") REFERENCES "assets" ("id") ON UPDATE CASCADE ON DELETE SET NULL;`.execute(db);
await sql`ALTER TABLE "assets" ADD CONSTRAINT "FK_9977c3c1de01c3d848039a6b90c" FOREIGN KEY ("libraryId") REFERENCES "libraries" ("id") ON UPDATE CASCADE ON DELETE CASCADE;`.execute(db);
await sql`ALTER TABLE "assets" ADD CONSTRAINT "FK_f15d48fa3ea5e4bda05ca8ab207" FOREIGN KEY ("stackId") REFERENCES "asset_stack" ("id") ON UPDATE CASCADE ON DELETE SET NULL;`.execute(db);
await sql`ALTER TABLE "albums" ADD CONSTRAINT "FK_b22c53f35ef20c28c21637c85f4" FOREIGN KEY ("ownerId") REFERENCES "users" ("id") ON UPDATE CASCADE ON DELETE CASCADE;`.execute(db);
await sql`ALTER TABLE "albums" ADD CONSTRAINT "FK_05895aa505a670300d4816debce" FOREIGN KEY ("albumThumbnailAssetId") REFERENCES "assets" ("id") ON UPDATE CASCADE ON DELETE SET NULL;`.execute(db);
await sql`ALTER TABLE "activity" ADD CONSTRAINT "FK_1af8519996fbfb3684b58df280b" FOREIGN KEY ("albumId") REFERENCES "albums" ("id") ON UPDATE CASCADE ON DELETE CASCADE;`.execute(db);
await sql`ALTER TABLE "activity" ADD CONSTRAINT "FK_3571467bcbe021f66e2bdce96ea" FOREIGN KEY ("userId") REFERENCES "users" ("id") ON UPDATE CASCADE ON DELETE CASCADE;`.execute(db);
await sql`ALTER TABLE "activity" ADD CONSTRAINT "FK_8091ea76b12338cb4428d33d782" FOREIGN KEY ("assetId") REFERENCES "assets" ("id") ON UPDATE CASCADE ON DELETE CASCADE;`.execute(db);
await sql`ALTER TABLE "albums_assets_assets" ADD CONSTRAINT "FK_e590fa396c6898fcd4a50e40927" FOREIGN KEY ("albumsId") REFERENCES "albums" ("id") ON UPDATE CASCADE ON DELETE CASCADE;`.execute(db);
await sql`ALTER TABLE "albums_assets_assets" ADD CONSTRAINT "FK_4bd1303d199f4e72ccdf998c621" FOREIGN KEY ("assetsId") REFERENCES "assets" ("id") ON UPDATE CASCADE ON DELETE CASCADE;`.execute(db);
await sql`ALTER TABLE "albums_shared_users_users" ADD CONSTRAINT "FK_427c350ad49bd3935a50baab737" FOREIGN KEY ("albumsId") REFERENCES "albums" ("id") ON UPDATE CASCADE ON DELETE CASCADE;`.execute(db);
await sql`ALTER TABLE "albums_shared_users_users" ADD CONSTRAINT "FK_f48513bf9bccefd6ff3ad30bd06" FOREIGN KEY ("usersId") REFERENCES "users" ("id") ON UPDATE CASCADE ON DELETE CASCADE;`.execute(db);
await sql`ALTER TABLE "api_keys" ADD CONSTRAINT "FK_6c2e267ae764a9413b863a29342" FOREIGN KEY ("userId") REFERENCES "users" ("id") ON UPDATE CASCADE ON DELETE CASCADE;`.execute(db);
await sql`ALTER TABLE "person" ADD CONSTRAINT "FK_5527cc99f530a547093f9e577b6" FOREIGN KEY ("ownerId") REFERENCES "users" ("id") ON UPDATE CASCADE ON DELETE CASCADE;`.execute(db);
await sql`ALTER TABLE "person" ADD CONSTRAINT "FK_2bbabe31656b6778c6b87b61023" FOREIGN KEY ("faceAssetId") REFERENCES "asset_faces" ("id") ON UPDATE NO ACTION ON DELETE SET NULL;`.execute(db);
await sql`ALTER TABLE "asset_faces" ADD CONSTRAINT "FK_02a43fd0b3c50fb6d7f0cb7282c" FOREIGN KEY ("assetId") REFERENCES "assets" ("id") ON UPDATE CASCADE ON DELETE CASCADE;`.execute(db);
await sql`ALTER TABLE "asset_faces" ADD CONSTRAINT "FK_95ad7106dd7b484275443f580f9" FOREIGN KEY ("personId") REFERENCES "person" ("id") ON UPDATE CASCADE ON DELETE SET NULL;`.execute(db);
await sql`ALTER TABLE "asset_files" ADD CONSTRAINT "FK_e3e103a5f1d8bc8402999286040" FOREIGN KEY ("assetId") REFERENCES "assets" ("id") ON UPDATE CASCADE ON DELETE CASCADE;`.execute(db);
await sql`ALTER TABLE "asset_job_status" ADD CONSTRAINT "FK_420bec36fc02813bddf5c8b73d4" FOREIGN KEY ("assetId") REFERENCES "assets" ("id") ON UPDATE CASCADE ON DELETE CASCADE;`.execute(db);
await sql`ALTER TABLE "exif" ADD CONSTRAINT "FK_c0117fdbc50b917ef9067740c44" FOREIGN KEY ("assetId") REFERENCES "assets" ("id") ON UPDATE NO ACTION ON DELETE CASCADE;`.execute(db);
await sql`ALTER TABLE "face_search" ADD CONSTRAINT "face_search_faceId_fkey" FOREIGN KEY ("faceId") REFERENCES "asset_faces" ("id") ON UPDATE NO ACTION ON DELETE CASCADE;`.execute(db);
await sql`ALTER TABLE "memories" ADD CONSTRAINT "FK_575842846f0c28fa5da46c99b19" FOREIGN KEY ("ownerId") REFERENCES "users" ("id") ON UPDATE CASCADE ON DELETE CASCADE;`.execute(db);
await sql`ALTER TABLE "memories_assets_assets" ADD CONSTRAINT "FK_984e5c9ab1f04d34538cd32334e" FOREIGN KEY ("memoriesId") REFERENCES "memories" ("id") ON UPDATE CASCADE ON DELETE CASCADE;`.execute(db);
await sql`ALTER TABLE "memories_assets_assets" ADD CONSTRAINT "FK_6942ecf52d75d4273de19d2c16f" FOREIGN KEY ("assetsId") REFERENCES "assets" ("id") ON UPDATE CASCADE ON DELETE CASCADE;`.execute(db);
await sql`ALTER TABLE "partners" ADD CONSTRAINT "FK_7e077a8b70b3530138610ff5e04" FOREIGN KEY ("sharedById") REFERENCES "users" ("id") ON UPDATE NO ACTION ON DELETE CASCADE;`.execute(db);
await sql`ALTER TABLE "partners" ADD CONSTRAINT "FK_d7e875c6c60e661723dbf372fd3" FOREIGN KEY ("sharedWithId") REFERENCES "users" ("id") ON UPDATE NO ACTION ON DELETE CASCADE;`.execute(db);
await sql`ALTER TABLE "sessions" ADD CONSTRAINT "FK_57de40bc620f456c7311aa3a1e6" FOREIGN KEY ("userId") REFERENCES "users" ("id") ON UPDATE CASCADE ON DELETE CASCADE;`.execute(db);
await sql`ALTER TABLE "shared_links" ADD CONSTRAINT "FK_66fe3837414c5a9f1c33ca49340" FOREIGN KEY ("userId") REFERENCES "users" ("id") ON UPDATE CASCADE ON DELETE CASCADE;`.execute(db);
await sql`ALTER TABLE "shared_links" ADD CONSTRAINT "FK_0c6ce9058c29f07cdf7014eac66" FOREIGN KEY ("albumId") REFERENCES "albums" ("id") ON UPDATE CASCADE ON DELETE CASCADE;`.execute(db);
await sql`ALTER TABLE "shared_link__asset" ADD CONSTRAINT "FK_5b7decce6c8d3db9593d6111a66" FOREIGN KEY ("assetsId") REFERENCES "assets" ("id") ON UPDATE CASCADE ON DELETE CASCADE;`.execute(db);
await sql`ALTER TABLE "shared_link__asset" ADD CONSTRAINT "FK_c9fab4aa97ffd1b034f3d6581ab" FOREIGN KEY ("sharedLinksId") REFERENCES "shared_links" ("id") ON UPDATE CASCADE ON DELETE CASCADE;`.execute(db);
await sql`ALTER TABLE "smart_search" ADD CONSTRAINT "smart_search_assetId_fkey" FOREIGN KEY ("assetId") REFERENCES "assets" ("id") ON UPDATE NO ACTION ON DELETE CASCADE;`.execute(db);
await sql`ALTER TABLE "session_sync_checkpoints" ADD CONSTRAINT "FK_d8ddd9d687816cc490432b3d4bc" FOREIGN KEY ("sessionId") REFERENCES "sessions" ("id") ON UPDATE CASCADE ON DELETE CASCADE;`.execute(db);
await sql`ALTER TABLE "tags" ADD CONSTRAINT "FK_92e67dc508c705dd66c94615576" FOREIGN KEY ("userId") REFERENCES "users" ("id") ON UPDATE CASCADE ON DELETE CASCADE;`.execute(db);
await sql`ALTER TABLE "tags" ADD CONSTRAINT "FK_9f9590cc11561f1f48ff034ef99" FOREIGN KEY ("parentId") REFERENCES "tags" ("id") ON UPDATE NO ACTION ON DELETE CASCADE;`.execute(db);
await sql`ALTER TABLE "tag_asset" ADD CONSTRAINT "FK_f8e8a9e893cb5c54907f1b798e9" FOREIGN KEY ("assetsId") REFERENCES "assets" ("id") ON UPDATE CASCADE ON DELETE CASCADE;`.execute(db);
await sql`ALTER TABLE "tag_asset" ADD CONSTRAINT "FK_e99f31ea4cdf3a2c35c7287eb42" FOREIGN KEY ("tagsId") REFERENCES "tags" ("id") ON UPDATE CASCADE ON DELETE CASCADE;`.execute(db);
await sql`ALTER TABLE "tags_closure" ADD CONSTRAINT "FK_15fbcbc67663c6bfc07b354c22c" FOREIGN KEY ("id_ancestor") REFERENCES "tags" ("id") ON UPDATE NO ACTION ON DELETE CASCADE;`.execute(db);
await sql`ALTER TABLE "tags_closure" ADD CONSTRAINT "FK_b1a2a7ed45c29179b5ad51548a1" FOREIGN KEY ("id_descendant") REFERENCES "tags" ("id") ON UPDATE NO ACTION ON DELETE CASCADE;`.execute(db);
await sql`ALTER TABLE "user_metadata" ADD CONSTRAINT "FK_6afb43681a21cf7815932bc38ac" FOREIGN KEY ("userId") REFERENCES "users" ("id") ON UPDATE CASCADE ON DELETE CASCADE;`.execute(db);
await sql`ALTER TABLE "users" ADD CONSTRAINT "UQ_97672ac88f789774dd47f7c8be3" UNIQUE ("email");`.execute(db);
await sql`ALTER TABLE "users" ADD CONSTRAINT "UQ_b309cf34fa58137c416b32cea3a" UNIQUE ("storageLabel");`.execute(db);
await sql`ALTER TABLE "asset_stack" ADD CONSTRAINT "REL_91704e101438fd0653f582426d" UNIQUE ("primaryAssetId");`.execute(db);
await sql`ALTER TABLE "asset_files" ADD CONSTRAINT "UQ_assetId_type" UNIQUE ("assetId", "type");`.execute(db);
await sql`ALTER TABLE "move_history" ADD CONSTRAINT "UQ_newPath" UNIQUE ("newPath");`.execute(db);
await sql`ALTER TABLE "move_history" ADD CONSTRAINT "UQ_entityId_pathType" UNIQUE ("entityId", "pathType");`.execute(db);
await sql`ALTER TABLE "shared_links" ADD CONSTRAINT "UQ_sharedlink_key" UNIQUE ("key");`.execute(db);
await sql`ALTER TABLE "tags" ADD CONSTRAINT "UQ_79d6f16e52bb2c7130375246793" UNIQUE ("userId", "value");`.execute(db);
await sql`ALTER TABLE "activity" ADD CONSTRAINT "CHK_2ab1e70f113f450eb40c1e3ec8" CHECK (("comment" IS NULL AND "isLiked" = true) OR ("comment" IS NOT NULL AND "isLiked" = false));`.execute(db);
await sql`ALTER TABLE "person" ADD CONSTRAINT "CHK_b0f82b0ed662bfc24fbb58bb45" CHECK ("birthDate" <= CURRENT_DATE);`.execute(db);
await sql`CREATE INDEX "IDX_users_updated_at_asc_id_asc" ON "users" ("updatedAt", "id")`.execute(db);
await sql`CREATE INDEX "IDX_users_update_id" ON "users" ("updateId")`.execute(db);
await sql`CREATE INDEX "IDX_0f6fc2fb195f24d19b0fb0d57c" ON "libraries" ("ownerId")`.execute(db);
await sql`CREATE INDEX "IDX_libraries_update_id" ON "libraries" ("updateId")`.execute(db);
await sql`CREATE INDEX "IDX_91704e101438fd0653f582426d" ON "asset_stack" ("primaryAssetId")`.execute(db);
await sql`CREATE INDEX "IDX_c05079e542fd74de3b5ecb5c1c" ON "asset_stack" ("ownerId")`.execute(db);
await sql`CREATE INDEX "idx_originalfilename_trigram" ON "assets" USING gin (f_unaccent("originalFileName") gin_trgm_ops)`.execute(db);
await sql`CREATE INDEX "IDX_asset_id_stackId" ON "assets" ("id", "stackId")`.execute(db);
await sql`CREATE INDEX "IDX_originalPath_libraryId" ON "assets" ("originalPath", "libraryId")`.execute(db);
await sql`CREATE INDEX "idx_local_date_time_month" ON "assets" ((date_trunc('MONTH'::text, ("localDateTime" AT TIME ZONE 'UTC'::text)) AT TIME ZONE 'UTC'::text))`.execute(db);
await sql`CREATE INDEX "idx_local_date_time" ON "assets" ((("localDateTime" at time zone 'UTC')::date))`.execute(db);
await sql`CREATE UNIQUE INDEX "UQ_assets_owner_library_checksum" ON "assets" ("ownerId", "libraryId", "checksum") WHERE ("libraryId" IS NOT NULL)`.execute(db);
await sql`CREATE UNIQUE INDEX "UQ_assets_owner_checksum" ON "assets" ("ownerId", "checksum") WHERE ("libraryId" IS NULL)`.execute(db);
await sql`CREATE INDEX "IDX_2c5ac0d6fb58b238fd2068de67" ON "assets" ("ownerId")`.execute(db);
await sql`CREATE INDEX "idx_asset_file_created_at" ON "assets" ("fileCreatedAt")`.execute(db);
await sql`CREATE INDEX "IDX_8d3efe36c0755849395e6ea866" ON "assets" ("checksum")`.execute(db);
await sql`CREATE INDEX "IDX_16294b83fa8c0149719a1f631e" ON "assets" ("livePhotoVideoId")`.execute(db);
await sql`CREATE INDEX "IDX_4d66e76dada1ca180f67a205dc" ON "assets" ("originalFileName")`.execute(db);
await sql`CREATE INDEX "IDX_9977c3c1de01c3d848039a6b90" ON "assets" ("libraryId")`.execute(db);
await sql`CREATE INDEX "IDX_f15d48fa3ea5e4bda05ca8ab20" ON "assets" ("stackId")`.execute(db);
await sql`CREATE INDEX "IDX_assets_duplicateId" ON "assets" ("duplicateId")`.execute(db);
await sql`CREATE INDEX "IDX_assets_update_id" ON "assets" ("updateId")`.execute(db);
await sql`CREATE INDEX "IDX_b22c53f35ef20c28c21637c85f" ON "albums" ("ownerId")`.execute(db);
await sql`CREATE INDEX "IDX_05895aa505a670300d4816debc" ON "albums" ("albumThumbnailAssetId")`.execute(db);
await sql`CREATE INDEX "IDX_albums_update_id" ON "albums" ("updateId")`.execute(db);
await sql`CREATE UNIQUE INDEX "IDX_activity_like" ON "activity" ("assetId", "userId", "albumId") WHERE ("isLiked" = true)`.execute(db);
await sql`CREATE INDEX "IDX_1af8519996fbfb3684b58df280" ON "activity" ("albumId")`.execute(db);
await sql`CREATE INDEX "IDX_3571467bcbe021f66e2bdce96e" ON "activity" ("userId")`.execute(db);
await sql`CREATE INDEX "IDX_8091ea76b12338cb4428d33d78" ON "activity" ("assetId")`.execute(db);
await sql`CREATE INDEX "IDX_activity_update_id" ON "activity" ("updateId")`.execute(db);
await sql`CREATE INDEX "IDX_e590fa396c6898fcd4a50e4092" ON "albums_assets_assets" ("albumsId")`.execute(db);
await sql`CREATE INDEX "IDX_4bd1303d199f4e72ccdf998c62" ON "albums_assets_assets" ("assetsId")`.execute(db);
await sql`CREATE INDEX "IDX_f48513bf9bccefd6ff3ad30bd0" ON "albums_shared_users_users" ("usersId")`.execute(db);
await sql`CREATE INDEX "IDX_427c350ad49bd3935a50baab73" ON "albums_shared_users_users" ("albumsId")`.execute(db);
await sql`CREATE INDEX "IDX_6c2e267ae764a9413b863a2934" ON "api_keys" ("userId")`.execute(db);
await sql`CREATE INDEX "IDX_api_keys_update_id" ON "api_keys" ("updateId")`.execute(db);
await sql`CREATE INDEX "IDX_assets_audit_asset_id" ON "assets_audit" ("assetId")`.execute(db);
await sql`CREATE INDEX "IDX_assets_audit_owner_id" ON "assets_audit" ("ownerId")`.execute(db);
await sql`CREATE INDEX "IDX_assets_audit_deleted_at" ON "assets_audit" ("deletedAt")`.execute(db);
await sql`CREATE INDEX "IDX_5527cc99f530a547093f9e577b" ON "person" ("ownerId")`.execute(db);
await sql`CREATE INDEX "IDX_2bbabe31656b6778c6b87b6102" ON "person" ("faceAssetId")`.execute(db);
await sql`CREATE INDEX "IDX_person_update_id" ON "person" ("updateId")`.execute(db);
await sql`CREATE INDEX "IDX_bf339a24070dac7e71304ec530" ON "asset_faces" ("personId", "assetId")`.execute(db);
await sql`CREATE INDEX "IDX_asset_faces_assetId_personId" ON "asset_faces" ("assetId", "personId")`.execute(db);
await sql`CREATE INDEX "IDX_02a43fd0b3c50fb6d7f0cb7282" ON "asset_faces" ("assetId")`.execute(db);
await sql`CREATE INDEX "IDX_95ad7106dd7b484275443f580f" ON "asset_faces" ("personId")`.execute(db);
await sql`CREATE INDEX "IDX_asset_files_assetId" ON "asset_files" ("assetId")`.execute(db);
await sql`CREATE INDEX "IDX_asset_files_update_id" ON "asset_files" ("updateId")`.execute(db);
await sql`CREATE INDEX "IDX_ownerId_createdAt" ON "audit" ("ownerId", "createdAt")`.execute(db);
await sql`CREATE INDEX "exif_city" ON "exif" ("city")`.execute(db);
await sql`CREATE INDEX "IDX_live_photo_cid" ON "exif" ("livePhotoCID")`.execute(db);
await sql`CREATE INDEX "IDX_auto_stack_id" ON "exif" ("autoStackId")`.execute(db);
await sql`CREATE INDEX "IDX_asset_exif_update_id" ON "exif" ("updateId")`.execute(db);
await sql`CREATE INDEX "face_index" ON "face_search" USING hnsw (embedding vector_cosine_ops) WITH (ef_construction = 300, m = 16)`.execute(db);
await sql`CREATE INDEX "IDX_geodata_gist_earthcoord" ON "geodata_places" (ll_to_earth_public(latitude, longitude))`.execute(db);
await sql`CREATE INDEX "idx_geodata_places_name" ON "geodata_places" USING gin (f_unaccent("name") gin_trgm_ops)`.execute(db);
await sql`CREATE INDEX "idx_geodata_places_admin2_name" ON "geodata_places" USING gin (f_unaccent("admin2Name") gin_trgm_ops)`.execute(db);
await sql`CREATE INDEX "idx_geodata_places_admin1_name" ON "geodata_places" USING gin (f_unaccent("admin1Name") gin_trgm_ops)`.execute(db);
await sql`CREATE INDEX "idx_geodata_places_alternate_names" ON "geodata_places" USING gin (f_unaccent("alternateNames") gin_trgm_ops)`.execute(db);
await sql`CREATE INDEX "IDX_575842846f0c28fa5da46c99b1" ON "memories" ("ownerId")`.execute(db);
await sql`CREATE INDEX "IDX_memories_update_id" ON "memories" ("updateId")`.execute(db);
await sql`CREATE INDEX "IDX_984e5c9ab1f04d34538cd32334" ON "memories_assets_assets" ("memoriesId")`.execute(db);
await sql`CREATE INDEX "IDX_6942ecf52d75d4273de19d2c16" ON "memories_assets_assets" ("assetsId")`.execute(db);
await sql`CREATE INDEX "IDX_partners_audit_shared_by_id" ON "partners_audit" ("sharedById")`.execute(db);
await sql`CREATE INDEX "IDX_partners_audit_shared_with_id" ON "partners_audit" ("sharedWithId")`.execute(db);
await sql`CREATE INDEX "IDX_partners_audit_deleted_at" ON "partners_audit" ("deletedAt")`.execute(db);
await sql`CREATE INDEX "IDX_7e077a8b70b3530138610ff5e0" ON "partners" ("sharedById")`.execute(db);
await sql`CREATE INDEX "IDX_d7e875c6c60e661723dbf372fd" ON "partners" ("sharedWithId")`.execute(db);
await sql`CREATE INDEX "IDX_partners_update_id" ON "partners" ("updateId")`.execute(db);
await sql`CREATE INDEX "IDX_57de40bc620f456c7311aa3a1e" ON "sessions" ("userId")`.execute(db);
await sql`CREATE INDEX "IDX_sessions_update_id" ON "sessions" ("updateId")`.execute(db);
await sql`CREATE INDEX "IDX_66fe3837414c5a9f1c33ca4934" ON "shared_links" ("userId")`.execute(db);
await sql`CREATE INDEX "IDX_sharedlink_key" ON "shared_links" ("key")`.execute(db);
await sql`CREATE INDEX "IDX_sharedlink_albumId" ON "shared_links" ("albumId")`.execute(db);
await sql`CREATE INDEX "IDX_5b7decce6c8d3db9593d6111a6" ON "shared_link__asset" ("assetsId")`.execute(db);
await sql`CREATE INDEX "IDX_c9fab4aa97ffd1b034f3d6581a" ON "shared_link__asset" ("sharedLinksId")`.execute(db);
await sql`CREATE INDEX "clip_index" ON "smart_search" USING hnsw (embedding vector_cosine_ops) WITH (ef_construction = 300, m = 16)`.execute(db);
await sql`CREATE INDEX "IDX_d8ddd9d687816cc490432b3d4b" ON "session_sync_checkpoints" ("sessionId")`.execute(db);
await sql`CREATE INDEX "IDX_session_sync_checkpoints_update_id" ON "session_sync_checkpoints" ("updateId")`.execute(db);
await sql`CREATE INDEX "IDX_92e67dc508c705dd66c9461557" ON "tags" ("userId")`.execute(db);
await sql`CREATE INDEX "IDX_9f9590cc11561f1f48ff034ef9" ON "tags" ("parentId")`.execute(db);
await sql`CREATE INDEX "IDX_tags_update_id" ON "tags" ("updateId")`.execute(db);
await sql`CREATE INDEX "IDX_tag_asset_assetsId_tagsId" ON "tag_asset" ("assetsId", "tagsId")`.execute(db);
await sql`CREATE INDEX "IDX_f8e8a9e893cb5c54907f1b798e" ON "tag_asset" ("assetsId")`.execute(db);
await sql`CREATE INDEX "IDX_e99f31ea4cdf3a2c35c7287eb4" ON "tag_asset" ("tagsId")`.execute(db);
await sql`CREATE INDEX "IDX_15fbcbc67663c6bfc07b354c22" ON "tags_closure" ("id_ancestor")`.execute(db);
await sql`CREATE INDEX "IDX_b1a2a7ed45c29179b5ad51548a" ON "tags_closure" ("id_descendant")`.execute(db);
await sql`CREATE INDEX "IDX_users_audit_deleted_at" ON "users_audit" ("deletedAt")`.execute(db);
await sql`CREATE INDEX "IDX_6afb43681a21cf7815932bc38a" ON "user_metadata" ("userId")`.execute(db);
await sql`CREATE OR REPLACE TRIGGER "users_delete_audit"
AFTER DELETE ON "users"
REFERENCING OLD TABLE AS "old"
FOR EACH STATEMENT
WHEN (pg_trigger_depth() = 0)
EXECUTE FUNCTION users_delete_audit();`.execute(db);
await sql`CREATE OR REPLACE TRIGGER "users_updated_at"
BEFORE UPDATE ON "users"
FOR EACH ROW
EXECUTE FUNCTION updated_at();`.execute(db);
await sql`CREATE OR REPLACE TRIGGER "libraries_updated_at"
BEFORE UPDATE ON "libraries"
FOR EACH ROW
EXECUTE FUNCTION updated_at();`.execute(db);
await sql`CREATE OR REPLACE TRIGGER "assets_delete_audit"
AFTER DELETE ON "assets"
REFERENCING OLD TABLE AS "old"
FOR EACH STATEMENT
WHEN (pg_trigger_depth() = 0)
EXECUTE FUNCTION assets_delete_audit();`.execute(db);
await sql`CREATE OR REPLACE TRIGGER "assets_updated_at"
BEFORE UPDATE ON "assets"
FOR EACH ROW
EXECUTE FUNCTION updated_at();`.execute(db);
await sql`CREATE OR REPLACE TRIGGER "albums_updated_at"
BEFORE UPDATE ON "albums"
FOR EACH ROW
EXECUTE FUNCTION updated_at();`.execute(db);
await sql`CREATE OR REPLACE TRIGGER "activity_updated_at"
BEFORE UPDATE ON "activity"
FOR EACH ROW
EXECUTE FUNCTION updated_at();`.execute(db);
await sql`CREATE OR REPLACE TRIGGER "api_keys_updated_at"
BEFORE UPDATE ON "api_keys"
FOR EACH ROW
EXECUTE FUNCTION updated_at();`.execute(db);
await sql`CREATE OR REPLACE TRIGGER "person_updated_at"
BEFORE UPDATE ON "person"
FOR EACH ROW
EXECUTE FUNCTION updated_at();`.execute(db);
await sql`CREATE OR REPLACE TRIGGER "asset_files_updated_at"
BEFORE UPDATE ON "asset_files"
FOR EACH ROW
EXECUTE FUNCTION updated_at();`.execute(db);
await sql`CREATE OR REPLACE TRIGGER "asset_exif_updated_at"
BEFORE UPDATE ON "exif"
FOR EACH ROW
EXECUTE FUNCTION updated_at();`.execute(db);
await sql`CREATE OR REPLACE TRIGGER "memories_updated_at"
BEFORE UPDATE ON "memories"
FOR EACH ROW
EXECUTE FUNCTION updated_at();`.execute(db);
await sql`CREATE OR REPLACE TRIGGER "partners_delete_audit"
AFTER DELETE ON "partners"
REFERENCING OLD TABLE AS "old"
FOR EACH STATEMENT
WHEN (pg_trigger_depth() = 0)
EXECUTE FUNCTION partners_delete_audit();`.execute(db);
await sql`CREATE OR REPLACE TRIGGER "partners_updated_at"
BEFORE UPDATE ON "partners"
FOR EACH ROW
EXECUTE FUNCTION updated_at();`.execute(db);
await sql`CREATE OR REPLACE TRIGGER "sessions_updated_at"
BEFORE UPDATE ON "sessions"
FOR EACH ROW
EXECUTE FUNCTION updated_at();`.execute(db);
await sql`CREATE OR REPLACE TRIGGER "session_sync_checkpoints_updated_at"
BEFORE UPDATE ON "session_sync_checkpoints"
FOR EACH ROW
EXECUTE FUNCTION updated_at();`.execute(db);
await sql`CREATE OR REPLACE TRIGGER "tags_updated_at"
BEFORE UPDATE ON "tags"
FOR EACH ROW
EXECUTE FUNCTION updated_at();`.execute(db);
}
export async function down(): Promise<void> {
// not implemented
}

View File

@ -1,5 +1,4 @@
import { UpdatedAtTrigger, UpdateIdColumn } from 'src/decorators';
import { ASSET_CHECKSUM_CONSTRAINT } from 'src/entities/asset.entity';
import { AssetStatus, AssetType } from 'src/enum';
import { assets_status_enum } from 'src/schema/enums';
import { assets_delete_audit } from 'src/schema/functions';
@ -17,6 +16,7 @@ import {
Table,
UpdateDateColumn,
} from 'src/sql-tools';
import { ASSET_CHECKSUM_CONSTRAINT } from 'src/utils/database';
@Table('assets')
@UpdatedAtTrigger('assets_updated_at')

View File

@ -1,6 +1,6 @@
import { Column, Index, PrimaryColumn, Table } from 'src/sql-tools';
@Table({ name: 'geodata_places' })
@Table({ name: 'geodata_places', synchronize: false })
@Index({
name: 'idx_geodata_places_alternate_names',
using: 'gin',
@ -26,11 +26,10 @@ import { Column, Index, PrimaryColumn, Table } from 'src/sql-tools';
synchronize: false,
})
@Index({
name: 'idx_geodata_places_gist_earthcoord',
name: 'IDX_geodata_gist_earthcoord',
expression: 'll_to_earth_public(latitude, longitude)',
synchronize: false,
})
@Table({ name: 'idx_geodata_places', synchronize: false })
export class GeodataPlacesTable {
@PrimaryColumn({ type: 'integer' })
id!: number;

View File

@ -6,15 +6,15 @@ import {
AlbumStatisticsResponseDto,
CreateAlbumDto,
GetAlbumsDto,
UpdateAlbumDto,
UpdateAlbumUserDto,
mapAlbum,
MapAlbumDto,
mapAlbumWithAssets,
mapAlbumWithoutAssets,
UpdateAlbumDto,
UpdateAlbumUserDto,
} from 'src/dtos/album.dto';
import { BulkIdResponseDto, BulkIdsDto } from 'src/dtos/asset-ids.response.dto';
import { AuthDto } from 'src/dtos/auth.dto';
import { AlbumEntity } from 'src/entities/album.entity';
import { Permission } from 'src/enum';
import { AlbumAssetCount, AlbumInfoOptions } from 'src/repositories/album.repository';
import { BaseService } from 'src/services/base.service';
@ -39,7 +39,7 @@ export class AlbumService extends BaseService {
async getAll({ user: { id: ownerId } }: AuthDto, { assetId, shared }: GetAlbumsDto): Promise<AlbumResponseDto[]> {
await this.albumRepository.updateThumbnails();
let albums: AlbumEntity[];
let albums: MapAlbumDto[];
if (assetId) {
albums = await this.albumRepository.getByAssetId(ownerId, assetId);
} else if (shared === true) {

View File

@ -8,10 +8,11 @@ import { Stats } from 'node:fs';
import { AssetFile } from 'src/database';
import { AssetMediaStatus, AssetRejectReason, AssetUploadAction } from 'src/dtos/asset-media-response.dto';
import { AssetMediaCreateDto, AssetMediaReplaceDto, AssetMediaSize, UploadFieldName } from 'src/dtos/asset-media.dto';
import { ASSET_CHECKSUM_CONSTRAINT, AssetEntity } from 'src/entities/asset.entity';
import { MapAsset } from 'src/dtos/asset-response.dto';
import { AssetFileType, AssetStatus, AssetType, CacheControl, JobName } from 'src/enum';
import { AuthRequest } from 'src/middleware/auth.guard';
import { AssetMediaService } from 'src/services/asset-media.service';
import { ASSET_CHECKSUM_CONSTRAINT } from 'src/utils/database';
import { ImmichFileResponse } from 'src/utils/file';
import { assetStub } from 'test/fixtures/asset.stub';
import { authStub } from 'test/fixtures/auth.stub';
@ -173,7 +174,7 @@ const assetEntity = Object.freeze({
},
livePhotoVideoId: null,
sidecarPath: null,
}) as AssetEntity;
} as MapAsset);
const existingAsset = Object.freeze({
...assetEntity,
@ -182,18 +183,18 @@ const existingAsset = Object.freeze({
checksum: Buffer.from('_getExistingAsset', 'utf8'),
libraryId: 'libraryId',
originalFileName: 'existing-filename.jpeg',
}) as AssetEntity;
}) as MapAsset;
const sidecarAsset = Object.freeze({
...existingAsset,
sidecarPath: 'sidecar-path',
checksum: Buffer.from('_getExistingAssetWithSideCar', 'utf8'),
}) as AssetEntity;
}) as MapAsset;
const copiedAsset = Object.freeze({
id: 'copied-asset',
originalPath: 'copied-path',
}) as AssetEntity;
}) as MapAsset;
describe(AssetMediaService.name, () => {
let sut: AssetMediaService;
@ -819,8 +820,8 @@ describe(AssetMediaService.name, () => {
const file2 = Buffer.from('53be335e99f18a66ff12e9a901c7a6171dd76573', 'hex');
mocks.asset.getByChecksums.mockResolvedValue([
{ id: 'asset-1', checksum: file1 } as AssetEntity,
{ id: 'asset-2', checksum: file2 } as AssetEntity,
{ id: 'asset-1', checksum: file1, deletedAt: null },
{ id: 'asset-2', checksum: file2, deletedAt: null },
]);
await expect(
@ -856,7 +857,7 @@ describe(AssetMediaService.name, () => {
const file1 = Buffer.from('d2947b871a706081be194569951b7db246907957', 'hex');
const file2 = Buffer.from('53be335e99f18a66ff12e9a901c7a6171dd76573', 'hex');
mocks.asset.getByChecksums.mockResolvedValue([{ id: 'asset-1', checksum: file1 } as AssetEntity]);
mocks.asset.getByChecksums.mockResolvedValue([{ id: 'asset-1', checksum: file1, deletedAt: null }]);
await expect(
sut.bulkUploadCheck(authStub.admin, {

View File

@ -2,6 +2,7 @@ import { BadRequestException, Injectable, InternalServerErrorException, NotFound
import { extname } from 'node:path';
import sanitize from 'sanitize-filename';
import { StorageCore } from 'src/cores/storage.core';
import { Asset } from 'src/database';
import {
AssetBulkUploadCheckResponseDto,
AssetMediaResponseDto,
@ -20,13 +21,13 @@ import {
UploadFieldName,
} from 'src/dtos/asset-media.dto';
import { AuthDto } from 'src/dtos/auth.dto';
import { ASSET_CHECKSUM_CONSTRAINT, AssetEntity } from 'src/entities/asset.entity';
import { AssetStatus, AssetType, CacheControl, JobName, Permission, StorageFolder } from 'src/enum';
import { AuthRequest } from 'src/middleware/auth.guard';
import { BaseService } from 'src/services/base.service';
import { UploadFile } from 'src/types';
import { requireUploadAccess } from 'src/utils/access';
import { asRequest, getAssetFiles, onBeforeLink } from 'src/utils/asset.util';
import { ASSET_CHECKSUM_CONSTRAINT } from 'src/utils/database';
import { getFilenameExtension, getFileNameWithoutExtension, ImmichFileResponse } from 'src/utils/file';
import { mimeTypes } from 'src/utils/mime-types';
import { fromChecksum } from 'src/utils/request';
@ -212,7 +213,7 @@ export class AssetMediaService extends BaseService {
const asset = await this.findOrFail(id);
const size = dto.size ?? AssetMediaSize.THUMBNAIL;
const { thumbnailFile, previewFile, fullsizeFile } = getAssetFiles(asset.files);
const { thumbnailFile, previewFile, fullsizeFile } = getAssetFiles(asset.files ?? []);
let filepath = previewFile?.path;
if (size === AssetMediaSize.THUMBNAIL && thumbnailFile) {
filepath = thumbnailFile.path;
@ -375,7 +376,7 @@ export class AssetMediaService extends BaseService {
* Uses only vital properties excluding things like: stacks, faces, smart search info, etc,
* and then queues a METADATA_EXTRACTION job.
*/
private async createCopy(asset: AssetEntity): Promise<AssetEntity> {
private async createCopy(asset: Omit<Asset, 'id'>) {
const created = await this.assetRepository.create({
ownerId: asset.ownerId,
originalPath: asset.originalPath,
@ -398,12 +399,7 @@ export class AssetMediaService extends BaseService {
return created;
}
private async create(
ownerId: string,
dto: AssetMediaCreateDto,
file: UploadFile,
sidecarFile?: UploadFile,
): Promise<AssetEntity> {
private async create(ownerId: string, dto: AssetMediaCreateDto, file: UploadFile, sidecarFile?: UploadFile) {
const asset = await this.assetRepository.create({
ownerId,
libraryId: null,
@ -444,7 +440,7 @@ export class AssetMediaService extends BaseService {
}
}
private async findOrFail(id: string): Promise<AssetEntity> {
private async findOrFail(id: string) {
const asset = await this.assetRepository.getById(id, { files: true });
if (!asset) {
throw new NotFoundException('Asset not found');

View File

@ -1,8 +1,7 @@
import { BadRequestException } from '@nestjs/common';
import { DateTime } from 'luxon';
import { mapAsset } from 'src/dtos/asset-response.dto';
import { MapAsset, mapAsset } from 'src/dtos/asset-response.dto';
import { AssetJobName, AssetStatsResponseDto } from 'src/dtos/asset.dto';
import { AssetEntity } from 'src/entities/asset.entity';
import { AssetStatus, AssetType, JobName, JobStatus } from 'src/enum';
import { AssetStats } from 'src/repositories/asset.repository';
import { AssetService } from 'src/services/asset.service';
@ -35,7 +34,7 @@ describe(AssetService.name, () => {
expect(sut).toBeDefined();
});
const mockGetById = (assets: AssetEntity[]) => {
const mockGetById = (assets: MapAsset[]) => {
mocks.asset.getById.mockImplementation((assetId) => Promise.resolve(assets.find((asset) => asset.id === assetId)));
};
@ -608,7 +607,7 @@ describe(AssetService.name, () => {
mocks.asset.getById.mockResolvedValue({
...assetStub.primaryImage,
stack: { ...assetStub.primaryImage.stack, assets: assetStub.primaryImage.stack!.assets.slice(0, 2) },
} as AssetEntity);
});
await sut.handleAssetDeletion({ id: assetStub.primaryImage.id, deleteOnDisk: true });

View File

@ -5,6 +5,7 @@ import { JOBS_ASSET_PAGINATION_SIZE } from 'src/constants';
import { OnJob } from 'src/decorators';
import {
AssetResponseDto,
MapAsset,
MemoryLaneResponseDto,
SanitizedAssetResponseDto,
mapAsset,
@ -20,7 +21,6 @@ import {
} from 'src/dtos/asset.dto';
import { AuthDto } from 'src/dtos/auth.dto';
import { MemoryLaneDto } from 'src/dtos/search.dto';
import { AssetEntity } from 'src/entities/asset.entity';
import { AssetStatus, JobName, JobStatus, Permission, QueueName } from 'src/enum';
import { BaseService } from 'src/services/base.service';
import { ISidecarWriteJob, JobItem, JobOf } from 'src/types';
@ -43,7 +43,7 @@ export class AssetService extends BaseService {
yearsAgo,
// TODO move this to clients
title: `${yearsAgo} year${yearsAgo > 1 ? 's' : ''} ago`,
assets: assets.map((asset) => mapAsset(asset as unknown as AssetEntity, { auth })),
assets: assets.map((asset) => mapAsset(asset, { auth })),
};
});
}
@ -105,7 +105,7 @@ export class AssetService extends BaseService {
const { description, dateTimeOriginal, latitude, longitude, rating, ...rest } = dto;
const repos = { asset: this.assetRepository, event: this.eventRepository };
let previousMotion: AssetEntity | null = null;
let previousMotion: MapAsset | null = null;
if (rest.livePhotoVideoId) {
await onBeforeLink(repos, { userId: auth.user.id, livePhotoVideoId: rest.livePhotoVideoId });
} else if (rest.livePhotoVideoId === null) {
@ -233,7 +233,7 @@ export class AssetService extends BaseService {
}
}
const { fullsizeFile, previewFile, thumbnailFile } = getAssetFiles(asset.files);
const { fullsizeFile, previewFile, thumbnailFile } = getAssetFiles(asset.files ?? []);
const files = [thumbnailFile?.path, previewFile?.path, fullsizeFile?.path, asset.encodedVideoPath];
if (deleteOnDisk) {

View File

@ -68,7 +68,7 @@ export class DuplicateService extends BaseService {
return JobStatus.SKIPPED;
}
const previewFile = getAssetFile(asset.files, AssetFileType.PREVIEW);
const previewFile = getAssetFile(asset.files || [], AssetFileType.PREVIEW);
if (!previewFile) {
this.logger.warn(`Asset ${id} is missing preview image`);
return JobStatus.FAILED;

View File

@ -285,9 +285,9 @@ describe(JobService.name, () => {
it(`should queue ${jobs.length} jobs when a ${item.name} job finishes successfully`, async () => {
if (item.name === JobName.GENERATE_THUMBNAILS && item.data.source === 'upload') {
if (item.data.id === 'asset-live-image') {
mocks.asset.getByIdsWithAllRelations.mockResolvedValue([assetStub.livePhotoStillAsset]);
mocks.asset.getByIdsWithAllRelationsButStacks.mockResolvedValue([assetStub.livePhotoStillAsset as any]);
} else {
mocks.asset.getByIdsWithAllRelations.mockResolvedValue([assetStub.livePhotoMotionAsset]);
mocks.asset.getByIdsWithAllRelationsButStacks.mockResolvedValue([assetStub.livePhotoMotionAsset as any]);
}
}

View File

@ -254,7 +254,7 @@ export class JobService extends BaseService {
case JobName.METADATA_EXTRACTION: {
if (item.data.source === 'sidecar-write') {
const [asset] = await this.assetRepository.getByIdsWithAllRelations([item.data.id]);
const [asset] = await this.assetRepository.getByIdsWithAllRelationsButStacks([item.data.id]);
if (asset) {
this.eventRepository.clientSend('on_asset_update', asset.ownerId, mapAsset(asset));
}
@ -284,7 +284,7 @@ export class JobService extends BaseService {
break;
}
const [asset] = await this.assetRepository.getByIdsWithAllRelations([item.data.id]);
const [asset] = await this.assetRepository.getByIdsWithAllRelationsButStacks([item.data.id]);
if (!asset) {
this.logger.warn(`Could not find asset ${item.data.id} after generating thumbnails`);
break;

View File

@ -350,7 +350,7 @@ describe(LibraryService.name, () => {
progressCounter: 0,
};
mocks.asset.getByIds.mockResolvedValue([assetStub.external]);
mocks.assetJob.getForSyncAssets.mockResolvedValue([assetStub.external]);
mocks.storage.stat.mockRejectedValue(new Error('ENOENT, no such file or directory'));
await expect(sut.handleSyncAssets(mockAssetJob)).resolves.toBe(JobStatus.SUCCESS);
@ -371,7 +371,7 @@ describe(LibraryService.name, () => {
progressCounter: 0,
};
mocks.asset.getByIds.mockResolvedValue([assetStub.external]);
mocks.assetJob.getForSyncAssets.mockResolvedValue([assetStub.external]);
mocks.storage.stat.mockRejectedValue(new Error('Could not read file'));
await expect(sut.handleSyncAssets(mockAssetJob)).resolves.toBe(JobStatus.SUCCESS);
@ -392,7 +392,7 @@ describe(LibraryService.name, () => {
progressCounter: 0,
};
mocks.asset.getByIds.mockResolvedValue([assetStub.trashedOffline]);
mocks.assetJob.getForSyncAssets.mockResolvedValue([assetStub.trashedOffline]);
mocks.storage.stat.mockRejectedValue(new Error('Could not read file'));
await expect(sut.handleSyncAssets(mockAssetJob)).resolves.toBe(JobStatus.SUCCESS);
@ -410,7 +410,7 @@ describe(LibraryService.name, () => {
progressCounter: 0,
};
mocks.asset.getByIds.mockResolvedValue([assetStub.trashedOffline]);
mocks.assetJob.getForSyncAssets.mockResolvedValue([assetStub.trashedOffline]);
mocks.storage.stat.mockResolvedValue({ mtime: assetStub.external.fileModifiedAt } as Stats);
await expect(sut.handleSyncAssets(mockAssetJob)).resolves.toBe(JobStatus.SUCCESS);
@ -431,7 +431,7 @@ describe(LibraryService.name, () => {
progressCounter: 0,
};
mocks.asset.getByIds.mockResolvedValue([assetStub.trashedOffline]);
mocks.assetJob.getForSyncAssets.mockResolvedValue([assetStub.trashedOffline]);
mocks.storage.stat.mockResolvedValue({ mtime: assetStub.external.fileModifiedAt } as Stats);
await expect(sut.handleSyncAssets(mockAssetJob)).resolves.toBe(JobStatus.SUCCESS);
@ -451,7 +451,7 @@ describe(LibraryService.name, () => {
progressCounter: 0,
};
mocks.asset.getByIds.mockResolvedValue([assetStub.trashedOffline]);
mocks.assetJob.getForSyncAssets.mockResolvedValue([assetStub.trashedOffline]);
mocks.storage.stat.mockResolvedValue({ mtime: assetStub.external.fileModifiedAt } as Stats);
await expect(sut.handleSyncAssets(mockAssetJob)).resolves.toBe(JobStatus.SUCCESS);
@ -471,7 +471,7 @@ describe(LibraryService.name, () => {
progressCounter: 0,
};
mocks.asset.getByIds.mockResolvedValue([assetStub.external]);
mocks.assetJob.getForSyncAssets.mockResolvedValue([assetStub.external]);
mocks.storage.stat.mockResolvedValue({ mtime: assetStub.external.fileModifiedAt } as Stats);
await expect(sut.handleSyncAssets(mockAssetJob)).resolves.toBe(JobStatus.SUCCESS);
@ -489,7 +489,7 @@ describe(LibraryService.name, () => {
progressCounter: 0,
};
mocks.asset.getByIds.mockResolvedValue([assetStub.trashedOffline]);
mocks.assetJob.getForSyncAssets.mockResolvedValue([assetStub.trashedOffline]);
mocks.storage.stat.mockResolvedValue({ mtime: assetStub.trashedOffline.fileModifiedAt } as Stats);
await expect(sut.handleSyncAssets(mockAssetJob)).resolves.toBe(JobStatus.SUCCESS);
@ -518,7 +518,7 @@ describe(LibraryService.name, () => {
const mtime = new Date(assetStub.external.fileModifiedAt.getDate() + 1);
mocks.asset.getByIds.mockResolvedValue([assetStub.external]);
mocks.assetJob.getForSyncAssets.mockResolvedValue([assetStub.external]);
mocks.storage.stat.mockResolvedValue({ mtime } as Stats);
await expect(sut.handleSyncAssets(mockAssetJob)).resolves.toBe(JobStatus.SUCCESS);

View File

@ -18,7 +18,6 @@ import {
ValidateLibraryImportPathResponseDto,
ValidateLibraryResponseDto,
} from 'src/dtos/library.dto';
import { AssetEntity } from 'src/entities/asset.entity';
import { AssetStatus, AssetType, DatabaseLock, ImmichWorker, JobName, JobStatus, QueueName } from 'src/enum';
import { ArgOf } from 'src/repositories/event.repository';
import { AssetSyncResult } from 'src/repositories/library.repository';
@ -467,7 +466,7 @@ export class LibraryService extends BaseService {
@OnJob({ name: JobName.LIBRARY_SYNC_ASSETS, queue: QueueName.LIBRARY })
async handleSyncAssets(job: JobOf<JobName.LIBRARY_SYNC_ASSETS>): Promise<JobStatus> {
const assets = await this.assetRepository.getByIds(job.assetIds);
const assets = await this.assetJobRepository.getForSyncAssets(job.assetIds);
const assetIdsToOffline: string[] = [];
const trashedAssetIdsToOffline: string[] = [];
@ -561,7 +560,16 @@ export class LibraryService extends BaseService {
return JobStatus.SUCCESS;
}
private checkExistingAsset(asset: AssetEntity, stat: Stats | null): AssetSyncResult {
private checkExistingAsset(
asset: {
isOffline: boolean;
libraryId: string | null;
originalPath: string;
status: AssetStatus;
fileModifiedAt: Date;
},
stat: Stats | null,
): AssetSyncResult {
if (!stat) {
// File not found on disk or permission error
if (asset.isOffline) {

View File

@ -15,6 +15,14 @@ describe(MemoryService.name, () => {
expect(sut).toBeDefined();
});
describe('onMemoryCleanup', () => {
it('should clean up memories', async () => {
mocks.memory.cleanup.mockResolvedValue([]);
await sut.onMemoriesCleanup();
expect(mocks.memory.cleanup).toHaveBeenCalled();
});
});
describe('search', () => {
it('should search memories', async () => {
const [userId] = newUuids();

View File

@ -3,7 +3,7 @@ import { randomBytes } from 'node:crypto';
import { Stats } from 'node:fs';
import { constants } from 'node:fs/promises';
import { defaults } from 'src/config';
import { AssetEntity } from 'src/entities/asset.entity';
import { MapAsset } from 'src/dtos/asset-response.dto';
import { AssetType, ExifOrientation, ImmichWorker, JobName, JobStatus, SourceType } from 'src/enum';
import { WithoutProperty } from 'src/repositories/asset.repository';
import { ImmichTags } from 'src/repositories/metadata.repository';
@ -549,7 +549,6 @@ describe(MetadataService.name, () => {
livePhotoVideoId: null,
libraryId: null,
});
mocks.asset.getByIds.mockResolvedValue([{ ...assetStub.livePhotoWithOriginalFileName, livePhotoVideoId: null }]);
mocks.storage.stat.mockResolvedValue({
size: 123_456,
mtime: assetStub.livePhotoWithOriginalFileName.fileModifiedAt,
@ -719,7 +718,7 @@ describe(MetadataService.name, () => {
});
mocks.crypto.hashSha1.mockReturnValue(randomBytes(512));
mocks.asset.create.mockImplementation(
(asset) => Promise.resolve({ ...assetStub.livePhotoMotionAsset, ...asset }) as Promise<AssetEntity>,
(asset) => Promise.resolve({ ...assetStub.livePhotoMotionAsset, ...asset }) as Promise<MapAsset>,
);
const video = randomBytes(512);
mocks.storage.readFile.mockResolvedValue(video);
@ -1394,7 +1393,7 @@ describe(MetadataService.name, () => {
});
it('should set sidecar path if exists (sidecar named photo.xmp)', async () => {
mocks.asset.getByIds.mockResolvedValue([assetStub.sidecarWithoutExt]);
mocks.asset.getByIds.mockResolvedValue([assetStub.sidecarWithoutExt as any]);
mocks.storage.checkFileExists.mockResolvedValueOnce(false);
mocks.storage.checkFileExists.mockResolvedValueOnce(true);
@ -1446,7 +1445,7 @@ describe(MetadataService.name, () => {
describe('handleSidecarDiscovery', () => {
it('should skip hidden assets', async () => {
mocks.asset.getByIds.mockResolvedValue([assetStub.livePhotoMotionAsset]);
mocks.asset.getByIds.mockResolvedValue([assetStub.livePhotoMotionAsset as any]);
await sut.handleSidecarDiscovery({ id: assetStub.livePhotoMotionAsset.id });
expect(mocks.storage.checkFileExists).not.toHaveBeenCalled();
});

View File

@ -271,7 +271,7 @@ export class MetadataService extends BaseService {
];
if (this.isMotionPhoto(asset, exifTags)) {
promises.push(this.applyMotionPhotos(asset as unknown as Asset, exifTags, dates, stats));
promises.push(this.applyMotionPhotos(asset, exifTags, dates, stats));
}
if (isFaceImportEnabled(metadata) && this.hasTaggedFaces(exifTags)) {

View File

@ -2,7 +2,8 @@ import { BadRequestException, Injectable, NotFoundException } from '@nestjs/comm
import { Insertable, Updateable } from 'kysely';
import { FACE_THUMBNAIL_SIZE, JOBS_ASSET_PAGINATION_SIZE } from 'src/constants';
import { StorageCore } from 'src/cores/storage.core';
import { AssetFaces, FaceSearch, Person } from 'src/db';
import { Person } from 'src/database';
import { AssetFaces, FaceSearch } from 'src/db';
import { Chunked, OnJob } from 'src/decorators';
import { BulkIdErrorReason, BulkIdResponseDto } from 'src/dtos/asset-ids.response.dto';
import { AuthDto } from 'src/dtos/auth.dto';
@ -315,6 +316,7 @@ export class PersonService extends BaseService {
const facesToAdd: (Insertable<AssetFaces> & { id: string })[] = [];
const embeddings: FaceSearch[] = [];
const mlFaceIds = new Set<string>();
for (const face of asset.faces) {
if (face.sourceType === SourceType.MACHINE_LEARNING) {
mlFaceIds.add(face.id);
@ -477,7 +479,7 @@ export class PersonService extends BaseService {
embedding: face.faceSearch.embedding,
maxDistance: machineLearning.facialRecognition.maxDistance,
numResults: machineLearning.facialRecognition.minFaces,
minBirthDate: face.asset.fileCreatedAt,
minBirthDate: face.asset.fileCreatedAt ?? undefined,
});
// `matches` also includes the face itself
@ -503,7 +505,7 @@ export class PersonService extends BaseService {
maxDistance: machineLearning.facialRecognition.maxDistance,
numResults: 1,
hasPerson: true,
minBirthDate: face.asset.fileCreatedAt,
minBirthDate: face.asset.fileCreatedAt ?? undefined,
});
if (matchWithPerson.length > 0) {

View File

@ -39,13 +39,36 @@ describe(SearchService.name, () => {
});
});
describe('searchPlaces', () => {
it('should search places', async () => {
mocks.search.searchPlaces.mockResolvedValue([
{
id: 42,
name: 'my place',
latitude: 420,
longitude: 69,
admin1Code: null,
admin1Name: null,
admin2Code: null,
admin2Name: null,
alternateNames: null,
countryCode: 'US',
modificationDate: new Date(),
},
]);
await sut.searchPlaces({ name: 'place' });
expect(mocks.search.searchPlaces).toHaveBeenCalledWith('place');
});
});
describe('getExploreData', () => {
it('should get assets by city and tag', async () => {
mocks.asset.getAssetIdByCity.mockResolvedValue({
fieldName: 'exifInfo.city',
items: [{ value: 'test-city', data: assetStub.withLocation.id }],
});
mocks.asset.getByIdsWithAllRelations.mockResolvedValue([assetStub.withLocation]);
mocks.asset.getByIdsWithAllRelationsButStacks.mockResolvedValue([assetStub.withLocation]);
const expectedResponse = [
{ fieldName: 'exifInfo.city', items: [{ value: 'test-city', data: mapAsset(assetStub.withLocation) }] },
];

View File

@ -1,5 +1,5 @@
import { BadRequestException, Injectable } from '@nestjs/common';
import { AssetMapOptions, AssetResponseDto, mapAsset } from 'src/dtos/asset-response.dto';
import { AssetMapOptions, AssetResponseDto, MapAsset, mapAsset } from 'src/dtos/asset-response.dto';
import { AuthDto } from 'src/dtos/auth.dto';
import { mapPerson, PersonResponseDto } from 'src/dtos/person.dto';
import {
@ -14,7 +14,6 @@ import {
SearchSuggestionType,
SmartSearchDto,
} from 'src/dtos/search.dto';
import { AssetEntity } from 'src/entities/asset.entity';
import { AssetOrder } from 'src/enum';
import { SearchExploreItem } from 'src/repositories/search.repository';
import { BaseService } from 'src/services/base.service';
@ -36,7 +35,7 @@ export class SearchService extends BaseService {
async getExploreData(auth: AuthDto): Promise<SearchExploreItem<AssetResponseDto>[]> {
const options = { maxFields: 12, minAssetsPerField: 5 };
const cities = await this.assetRepository.getAssetIdByCity(auth.user.id, options);
const assets = await this.assetRepository.getByIdsWithAllRelations(cities.items.map(({ data }) => data));
const assets = await this.assetRepository.getByIdsWithAllRelationsButStacks(cities.items.map(({ data }) => data));
const items = assets.map((asset) => ({ value: asset.exifInfo!.city!, data: mapAsset(asset, { auth }) }));
return [{ fieldName: cities.fieldName, items }];
}
@ -139,7 +138,7 @@ export class SearchService extends BaseService {
return [auth.user.id, ...partnerIds];
}
private mapResponse(assets: AssetEntity[], nextPage: string | null, options: AssetMapOptions): SearchResponseDto {
private mapResponse(assets: MapAsset[], nextPage: string | null, options: AssetMapOptions): SearchResponseDto {
return {
albums: { total: 0, count: 0, items: [], facets: [] },
assets: {

View File

@ -244,7 +244,7 @@ describe(SharedLinkService.name, () => {
await sut.remove(authStub.user1, sharedLinkStub.valid.id);
expect(mocks.sharedLink.get).toHaveBeenCalledWith(authStub.user1.user.id, sharedLinkStub.valid.id);
expect(mocks.sharedLink.remove).toHaveBeenCalledWith(sharedLinkStub.valid);
expect(mocks.sharedLink.remove).toHaveBeenCalledWith(sharedLinkStub.valid.id);
});
});
@ -333,8 +333,7 @@ describe(SharedLinkService.name, () => {
});
it('should return metadata tags with a default image path if the asset id is not set', async () => {
mocks.sharedLink.get.mockResolvedValue({ ...sharedLinkStub.individual, album: undefined, assets: [] });
mocks.sharedLink.get.mockResolvedValue({ ...sharedLinkStub.individual, album: null, assets: [] });
await expect(sut.getMetadataTags(authStub.adminSharedLink)).resolves.toEqual({
description: '0 shared photos & videos',
imageUrl: `https://my.immich.app/feature-panel.png`,

View File

@ -1,4 +1,5 @@
import { BadRequestException, ForbiddenException, Injectable, UnauthorizedException } from '@nestjs/common';
import { SharedLink } from 'src/database';
import { AssetIdErrorReason, AssetIdsResponseDto } from 'src/dtos/asset-ids.response.dto';
import { AssetIdsDto } from 'src/dtos/asset.dto';
import { AuthDto } from 'src/dtos/auth.dto';
@ -11,7 +12,6 @@ import {
SharedLinkResponseDto,
SharedLinkSearchDto,
} from 'src/dtos/shared-link.dto';
import { SharedLinkEntity } from 'src/entities/shared-link.entity';
import { Permission, SharedLinkType } from 'src/enum';
import { BaseService } from 'src/services/base.service';
import { getExternalDomain, OpenGraphTags } from 'src/utils/misc';
@ -98,7 +98,7 @@ export class SharedLinkService extends BaseService {
async remove(auth: AuthDto, id: string): Promise<void> {
const sharedLink = await this.findOrFail(auth.user.id, id);
await this.sharedLinkRepository.remove(sharedLink);
await this.sharedLinkRepository.remove(sharedLink.id);
}
// TODO: replace `userId` with permissions and access control checks
@ -182,7 +182,7 @@ export class SharedLinkService extends BaseService {
const config = await this.getConfig({ withCache: true });
const sharedLink = await this.findOrFail(auth.sharedLink.userId, auth.sharedLink.id);
const assetId = sharedLink.album?.albumThumbnailAssetId || sharedLink.assets[0]?.id;
const assetCount = sharedLink.assets.length > 0 ? sharedLink.assets.length : sharedLink.album?.assets.length || 0;
const assetCount = sharedLink.assets.length > 0 ? sharedLink.assets.length : sharedLink.album?.assets?.length || 0;
const imagePath = assetId
? `/api/assets/${assetId}/thumbnail?key=${sharedLink.key.toString('base64url')}`
: '/feature-panel.png';
@ -194,11 +194,11 @@ export class SharedLinkService extends BaseService {
};
}
private mapToSharedLink(sharedLink: SharedLinkEntity, { withExif }: { withExif: boolean }) {
private mapToSharedLink(sharedLink: SharedLink, { withExif }: { withExif: boolean }) {
return withExif ? mapSharedLink(sharedLink) : mapSharedLinkWithoutMetadata(sharedLink);
}
private validateAndRefreshToken(sharedLink: SharedLinkEntity, dto: SharedLinkPasswordDto): string {
private validateAndRefreshToken(sharedLink: SharedLink, dto: SharedLinkPasswordDto): string {
const token = this.cryptoRepository.hashSha256(`${sharedLink.id}-${sharedLink.password}`);
const sharedLinkTokens = dto.token?.split(',') || [];
if (sharedLink.password !== dto.password && !sharedLinkTokens.includes(token)) {

View File

@ -1,5 +1,4 @@
import { mapAsset } from 'src/dtos/asset-response.dto';
import { AssetEntity } from 'src/entities/asset.entity';
import { SyncService } from 'src/services/sync.service';
import { assetStub } from 'test/fixtures/asset.stub';
import { authStub } from 'test/fixtures/auth.stub';
@ -63,7 +62,7 @@ describe(SyncService.name, () => {
it('should return a response requiring a full sync when there are too many changes', async () => {
mocks.partner.getAll.mockResolvedValue([]);
mocks.asset.getChangedDeltaSync.mockResolvedValue(
Array.from<AssetEntity>({ length: 10_000 }).fill(assetStub.image),
Array.from<typeof assetStub.image>({ length: 10_000 }).fill(assetStub.image),
);
await expect(
sut.getDeltaSync(authStub.user1, { updatedAfter: new Date(), userIds: [authStub.user1.user.id] }),

View File

@ -1,7 +1,6 @@
import { Injectable } from '@nestjs/common';
import { AssetResponseDto, mapAsset } from 'src/dtos/asset-response.dto';
import { AuthDto } from 'src/dtos/auth.dto';
import { AssetEntity } from 'src/entities/asset.entity';
import { BaseService } from 'src/services/base.service';
@Injectable()
@ -12,6 +11,6 @@ export class ViewService extends BaseService {
async getAssetsByOriginalPath(auth: AuthDto, path: string): Promise<AssetResponseDto[]> {
const assets = await this.viewRepository.getAssetsByOriginalPath(auth.user.id, path);
return assets.map((asset) => mapAsset(asset as unknown as AssetEntity, { auth }));
return assets.map((asset) => mapAsset(asset, { auth }));
}
}

View File

@ -6,6 +6,13 @@ export const processTables: Processor = (builder, items) => {
for (const {
item: { options, object },
} of items.filter((item) => item.type === 'table')) {
const test = readMetadata(object);
if (test) {
throw new Error(
`Table ${test.name} has already been registered. Does ${object.name} have two @Table() decorators?`,
);
}
const tableName = options.name || asSnakeCase(object.name);
writeMetadata(object, { name: tableName, options });

View File

@ -50,12 +50,8 @@ const asFunctionExpression = (options: FunctionOptions) => {
}
if ('body' in options) {
sql.push(
//
`AS $$`,
' ' + options.body.trim(),
`$$;`,
);
const body = options.body;
sql.push(...(body.includes('\n') ? [`AS $$`, ' ' + body.trim(), `$$;`] : [`AS $$${body}$$;`]));
}
return sql.join('\n ').trim();

View File

@ -13,11 +13,8 @@ import { PartnerRepository } from 'src/repositories/partner.repository';
import { IBulkAsset, ImmichFile, UploadFile } from 'src/types';
import { checkAccess } from 'src/utils/access';
export const getAssetFile = <T extends { type: AssetFileType }>(
files: T[],
type: AssetFileType | GeneratedImageType,
) => {
return (files || []).find((file) => file.type === type);
export const getAssetFile = (files: AssetFile[], type: AssetFileType | GeneratedImageType) => {
return files.find((file) => file.type === type);
};
export const getAssetFiles = (files: AssetFile[]) => ({

View File

@ -1,15 +1,24 @@
import {
DeduplicateJoinsPlugin,
Expression,
ExpressionBuilder,
ExpressionWrapper,
Kysely,
KyselyConfig,
Nullable,
Selectable,
SelectQueryBuilder,
Simplify,
sql,
} from 'kysely';
import { PostgresJSDialect } from 'kysely-postgres-js';
import { jsonArrayFrom, jsonObjectFrom } from 'kysely/helpers/postgres';
import postgres, { Notice } from 'postgres';
import { columns, Exif, Person } from 'src/database';
import { DB } from 'src/db';
import { AssetFileType } from 'src/enum';
import { TimeBucketSize } from 'src/repositories/asset.repository';
import { AssetSearchBuilderOptions } from 'src/repositories/search.repository';
type Ssl = 'require' | 'allow' | 'prefer' | 'verify-full' | boolean | object;
@ -112,3 +121,225 @@ export function toJson<DB, TB extends keyof DB & string, T extends TB | Expressi
>
>;
}
export const ASSET_CHECKSUM_CONSTRAINT = 'UQ_assets_owner_checksum';
// TODO come up with a better query that only selects the fields we need
export function withExif<O>(qb: SelectQueryBuilder<DB, 'assets', O>) {
return qb
.leftJoin('exif', 'assets.id', 'exif.assetId')
.select((eb) => eb.fn.toJson(eb.table('exif')).$castTo<Exif | null>().as('exifInfo'));
}
export function withExifInner<O>(qb: SelectQueryBuilder<DB, 'assets', O>) {
return qb
.innerJoin('exif', 'assets.id', 'exif.assetId')
.select((eb) => eb.fn.toJson(eb.table('exif')).$castTo<Exif>().as('exifInfo'));
}
export function withSmartSearch<O>(qb: SelectQueryBuilder<DB, 'assets', O>) {
return qb
.leftJoin('smart_search', 'assets.id', 'smart_search.assetId')
.select((eb) => toJson(eb, 'smart_search').as('smartSearch'));
}
export function withFaces(eb: ExpressionBuilder<DB, 'assets'>, withDeletedFace?: boolean) {
return jsonArrayFrom(
eb
.selectFrom('asset_faces')
.selectAll('asset_faces')
.whereRef('asset_faces.assetId', '=', 'assets.id')
.$if(!withDeletedFace, (qb) => qb.where('asset_faces.deletedAt', 'is', null)),
).as('faces');
}
export function withFiles(eb: ExpressionBuilder<DB, 'assets'>, type?: AssetFileType) {
return jsonArrayFrom(
eb
.selectFrom('asset_files')
.select(columns.assetFiles)
.whereRef('asset_files.assetId', '=', 'assets.id')
.$if(!!type, (qb) => qb.where('asset_files.type', '=', type!)),
).as('files');
}
export function withFacesAndPeople(eb: ExpressionBuilder<DB, 'assets'>, withDeletedFace?: boolean) {
return jsonArrayFrom(
eb
.selectFrom('asset_faces')
.leftJoinLateral(
(eb) =>
eb.selectFrom('person').selectAll('person').whereRef('asset_faces.personId', '=', 'person.id').as('person'),
(join) => join.onTrue(),
)
.selectAll('asset_faces')
.select((eb) => eb.table('person').$castTo<Person>().as('person'))
.whereRef('asset_faces.assetId', '=', 'assets.id')
.$if(!withDeletedFace, (qb) => qb.where('asset_faces.deletedAt', 'is', null)),
).as('faces');
}
export function hasPeople<O>(qb: SelectQueryBuilder<DB, 'assets', O>, personIds: string[]) {
return qb.innerJoin(
(eb) =>
eb
.selectFrom('asset_faces')
.select('assetId')
.where('personId', '=', anyUuid(personIds!))
.where('deletedAt', 'is', null)
.groupBy('assetId')
.having((eb) => eb.fn.count('personId').distinct(), '=', personIds.length)
.as('has_people'),
(join) => join.onRef('has_people.assetId', '=', 'assets.id'),
);
}
export function hasTags<O>(qb: SelectQueryBuilder<DB, 'assets', O>, tagIds: string[]) {
return qb.innerJoin(
(eb) =>
eb
.selectFrom('tag_asset')
.select('assetsId')
.innerJoin('tags_closure', 'tag_asset.tagsId', 'tags_closure.id_descendant')
.where('tags_closure.id_ancestor', '=', anyUuid(tagIds))
.groupBy('assetsId')
.having((eb) => eb.fn.count('tags_closure.id_ancestor').distinct(), '>=', tagIds.length)
.as('has_tags'),
(join) => join.onRef('has_tags.assetsId', '=', 'assets.id'),
);
}
export function withOwner(eb: ExpressionBuilder<DB, 'assets'>) {
return jsonObjectFrom(eb.selectFrom('users').select(columns.user).whereRef('users.id', '=', 'assets.ownerId')).as(
'owner',
);
}
export function withLibrary(eb: ExpressionBuilder<DB, 'assets'>) {
return jsonObjectFrom(
eb.selectFrom('libraries').selectAll('libraries').whereRef('libraries.id', '=', 'assets.libraryId'),
).as('library');
}
export function withTags(eb: ExpressionBuilder<DB, 'assets'>) {
return jsonArrayFrom(
eb
.selectFrom('tags')
.select(columns.tag)
.innerJoin('tag_asset', 'tags.id', 'tag_asset.tagsId')
.whereRef('assets.id', '=', 'tag_asset.assetsId'),
).as('tags');
}
export function truncatedDate<O>(size: TimeBucketSize) {
return sql<O>`date_trunc(${size}, "localDateTime" at time zone 'UTC') at time zone 'UTC'`;
}
export function withTagId<O>(qb: SelectQueryBuilder<DB, 'assets', O>, tagId: string) {
return qb.where((eb) =>
eb.exists(
eb
.selectFrom('tags_closure')
.innerJoin('tag_asset', 'tag_asset.tagsId', 'tags_closure.id_descendant')
.whereRef('tag_asset.assetsId', '=', 'assets.id')
.where('tags_closure.id_ancestor', '=', tagId),
),
);
}
const joinDeduplicationPlugin = new DeduplicateJoinsPlugin();
/** TODO: This should only be used for search-related queries, not as a general purpose query builder */
export function searchAssetBuilder(kysely: Kysely<DB>, options: AssetSearchBuilderOptions) {
options.isArchived ??= options.withArchived ? undefined : false;
options.withDeleted ||= !!(options.trashedAfter || options.trashedBefore || options.isOffline);
return kysely
.withPlugin(joinDeduplicationPlugin)
.selectFrom('assets')
.selectAll('assets')
.$if(!!options.tagIds && options.tagIds.length > 0, (qb) => hasTags(qb, options.tagIds!))
.$if(!!options.personIds && options.personIds.length > 0, (qb) => hasPeople(qb, options.personIds!))
.$if(!!options.createdBefore, (qb) => qb.where('assets.createdAt', '<=', options.createdBefore!))
.$if(!!options.createdAfter, (qb) => qb.where('assets.createdAt', '>=', options.createdAfter!))
.$if(!!options.updatedBefore, (qb) => qb.where('assets.updatedAt', '<=', options.updatedBefore!))
.$if(!!options.updatedAfter, (qb) => qb.where('assets.updatedAt', '>=', options.updatedAfter!))
.$if(!!options.trashedBefore, (qb) => qb.where('assets.deletedAt', '<=', options.trashedBefore!))
.$if(!!options.trashedAfter, (qb) => qb.where('assets.deletedAt', '>=', options.trashedAfter!))
.$if(!!options.takenBefore, (qb) => qb.where('assets.fileCreatedAt', '<=', options.takenBefore!))
.$if(!!options.takenAfter, (qb) => qb.where('assets.fileCreatedAt', '>=', options.takenAfter!))
.$if(options.city !== undefined, (qb) =>
qb
.innerJoin('exif', 'assets.id', 'exif.assetId')
.where('exif.city', options.city === null ? 'is' : '=', options.city!),
)
.$if(options.state !== undefined, (qb) =>
qb
.innerJoin('exif', 'assets.id', 'exif.assetId')
.where('exif.state', options.state === null ? 'is' : '=', options.state!),
)
.$if(options.country !== undefined, (qb) =>
qb
.innerJoin('exif', 'assets.id', 'exif.assetId')
.where('exif.country', options.country === null ? 'is' : '=', options.country!),
)
.$if(options.make !== undefined, (qb) =>
qb
.innerJoin('exif', 'assets.id', 'exif.assetId')
.where('exif.make', options.make === null ? 'is' : '=', options.make!),
)
.$if(options.model !== undefined, (qb) =>
qb
.innerJoin('exif', 'assets.id', 'exif.assetId')
.where('exif.model', options.model === null ? 'is' : '=', options.model!),
)
.$if(options.lensModel !== undefined, (qb) =>
qb
.innerJoin('exif', 'assets.id', 'exif.assetId')
.where('exif.lensModel', options.lensModel === null ? 'is' : '=', options.lensModel!),
)
.$if(options.rating !== undefined, (qb) =>
qb
.innerJoin('exif', 'assets.id', 'exif.assetId')
.where('exif.rating', options.rating === null ? 'is' : '=', options.rating!),
)
.$if(!!options.checksum, (qb) => qb.where('assets.checksum', '=', options.checksum!))
.$if(!!options.deviceAssetId, (qb) => qb.where('assets.deviceAssetId', '=', options.deviceAssetId!))
.$if(!!options.deviceId, (qb) => qb.where('assets.deviceId', '=', options.deviceId!))
.$if(!!options.id, (qb) => qb.where('assets.id', '=', asUuid(options.id!)))
.$if(!!options.libraryId, (qb) => qb.where('assets.libraryId', '=', asUuid(options.libraryId!)))
.$if(!!options.userIds, (qb) => qb.where('assets.ownerId', '=', anyUuid(options.userIds!)))
.$if(!!options.encodedVideoPath, (qb) => qb.where('assets.encodedVideoPath', '=', options.encodedVideoPath!))
.$if(!!options.originalPath, (qb) =>
qb.where(sql`f_unaccent(assets."originalPath")`, 'ilike', sql`'%' || f_unaccent(${options.originalPath}) || '%'`),
)
.$if(!!options.originalFileName, (qb) =>
qb.where(
sql`f_unaccent(assets."originalFileName")`,
'ilike',
sql`'%' || f_unaccent(${options.originalFileName}) || '%'`,
),
)
.$if(!!options.description, (qb) =>
qb
.innerJoin('exif', 'assets.id', 'exif.assetId')
.where(sql`f_unaccent(exif.description)`, 'ilike', sql`'%' || f_unaccent(${options.description}) || '%'`),
)
.$if(!!options.type, (qb) => qb.where('assets.type', '=', options.type!))
.$if(options.isFavorite !== undefined, (qb) => qb.where('assets.isFavorite', '=', options.isFavorite!))
.$if(options.isOffline !== undefined, (qb) => qb.where('assets.isOffline', '=', options.isOffline!))
.$if(options.isVisible !== undefined, (qb) => qb.where('assets.isVisible', '=', options.isVisible!))
.$if(options.isArchived !== undefined, (qb) => qb.where('assets.isArchived', '=', options.isArchived!))
.$if(options.isEncoded !== undefined, (qb) =>
qb.where('assets.encodedVideoPath', options.isEncoded ? 'is not' : 'is', null),
)
.$if(options.isMotion !== undefined, (qb) =>
qb.where('assets.livePhotoVideoId', options.isMotion ? 'is not' : 'is', null),
)
.$if(!!options.isNotInAlbum, (qb) =>
qb.where((eb) =>
eb.not(eb.exists((eb) => eb.selectFrom('albums_assets_assets').whereRef('assetsId', '=', 'assets.id'))),
),
)
.$if(!!options.withExif, withExifInner)
.$if(!!(options.withFaces || options.withPeople || options.personIds), (qb) => qb.select(withFacesAndPeople))
.$if(!options.withDeleted, (qb) => qb.where('assets.deletedAt', 'is', null));
}

View File

@ -1,11 +1,10 @@
import { AlbumEntity } from 'src/entities/album.entity';
import { AlbumUserRole, AssetOrder } from 'src/enum';
import { assetStub } from 'test/fixtures/asset.stub';
import { authStub } from 'test/fixtures/auth.stub';
import { userStub } from 'test/fixtures/user.stub';
export const albumStub = {
empty: Object.freeze<AlbumEntity>({
empty: Object.freeze({
id: 'album-1',
albumName: 'Empty album',
description: '',
@ -21,8 +20,9 @@ export const albumStub = {
albumUsers: [],
isActivityEnabled: true,
order: AssetOrder.DESC,
updateId: '42',
}),
sharedWithUser: Object.freeze<AlbumEntity>({
sharedWithUser: Object.freeze({
id: 'album-2',
albumName: 'Empty album shared with user',
description: '',
@ -43,8 +43,9 @@ export const albumStub = {
],
isActivityEnabled: true,
order: AssetOrder.DESC,
updateId: '42',
}),
sharedWithMultiple: Object.freeze<AlbumEntity>({
sharedWithMultiple: Object.freeze({
id: 'album-3',
albumName: 'Empty album shared with users',
description: '',
@ -69,8 +70,9 @@ export const albumStub = {
],
isActivityEnabled: true,
order: AssetOrder.DESC,
updateId: '42',
}),
sharedWithAdmin: Object.freeze<AlbumEntity>({
sharedWithAdmin: Object.freeze({
id: 'album-3',
albumName: 'Empty album shared with admin',
description: '',
@ -91,8 +93,9 @@ export const albumStub = {
],
isActivityEnabled: true,
order: AssetOrder.DESC,
updateId: '42',
}),
oneAsset: Object.freeze<AlbumEntity>({
oneAsset: Object.freeze({
id: 'album-4',
albumName: 'Album with one asset',
description: '',
@ -108,8 +111,9 @@ export const albumStub = {
albumUsers: [],
isActivityEnabled: true,
order: AssetOrder.DESC,
updateId: '42',
}),
twoAssets: Object.freeze<AlbumEntity>({
twoAssets: Object.freeze({
id: 'album-4a',
albumName: 'Album with two assets',
description: '',
@ -125,8 +129,9 @@ export const albumStub = {
albumUsers: [],
isActivityEnabled: true,
order: AssetOrder.DESC,
updateId: '42',
}),
emptyWithValidThumbnail: Object.freeze<AlbumEntity>({
emptyWithValidThumbnail: Object.freeze({
id: 'album-5',
albumName: 'Empty album with valid thumbnail',
description: '',
@ -142,5 +147,6 @@ export const albumStub = {
albumUsers: [],
isActivityEnabled: true,
order: AssetOrder.DESC,
updateId: '42',
}),
};

View File

@ -1,5 +1,5 @@
import { AssetFile, Exif } from 'src/database';
import { AssetEntity } from 'src/entities/asset.entity';
import { AssetFace, AssetFile, Exif } from 'src/database';
import { MapAsset } from 'src/dtos/asset-response.dto';
import { AssetFileType, AssetStatus, AssetType } from 'src/enum';
import { StorageAsset } from 'src/types';
import { authStub } from 'test/fixtures/auth.stub';
@ -26,13 +26,15 @@ const fullsizeFile: AssetFile = {
const files: AssetFile[] = [fullsizeFile, previewFile, thumbnailFile];
export const stackStub = (stackId: string, assets: AssetEntity[]) => {
export const stackStub = (stackId: string, assets: (MapAsset & { exifInfo: Exif })[]) => {
return {
id: stackId,
assets,
ownerId: assets[0].ownerId,
primaryAsset: assets[0],
primaryAssetId: assets[0].id,
createdAt: new Date('2023-02-23T05:06:29.716Z'),
updatedAt: new Date('2023-02-23T05:06:29.716Z'),
};
};
@ -85,9 +87,12 @@ export const assetStub = {
isExternal: false,
duplicateId: null,
isOffline: false,
libraryId: null,
stackId: null,
updateId: '42',
}),
noWebpPath: Object.freeze<AssetEntity>({
noWebpPath: Object.freeze({
id: 'asset-id',
status: AssetStatus.ACTIVE,
deviceAssetId: 'device-asset-id',
@ -122,9 +127,12 @@ export const assetStub = {
deletedAt: null,
duplicateId: null,
isOffline: false,
libraryId: null,
stackId: null,
updateId: '42',
}),
noThumbhash: Object.freeze<AssetEntity>({
noThumbhash: Object.freeze({
id: 'asset-id',
status: AssetStatus.ACTIVE,
deviceAssetId: 'device-asset-id',
@ -156,6 +164,9 @@ export const assetStub = {
deletedAt: null,
duplicateId: null,
isOffline: false,
libraryId: null,
stackId: null,
updateId: '42',
}),
primaryImage: Object.freeze({
@ -195,12 +206,13 @@ export const assetStub = {
} as Exif,
stackId: 'stack-1',
stack: stackStub('stack-1', [
{ id: 'primary-asset-id' } as AssetEntity,
{ id: 'stack-child-asset-1' } as AssetEntity,
{ id: 'stack-child-asset-2' } as AssetEntity,
{ id: 'primary-asset-id' } as MapAsset & { exifInfo: Exif },
{ id: 'stack-child-asset-1' } as MapAsset & { exifInfo: Exif },
{ id: 'stack-child-asset-2' } as MapAsset & { exifInfo: Exif },
]),
duplicateId: null,
isOffline: false,
updateId: '42',
libraryId: null,
}),
@ -229,6 +241,9 @@ export const assetStub = {
isExternal: false,
livePhotoVideo: null,
livePhotoVideoId: null,
updateId: 'foo',
libraryId: null,
stackId: null,
sharedLinks: [],
originalFileName: 'asset-id.jpg',
faces: [],
@ -241,10 +256,10 @@ export const assetStub = {
} as Exif,
duplicateId: null,
isOffline: false,
libraryId: null,
stack: null,
}),
trashed: Object.freeze<AssetEntity>({
trashed: Object.freeze({
id: 'asset-id',
deviceAssetId: 'device-asset-id',
fileModifiedAt: new Date('2023-02-23T05:06:29.716Z'),
@ -281,9 +296,12 @@ export const assetStub = {
duplicateId: null,
isOffline: false,
status: AssetStatus.TRASHED,
libraryId: null,
stackId: null,
updateId: '42',
}),
trashedOffline: Object.freeze<AssetEntity>({
trashedOffline: Object.freeze({
id: 'asset-id',
status: AssetStatus.ACTIVE,
deviceAssetId: 'device-asset-id',
@ -321,8 +339,10 @@ export const assetStub = {
} as Exif,
duplicateId: null,
isOffline: true,
stackId: null,
updateId: '42',
}),
archived: Object.freeze<AssetEntity>({
archived: Object.freeze({
id: 'asset-id',
status: AssetStatus.ACTIVE,
deviceAssetId: 'device-asset-id',
@ -359,9 +379,12 @@ export const assetStub = {
} as Exif,
duplicateId: null,
isOffline: false,
libraryId: null,
stackId: null,
updateId: '42',
}),
external: Object.freeze<AssetEntity>({
external: Object.freeze({
id: 'asset-id',
status: AssetStatus.ACTIVE,
deviceAssetId: 'device-asset-id',
@ -397,9 +420,12 @@ export const assetStub = {
} as Exif,
duplicateId: null,
isOffline: false,
updateId: '42',
stackId: null,
stack: null,
}),
image1: Object.freeze<AssetEntity>({
image1: Object.freeze({
id: 'asset-id-1',
status: AssetStatus.ACTIVE,
deviceAssetId: 'device-asset-id',
@ -434,9 +460,13 @@ export const assetStub = {
} as Exif,
duplicateId: null,
isOffline: false,
updateId: '42',
stackId: null,
libraryId: null,
stack: null,
}),
imageFrom2015: Object.freeze<AssetEntity>({
imageFrom2015: Object.freeze({
id: 'asset-id-1',
status: AssetStatus.ACTIVE,
deviceAssetId: 'device-asset-id',
@ -510,7 +540,9 @@ export const assetStub = {
deletedAt: null,
duplicateId: null,
isOffline: false,
updateId: '42',
libraryId: null,
stackId: null,
}),
livePhotoMotionAsset: Object.freeze({
@ -527,7 +559,7 @@ export const assetStub = {
timeZone: `America/New_York`,
},
libraryId: null,
} as AssetEntity & { libraryId: string | null; files: AssetFile[]; exifInfo: Exif }),
} as MapAsset & { faces: AssetFace[]; files: AssetFile[]; exifInfo: Exif }),
livePhotoStillAsset: Object.freeze({
id: 'live-photo-still-asset',
@ -544,7 +576,8 @@ export const assetStub = {
timeZone: `America/New_York`,
},
files,
} as AssetEntity & { libraryId: string | null }),
faces: [] as AssetFace[],
} as MapAsset & { faces: AssetFace[] }),
livePhotoWithOriginalFileName: Object.freeze({
id: 'live-photo-still-asset',
@ -562,7 +595,8 @@ export const assetStub = {
timeZone: `America/New_York`,
},
libraryId: null,
} as AssetEntity & { libraryId: string | null }),
faces: [] as AssetFace[],
} as MapAsset & { faces: AssetFace[] }),
withLocation: Object.freeze({
id: 'asset-with-favorite-id',
@ -590,6 +624,9 @@ export const assetStub = {
isVisible: true,
livePhotoVideo: null,
livePhotoVideoId: null,
updateId: 'foo',
libraryId: null,
stackId: null,
sharedLinks: [],
originalFileName: 'asset-id.ext',
faces: [],
@ -604,7 +641,7 @@ export const assetStub = {
deletedAt: null,
duplicateId: null,
isOffline: false,
libraryId: null,
tags: [],
}),
sidecar: Object.freeze({
@ -639,10 +676,12 @@ export const assetStub = {
deletedAt: null,
duplicateId: null,
isOffline: false,
updateId: 'foo',
libraryId: null,
stackId: null,
}),
sidecarWithoutExt: Object.freeze<AssetEntity>({
sidecarWithoutExt: Object.freeze({
id: 'asset-id',
status: AssetStatus.ACTIVE,
deviceAssetId: 'device-asset-id',
@ -676,7 +715,7 @@ export const assetStub = {
isOffline: false,
}),
hasEncodedVideo: Object.freeze<AssetEntity>({
hasEncodedVideo: Object.freeze({
id: 'asset-id',
status: AssetStatus.ACTIVE,
originalFileName: 'asset-id.ext',
@ -711,9 +750,13 @@ export const assetStub = {
deletedAt: null,
duplicateId: null,
isOffline: false,
updateId: '42',
libraryId: null,
stackId: null,
stack: null,
}),
hasFileExtension: Object.freeze<AssetEntity>({
hasFileExtension: Object.freeze({
id: 'asset-id',
status: AssetStatus.ACTIVE,
deviceAssetId: 'device-asset-id',
@ -788,6 +831,9 @@ export const assetStub = {
} as Exif,
duplicateId: null,
isOffline: false,
updateId: '42',
libraryId: null,
stackId: null,
}),
imageHif: Object.freeze({
@ -827,5 +873,8 @@ export const assetStub = {
} as Exif,
duplicateId: null,
isOffline: false,
updateId: '42',
libraryId: null,
stackId: null,
}),
};

View File

@ -1,6 +1,5 @@
import { Session } from 'src/database';
import { AuthDto } from 'src/dtos/auth.dto';
import { SharedLinkEntity } from 'src/entities/shared-link.entity';
const authUser = {
admin: {
@ -42,14 +41,16 @@ export const authStub = {
id: 'token-id',
} as Session,
}),
adminSharedLink: Object.freeze<AuthDto>({
adminSharedLink: Object.freeze({
user: authUser.admin,
sharedLink: {
id: '123',
showExif: true,
allowDownload: true,
allowUpload: true,
key: Buffer.from('shared-link-key'),
} as SharedLinkEntity,
expiresAt: null,
password: null,
userId: '42',
},
}),
};

View File

@ -1,10 +1,9 @@
import { UserAdmin } from 'src/database';
import { AlbumResponseDto } from 'src/dtos/album.dto';
import { AssetResponseDto } from 'src/dtos/asset-response.dto';
import { AssetResponseDto, MapAsset } from 'src/dtos/asset-response.dto';
import { ExifResponseDto } from 'src/dtos/exif.dto';
import { SharedLinkResponseDto } from 'src/dtos/shared-link.dto';
import { mapUser } from 'src/dtos/user.dto';
import { SharedLinkEntity } from 'src/entities/shared-link.entity';
import { AssetOrder, AssetStatus, AssetType, SharedLinkType } from 'src/enum';
import { assetStub } from 'test/fixtures/asset.stub';
import { authStub } from 'test/fixtures/auth.stub';
@ -113,12 +112,12 @@ export const sharedLinkStub = {
allowUpload: true,
allowDownload: true,
showExif: true,
album: undefined,
albumId: null,
album: null,
description: null,
assets: [assetStub.image],
password: 'password',
albumId: null,
} as SharedLinkEntity),
}),
valid: Object.freeze({
id: '123',
userId: authStub.admin.user.id,
@ -130,12 +129,12 @@ export const sharedLinkStub = {
allowUpload: true,
allowDownload: true,
showExif: true,
album: undefined,
albumId: null,
description: null,
password: null,
assets: [],
} as SharedLinkEntity),
assets: [] as MapAsset[],
album: null,
}),
expired: Object.freeze({
id: '123',
userId: authStub.admin.user.id,
@ -150,9 +149,10 @@ export const sharedLinkStub = {
description: null,
password: null,
albumId: null,
assets: [],
} as SharedLinkEntity),
readonlyNoExif: Object.freeze<SharedLinkEntity>({
assets: [] as MapAsset[],
album: null,
}),
readonlyNoExif: Object.freeze({
id: '123',
userId: authStub.admin.user.id,
key: sharedLinkBytes,
@ -168,6 +168,7 @@ export const sharedLinkStub = {
albumId: 'album-123',
album: {
id: 'album-123',
updateId: '42',
ownerId: authStub.admin.user.id,
owner: userStub.admin,
albumName: 'Test Album',
@ -239,17 +240,22 @@ export const sharedLinkStub = {
colorspace: 'sRGB',
autoStackId: null,
rating: 3,
updatedAt: today,
updateId: '42',
},
sharedLinks: [],
faces: [],
sidecarPath: null,
deletedAt: null,
duplicateId: null,
updateId: '42',
libraryId: null,
stackId: null,
},
],
},
}),
passwordRequired: Object.freeze<SharedLinkEntity>({
passwordRequired: Object.freeze({
id: '123',
userId: authStub.admin.user.id,
key: sharedLinkBytes,
@ -263,6 +269,7 @@ export const sharedLinkStub = {
password: 'password',
assets: [],
albumId: null,
album: null,
}),
};

View File

@ -1,10 +1,11 @@
import { FileMigrationProvider, Kysely, Migrator } from 'kysely';
import { mkdir, readdir } from 'node:fs/promises';
import { join } from 'node:path';
import { Kysely } from 'kysely';
import { parse } from 'pg-connection-string';
import { DB } from 'src/db';
import { ConfigRepository } from 'src/repositories/config.repository';
import { DatabaseRepository } from 'src/repositories/database.repository';
import { LoggingRepository } from 'src/repositories/logging.repository';
import { getKyselyConfig } from 'src/utils/database';
import { GenericContainer, Wait } from 'testcontainers';
import { DataSource } from 'typeorm';
const globalSetup = async () => {
const postgresContainer = await new GenericContainer('tensorchord/pgvecto-rs:pg14-v0.2.0')
@ -36,66 +37,23 @@ const globalSetup = async () => {
const postgresPort = postgresContainer.getMappedPort(5432);
const postgresUrl = `postgres://postgres:postgres@localhost:${postgresPort}/immich`;
const parsed = parse(postgresUrl);
process.env.IMMICH_TEST_POSTGRES_URL = postgresUrl;
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
// @ts-expect-error
const modules = import.meta.glob('/src/migrations/*.ts', { eager: true });
const config = {
type: 'postgres' as const,
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
// @ts-expect-error
migrations: Object.values(modules).map((module) => Object.values(module)[0]),
migrationsRun: false,
synchronize: false,
connectTimeoutMS: 10_000, // 10 seconds
parseInt8: true,
url: postgresUrl,
};
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
// @ts-expect-error
const dataSource = new DataSource(config);
await dataSource.initialize();
await dataSource.runMigrations();
await dataSource.destroy();
// for whatever reason, importing from test/utils causes vitest to crash
// eslint-disable-next-line unicorn/prefer-module
const migrationFolder = join(__dirname, '..', 'schema/migrations');
// TODO remove after we have at least one kysely migration
await mkdir(migrationFolder, { recursive: true });
const parsed = parse(process.env.IMMICH_TEST_POSTGRES_URL!);
const parsedOptions = {
...parsed,
ssl: false,
host: parsed.host ?? undefined,
port: parsed.port ? Number(parsed.port) : undefined,
database: parsed.database ?? undefined,
};
const db = new Kysely(getKyselyConfig(parsedOptions));
// TODO just call `databaseRepository.migrate()` (probably have to wait until TypeOrm is gone)
const migrator = new Migrator({
db,
migrationLockTableName: 'kysely_migrations_lock',
migrationTableName: 'kysely_migrations',
provider: new FileMigrationProvider({
fs: { readdir },
path: { join },
migrationFolder,
const db = new Kysely<DB>(
getKyselyConfig({
...parsed,
ssl: false,
host: parsed.host ?? undefined,
port: parsed.port ? Number(parsed.port) : undefined,
database: parsed.database ?? undefined,
}),
});
);
const { error } = await migrator.migrateToLatest();
if (error) {
console.error('Unable to run kysely migrations', error);
throw error;
}
const configRepository = new ConfigRepository();
const logger = new LoggingRepository(undefined, configRepository);
await new DatabaseRepository(db, logger, configRepository).runMigrations();
await db.destroy();
};

View File

@ -39,9 +39,12 @@ describe(MemoryService.name, () => {
it('should create a memory from an asset', async () => {
const { sut, repos, getRepository } = createSut();
const now = DateTime.fromObject({ year: 2025, month: 2, day: 25 }, { zone: 'utc' });
const now = DateTime.fromObject({ year: 2025, month: 2, day: 25 }, { zone: 'utc' }) as DateTime<true>;
const user = mediumFactory.userInsert();
const asset = mediumFactory.assetInsert({ ownerId: user.id, localDateTime: now.minus({ years: 1 }).toISO() });
const asset = mediumFactory.assetInsert({
ownerId: user.id,
localDateTime: now.minus({ years: 1 }).toISO(),
});
const jobStatus = mediumFactory.assetJobStatusInsert({ assetId: asset.id });
const userRepo = getRepository('user');
@ -86,7 +89,7 @@ describe(MemoryService.name, () => {
it('should not generate a memory twice for the same day', async () => {
const { sut, repos, getRepository } = createSut();
const now = DateTime.fromObject({ year: 2025, month: 2, day: 20 }, { zone: 'utc' });
const now = DateTime.fromObject({ year: 2025, month: 2, day: 20 }, { zone: 'utc' }) as DateTime<true>;
const assetRepo = getRepository('asset');
const memoryRepo = getRepository('memory');

View File

@ -118,7 +118,7 @@ describe(MetadataService.name, () => {
process.env.TZ = serverTimeZone ?? undefined;
const { filePath } = await createTestFile(exifData);
mocks.assetJob.getForMetadataExtraction.mockResolvedValue({ id: 'asset-1', originalPath: filePath } as never);
mocks.assetJob.getForMetadataExtraction.mockResolvedValue({ id: 'asset-1', originalPath: filePath } as any);
await sut.handleMetadataExtraction({ id: 'asset-1' });

View File

@ -11,7 +11,7 @@ export const newAssetRepositoryMock = (): Mocked<RepositoryInterface<AssetReposi
upsertJobStatus: vitest.fn(),
getByDayOfYear: vitest.fn(),
getByIds: vitest.fn().mockResolvedValue([]),
getByIdsWithAllRelations: vitest.fn().mockResolvedValue([]),
getByIdsWithAllRelationsButStacks: vitest.fn().mockResolvedValue([]),
getByDeviceIds: vitest.fn(),
getByUserId: vitest.fn(),
getById: vitest.fn(),

View File

@ -2,7 +2,6 @@ import { randomUUID } from 'node:crypto';
import {
Activity,
ApiKey,
Asset,
AuthApiKey,
AuthSharedLink,
AuthUser,
@ -14,6 +13,7 @@ import {
User,
UserAdmin,
} from 'src/database';
import { MapAsset } from 'src/dtos/asset-response.dto';
import { AuthDto } from 'src/dtos/auth.dto';
import { AssetStatus, AssetType, MemoryType, Permission, UserStatus } from 'src/enum';
import { OnThisDayData } from 'src/types';
@ -184,7 +184,7 @@ const userAdminFactory = (user: Partial<UserAdmin> = {}) => {
};
};
const assetFactory = (asset: Partial<Asset> = {}) => ({
const assetFactory = (asset: Partial<MapAsset> = {}) => ({
id: newUuid(),
createdAt: newDate(),
updatedAt: newDate(),