mirror of
https://github.com/immich-app/immich.git
synced 2025-07-09 03:04:16 -04:00
Merge branch 'main' into improve_focus
This commit is contained in:
commit
dccf33af2d
29
.github/workflows/build-mobile.yml
vendored
29
.github/workflows/build-mobile.yml
vendored
@ -7,6 +7,15 @@ on:
|
||||
ref:
|
||||
required: false
|
||||
type: string
|
||||
secrets:
|
||||
KEY_JKS:
|
||||
required: true
|
||||
ALIAS:
|
||||
required: true
|
||||
ANDROID_KEY_PASSWORD:
|
||||
required: true
|
||||
ANDROID_STORE_PASSWORD:
|
||||
required: true
|
||||
pull_request:
|
||||
push:
|
||||
branches: [main]
|
||||
@ -15,14 +24,21 @@ concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
permissions: {}
|
||||
|
||||
jobs:
|
||||
pre-job:
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: read
|
||||
outputs:
|
||||
should_run: ${{ steps.found_paths.outputs.mobile == 'true' || steps.should_force.outputs.should_force == 'true' }}
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
|
||||
with:
|
||||
persist-credentials: false
|
||||
|
||||
- id: found_paths
|
||||
uses: dorny/paths-filter@de90cc6fb38fc0963ad72b210f1f284cd68cea36 # v3
|
||||
with:
|
||||
@ -38,22 +54,17 @@ jobs:
|
||||
build-sign-android:
|
||||
name: Build and sign Android
|
||||
needs: pre-job
|
||||
permissions:
|
||||
contents: read
|
||||
# Skip when PR from a fork
|
||||
if: ${{ !github.event.pull_request.head.repo.fork && github.actor != 'dependabot[bot]' && needs.pre-job.outputs.should_run == 'true' }}
|
||||
runs-on: macos-14
|
||||
|
||||
steps:
|
||||
- name: Determine ref
|
||||
id: get-ref
|
||||
run: |
|
||||
input_ref="${{ inputs.ref }}"
|
||||
github_ref="${{ github.sha }}"
|
||||
ref="${input_ref:-$github_ref}"
|
||||
echo "ref=$ref" >> $GITHUB_OUTPUT
|
||||
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
|
||||
with:
|
||||
ref: ${{ steps.get-ref.outputs.ref }}
|
||||
ref: ${{ inputs.ref || github.sha }}
|
||||
persist-credentials: false
|
||||
|
||||
- uses: actions/setup-java@c5195efecf7bdfc987ee8bae7a71cb8b11521c00 # v4
|
||||
with:
|
||||
|
17
.github/workflows/cache-cleanup.yml
vendored
17
.github/workflows/cache-cleanup.yml
vendored
@ -8,31 +8,38 @@ concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
permissions: {}
|
||||
|
||||
jobs:
|
||||
cleanup:
|
||||
name: Cleanup
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: read
|
||||
actions: write
|
||||
steps:
|
||||
- name: Check out code
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
|
||||
with:
|
||||
persist-credentials: false
|
||||
|
||||
- name: Cleanup
|
||||
env:
|
||||
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
REF: ${{ github.ref }}
|
||||
run: |
|
||||
gh extension install actions/gh-actions-cache
|
||||
|
||||
REPO=${{ github.repository }}
|
||||
BRANCH=${{ github.ref }}
|
||||
|
||||
echo "Fetching list of cache keys"
|
||||
cacheKeysForPR=$(gh actions-cache list -R $REPO -B $BRANCH -L 100 | cut -f 1 )
|
||||
cacheKeysForPR=$(gh actions-cache list -R $REPO -B ${REF} -L 100 | cut -f 1 )
|
||||
|
||||
## Setting this to not fail the workflow while deleting cache keys.
|
||||
set +e
|
||||
echo "Deleting caches..."
|
||||
for cacheKey in $cacheKeysForPR
|
||||
do
|
||||
gh actions-cache delete $cacheKey -R $REPO -B $BRANCH --confirm
|
||||
gh actions-cache delete $cacheKey -R "$REPO" -B "${REF}" --confirm
|
||||
done
|
||||
echo "Done"
|
||||
env:
|
||||
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
13
.github/workflows/cli.yml
vendored
13
.github/workflows/cli.yml
vendored
@ -16,19 +16,23 @@ concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
permissions:
|
||||
packages: write
|
||||
permissions: {}
|
||||
|
||||
jobs:
|
||||
publish:
|
||||
name: CLI Publish
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: read
|
||||
defaults:
|
||||
run:
|
||||
working-directory: ./cli
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
|
||||
with:
|
||||
persist-credentials: false
|
||||
|
||||
# Setup .npmrc file to publish to npm
|
||||
- uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4
|
||||
with:
|
||||
@ -48,11 +52,16 @@ jobs:
|
||||
docker:
|
||||
name: Docker
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: read
|
||||
packages: write
|
||||
needs: publish
|
||||
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
|
||||
with:
|
||||
persist-credentials: false
|
||||
|
||||
- name: Set up QEMU
|
||||
uses: docker/setup-qemu-action@29109295f81e9208d7d86ff1c6c12d2833863392 # v3.6.0
|
||||
|
4
.github/workflows/codeql-analysis.yml
vendored
4
.github/workflows/codeql-analysis.yml
vendored
@ -24,6 +24,8 @@ concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
permissions: {}
|
||||
|
||||
jobs:
|
||||
analyze:
|
||||
name: Analyze
|
||||
@ -43,6 +45,8 @@ jobs:
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
|
||||
with:
|
||||
persist-credentials: false
|
||||
|
||||
# Initializes the CodeQL tools for scanning.
|
||||
- name: Initialize CodeQL
|
||||
|
85
.github/workflows/docker.yml
vendored
85
.github/workflows/docker.yml
vendored
@ -12,18 +12,21 @@ concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
permissions:
|
||||
packages: write
|
||||
permissions: {}
|
||||
|
||||
jobs:
|
||||
pre-job:
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: read
|
||||
outputs:
|
||||
should_run_server: ${{ steps.found_paths.outputs.server == 'true' || steps.should_force.outputs.should_force == 'true' }}
|
||||
should_run_ml: ${{ steps.found_paths.outputs.machine-learning == 'true' || steps.should_force.outputs.should_force == 'true' }}
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
|
||||
with:
|
||||
persist-credentials: false
|
||||
- id: found_paths
|
||||
uses: dorny/paths-filter@de90cc6fb38fc0963ad72b210f1f284cd68cea36 # v3
|
||||
with:
|
||||
@ -45,6 +48,9 @@ jobs:
|
||||
retag_ml:
|
||||
name: Re-Tag ML
|
||||
needs: pre-job
|
||||
permissions:
|
||||
contents: read
|
||||
packages: write
|
||||
if: ${{ needs.pre-job.outputs.should_run_ml == 'false' && !github.event.pull_request.head.repo.fork }}
|
||||
runs-on: ubuntu-latest
|
||||
strategy:
|
||||
@ -58,18 +64,22 @@ jobs:
|
||||
username: ${{ github.repository_owner }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
- name: Re-tag image
|
||||
env:
|
||||
REGISTRY_NAME: 'ghcr.io'
|
||||
REPOSITORY: ${{ github.repository_owner }}/immich-machine-learning
|
||||
TAG_OLD: main${{ matrix.suffix }}
|
||||
TAG_PR: ${{ github.event.number == 0 && github.ref_name || format('pr-{0}', github.event.number) }}${{ matrix.suffix }}
|
||||
TAG_COMMIT: commit-${{ github.event_name != 'pull_request' && github.sha || github.event.pull_request.head.sha }}${{ matrix.suffix }}
|
||||
run: |
|
||||
REGISTRY_NAME="ghcr.io"
|
||||
REPOSITORY=${{ github.repository_owner }}/immich-machine-learning
|
||||
TAG_OLD=main${{ matrix.suffix }}
|
||||
TAG_PR=${{ github.event.number == 0 && github.ref_name || format('pr-{0}', github.event.number) }}${{ matrix.suffix }}
|
||||
TAG_COMMIT=commit-${{ github.event_name != 'pull_request' && github.sha || github.event.pull_request.head.sha }}${{ matrix.suffix }}
|
||||
docker buildx imagetools create -t $REGISTRY_NAME/$REPOSITORY:$TAG_PR $REGISTRY_NAME/$REPOSITORY:$TAG_OLD
|
||||
docker buildx imagetools create -t $REGISTRY_NAME/$REPOSITORY:$TAG_COMMIT $REGISTRY_NAME/$REPOSITORY:$TAG_OLD
|
||||
docker buildx imagetools create -t "${REGISTRY_NAME}/${REPOSITORY}:${TAG_PR}" "${REGISTRY_NAME}/${REPOSITORY}:${TAG_OLD}"
|
||||
docker buildx imagetools create -t "${REGISTRY_NAME}/${REPOSITORY}:${TAG_COMMIT}" "${REGISTRY_NAME}/${REPOSITORY}:${TAG_OLD}"
|
||||
|
||||
retag_server:
|
||||
name: Re-Tag Server
|
||||
needs: pre-job
|
||||
permissions:
|
||||
contents: read
|
||||
packages: write
|
||||
if: ${{ needs.pre-job.outputs.should_run_server == 'false' && !github.event.pull_request.head.repo.fork }}
|
||||
runs-on: ubuntu-latest
|
||||
strategy:
|
||||
@ -83,18 +93,22 @@ jobs:
|
||||
username: ${{ github.repository_owner }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
- name: Re-tag image
|
||||
env:
|
||||
REGISTRY_NAME: 'ghcr.io'
|
||||
REPOSITORY: ${{ github.repository_owner }}/immich-server
|
||||
TAG_OLD: main${{ matrix.suffix }}
|
||||
TAG_PR: ${{ github.event.number == 0 && github.ref_name || format('pr-{0}', github.event.number) }}${{ matrix.suffix }}
|
||||
TAG_COMMIT: commit-${{ github.event_name != 'pull_request' && github.sha || github.event.pull_request.head.sha }}${{ matrix.suffix }}
|
||||
run: |
|
||||
REGISTRY_NAME="ghcr.io"
|
||||
REPOSITORY=${{ github.repository_owner }}/immich-server
|
||||
TAG_OLD=main${{ matrix.suffix }}
|
||||
TAG_PR=${{ github.event.number == 0 && github.ref_name || format('pr-{0}', github.event.number) }}${{ matrix.suffix }}
|
||||
TAG_COMMIT=commit-${{ github.event_name != 'pull_request' && github.sha || github.event.pull_request.head.sha }}${{ matrix.suffix }}
|
||||
docker buildx imagetools create -t $REGISTRY_NAME/$REPOSITORY:$TAG_PR $REGISTRY_NAME/$REPOSITORY:$TAG_OLD
|
||||
docker buildx imagetools create -t $REGISTRY_NAME/$REPOSITORY:$TAG_COMMIT $REGISTRY_NAME/$REPOSITORY:$TAG_OLD
|
||||
docker buildx imagetools create -t "${REGISTRY_NAME}/${REPOSITORY}:${TAG_PR}" "${REGISTRY_NAME}/${REPOSITORY}:${TAG_OLD}"
|
||||
docker buildx imagetools create -t "${REGISTRY_NAME}/${REPOSITORY}:${TAG_COMMIT}" "${REGISTRY_NAME}/${REPOSITORY}:${TAG_OLD}"
|
||||
|
||||
build_and_push_ml:
|
||||
name: Build and Push ML
|
||||
needs: pre-job
|
||||
permissions:
|
||||
contents: read
|
||||
packages: write
|
||||
if: ${{ needs.pre-job.outputs.should_run_ml == 'true' }}
|
||||
runs-on: ${{ matrix.runner }}
|
||||
env:
|
||||
@ -148,6 +162,8 @@ jobs:
|
||||
|
||||
- name: Checkout
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
|
||||
with:
|
||||
persist-credentials: false
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@b5ca514318bd6ebac0fb2aedd5d36ec1b5c232a2 # v3.10.0
|
||||
@ -161,11 +177,14 @@ jobs:
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Generate cache key suffix
|
||||
env:
|
||||
REF: ${{ github.ref_name }}
|
||||
run: |
|
||||
if [[ "${{ github.event_name }}" == "pull_request" ]]; then
|
||||
echo "CACHE_KEY_SUFFIX=pr-${{ github.event.number }}" >> $GITHUB_ENV
|
||||
else
|
||||
echo "CACHE_KEY_SUFFIX=$(echo ${{ github.ref_name }} | sed 's/[^a-zA-Z0-9]/-/g')" >> $GITHUB_ENV
|
||||
SUFFIX=$(echo "${REF}" | sed 's/[^a-zA-Z0-9]/-/g')
|
||||
echo "CACHE_KEY_SUFFIX=${SUFFIX}" >> $GITHUB_ENV
|
||||
fi
|
||||
|
||||
- name: Generate cache target
|
||||
@ -175,7 +194,7 @@ jobs:
|
||||
# Essentially just ignore the cache output (forks can't write to registry cache)
|
||||
echo "cache-to=type=local,dest=/tmp/discard,ignore-error=true" >> $GITHUB_OUTPUT
|
||||
else
|
||||
echo "cache-to=type=registry,ref=${{ env.GHCR_REPO }}-build-cache:${{ env.PLATFORM_PAIR }}-${{ matrix.device }}-${{ env.CACHE_KEY_SUFFIX }},mode=max,compression=zstd" >> $GITHUB_OUTPUT
|
||||
echo "cache-to=type=registry,ref=${GHCR_REPO}-build-cache:${PLATFORM_PAIR}-${{ matrix.device }}-${CACHE_KEY_SUFFIX},mode=max,compression=zstd" >> $GITHUB_OUTPUT
|
||||
fi
|
||||
|
||||
- name: Generate docker image tags
|
||||
@ -221,6 +240,10 @@ jobs:
|
||||
merge_ml:
|
||||
name: Merge & Push ML
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: read
|
||||
actions: read
|
||||
packages: write
|
||||
if: ${{ needs.pre-job.outputs.should_run_ml == 'true' && !github.event.pull_request.head.repo.fork }}
|
||||
env:
|
||||
GHCR_REPO: ghcr.io/${{ github.repository_owner }}/immich-machine-learning
|
||||
@ -308,15 +331,16 @@ jobs:
|
||||
fi
|
||||
|
||||
TAGS=$(jq -cr '.tags | map("-t " + .) | join(" ")' <<< "$DOCKER_METADATA_OUTPUT_JSON") \
|
||||
SOURCE_ARGS=$(printf '${{ env.GHCR_REPO }}@sha256:%s ' *)
|
||||
|
||||
echo "docker buildx imagetools create $TAGS "${ANNOTATIONS[@]}" $SOURCE_ARGS"
|
||||
SOURCE_ARGS=$(printf "${GHCR_REPO}@sha256:%s " *)
|
||||
|
||||
docker buildx imagetools create $TAGS "${ANNOTATIONS[@]}" $SOURCE_ARGS
|
||||
|
||||
build_and_push_server:
|
||||
name: Build and Push Server
|
||||
runs-on: ${{ matrix.runner }}
|
||||
permissions:
|
||||
contents: read
|
||||
packages: write
|
||||
needs: pre-job
|
||||
if: ${{ needs.pre-job.outputs.should_run_server == 'true' }}
|
||||
env:
|
||||
@ -340,6 +364,8 @@ jobs:
|
||||
|
||||
- name: Checkout
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
|
||||
with:
|
||||
persist-credentials: false
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@b5ca514318bd6ebac0fb2aedd5d36ec1b5c232a2 # v3
|
||||
@ -353,11 +379,14 @@ jobs:
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Generate cache key suffix
|
||||
env:
|
||||
REF: ${{ github.ref_name }}
|
||||
run: |
|
||||
if [[ "${{ github.event_name }}" == "pull_request" ]]; then
|
||||
echo "CACHE_KEY_SUFFIX=pr-${{ github.event.number }}" >> $GITHUB_ENV
|
||||
else
|
||||
echo "CACHE_KEY_SUFFIX=$(echo ${{ github.ref_name }} | sed 's/[^a-zA-Z0-9]/-/g')" >> $GITHUB_ENV
|
||||
SUFFIX=$(echo "${REF}" | sed 's/[^a-zA-Z0-9]/-/g')
|
||||
echo "CACHE_KEY_SUFFIX=${SUFFIX}" >> $GITHUB_ENV
|
||||
fi
|
||||
|
||||
- name: Generate cache target
|
||||
@ -367,7 +396,7 @@ jobs:
|
||||
# Essentially just ignore the cache output (forks can't write to registry cache)
|
||||
echo "cache-to=type=local,dest=/tmp/discard,ignore-error=true" >> $GITHUB_OUTPUT
|
||||
else
|
||||
echo "cache-to=type=registry,ref=${{ env.GHCR_REPO }}-build-cache:${{ env.PLATFORM_PAIR }}-${{ env.CACHE_KEY_SUFFIX }},mode=max,compression=zstd" >> $GITHUB_OUTPUT
|
||||
echo "cache-to=type=registry,ref=${GHCR_REPO}-build-cache:${PLATFORM_PAIR}-${CACHE_KEY_SUFFIX},mode=max,compression=zstd" >> $GITHUB_OUTPUT
|
||||
fi
|
||||
|
||||
- name: Generate docker image tags
|
||||
@ -413,6 +442,10 @@ jobs:
|
||||
merge_server:
|
||||
name: Merge & Push Server
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: read
|
||||
actions: read
|
||||
packages: write
|
||||
if: ${{ needs.pre-job.outputs.should_run_server == 'true' && !github.event.pull_request.head.repo.fork }}
|
||||
env:
|
||||
GHCR_REPO: ghcr.io/${{ github.repository_owner }}/immich-server
|
||||
@ -486,15 +519,14 @@ jobs:
|
||||
fi
|
||||
|
||||
TAGS=$(jq -cr '.tags | map("-t " + .) | join(" ")' <<< "$DOCKER_METADATA_OUTPUT_JSON") \
|
||||
SOURCE_ARGS=$(printf '${{ env.GHCR_REPO }}@sha256:%s ' *)
|
||||
|
||||
echo "docker buildx imagetools create $TAGS "${ANNOTATIONS[@]}" $SOURCE_ARGS"
|
||||
SOURCE_ARGS=$(printf "${GHCR_REPO}@sha256:%s " *)
|
||||
|
||||
docker buildx imagetools create $TAGS "${ANNOTATIONS[@]}" $SOURCE_ARGS
|
||||
|
||||
success-check-server:
|
||||
name: Docker Build & Push Server Success
|
||||
needs: [merge_server, retag_server]
|
||||
permissions: {}
|
||||
runs-on: ubuntu-latest
|
||||
if: always()
|
||||
steps:
|
||||
@ -508,6 +540,7 @@ jobs:
|
||||
success-check-ml:
|
||||
name: Docker Build & Push ML Success
|
||||
needs: [merge_ml, retag_ml]
|
||||
permissions: {}
|
||||
runs-on: ubuntu-latest
|
||||
if: always()
|
||||
steps:
|
||||
|
10
.github/workflows/docs-build.yml
vendored
10
.github/workflows/docs-build.yml
vendored
@ -10,14 +10,20 @@ concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
permissions: {}
|
||||
|
||||
jobs:
|
||||
pre-job:
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: read
|
||||
outputs:
|
||||
should_run: ${{ steps.found_paths.outputs.docs == 'true' || steps.should_force.outputs.should_force == 'true' }}
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
|
||||
with:
|
||||
persist-credentials: false
|
||||
- id: found_paths
|
||||
uses: dorny/paths-filter@de90cc6fb38fc0963ad72b210f1f284cd68cea36 # v3
|
||||
with:
|
||||
@ -33,6 +39,8 @@ jobs:
|
||||
build:
|
||||
name: Docs Build
|
||||
needs: pre-job
|
||||
permissions:
|
||||
contents: read
|
||||
if: ${{ needs.pre-job.outputs.should_run == 'true' }}
|
||||
runs-on: ubuntu-latest
|
||||
defaults:
|
||||
@ -42,6 +50,8 @@ jobs:
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
|
||||
with:
|
||||
persist-credentials: false
|
||||
|
||||
- name: Setup Node
|
||||
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4
|
||||
|
20
.github/workflows/docs-deploy.yml
vendored
20
.github/workflows/docs-deploy.yml
vendored
@ -9,6 +9,9 @@ jobs:
|
||||
checks:
|
||||
name: Docs Deploy Checks
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
actions: read
|
||||
pull-requests: read
|
||||
outputs:
|
||||
parameters: ${{ steps.parameters.outputs.result }}
|
||||
artifact: ${{ steps.get-artifact.outputs.result }}
|
||||
@ -36,6 +39,8 @@ jobs:
|
||||
- name: Determine deploy parameters
|
||||
id: parameters
|
||||
uses: actions/github-script@60a0d83039c74a4aee543508d2ffcb1c3799cdea # v7
|
||||
env:
|
||||
HEAD_SHA: ${{ github.event.workflow_run.head_sha }}
|
||||
with:
|
||||
script: |
|
||||
const eventType = context.payload.workflow_run.event;
|
||||
@ -57,7 +62,8 @@ jobs:
|
||||
} else if (eventType == "pull_request") {
|
||||
let pull_number = context.payload.workflow_run.pull_requests[0]?.number;
|
||||
if(!pull_number) {
|
||||
const response = await github.rest.search.issuesAndPullRequests({q: 'repo:${{ github.repository }} is:pr sha:${{ github.event.workflow_run.head_sha }}',per_page: 1,})
|
||||
const {HEAD_SHA} = process.env;
|
||||
const response = await github.rest.search.issuesAndPullRequests({q: `repo:${{ github.repository }} is:pr sha:${HEAD_SHA}`,per_page: 1,})
|
||||
const items = response.data.items
|
||||
if (items.length < 1) {
|
||||
throw new Error("No pull request found for the commit")
|
||||
@ -95,10 +101,16 @@ jobs:
|
||||
name: Docs Deploy
|
||||
runs-on: ubuntu-latest
|
||||
needs: checks
|
||||
permissions:
|
||||
contents: read
|
||||
actions: read
|
||||
pull-requests: write
|
||||
if: ${{ fromJson(needs.checks.outputs.artifact).found && fromJson(needs.checks.outputs.parameters).shouldDeploy }}
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
|
||||
with:
|
||||
persist-credentials: false
|
||||
|
||||
- name: Load parameters
|
||||
id: parameters
|
||||
@ -162,9 +174,11 @@ jobs:
|
||||
|
||||
- name: Output Cleaning
|
||||
id: clean
|
||||
env:
|
||||
TG_OUTPUT: ${{ steps.docs-output.outputs.tg_action_output }}
|
||||
run: |
|
||||
TG_OUT=$(echo '${{ steps.docs-output.outputs.tg_action_output }}' | sed 's|%0A|\n|g ; s|%3C|<|g' | jq -c .)
|
||||
echo "output=$TG_OUT" >> $GITHUB_OUTPUT
|
||||
CLEANED=$(echo "$TG_OUTPUT" | sed 's|%0A|\n|g ; s|%3C|<|g' | jq -c .)
|
||||
echo "output=$CLEANED" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: Publish to Cloudflare Pages
|
||||
uses: cloudflare/pages-action@f0a1cd58cd66095dee69bfa18fa5efd1dde93bca # v1
|
||||
|
7
.github/workflows/docs-destroy.yml
vendored
7
.github/workflows/docs-destroy.yml
vendored
@ -3,13 +3,20 @@ on:
|
||||
pull_request_target:
|
||||
types: [closed]
|
||||
|
||||
permissions: {}
|
||||
|
||||
jobs:
|
||||
deploy:
|
||||
name: Docs Destroy
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: read
|
||||
pull-requests: write
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
|
||||
with:
|
||||
persist-credentials: false
|
||||
|
||||
- name: Destroy Docs Subdomain
|
||||
env:
|
||||
|
4
.github/workflows/fix-format.yml
vendored
4
.github/workflows/fix-format.yml
vendored
@ -4,11 +4,14 @@ on:
|
||||
pull_request:
|
||||
types: [labeled]
|
||||
|
||||
permissions: {}
|
||||
|
||||
jobs:
|
||||
fix-formatting:
|
||||
runs-on: ubuntu-latest
|
||||
if: ${{ github.event.label.name == 'fix:formatting' }}
|
||||
permissions:
|
||||
contents: write
|
||||
pull-requests: write
|
||||
steps:
|
||||
- name: Generate a token
|
||||
@ -23,6 +26,7 @@ jobs:
|
||||
with:
|
||||
ref: ${{ github.event.pull_request.head.ref }}
|
||||
token: ${{ steps.generate-token.outputs.token }}
|
||||
persist-credentials: true
|
||||
|
||||
- name: Setup Node
|
||||
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4
|
||||
|
2
.github/workflows/pr-label-validation.yml
vendored
2
.github/workflows/pr-label-validation.yml
vendored
@ -4,6 +4,8 @@ on:
|
||||
pull_request_target:
|
||||
types: [opened, labeled, unlabeled, synchronize]
|
||||
|
||||
permissions: {}
|
||||
|
||||
jobs:
|
||||
validate-release-label:
|
||||
runs-on: ubuntu-latest
|
||||
|
2
.github/workflows/pr-labeler.yml
vendored
2
.github/workflows/pr-labeler.yml
vendored
@ -2,6 +2,8 @@ name: 'Pull Request Labeler'
|
||||
on:
|
||||
- pull_request_target
|
||||
|
||||
permissions: {}
|
||||
|
||||
jobs:
|
||||
labeler:
|
||||
permissions:
|
||||
|
@ -4,9 +4,13 @@ on:
|
||||
pull_request:
|
||||
types: [opened, synchronize, reopened, edited]
|
||||
|
||||
permissions: {}
|
||||
|
||||
jobs:
|
||||
validate-pr-title:
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
pull-requests: write
|
||||
steps:
|
||||
- name: PR Conventional Commit Validation
|
||||
uses: ytanikin/PRConventionalCommits@b628c5a234cc32513014b7bfdd1e47b532124d98 # 1.3.0
|
||||
|
18
.github/workflows/prepare-release.yml
vendored
18
.github/workflows/prepare-release.yml
vendored
@ -21,13 +21,14 @@ concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.ref }}-root
|
||||
cancel-in-progress: true
|
||||
|
||||
permissions: {}
|
||||
|
||||
jobs:
|
||||
bump_version:
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
outputs:
|
||||
ref: ${{ steps.push-tag.outputs.commit_long_sha }}
|
||||
|
||||
permissions: {} # No job-level permissions are needed because it uses the app-token
|
||||
steps:
|
||||
- name: Generate a token
|
||||
id: generate-token
|
||||
@ -40,6 +41,7 @@ jobs:
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
|
||||
with:
|
||||
token: ${{ steps.generate-token.outputs.token }}
|
||||
persist-credentials: true
|
||||
|
||||
- name: Install uv
|
||||
uses: astral-sh/setup-uv@0c5e2b8115b80b4c7c5ddf6ffdd634974642d182 # v5
|
||||
@ -59,14 +61,20 @@ jobs:
|
||||
build_mobile:
|
||||
uses: ./.github/workflows/build-mobile.yml
|
||||
needs: bump_version
|
||||
secrets: inherit
|
||||
secrets:
|
||||
KEY_JKS: ${{ secrets.KEY_JKS }}
|
||||
ALIAS: ${{ secrets.ALIAS }}
|
||||
ANDROID_KEY_PASSWORD: ${{ secrets.ANDROID_KEY_PASSWORD }}
|
||||
ANDROID_STORE_PASSWORD: ${{ secrets.ANDROID_STORE_PASSWORD }}
|
||||
with:
|
||||
ref: ${{ needs.bump_version.outputs.ref }}
|
||||
|
||||
prepare_release:
|
||||
runs-on: ubuntu-latest
|
||||
needs: build_mobile
|
||||
|
||||
permissions:
|
||||
actions: read # To download the app artifact
|
||||
# No content permissions are needed because it uses the app-token
|
||||
steps:
|
||||
- name: Generate a token
|
||||
id: generate-token
|
||||
@ -79,6 +87,7 @@ jobs:
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
|
||||
with:
|
||||
token: ${{ steps.generate-token.outputs.token }}
|
||||
persist-credentials: false
|
||||
|
||||
- name: Download APK
|
||||
uses: actions/download-artifact@95815c38cf2ff2164869cbab79da8d1f422bc89e # v4
|
||||
@ -90,6 +99,7 @@ jobs:
|
||||
with:
|
||||
draft: true
|
||||
tag_name: ${{ env.IMMICH_VERSION }}
|
||||
token: ${{ steps.generate-token.outputs.token }}
|
||||
generate_release_notes: true
|
||||
body_path: misc/release/notes.tmpl
|
||||
files: |
|
||||
|
2
.github/workflows/preview-label.yaml
vendored
2
.github/workflows/preview-label.yaml
vendored
@ -4,6 +4,8 @@ on:
|
||||
pull_request:
|
||||
types: [labeled, closed]
|
||||
|
||||
permissions: {}
|
||||
|
||||
jobs:
|
||||
comment-status:
|
||||
runs-on: ubuntu-latest
|
||||
|
8
.github/workflows/sdk.yml
vendored
8
.github/workflows/sdk.yml
vendored
@ -4,18 +4,22 @@ on:
|
||||
release:
|
||||
types: [published]
|
||||
|
||||
permissions:
|
||||
packages: write
|
||||
permissions: {}
|
||||
|
||||
jobs:
|
||||
publish:
|
||||
name: Publish `@immich/sdk`
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: read
|
||||
defaults:
|
||||
run:
|
||||
working-directory: ./open-api/typescript-sdk
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
|
||||
with:
|
||||
persist-credentials: false
|
||||
|
||||
# Setup .npmrc file to publish to npm
|
||||
- uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4
|
||||
with:
|
||||
|
16
.github/workflows/static_analysis.yml
vendored
16
.github/workflows/static_analysis.yml
vendored
@ -9,14 +9,20 @@ concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
permissions: {}
|
||||
|
||||
jobs:
|
||||
pre-job:
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: read
|
||||
outputs:
|
||||
should_run: ${{ steps.found_paths.outputs.mobile == 'true' || steps.should_force.outputs.should_force == 'true' }}
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
|
||||
with:
|
||||
persist-credentials: false
|
||||
- id: found_paths
|
||||
uses: dorny/paths-filter@de90cc6fb38fc0963ad72b210f1f284cd68cea36 # v3
|
||||
with:
|
||||
@ -33,12 +39,14 @@ jobs:
|
||||
name: Run Dart Code Analysis
|
||||
needs: pre-job
|
||||
if: ${{ needs.pre-job.outputs.should_run == 'true' }}
|
||||
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
|
||||
with:
|
||||
persist-credentials: false
|
||||
|
||||
- name: Setup Flutter SDK
|
||||
uses: subosito/flutter-action@e938fdf56512cc96ef2f93601a5a40bde3801046 # v2
|
||||
@ -69,9 +77,11 @@ jobs:
|
||||
|
||||
- name: Verify files have not changed
|
||||
if: steps.verify-changed-files.outputs.files_changed == 'true'
|
||||
env:
|
||||
CHANGED_FILES: ${{ steps.verify-changed-files.outputs.changed_files }}
|
||||
run: |
|
||||
echo "ERROR: Generated files not up to date! Run make_build inside the mobile directory"
|
||||
echo "Changed files: ${{ steps.verify-changed-files.outputs.changed_files }}"
|
||||
echo "Changed files: ${CHANGED_FILES}"
|
||||
exit 1
|
||||
|
||||
- name: Run dart analyze
|
||||
|
80
.github/workflows/test.yml
vendored
80
.github/workflows/test.yml
vendored
@ -9,9 +9,13 @@ concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
permissions: {}
|
||||
|
||||
jobs:
|
||||
pre-job:
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: read
|
||||
outputs:
|
||||
should_run_web: ${{ steps.found_paths.outputs.web == 'true' || steps.should_force.outputs.should_force == 'true' }}
|
||||
should_run_server: ${{ steps.found_paths.outputs.server == 'true' || steps.should_force.outputs.should_force == 'true' }}
|
||||
@ -25,6 +29,9 @@ jobs:
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
|
||||
with:
|
||||
persist-credentials: false
|
||||
|
||||
- id: found_paths
|
||||
uses: dorny/paths-filter@de90cc6fb38fc0963ad72b210f1f284cd68cea36 # v3
|
||||
with:
|
||||
@ -58,6 +65,8 @@ jobs:
|
||||
needs: pre-job
|
||||
if: ${{ needs.pre-job.outputs.should_run_server == 'true' }}
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: read
|
||||
defaults:
|
||||
run:
|
||||
working-directory: ./server
|
||||
@ -65,6 +74,8 @@ jobs:
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
|
||||
with:
|
||||
persist-credentials: false
|
||||
|
||||
- name: Setup Node
|
||||
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4
|
||||
@ -95,6 +106,8 @@ jobs:
|
||||
needs: pre-job
|
||||
if: ${{ needs.pre-job.outputs.should_run_cli == 'true' }}
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: read
|
||||
defaults:
|
||||
run:
|
||||
working-directory: ./cli
|
||||
@ -102,6 +115,8 @@ jobs:
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
|
||||
with:
|
||||
persist-credentials: false
|
||||
|
||||
- name: Setup Node
|
||||
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4
|
||||
@ -136,6 +151,8 @@ jobs:
|
||||
needs: pre-job
|
||||
if: ${{ needs.pre-job.outputs.should_run_cli == 'true' }}
|
||||
runs-on: windows-latest
|
||||
permissions:
|
||||
contents: read
|
||||
defaults:
|
||||
run:
|
||||
working-directory: ./cli
|
||||
@ -143,6 +160,8 @@ jobs:
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
|
||||
with:
|
||||
persist-credentials: false
|
||||
|
||||
- name: Setup Node
|
||||
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4
|
||||
@ -170,6 +189,8 @@ jobs:
|
||||
needs: pre-job
|
||||
if: ${{ needs.pre-job.outputs.should_run_web == 'true' }}
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: read
|
||||
defaults:
|
||||
run:
|
||||
working-directory: ./web
|
||||
@ -177,6 +198,8 @@ jobs:
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
|
||||
with:
|
||||
persist-credentials: false
|
||||
|
||||
- name: Setup Node
|
||||
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4
|
||||
@ -215,6 +238,8 @@ jobs:
|
||||
needs: pre-job
|
||||
if: ${{ needs.pre-job.outputs.should_run_e2e == 'true' }}
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: read
|
||||
defaults:
|
||||
run:
|
||||
working-directory: ./e2e
|
||||
@ -222,6 +247,8 @@ jobs:
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
|
||||
with:
|
||||
persist-credentials: false
|
||||
|
||||
- name: Setup Node
|
||||
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4
|
||||
@ -254,6 +281,8 @@ jobs:
|
||||
needs: pre-job
|
||||
if: ${{ needs.pre-job.outputs.should_run_server == 'true' }}
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: read
|
||||
defaults:
|
||||
run:
|
||||
working-directory: ./server
|
||||
@ -261,6 +290,8 @@ jobs:
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
|
||||
with:
|
||||
persist-credentials: false
|
||||
|
||||
- name: Setup Node
|
||||
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4
|
||||
@ -279,6 +310,8 @@ jobs:
|
||||
needs: pre-job
|
||||
if: ${{ needs.pre-job.outputs.should_run_e2e_server_cli == 'true' }}
|
||||
runs-on: mich
|
||||
permissions:
|
||||
contents: read
|
||||
defaults:
|
||||
run:
|
||||
working-directory: ./e2e
|
||||
@ -287,6 +320,7 @@ jobs:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
|
||||
with:
|
||||
persist-credentials: false
|
||||
submodules: 'recursive'
|
||||
|
||||
- name: Setup Node
|
||||
@ -321,6 +355,8 @@ jobs:
|
||||
needs: pre-job
|
||||
if: ${{ needs.pre-job.outputs.should_run_e2e_web == 'true' }}
|
||||
runs-on: mich
|
||||
permissions:
|
||||
contents: read
|
||||
defaults:
|
||||
run:
|
||||
working-directory: ./e2e
|
||||
@ -329,6 +365,7 @@ jobs:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
|
||||
with:
|
||||
persist-credentials: false
|
||||
submodules: 'recursive'
|
||||
|
||||
- name: Setup Node
|
||||
@ -362,8 +399,13 @@ jobs:
|
||||
needs: pre-job
|
||||
if: ${{ needs.pre-job.outputs.should_run_mobile == 'true' }}
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: read
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
|
||||
with:
|
||||
persist-credentials: false
|
||||
|
||||
- name: Setup Flutter SDK
|
||||
uses: subosito/flutter-action@e938fdf56512cc96ef2f93601a5a40bde3801046 # v2
|
||||
with:
|
||||
@ -378,11 +420,16 @@ jobs:
|
||||
needs: pre-job
|
||||
if: ${{ needs.pre-job.outputs.should_run_ml == 'true' }}
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: read
|
||||
defaults:
|
||||
run:
|
||||
working-directory: ./machine-learning
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
|
||||
with:
|
||||
persist-credentials: false
|
||||
|
||||
- name: Install uv
|
||||
uses: astral-sh/setup-uv@0c5e2b8115b80b4c7c5ddf6ffdd634974642d182 # v5
|
||||
- uses: actions/setup-python@8d9ed9ac5c53483de85588cdf95a591a75ab9f55 # v5
|
||||
@ -411,6 +458,8 @@ jobs:
|
||||
needs: pre-job
|
||||
if: ${{ needs.pre-job.outputs['should_run_.github'] == 'true' }}
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: read
|
||||
defaults:
|
||||
run:
|
||||
working-directory: ./.github
|
||||
@ -418,6 +467,8 @@ jobs:
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
|
||||
with:
|
||||
persist-credentials: false
|
||||
|
||||
- name: Setup Node
|
||||
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4
|
||||
@ -434,22 +485,31 @@ jobs:
|
||||
shellcheck:
|
||||
name: ShellCheck
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: read
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
|
||||
with:
|
||||
persist-credentials: false
|
||||
|
||||
- name: Run ShellCheck
|
||||
uses: ludeeus/action-shellcheck@master
|
||||
with:
|
||||
ignore_paths: >-
|
||||
**/open-api/**
|
||||
**/openapi/**
|
||||
**/openapi**
|
||||
**/node_modules/**
|
||||
|
||||
generated-api-up-to-date:
|
||||
name: OpenAPI Clients
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: read
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
|
||||
with:
|
||||
persist-credentials: false
|
||||
|
||||
- name: Setup Node
|
||||
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4
|
||||
@ -476,14 +536,18 @@ jobs:
|
||||
|
||||
- name: Verify files have not changed
|
||||
if: steps.verify-changed-files.outputs.files_changed == 'true'
|
||||
env:
|
||||
CHANGED_FILES: ${{ steps.verify-changed-files.outputs.changed_files }}
|
||||
run: |
|
||||
echo "ERROR: Generated files not up to date!"
|
||||
echo "Changed files: ${{ steps.verify-changed-files.outputs.changed_files }}"
|
||||
echo "Changed files: ${CHANGED_FILES}"
|
||||
exit 1
|
||||
|
||||
generated-typeorm-migrations-up-to-date:
|
||||
name: TypeORM Checks
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: read
|
||||
services:
|
||||
postgres:
|
||||
image: tensorchord/pgvecto-rs:pg14-v0.2.0@sha256:739cdd626151ff1f796dc95a6591b55a714f341c737e27f045019ceabf8e8c52
|
||||
@ -505,6 +569,8 @@ jobs:
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
|
||||
with:
|
||||
persist-credentials: false
|
||||
|
||||
- name: Setup Node
|
||||
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4
|
||||
@ -521,7 +587,7 @@ jobs:
|
||||
run: npm run migrations:run
|
||||
|
||||
- name: Test npm run schema:reset command works
|
||||
run: npm run typeorm:schema:reset
|
||||
run: npm run schema:reset
|
||||
|
||||
- name: Generate new migrations
|
||||
continue-on-error: true
|
||||
@ -535,9 +601,11 @@ jobs:
|
||||
server/src
|
||||
- name: Verify migration files have not changed
|
||||
if: steps.verify-changed-files.outputs.files_changed == 'true'
|
||||
env:
|
||||
CHANGED_FILES: ${{ steps.verify-changed-files.outputs.changed_files }}
|
||||
run: |
|
||||
echo "ERROR: Generated migration files not up to date!"
|
||||
echo "Changed files: ${{ steps.verify-changed-files.outputs.changed_files }}"
|
||||
echo "Changed files: ${CHANGED_FILES}"
|
||||
cat ./src/*-TestMigration.ts
|
||||
exit 1
|
||||
|
||||
@ -555,9 +623,11 @@ jobs:
|
||||
|
||||
- name: Verify SQL files have not changed
|
||||
if: steps.verify-changed-sql-files.outputs.files_changed == 'true'
|
||||
env:
|
||||
CHANGED_FILES: ${{ steps.verify-changed-sql-files.outputs.changed_files }}
|
||||
run: |
|
||||
echo "ERROR: Generated SQL files not up to date!"
|
||||
echo "Changed files: ${{ steps.verify-changed-sql-files.outputs.changed_files }}"
|
||||
echo "Changed files: ${CHANGED_FILES}"
|
||||
exit 1
|
||||
|
||||
# mobile-integration-tests:
|
||||
|
13
.github/workflows/weblate-lock.yml
vendored
13
.github/workflows/weblate-lock.yml
vendored
@ -4,30 +4,32 @@ on:
|
||||
pull_request:
|
||||
branches: [main]
|
||||
|
||||
permissions: {}
|
||||
|
||||
jobs:
|
||||
pre-job:
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: read
|
||||
outputs:
|
||||
should_run: ${{ steps.found_paths.outputs.i18n == 'true' && github.head_ref != 'chore/translations'}}
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
|
||||
with:
|
||||
persist-credentials: false
|
||||
- id: found_paths
|
||||
uses: dorny/paths-filter@de90cc6fb38fc0963ad72b210f1f284cd68cea36 # v3
|
||||
with:
|
||||
filters: |
|
||||
i18n:
|
||||
- 'i18n/!(en)**\.json'
|
||||
- name: Debug
|
||||
run: |
|
||||
echo "Should run: ${{ steps.found_paths.outputs.i18n == 'true' && github.head_ref != 'chore/translations'}}"
|
||||
echo "Found i18n paths: ${{ steps.found_paths.outputs.i18n }}"
|
||||
echo "Head ref: ${{ github.head_ref }}"
|
||||
|
||||
enforce-lock:
|
||||
name: Check Weblate Lock
|
||||
needs: [pre-job]
|
||||
runs-on: ubuntu-latest
|
||||
permissions: {}
|
||||
if: ${{ needs.pre-job.outputs.should_run == 'true' }}
|
||||
steps:
|
||||
- name: Check weblate lock
|
||||
@ -47,6 +49,7 @@ jobs:
|
||||
name: Weblate Lock Check Success
|
||||
needs: [enforce-lock]
|
||||
runs-on: ubuntu-latest
|
||||
permissions: {}
|
||||
if: always()
|
||||
steps:
|
||||
- name: Any jobs failed?
|
||||
|
4
cli/package-lock.json
generated
4
cli/package-lock.json
generated
@ -27,7 +27,7 @@
|
||||
"@types/lodash-es": "^4.17.12",
|
||||
"@types/micromatch": "^4.0.9",
|
||||
"@types/mock-fs": "^4.13.1",
|
||||
"@types/node": "^22.14.0",
|
||||
"@types/node": "^22.14.1",
|
||||
"@vitest/coverage-v8": "^3.0.0",
|
||||
"byte-size": "^9.0.0",
|
||||
"cli-progress": "^3.12.0",
|
||||
@ -61,7 +61,7 @@
|
||||
"@oazapfts/runtime": "^1.0.2"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@types/node": "^22.14.0",
|
||||
"@types/node": "^22.14.1",
|
||||
"typescript": "^5.3.3"
|
||||
}
|
||||
},
|
||||
|
@ -21,7 +21,7 @@
|
||||
"@types/lodash-es": "^4.17.12",
|
||||
"@types/micromatch": "^4.0.9",
|
||||
"@types/mock-fs": "^4.13.1",
|
||||
"@types/node": "^22.14.0",
|
||||
"@types/node": "^22.14.1",
|
||||
"@vitest/coverage-v8": "^3.0.0",
|
||||
"byte-size": "^9.0.0",
|
||||
"cli-progress": "^3.12.0",
|
||||
|
@ -23,23 +23,32 @@ Refer to the official [postgres documentation](https://www.postgresql.org/docs/c
|
||||
It is not recommended to directly backup the `DB_DATA_LOCATION` folder. Doing so while the database is running can lead to a corrupted backup that cannot be restored.
|
||||
:::
|
||||
|
||||
### Automatic Database Backups
|
||||
### Automatic Database Dumps
|
||||
|
||||
For convenience, Immich will automatically create database backups by default. The backups are stored in `UPLOAD_LOCATION/backups`.
|
||||
As mentioned above, you should make your own backup of these together with the asset folders as noted below.
|
||||
You can adjust the schedule and amount of kept backups in the [admin settings](http://my.immich.app/admin/system-settings?isOpen=backup).
|
||||
By default, Immich will keep the last 14 backups and create a new backup every day at 2:00 AM.
|
||||
:::warning
|
||||
The automatic database dumps can be used to restore the database in the event of damage to the Postgres database files.
|
||||
There is no monitoring for these dumps and you will not be notified if they are unsuccessful.
|
||||
:::
|
||||
|
||||
#### Trigger Backup
|
||||
:::caution
|
||||
The database dumps do **NOT** contain any pictures or videos, only metadata. They are only usable with a copy of the other files in `UPLOAD_LOCATION` as outlined below.
|
||||
:::
|
||||
|
||||
You are able to trigger a backup in the [admin job status page](http://my.immich.app/admin/jobs-status).
|
||||
Visit the page, open the "Create job" modal from the top right, select "Backup Database" and click "Confirm".
|
||||
A job will run and trigger a backup, you can verify this worked correctly by checking the logs or the backup folder.
|
||||
This backup will count towards the last X backups that will be kept based on your settings.
|
||||
For disaster-recovery purposes, Immich will automatically create database dumps. The dumps are stored in `UPLOAD_LOCATION/backups`.
|
||||
Please be sure to make your own, independent backup of the database together with the asset folders as noted below.
|
||||
You can adjust the schedule and amount of kept database dumps in the [admin settings](http://my.immich.app/admin/system-settings?isOpen=backup).
|
||||
By default, Immich will keep the last 14 database dumps and create a new dump every day at 2:00 AM.
|
||||
|
||||
#### Trigger Dump
|
||||
|
||||
You are able to trigger a database dump in the [admin job status page](http://my.immich.app/admin/jobs-status).
|
||||
Visit the page, open the "Create job" modal from the top right, select "Create Database Dump" and click "Confirm".
|
||||
A job will run and trigger a dump, you can verify this worked correctly by checking the logs or the `backups/` folder.
|
||||
This dumps will count towards the last `X` dumps that will be kept based on your settings.
|
||||
|
||||
#### Restoring
|
||||
|
||||
We hope to make restoring simpler in future versions, for now you can find the backups in the `UPLOAD_LOCATION/backups` folder on your host.
|
||||
We hope to make restoring simpler in future versions, for now you can find the database dumps in the `UPLOAD_LOCATION/backups` folder on your host.
|
||||
Then please follow the steps in the following section for restoring the database.
|
||||
|
||||
### Manual Backup and Restore
|
||||
|
@ -1,14 +1,14 @@
|
||||
# Database Migrations
|
||||
|
||||
After making any changes in the `server/src/entities`, a database migration need to run in order to register the changes in the database. Follow the steps below to create a new migration.
|
||||
After making any changes in the `server/src/schema`, a database migration need to run in order to register the changes in the database. Follow the steps below to create a new migration.
|
||||
|
||||
1. Run the command
|
||||
|
||||
```bash
|
||||
npm run typeorm:migrations:generate <migration-name>
|
||||
npm run migrations:generate <migration-name>
|
||||
```
|
||||
|
||||
2. Check if the migration file makes sense.
|
||||
3. Move the migration file to folder `./server/src/migrations` in your code editor.
|
||||
3. Move the migration file to folder `./server/src/schema/migrations` in your code editor.
|
||||
|
||||
The server will automatically detect `*.ts` file changes and restart. Part of the server start-up process includes running any new migrations, so it will be applied immediately.
|
||||
|
@ -42,7 +42,7 @@ You do not need to redo any machine learning jobs after enabling hardware accele
|
||||
|
||||
- The GPU must have compute capability 5.2 or greater.
|
||||
- The server must have the official NVIDIA driver installed.
|
||||
- The installed driver must be >= 535 (it must support CUDA 12.2).
|
||||
- The installed driver must be >= 545 (it must support CUDA 12.3).
|
||||
- On Linux (except for WSL2), you also need to have [NVIDIA Container Toolkit][nvct] installed.
|
||||
|
||||
#### ROCm
|
||||
|
5
docs/src/pages/errors.md
Normal file
5
docs/src/pages/errors.md
Normal file
@ -0,0 +1,5 @@
|
||||
# Errors
|
||||
|
||||
## TypeORM Upgrade
|
||||
|
||||
The upgrade to Immich `v2.x.x` has a required upgrade path to `v1.132.0+`. This means it is required to start up the application at least once on version `1.132.0` (or later). Doing so will complete database schema upgrades that are required for `v2.0.0`. After Immich has successfully booted on this version, shut the system down and try the `v2.x.x` upgrade again.
|
@ -4,6 +4,7 @@ import Layout from '@theme/Layout';
|
||||
import { discordPath, discordViewBox } from '@site/src/components/svg-paths';
|
||||
import ThemedImage from '@theme/ThemedImage';
|
||||
import Icon from '@mdi/react';
|
||||
import { mdiAndroid } from '@mdi/js';
|
||||
function HomepageHeader() {
|
||||
return (
|
||||
<header>
|
||||
@ -88,11 +89,18 @@ function HomepageHeader() {
|
||||
<img className="h-24" alt="Get it on Google Play" src="/img/google-play-badge.png" />
|
||||
</a>
|
||||
</div>
|
||||
|
||||
<div className="h-24">
|
||||
<a href="https://apps.apple.com/sg/app/immich/id1613945652">
|
||||
<img className="h-24 sm:p-3.5 p-3" alt="Download on the App Store" src="/img/ios-app-store-badge.svg" />
|
||||
</a>
|
||||
</div>
|
||||
|
||||
<div className="h-24">
|
||||
<a href="https://github.com/immich-app/immich/releases/latest">
|
||||
<img className="h-24 sm:p-3.5 p-3" alt="Download APK" src="/img/download-apk-github.svg" />
|
||||
</a>
|
||||
</div>
|
||||
</div>
|
||||
<ThemedImage
|
||||
sources={{ dark: '/img/app-qr-code-dark.svg', light: '/img/app-qr-code-light.svg' }}
|
||||
|
@ -76,6 +76,7 @@ import {
|
||||
mdiWeb,
|
||||
mdiDatabaseOutline,
|
||||
mdiLinkEdit,
|
||||
mdiTagFaces,
|
||||
mdiMovieOpenPlayOutline,
|
||||
} from '@mdi/js';
|
||||
import Layout from '@theme/Layout';
|
||||
@ -83,6 +84,8 @@ import React from 'react';
|
||||
import { Item, Timeline } from '../components/timeline';
|
||||
|
||||
const releases = {
|
||||
'v1.130.0': new Date(2025, 2, 25),
|
||||
'v1.127.0': new Date(2025, 1, 26),
|
||||
'v1.122.0': new Date(2024, 11, 5),
|
||||
'v1.120.0': new Date(2024, 10, 6),
|
||||
'v1.114.0': new Date(2024, 8, 6),
|
||||
@ -242,6 +245,21 @@ const roadmap: Item[] = [
|
||||
];
|
||||
|
||||
const milestones: Item[] = [
|
||||
withRelease({
|
||||
icon: mdiFolderMultiple,
|
||||
iconColor: 'brown',
|
||||
title: 'Folders view in the mobile app',
|
||||
description: 'Browse your photos and videos in their folder structure inside the mobile app',
|
||||
release: 'v1.130.0',
|
||||
}),
|
||||
withRelease({
|
||||
icon: mdiTagFaces,
|
||||
iconColor: 'teal',
|
||||
title: 'Manual face tagging',
|
||||
description:
|
||||
'Manually tag or remove faces in photos and videos, even when automatic detection misses or misidentifies them.',
|
||||
release: 'v1.127.0',
|
||||
}),
|
||||
{
|
||||
icon: mdiStar,
|
||||
iconColor: 'gold',
|
||||
@ -266,8 +284,8 @@ const milestones: Item[] = [
|
||||
withRelease({
|
||||
icon: mdiDatabaseOutline,
|
||||
iconColor: 'brown',
|
||||
title: 'Automatic database backups',
|
||||
description: 'Database backups are now integrated into the Immich server',
|
||||
title: 'Automatic database dumps',
|
||||
description: 'Database dumps are now integrated into the Immich server',
|
||||
release: 'v1.120.0',
|
||||
}),
|
||||
{
|
||||
@ -300,7 +318,7 @@ const milestones: Item[] = [
|
||||
withRelease({
|
||||
icon: mdiFolderMultiple,
|
||||
iconColor: 'brown',
|
||||
title: 'Folders',
|
||||
title: 'Folders view',
|
||||
description: 'Browse your photos and videos in their folder structure',
|
||||
release: 'v1.113.0',
|
||||
}),
|
||||
|
13
docs/static/img/download-apk-github.svg
vendored
Normal file
13
docs/static/img/download-apk-github.svg
vendored
Normal file
File diff suppressed because one or more lines are too long
After Width: | Height: | Size: 14 KiB |
6
e2e/package-lock.json
generated
6
e2e/package-lock.json
generated
@ -15,7 +15,7 @@
|
||||
"@immich/sdk": "file:../open-api/typescript-sdk",
|
||||
"@playwright/test": "^1.44.1",
|
||||
"@types/luxon": "^3.4.2",
|
||||
"@types/node": "^22.14.0",
|
||||
"@types/node": "^22.14.1",
|
||||
"@types/oidc-provider": "^8.5.1",
|
||||
"@types/pg": "^8.11.0",
|
||||
"@types/pngjs": "^6.0.4",
|
||||
@ -66,7 +66,7 @@
|
||||
"@types/lodash-es": "^4.17.12",
|
||||
"@types/micromatch": "^4.0.9",
|
||||
"@types/mock-fs": "^4.13.1",
|
||||
"@types/node": "^22.14.0",
|
||||
"@types/node": "^22.14.1",
|
||||
"@vitest/coverage-v8": "^3.0.0",
|
||||
"byte-size": "^9.0.0",
|
||||
"cli-progress": "^3.12.0",
|
||||
@ -100,7 +100,7 @@
|
||||
"@oazapfts/runtime": "^1.0.2"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@types/node": "^22.14.0",
|
||||
"@types/node": "^22.14.1",
|
||||
"typescript": "^5.3.3"
|
||||
}
|
||||
},
|
||||
|
@ -25,7 +25,7 @@
|
||||
"@immich/sdk": "file:../open-api/typescript-sdk",
|
||||
"@playwright/test": "^1.44.1",
|
||||
"@types/luxon": "^3.4.2",
|
||||
"@types/node": "^22.14.0",
|
||||
"@types/node": "^22.14.1",
|
||||
"@types/oidc-provider": "^8.5.1",
|
||||
"@types/pg": "^8.11.0",
|
||||
"@types/pngjs": "^6.0.4",
|
||||
|
11
i18n/en.json
11
i18n/en.json
@ -39,11 +39,11 @@
|
||||
"authentication_settings_disable_all": "Are you sure you want to disable all login methods? Login will be completely disabled.",
|
||||
"authentication_settings_reenable": "To re-enable, use a <link>Server Command</link>.",
|
||||
"background_task_job": "Background Tasks",
|
||||
"backup_database": "Backup Database",
|
||||
"backup_database_enable_description": "Enable database backups",
|
||||
"backup_keep_last_amount": "Amount of previous backups to keep",
|
||||
"backup_settings": "Backup Settings",
|
||||
"backup_settings_description": "Manage database backup settings",
|
||||
"backup_database": "Create Database Dump",
|
||||
"backup_database_enable_description": "Enable database dumps",
|
||||
"backup_keep_last_amount": "Amount of previous dumps to keep",
|
||||
"backup_settings": "Database Dump Settings",
|
||||
"backup_settings_description": "Manage database dump settings. Note: These jobs are not monitored and you will not be notified of failure.",
|
||||
"check_all": "Check All",
|
||||
"cleanup": "Cleanup",
|
||||
"cleared_jobs": "Cleared jobs for: {job}",
|
||||
@ -996,6 +996,7 @@
|
||||
"filetype": "Filetype",
|
||||
"filter": "Filter",
|
||||
"filter_people": "Filter people",
|
||||
"filter_places": "Filter places",
|
||||
"find_them_fast": "Find them fast by name with search",
|
||||
"fix_incorrect_match": "Fix incorrect match",
|
||||
"folder": "Folder",
|
||||
|
@ -35,6 +35,7 @@ linter:
|
||||
analyzer:
|
||||
exclude:
|
||||
- openapi/**
|
||||
- build/**
|
||||
- lib/generated_plugin_registrant.dart
|
||||
- lib/**/*.g.dart
|
||||
- lib/**/*.drift.dart
|
||||
@ -92,6 +93,9 @@ custom_lint:
|
||||
allowed:
|
||||
# required / wanted
|
||||
- lib/repositories/*_api.repository.dart
|
||||
- lib/domain/models/sync_event.model.dart
|
||||
- lib/{domain,infrastructure}/**/sync_stream.*
|
||||
- lib/{domain,infrastructure}/**/sync_api.*
|
||||
- lib/infrastructure/repositories/*_api.repository.dart
|
||||
- lib/infrastructure/utils/*.converter.dart
|
||||
# acceptable exceptions for the time being
|
||||
@ -144,7 +148,9 @@ dart_code_metrics:
|
||||
- avoid-global-state
|
||||
- avoid-inverted-boolean-checks
|
||||
- avoid-late-final-reassignment
|
||||
- avoid-local-functions
|
||||
- avoid-local-functions:
|
||||
exclude:
|
||||
- test/**.dart
|
||||
- avoid-negated-conditions
|
||||
- avoid-nested-streams-and-futures
|
||||
- avoid-referencing-subclasses
|
||||
|
@ -1,3 +1,4 @@
|
||||
description: This file stores settings for Dart & Flutter DevTools.
|
||||
documentation: https://docs.flutter.dev/tools/devtools/extensions#configure-extension-enablement-states
|
||||
extensions:
|
||||
- drift: true
|
@ -1,5 +1,5 @@
|
||||
# Uncomment this line to define a global platform for your project
|
||||
# platform :ios, '12.0'
|
||||
platform :ios, '14.0'
|
||||
|
||||
# CocoaPods analytics sends network stats synchronously affecting flutter build latency.
|
||||
ENV['COCOAPODS_DISABLE_STATS'] = 'true'
|
||||
@ -45,7 +45,7 @@ post_install do |installer|
|
||||
installer.generated_projects.each do |project|
|
||||
project.targets.each do |target|
|
||||
target.build_configurations.each do |config|
|
||||
config.build_settings['IPHONEOS_DEPLOYMENT_TARGET'] = '13.0'
|
||||
config.build_settings['IPHONEOS_DEPLOYMENT_TARGET'] = '14.0'
|
||||
end
|
||||
end
|
||||
end
|
||||
|
@ -224,7 +224,7 @@ EXTERNAL SOURCES:
|
||||
:path: ".symlinks/plugins/wakelock_plus/ios"
|
||||
|
||||
SPEC CHECKSUMS:
|
||||
background_downloader: b42a56120f5348bff70e74222f0e9e6f7f1a1537
|
||||
background_downloader: 50e91d979067b82081aba359d7d916b3ba5fadad
|
||||
connectivity_plus: cb623214f4e1f6ef8fe7403d580fdad517d2f7dd
|
||||
device_info_plus: 21fcca2080fbcd348be798aa36c3e5ed849eefbe
|
||||
DKImagePickerController: 946cec48c7873164274ecc4624d19e3da4c1ef3c
|
||||
@ -261,6 +261,6 @@ SPEC CHECKSUMS:
|
||||
url_launcher_ios: 694010445543906933d732453a59da0a173ae33d
|
||||
wakelock_plus: 04623e3f525556020ebd4034310f20fe7fda8b49
|
||||
|
||||
PODFILE CHECKSUM: 03b7eead4ee77b9e778179eeb0f3b5513617451c
|
||||
PODFILE CHECKSUM: 7ce312f2beab01395db96f6969d90a447279cf45
|
||||
|
||||
COCOAPODS: 1.16.2
|
||||
|
@ -546,7 +546,7 @@
|
||||
DEVELOPMENT_TEAM = 2F67MQ8R79;
|
||||
ENABLE_BITCODE = NO;
|
||||
INFOPLIST_FILE = Runner/Info.plist;
|
||||
IPHONEOS_DEPLOYMENT_TARGET = 13.0;
|
||||
IPHONEOS_DEPLOYMENT_TARGET = 14.0;
|
||||
LD_RUNPATH_SEARCH_PATHS = (
|
||||
"$(inherited)",
|
||||
"@executable_path/Frameworks",
|
||||
@ -690,7 +690,7 @@
|
||||
DEVELOPMENT_TEAM = 2F67MQ8R79;
|
||||
ENABLE_BITCODE = NO;
|
||||
INFOPLIST_FILE = Runner/Info.plist;
|
||||
IPHONEOS_DEPLOYMENT_TARGET = 13.0;
|
||||
IPHONEOS_DEPLOYMENT_TARGET = 14.0;
|
||||
LD_RUNPATH_SEARCH_PATHS = (
|
||||
"$(inherited)",
|
||||
"@executable_path/Frameworks",
|
||||
@ -720,7 +720,7 @@
|
||||
DEVELOPMENT_TEAM = 2F67MQ8R79;
|
||||
ENABLE_BITCODE = NO;
|
||||
INFOPLIST_FILE = Runner/Info.plist;
|
||||
IPHONEOS_DEPLOYMENT_TARGET = 13.0;
|
||||
IPHONEOS_DEPLOYMENT_TARGET = 14.0;
|
||||
LD_RUNPATH_SEARCH_PATHS = (
|
||||
"$(inherited)",
|
||||
"@executable_path/Frameworks",
|
||||
|
@ -5,5 +5,9 @@ const double downloadFailed = -2;
|
||||
// Number of log entries to retain on app start
|
||||
const int kLogTruncateLimit = 250;
|
||||
|
||||
// Sync
|
||||
const int kSyncEventBatchSize = 5000;
|
||||
|
||||
// Hash batch limits
|
||||
const int kBatchHashFileLimit = 128;
|
||||
const int kBatchHashSizeLimit = 1024 * 1024 * 1024; // 1GB
|
||||
|
@ -1,7 +1,12 @@
|
||||
import 'package:immich_mobile/domain/models/sync/sync_event.model.dart';
|
||||
import 'package:http/http.dart' as http;
|
||||
import 'package:immich_mobile/domain/models/sync_event.model.dart';
|
||||
|
||||
abstract interface class ISyncApiRepository {
|
||||
Future<void> ack(String data);
|
||||
Future<void> ack(List<String> data);
|
||||
|
||||
Stream<List<SyncEvent>> watchUserSyncEvent();
|
||||
Future<void> streamChanges(
|
||||
Function(List<SyncEvent>, Function() abort) onData, {
|
||||
int batchSize,
|
||||
http.Client? httpClient,
|
||||
});
|
||||
}
|
||||
|
18
mobile/lib/domain/interfaces/sync_stream.interface.dart
Normal file
18
mobile/lib/domain/interfaces/sync_stream.interface.dart
Normal file
@ -0,0 +1,18 @@
|
||||
import 'package:immich_mobile/domain/interfaces/db.interface.dart';
|
||||
import 'package:openapi/api.dart';
|
||||
|
||||
abstract interface class ISyncStreamRepository implements IDatabaseRepository {
|
||||
Future<void> updateUsersV1(Iterable<SyncUserV1> data);
|
||||
Future<void> deleteUsersV1(Iterable<SyncUserDeleteV1> data);
|
||||
|
||||
Future<void> updatePartnerV1(Iterable<SyncPartnerV1> data);
|
||||
Future<void> deletePartnerV1(Iterable<SyncPartnerDeleteV1> data);
|
||||
|
||||
Future<void> updateAssetsV1(Iterable<SyncAssetV1> data);
|
||||
Future<void> deleteAssetsV1(Iterable<SyncAssetDeleteV1> data);
|
||||
Future<void> updateAssetsExifV1(Iterable<SyncAssetExifV1> data);
|
||||
|
||||
Future<void> updatePartnerAssetsV1(Iterable<SyncAssetV1> data);
|
||||
Future<void> deletePartnerAssetsV1(Iterable<SyncAssetDeleteV1> data);
|
||||
Future<void> updatePartnerAssetsExifV1(Iterable<SyncAssetExifV1> data);
|
||||
}
|
@ -1,14 +0,0 @@
|
||||
class SyncEvent {
|
||||
// dynamic
|
||||
final dynamic data;
|
||||
|
||||
final String ack;
|
||||
|
||||
SyncEvent({
|
||||
required this.data,
|
||||
required this.ack,
|
||||
});
|
||||
|
||||
@override
|
||||
String toString() => 'SyncEvent(data: $data, ack: $ack)';
|
||||
}
|
13
mobile/lib/domain/models/sync_event.model.dart
Normal file
13
mobile/lib/domain/models/sync_event.model.dart
Normal file
@ -0,0 +1,13 @@
|
||||
import 'package:openapi/api.dart';
|
||||
|
||||
class SyncEvent {
|
||||
final SyncEntityType type;
|
||||
// ignore: avoid-dynamic
|
||||
final dynamic data;
|
||||
final String ack;
|
||||
|
||||
const SyncEvent({required this.type, required this.data, required this.ack});
|
||||
|
||||
@override
|
||||
String toString() => 'SyncEvent(type: $type, data: $data, ack: $ack)';
|
||||
}
|
@ -1,49 +1,92 @@
|
||||
// ignore_for_file: avoid-passing-async-when-sync-expected
|
||||
|
||||
import 'dart:async';
|
||||
|
||||
import 'package:flutter/foundation.dart';
|
||||
import 'package:immich_mobile/domain/interfaces/sync_api.interface.dart';
|
||||
import 'package:immich_mobile/domain/interfaces/sync_stream.interface.dart';
|
||||
import 'package:immich_mobile/domain/models/sync_event.model.dart';
|
||||
import 'package:logging/logging.dart';
|
||||
import 'package:openapi/api.dart';
|
||||
|
||||
class SyncStreamService {
|
||||
final Logger _logger = Logger('SyncStreamService');
|
||||
|
||||
final ISyncApiRepository _syncApiRepository;
|
||||
final ISyncStreamRepository _syncStreamRepository;
|
||||
final bool Function()? _cancelChecker;
|
||||
|
||||
SyncStreamService(this._syncApiRepository);
|
||||
SyncStreamService({
|
||||
required ISyncApiRepository syncApiRepository,
|
||||
required ISyncStreamRepository syncStreamRepository,
|
||||
bool Function()? cancelChecker,
|
||||
}) : _syncApiRepository = syncApiRepository,
|
||||
_syncStreamRepository = syncStreamRepository,
|
||||
_cancelChecker = cancelChecker;
|
||||
|
||||
StreamSubscription? _userSyncSubscription;
|
||||
bool get isCancelled => _cancelChecker?.call() ?? false;
|
||||
|
||||
void syncUsers() {
|
||||
_userSyncSubscription =
|
||||
_syncApiRepository.watchUserSyncEvent().listen((events) async {
|
||||
for (final event in events) {
|
||||
if (event.data is SyncUserV1) {
|
||||
final data = event.data as SyncUserV1;
|
||||
debugPrint("User Update: $data");
|
||||
Future<void> sync() => _syncApiRepository.streamChanges(_handleEvents);
|
||||
|
||||
// final user = await _userRepository.get(data.id);
|
||||
|
||||
// if (user == null) {
|
||||
// continue;
|
||||
// }
|
||||
|
||||
// user.name = data.name;
|
||||
// user.email = data.email;
|
||||
// user.updatedAt = DateTime.now();
|
||||
|
||||
// await _userRepository.update(user);
|
||||
// await _syncApiRepository.ack(event.ack);
|
||||
}
|
||||
|
||||
if (event.data is SyncUserDeleteV1) {
|
||||
final data = event.data as SyncUserDeleteV1;
|
||||
|
||||
debugPrint("User delete: $data");
|
||||
// await _syncApiRepository.ack(event.ack);
|
||||
}
|
||||
Future<void> _handleEvents(List<SyncEvent> events, Function() abort) async {
|
||||
List<SyncEvent> items = [];
|
||||
for (final event in events) {
|
||||
if (isCancelled) {
|
||||
_logger.warning("Sync stream cancelled");
|
||||
abort();
|
||||
return;
|
||||
}
|
||||
});
|
||||
|
||||
if (event.type != items.firstOrNull?.type) {
|
||||
await _processBatch(items);
|
||||
}
|
||||
|
||||
items.add(event);
|
||||
}
|
||||
|
||||
await _processBatch(items);
|
||||
}
|
||||
|
||||
Future<void> dispose() async {
|
||||
await _userSyncSubscription?.cancel();
|
||||
Future<void> _processBatch(List<SyncEvent> batch) async {
|
||||
if (batch.isEmpty) {
|
||||
return;
|
||||
}
|
||||
|
||||
final type = batch.first.type;
|
||||
await _handleSyncData(type, batch.map((e) => e.data));
|
||||
await _syncApiRepository.ack([batch.last.ack]);
|
||||
batch.clear();
|
||||
}
|
||||
|
||||
Future<void> _handleSyncData(
|
||||
SyncEntityType type,
|
||||
// ignore: avoid-dynamic
|
||||
Iterable<dynamic> data,
|
||||
) async {
|
||||
_logger.fine("Processing sync data for $type of length ${data.length}");
|
||||
// ignore: prefer-switch-expression
|
||||
switch (type) {
|
||||
case SyncEntityType.userV1:
|
||||
return _syncStreamRepository.updateUsersV1(data.cast());
|
||||
case SyncEntityType.userDeleteV1:
|
||||
return _syncStreamRepository.deleteUsersV1(data.cast());
|
||||
case SyncEntityType.partnerV1:
|
||||
return _syncStreamRepository.updatePartnerV1(data.cast());
|
||||
case SyncEntityType.partnerDeleteV1:
|
||||
return _syncStreamRepository.deletePartnerV1(data.cast());
|
||||
case SyncEntityType.assetV1:
|
||||
return _syncStreamRepository.updateAssetsV1(data.cast());
|
||||
case SyncEntityType.assetDeleteV1:
|
||||
return _syncStreamRepository.deleteAssetsV1(data.cast());
|
||||
case SyncEntityType.assetExifV1:
|
||||
return _syncStreamRepository.updateAssetsExifV1(data.cast());
|
||||
case SyncEntityType.partnerAssetV1:
|
||||
return _syncStreamRepository.updatePartnerAssetsV1(data.cast());
|
||||
case SyncEntityType.partnerAssetDeleteV1:
|
||||
return _syncStreamRepository.deletePartnerAssetsV1(data.cast());
|
||||
case SyncEntityType.partnerAssetExifV1:
|
||||
return _syncStreamRepository.updatePartnerAssetsExifV1(data.cast());
|
||||
default:
|
||||
_logger.warning("Unknown sync data type: $type");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
39
mobile/lib/domain/utils/background_sync.dart
Normal file
39
mobile/lib/domain/utils/background_sync.dart
Normal file
@ -0,0 +1,39 @@
|
||||
// ignore_for_file: avoid-passing-async-when-sync-expected
|
||||
|
||||
import 'dart:async';
|
||||
|
||||
import 'package:immich_mobile/providers/infrastructure/sync_stream.provider.dart';
|
||||
import 'package:immich_mobile/utils/isolate.dart';
|
||||
import 'package:worker_manager/worker_manager.dart';
|
||||
|
||||
class BackgroundSyncManager {
|
||||
Cancelable<void>? _syncTask;
|
||||
|
||||
BackgroundSyncManager();
|
||||
|
||||
Future<void> cancel() {
|
||||
final futures = <Future>[];
|
||||
|
||||
if (_syncTask != null) {
|
||||
futures.add(_syncTask!.future);
|
||||
}
|
||||
_syncTask?.cancel();
|
||||
_syncTask = null;
|
||||
|
||||
return Future.wait(futures);
|
||||
}
|
||||
|
||||
Future<void> sync() {
|
||||
if (_syncTask != null) {
|
||||
return _syncTask!.future;
|
||||
}
|
||||
|
||||
_syncTask = runInIsolateGentle(
|
||||
computation: (ref) => ref.read(syncStreamServiceProvider).sync(),
|
||||
);
|
||||
_syncTask!.whenComplete(() {
|
||||
_syncTask = null;
|
||||
});
|
||||
return _syncTask!.future;
|
||||
}
|
||||
}
|
@ -1,3 +1,7 @@
|
||||
import 'dart:typed_data';
|
||||
|
||||
import 'package:uuid/parsing.dart';
|
||||
|
||||
extension StringExtension on String {
|
||||
String capitalize() {
|
||||
return split(" ")
|
||||
@ -29,3 +33,8 @@ extension DurationExtension on String {
|
||||
return int.parse(this);
|
||||
}
|
||||
}
|
||||
|
||||
extension UUIDExtension on String {
|
||||
Uint8List toUuidByte({bool shouldValidate = false}) =>
|
||||
UuidParsing.parseAsByteList(this, validate: shouldValidate);
|
||||
}
|
||||
|
@ -1,55 +1,72 @@
|
||||
import 'dart:async';
|
||||
import 'dart:convert';
|
||||
|
||||
import 'package:flutter/foundation.dart';
|
||||
import 'package:immich_mobile/domain/interfaces/sync_api.interface.dart';
|
||||
import 'package:immich_mobile/domain/models/sync/sync_event.model.dart';
|
||||
import 'package:immich_mobile/services/api.service.dart';
|
||||
import 'package:openapi/api.dart';
|
||||
import 'package:http/http.dart' as http;
|
||||
import 'package:immich_mobile/constants/constants.dart';
|
||||
import 'package:immich_mobile/domain/interfaces/sync_api.interface.dart';
|
||||
import 'package:immich_mobile/domain/models/sync_event.model.dart';
|
||||
import 'package:immich_mobile/services/api.service.dart';
|
||||
import 'package:logging/logging.dart';
|
||||
import 'package:openapi/api.dart';
|
||||
|
||||
class SyncApiRepository implements ISyncApiRepository {
|
||||
final Logger _logger = Logger('SyncApiRepository');
|
||||
final ApiService _api;
|
||||
const SyncApiRepository(this._api);
|
||||
SyncApiRepository(this._api);
|
||||
|
||||
@override
|
||||
Stream<List<SyncEvent>> watchUserSyncEvent() {
|
||||
return _getSyncStream(
|
||||
SyncStreamDto(types: [SyncRequestType.usersV1]),
|
||||
);
|
||||
Future<void> ack(List<String> data) {
|
||||
return _api.syncApi.sendSyncAck(SyncAckSetDto(acks: data));
|
||||
}
|
||||
|
||||
@override
|
||||
Future<void> ack(String data) {
|
||||
return _api.syncApi.sendSyncAck(SyncAckSetDto(acks: [data]));
|
||||
}
|
||||
|
||||
Stream<List<SyncEvent>> _getSyncStream(
|
||||
SyncStreamDto dto, {
|
||||
int batchSize = 5000,
|
||||
}) async* {
|
||||
final client = http.Client();
|
||||
Future<void> streamChanges(
|
||||
Function(List<SyncEvent>, Function() abort) onData, {
|
||||
int batchSize = kSyncEventBatchSize,
|
||||
http.Client? httpClient,
|
||||
}) async {
|
||||
// ignore: avoid-unused-assignment
|
||||
final stopwatch = Stopwatch()..start();
|
||||
final client = httpClient ?? http.Client();
|
||||
final endpoint = "${_api.apiClient.basePath}/sync/stream";
|
||||
|
||||
final headers = <String, String>{
|
||||
final headers = {
|
||||
'Content-Type': 'application/json',
|
||||
'Accept': 'application/jsonlines+json',
|
||||
};
|
||||
|
||||
final queryParams = <QueryParam>[];
|
||||
final headerParams = <String, String>{};
|
||||
await _api.applyToParams(queryParams, headerParams);
|
||||
await _api.applyToParams([], headerParams);
|
||||
headers.addAll(headerParams);
|
||||
|
||||
final request = http.Request('POST', Uri.parse(endpoint));
|
||||
request.headers.addAll(headers);
|
||||
request.body = jsonEncode(dto.toJson());
|
||||
request.body = jsonEncode(
|
||||
SyncStreamDto(
|
||||
types: [
|
||||
SyncRequestType.usersV1,
|
||||
SyncRequestType.partnersV1,
|
||||
SyncRequestType.assetsV1,
|
||||
SyncRequestType.partnerAssetsV1,
|
||||
SyncRequestType.assetExifsV1,
|
||||
SyncRequestType.partnerAssetExifsV1,
|
||||
],
|
||||
).toJson(),
|
||||
);
|
||||
|
||||
String previousChunk = '';
|
||||
List<String> lines = [];
|
||||
|
||||
bool shouldAbort = false;
|
||||
|
||||
void abort() {
|
||||
_logger.warning("Abort requested, stopping sync stream");
|
||||
shouldAbort = true;
|
||||
}
|
||||
|
||||
try {
|
||||
final response = await client.send(request);
|
||||
final response =
|
||||
await client.send(request).timeout(const Duration(seconds: 20));
|
||||
|
||||
if (response.statusCode != 200) {
|
||||
final errorBody = await response.stream.bytesToString();
|
||||
@ -60,8 +77,12 @@ class SyncApiRepository implements ISyncApiRepository {
|
||||
}
|
||||
|
||||
await for (final chunk in response.stream.transform(utf8.decoder)) {
|
||||
if (shouldAbort) {
|
||||
break;
|
||||
}
|
||||
|
||||
previousChunk += chunk;
|
||||
final parts = previousChunk.split('\n');
|
||||
final parts = previousChunk.toString().split('\n');
|
||||
previousChunk = parts.removeLast();
|
||||
lines.addAll(parts);
|
||||
|
||||
@ -69,44 +90,59 @@ class SyncApiRepository implements ISyncApiRepository {
|
||||
continue;
|
||||
}
|
||||
|
||||
yield await compute(_parseSyncResponse, lines);
|
||||
await onData(_parseLines(lines), abort);
|
||||
lines.clear();
|
||||
}
|
||||
} finally {
|
||||
if (lines.isNotEmpty) {
|
||||
yield await compute(_parseSyncResponse, lines);
|
||||
|
||||
if (lines.isNotEmpty && !shouldAbort) {
|
||||
await onData(_parseLines(lines), abort);
|
||||
}
|
||||
} catch (error, stack) {
|
||||
_logger.severe("error processing stream", error, stack);
|
||||
return Future.error(error, stack);
|
||||
} finally {
|
||||
client.close();
|
||||
}
|
||||
stopwatch.stop();
|
||||
_logger
|
||||
.info("Remote Sync completed in ${stopwatch.elapsed.inMilliseconds}ms");
|
||||
}
|
||||
|
||||
List<SyncEvent> _parseLines(List<String> lines) {
|
||||
final List<SyncEvent> data = [];
|
||||
|
||||
for (final line in lines) {
|
||||
try {
|
||||
final jsonData = jsonDecode(line);
|
||||
final type = SyncEntityType.fromJson(jsonData['type'])!;
|
||||
final dataJson = jsonData['data'];
|
||||
final ack = jsonData['ack'];
|
||||
final converter = _kResponseMap[type];
|
||||
if (converter == null) {
|
||||
_logger.warning("[_parseSyncResponse] Unknown type $type");
|
||||
continue;
|
||||
}
|
||||
|
||||
data.add(SyncEvent(type: type, data: converter(dataJson), ack: ack));
|
||||
} catch (error, stack) {
|
||||
_logger.severe("[_parseSyncResponse] Error parsing json", error, stack);
|
||||
}
|
||||
}
|
||||
|
||||
return data;
|
||||
}
|
||||
}
|
||||
|
||||
// ignore: avoid-dynamic
|
||||
const _kResponseMap = <SyncEntityType, Function(dynamic)>{
|
||||
SyncEntityType.userV1: SyncUserV1.fromJson,
|
||||
SyncEntityType.userDeleteV1: SyncUserDeleteV1.fromJson,
|
||||
SyncEntityType.partnerV1: SyncPartnerV1.fromJson,
|
||||
SyncEntityType.partnerDeleteV1: SyncPartnerDeleteV1.fromJson,
|
||||
SyncEntityType.assetV1: SyncAssetV1.fromJson,
|
||||
SyncEntityType.assetDeleteV1: SyncAssetDeleteV1.fromJson,
|
||||
SyncEntityType.assetExifV1: SyncAssetExifV1.fromJson,
|
||||
SyncEntityType.partnerAssetV1: SyncAssetV1.fromJson,
|
||||
SyncEntityType.partnerAssetDeleteV1: SyncAssetDeleteV1.fromJson,
|
||||
SyncEntityType.partnerAssetExifV1: SyncAssetExifV1.fromJson,
|
||||
};
|
||||
|
||||
// Need to be outside of the class to be able to use compute
|
||||
List<SyncEvent> _parseSyncResponse(List<String> lines) {
|
||||
final List<SyncEvent> data = [];
|
||||
|
||||
for (var line in lines) {
|
||||
try {
|
||||
final jsonData = jsonDecode(line);
|
||||
final type = SyncEntityType.fromJson(jsonData['type'])!;
|
||||
final dataJson = jsonData['data'];
|
||||
final ack = jsonData['ack'];
|
||||
final converter = _kResponseMap[type];
|
||||
if (converter == null) {
|
||||
debugPrint("[_parseSyncReponse] Unknown type $type");
|
||||
continue;
|
||||
}
|
||||
|
||||
data.add(SyncEvent(data: converter(dataJson), ack: ack));
|
||||
} catch (error, stack) {
|
||||
debugPrint("[_parseSyncReponse] Error parsing json $error $stack");
|
||||
}
|
||||
}
|
||||
|
||||
return data;
|
||||
}
|
||||
|
@ -0,0 +1,134 @@
|
||||
import 'package:drift/drift.dart';
|
||||
import 'package:flutter/foundation.dart';
|
||||
import 'package:immich_mobile/domain/interfaces/sync_stream.interface.dart';
|
||||
import 'package:immich_mobile/extensions/string_extensions.dart';
|
||||
import 'package:immich_mobile/infrastructure/entities/partner.entity.drift.dart';
|
||||
import 'package:immich_mobile/infrastructure/entities/user.entity.drift.dart';
|
||||
import 'package:immich_mobile/infrastructure/repositories/db.repository.dart';
|
||||
import 'package:logging/logging.dart';
|
||||
import 'package:openapi/api.dart';
|
||||
|
||||
class DriftSyncStreamRepository extends DriftDatabaseRepository
|
||||
implements ISyncStreamRepository {
|
||||
final Logger _logger = Logger('DriftSyncStreamRepository');
|
||||
final Drift _db;
|
||||
|
||||
DriftSyncStreamRepository(super.db) : _db = db;
|
||||
|
||||
@override
|
||||
Future<void> deleteUsersV1(Iterable<SyncUserDeleteV1> data) async {
|
||||
try {
|
||||
await _db.batch((batch) {
|
||||
for (final user in data) {
|
||||
batch.delete(
|
||||
_db.userEntity,
|
||||
UserEntityCompanion(id: Value(user.userId.toUuidByte())),
|
||||
);
|
||||
}
|
||||
});
|
||||
} catch (error, stack) {
|
||||
_logger.severe('Error while processing SyncUserDeleteV1', error, stack);
|
||||
rethrow;
|
||||
}
|
||||
}
|
||||
|
||||
@override
|
||||
Future<void> updateUsersV1(Iterable<SyncUserV1> data) async {
|
||||
try {
|
||||
await _db.batch((batch) {
|
||||
for (final user in data) {
|
||||
final companion = UserEntityCompanion(
|
||||
name: Value(user.name),
|
||||
email: Value(user.email),
|
||||
);
|
||||
|
||||
batch.insert(
|
||||
_db.userEntity,
|
||||
companion.copyWith(id: Value(user.id.toUuidByte())),
|
||||
onConflict: DoUpdate((_) => companion),
|
||||
);
|
||||
}
|
||||
});
|
||||
} catch (error, stack) {
|
||||
_logger.severe('Error while processing SyncUserV1', error, stack);
|
||||
rethrow;
|
||||
}
|
||||
}
|
||||
|
||||
@override
|
||||
Future<void> deletePartnerV1(Iterable<SyncPartnerDeleteV1> data) async {
|
||||
try {
|
||||
await _db.batch((batch) {
|
||||
for (final partner in data) {
|
||||
batch.delete(
|
||||
_db.partnerEntity,
|
||||
PartnerEntityCompanion(
|
||||
sharedById: Value(partner.sharedById.toUuidByte()),
|
||||
sharedWithId: Value(partner.sharedWithId.toUuidByte()),
|
||||
),
|
||||
);
|
||||
}
|
||||
});
|
||||
} catch (e, s) {
|
||||
_logger.severe('Error while processing SyncPartnerDeleteV1', e, s);
|
||||
rethrow;
|
||||
}
|
||||
}
|
||||
|
||||
@override
|
||||
Future<void> updatePartnerV1(Iterable<SyncPartnerV1> data) async {
|
||||
try {
|
||||
await _db.batch((batch) {
|
||||
for (final partner in data) {
|
||||
final companion =
|
||||
PartnerEntityCompanion(inTimeline: Value(partner.inTimeline));
|
||||
|
||||
batch.insert(
|
||||
_db.partnerEntity,
|
||||
companion.copyWith(
|
||||
sharedById: Value(partner.sharedById.toUuidByte()),
|
||||
sharedWithId: Value(partner.sharedWithId.toUuidByte()),
|
||||
),
|
||||
onConflict: DoUpdate((_) => companion),
|
||||
);
|
||||
}
|
||||
});
|
||||
} catch (e, s) {
|
||||
_logger.severe('Error while processing SyncPartnerV1', e, s);
|
||||
rethrow;
|
||||
}
|
||||
}
|
||||
|
||||
// Assets
|
||||
@override
|
||||
Future<void> updateAssetsV1(Iterable<SyncAssetV1> data) async {
|
||||
debugPrint("updateAssetsV1 - ${data.length}");
|
||||
}
|
||||
|
||||
@override
|
||||
Future<void> deleteAssetsV1(Iterable<SyncAssetDeleteV1> data) async {
|
||||
debugPrint("deleteAssetsV1 - ${data.length}");
|
||||
}
|
||||
|
||||
// Partner Assets
|
||||
@override
|
||||
Future<void> updatePartnerAssetsV1(Iterable<SyncAssetV1> data) async {
|
||||
debugPrint("updatePartnerAssetsV1 - ${data.length}");
|
||||
}
|
||||
|
||||
@override
|
||||
Future<void> deletePartnerAssetsV1(Iterable<SyncAssetDeleteV1> data) async {
|
||||
debugPrint("deletePartnerAssetsV1 - ${data.length}");
|
||||
}
|
||||
|
||||
// EXIF
|
||||
@override
|
||||
Future<void> updateAssetsExifV1(Iterable<SyncAssetExifV1> data) async {
|
||||
debugPrint("updateAssetsExifV1 - ${data.length}");
|
||||
}
|
||||
|
||||
@override
|
||||
Future<void> updatePartnerAssetsExifV1(Iterable<SyncAssetExifV1> data) async {
|
||||
debugPrint("updatePartnerAssetsExifV1 - ${data.length}");
|
||||
}
|
||||
}
|
@ -11,6 +11,7 @@ import 'package:flutter_displaymode/flutter_displaymode.dart';
|
||||
import 'package:hooks_riverpod/hooks_riverpod.dart';
|
||||
import 'package:immich_mobile/constants/locales.dart';
|
||||
import 'package:immich_mobile/extensions/build_context_extensions.dart';
|
||||
import 'package:immich_mobile/generated/codegen_loader.g.dart';
|
||||
import 'package:immich_mobile/providers/app_life_cycle.provider.dart';
|
||||
import 'package:immich_mobile/providers/asset_viewer/share_intent_upload.provider.dart';
|
||||
import 'package:immich_mobile/providers/db.provider.dart';
|
||||
@ -31,13 +32,15 @@ import 'package:immich_mobile/utils/migration.dart';
|
||||
import 'package:intl/date_symbol_data_local.dart';
|
||||
import 'package:logging/logging.dart';
|
||||
import 'package:timezone/data/latest.dart';
|
||||
import 'package:immich_mobile/generated/codegen_loader.g.dart';
|
||||
import 'package:worker_manager/worker_manager.dart';
|
||||
|
||||
void main() async {
|
||||
ImmichWidgetsBinding();
|
||||
final db = await Bootstrap.initIsar();
|
||||
await Bootstrap.initDomain(db);
|
||||
await initApp();
|
||||
// Warm-up isolate pool for worker manager
|
||||
await workerManager.init(dynamicSpawning: true);
|
||||
await migrateDatabaseIfNeeded(db);
|
||||
HttpOverrides.global = HttpSSLCertOverride();
|
||||
|
||||
|
@ -63,9 +63,12 @@ class GalleryViewerPage extends HookConsumerWidget {
|
||||
final loadAsset = renderList.loadAsset;
|
||||
final isPlayingMotionVideo = ref.watch(isPlayingMotionVideoProvider);
|
||||
|
||||
// This key is to prevent the video player from being re-initialized during
|
||||
// hero animation or device rotation.
|
||||
final videoPlayerKey = useMemoized(() => GlobalKey());
|
||||
final videoPlayerKeys = useRef<Map<int, GlobalKey>>({});
|
||||
|
||||
GlobalKey getVideoPlayerKey(int id) {
|
||||
videoPlayerKeys.value.putIfAbsent(id, () => GlobalKey());
|
||||
return videoPlayerKeys.value[id]!;
|
||||
}
|
||||
|
||||
Future<void> precacheNextImage(int index) async {
|
||||
if (!context.mounted) {
|
||||
@ -243,7 +246,7 @@ class GalleryViewerPage extends HookConsumerWidget {
|
||||
width: context.width,
|
||||
height: context.height,
|
||||
child: NativeVideoViewerPage(
|
||||
key: videoPlayerKey,
|
||||
key: getVideoPlayerKey(asset.id),
|
||||
asset: asset,
|
||||
image: Image(
|
||||
key: ValueKey(asset),
|
||||
|
@ -1,6 +1,7 @@
|
||||
import 'package:auto_route/auto_route.dart';
|
||||
import 'package:easy_localization/easy_localization.dart';
|
||||
import 'package:flutter/material.dart';
|
||||
import 'package:geolocator/geolocator.dart';
|
||||
import 'package:hooks_riverpod/hooks_riverpod.dart';
|
||||
import 'package:immich_mobile/domain/models/user.model.dart';
|
||||
import 'package:immich_mobile/extensions/asyncvalue_extensions.dart';
|
||||
@ -12,6 +13,7 @@ import 'package:immich_mobile/providers/server_info.provider.dart';
|
||||
import 'package:immich_mobile/routing/router.dart';
|
||||
import 'package:immich_mobile/services/api.service.dart';
|
||||
import 'package:immich_mobile/utils/image_url_builder.dart';
|
||||
import 'package:immich_mobile/utils/map_utils.dart';
|
||||
import 'package:immich_mobile/widgets/album/album_thumbnail_card.dart';
|
||||
import 'package:immich_mobile/widgets/common/immich_app_bar.dart';
|
||||
import 'package:immich_mobile/widgets/common/user_avatar.dart';
|
||||
@ -297,32 +299,34 @@ class LocalAlbumsCollectionCard extends HookConsumerWidget {
|
||||
child: Column(
|
||||
crossAxisAlignment: CrossAxisAlignment.start,
|
||||
children: [
|
||||
Container(
|
||||
SizedBox(
|
||||
height: size,
|
||||
width: size,
|
||||
decoration: BoxDecoration(
|
||||
borderRadius: BorderRadius.circular(20),
|
||||
gradient: LinearGradient(
|
||||
colors: [
|
||||
context.colorScheme.primary.withAlpha(30),
|
||||
context.colorScheme.primary.withAlpha(25),
|
||||
],
|
||||
begin: Alignment.topCenter,
|
||||
end: Alignment.bottomCenter,
|
||||
child: DecoratedBox(
|
||||
decoration: BoxDecoration(
|
||||
borderRadius: const BorderRadius.all(Radius.circular(20)),
|
||||
gradient: LinearGradient(
|
||||
colors: [
|
||||
context.colorScheme.primary.withAlpha(30),
|
||||
context.colorScheme.primary.withAlpha(25),
|
||||
],
|
||||
begin: Alignment.topCenter,
|
||||
end: Alignment.bottomCenter,
|
||||
),
|
||||
),
|
||||
child: GridView.count(
|
||||
crossAxisCount: 2,
|
||||
padding: const EdgeInsets.all(12),
|
||||
crossAxisSpacing: 8,
|
||||
mainAxisSpacing: 8,
|
||||
physics: const NeverScrollableScrollPhysics(),
|
||||
children: albums.take(4).map((album) {
|
||||
return AlbumThumbnailCard(
|
||||
album: album,
|
||||
showTitle: false,
|
||||
);
|
||||
}).toList(),
|
||||
),
|
||||
),
|
||||
child: GridView.count(
|
||||
crossAxisCount: 2,
|
||||
padding: const EdgeInsets.all(12),
|
||||
crossAxisSpacing: 8,
|
||||
mainAxisSpacing: 8,
|
||||
physics: const NeverScrollableScrollPhysics(),
|
||||
children: albums.take(4).map((album) {
|
||||
return AlbumThumbnailCard(
|
||||
album: album,
|
||||
showTitle: false,
|
||||
);
|
||||
}).toList(),
|
||||
),
|
||||
),
|
||||
Padding(
|
||||
@ -353,43 +357,66 @@ class PlacesCollectionCard extends StatelessWidget {
|
||||
final widthFactor = isTablet ? 0.25 : 0.5;
|
||||
final size = context.width * widthFactor - 20.0;
|
||||
|
||||
return GestureDetector(
|
||||
onTap: () => context.pushRoute(const PlacesCollectionRoute()),
|
||||
child: Column(
|
||||
crossAxisAlignment: CrossAxisAlignment.start,
|
||||
children: [
|
||||
Container(
|
||||
height: size,
|
||||
width: size,
|
||||
decoration: BoxDecoration(
|
||||
borderRadius: BorderRadius.circular(20),
|
||||
color: context.colorScheme.secondaryContainer.withAlpha(100),
|
||||
),
|
||||
child: IgnorePointer(
|
||||
child: MapThumbnail(
|
||||
zoom: 8,
|
||||
centre: const LatLng(
|
||||
21.44950,
|
||||
-157.91959,
|
||||
),
|
||||
showAttribution: false,
|
||||
themeMode:
|
||||
context.isDarkTheme ? ThemeMode.dark : ThemeMode.light,
|
||||
),
|
||||
),
|
||||
),
|
||||
Padding(
|
||||
padding: const EdgeInsets.all(8.0),
|
||||
child: Text(
|
||||
'places'.tr(),
|
||||
style: context.textTheme.titleSmall?.copyWith(
|
||||
color: context.colorScheme.onSurface,
|
||||
fontWeight: FontWeight.w500,
|
||||
),
|
||||
),
|
||||
),
|
||||
],
|
||||
return FutureBuilder<(Position?, LocationPermission?)>(
|
||||
future: MapUtils.checkPermAndGetLocation(
|
||||
context: context,
|
||||
silent: true,
|
||||
),
|
||||
builder: (context, snapshot) {
|
||||
var position = snapshot.data?.$1;
|
||||
return GestureDetector(
|
||||
onTap: () => context.pushRoute(
|
||||
PlacesCollectionRoute(
|
||||
currentLocation: position != null
|
||||
? LatLng(position.latitude, position.longitude)
|
||||
: null,
|
||||
),
|
||||
),
|
||||
child: Column(
|
||||
crossAxisAlignment: CrossAxisAlignment.start,
|
||||
children: [
|
||||
SizedBox(
|
||||
height: size,
|
||||
width: size,
|
||||
child: DecoratedBox(
|
||||
decoration: BoxDecoration(
|
||||
borderRadius:
|
||||
const BorderRadius.all(Radius.circular(20)),
|
||||
color: context.colorScheme.secondaryContainer
|
||||
.withAlpha(100),
|
||||
),
|
||||
child: IgnorePointer(
|
||||
child: snapshot.connectionState ==
|
||||
ConnectionState.waiting
|
||||
? const Center(child: CircularProgressIndicator())
|
||||
: MapThumbnail(
|
||||
zoom: 8,
|
||||
centre: LatLng(
|
||||
position?.latitude ?? 21.44950,
|
||||
position?.longitude ?? -157.91959,
|
||||
),
|
||||
showAttribution: false,
|
||||
themeMode: context.isDarkTheme
|
||||
? ThemeMode.dark
|
||||
: ThemeMode.light,
|
||||
),
|
||||
),
|
||||
),
|
||||
),
|
||||
Padding(
|
||||
padding: const EdgeInsets.all(8.0),
|
||||
child: Text(
|
||||
'places'.tr(),
|
||||
style: context.textTheme.titleSmall?.copyWith(
|
||||
color: context.colorScheme.onSurface,
|
||||
fontWeight: FontWeight.w500,
|
||||
),
|
||||
),
|
||||
),
|
||||
],
|
||||
),
|
||||
);
|
||||
},
|
||||
);
|
||||
},
|
||||
);
|
||||
|
@ -4,11 +4,11 @@ import 'package:flutter/material.dart';
|
||||
import 'package:flutter_hooks/flutter_hooks.dart';
|
||||
import 'package:hooks_riverpod/hooks_riverpod.dart';
|
||||
import 'package:immich_mobile/extensions/build_context_extensions.dart';
|
||||
import 'package:immich_mobile/extensions/theme_extensions.dart';
|
||||
import 'package:immich_mobile/providers/search/people.provider.dart';
|
||||
import 'package:immich_mobile/routing/router.dart';
|
||||
import 'package:immich_mobile/services/api.service.dart';
|
||||
import 'package:immich_mobile/utils/image_url_builder.dart';
|
||||
import 'package:immich_mobile/widgets/common/search_field.dart';
|
||||
import 'package:immich_mobile/widgets/search/person_name_edit_form.dart';
|
||||
|
||||
@RoutePage()
|
||||
@ -42,47 +42,12 @@ class PeopleCollectionPage extends HookConsumerWidget {
|
||||
appBar: AppBar(
|
||||
automaticallyImplyLeading: search.value == null,
|
||||
title: search.value != null
|
||||
? TextField(
|
||||
? SearchField(
|
||||
focusNode: formFocus,
|
||||
onTapOutside: (_) => formFocus.unfocus(),
|
||||
onChanged: (value) => search.value = value,
|
||||
decoration: InputDecoration(
|
||||
contentPadding: const EdgeInsets.only(left: 24),
|
||||
filled: true,
|
||||
fillColor: context.primaryColor.withValues(alpha: 0.1),
|
||||
hintStyle: context.textTheme.bodyLarge?.copyWith(
|
||||
color: context.themeData.colorScheme.onSurfaceSecondary,
|
||||
),
|
||||
border: OutlineInputBorder(
|
||||
borderRadius: BorderRadius.circular(25),
|
||||
borderSide: BorderSide(
|
||||
color: context.colorScheme.surfaceContainerHighest,
|
||||
),
|
||||
),
|
||||
enabledBorder: OutlineInputBorder(
|
||||
borderRadius: BorderRadius.circular(25),
|
||||
borderSide: BorderSide(
|
||||
color: context.colorScheme.surfaceContainerHighest,
|
||||
),
|
||||
),
|
||||
disabledBorder: OutlineInputBorder(
|
||||
borderRadius: BorderRadius.circular(25),
|
||||
borderSide: BorderSide(
|
||||
color: context.colorScheme.surfaceContainerHighest,
|
||||
),
|
||||
),
|
||||
focusedBorder: OutlineInputBorder(
|
||||
borderRadius: BorderRadius.circular(25),
|
||||
borderSide: BorderSide(
|
||||
color: context.colorScheme.primary.withAlpha(150),
|
||||
),
|
||||
),
|
||||
prefixIcon: Icon(
|
||||
Icons.search_rounded,
|
||||
color: context.colorScheme.primary,
|
||||
),
|
||||
hintText: 'filter_people'.tr(),
|
||||
),
|
||||
filled: true,
|
||||
hintText: 'filter_people'.tr(),
|
||||
autofocus: true,
|
||||
)
|
||||
: Text('people'.tr()),
|
||||
|
@ -2,6 +2,7 @@ import 'package:auto_route/auto_route.dart';
|
||||
import 'package:cached_network_image/cached_network_image.dart';
|
||||
import 'package:easy_localization/easy_localization.dart';
|
||||
import 'package:flutter/material.dart';
|
||||
import 'package:flutter_hooks/flutter_hooks.dart' hide Store;
|
||||
import 'package:hooks_riverpod/hooks_riverpod.dart';
|
||||
import 'package:immich_mobile/domain/models/store.model.dart';
|
||||
import 'package:immich_mobile/entities/asset.entity.dart';
|
||||
@ -12,43 +13,75 @@ import 'package:immich_mobile/pages/common/large_leading_tile.dart';
|
||||
import 'package:immich_mobile/providers/search/search_page_state.provider.dart';
|
||||
import 'package:immich_mobile/routing/router.dart';
|
||||
import 'package:immich_mobile/services/api.service.dart';
|
||||
import 'package:immich_mobile/widgets/common/search_field.dart';
|
||||
import 'package:immich_mobile/widgets/map/map_thumbnail.dart';
|
||||
import 'package:maplibre_gl/maplibre_gl.dart';
|
||||
|
||||
@RoutePage()
|
||||
class PlacesCollectionPage extends HookConsumerWidget {
|
||||
const PlacesCollectionPage({super.key});
|
||||
const PlacesCollectionPage({super.key, this.currentLocation});
|
||||
final LatLng? currentLocation;
|
||||
@override
|
||||
Widget build(BuildContext context, WidgetRef ref) {
|
||||
final places = ref.watch(getAllPlacesProvider);
|
||||
final formFocus = useFocusNode();
|
||||
final ValueNotifier<String?> search = useState(null);
|
||||
|
||||
return Scaffold(
|
||||
appBar: AppBar(
|
||||
title: Text('places'.tr()),
|
||||
automaticallyImplyLeading: search.value == null,
|
||||
title: search.value != null
|
||||
? SearchField(
|
||||
autofocus: true,
|
||||
filled: true,
|
||||
focusNode: formFocus,
|
||||
onChanged: (value) => search.value = value,
|
||||
onTapOutside: (_) => formFocus.unfocus(),
|
||||
hintText: 'filter_places'.tr(),
|
||||
)
|
||||
: Text('places'.tr()),
|
||||
actions: [
|
||||
IconButton(
|
||||
icon: Icon(search.value != null ? Icons.close : Icons.search),
|
||||
onPressed: () {
|
||||
search.value = search.value == null ? '' : null;
|
||||
},
|
||||
),
|
||||
],
|
||||
),
|
||||
body: ListView(
|
||||
shrinkWrap: true,
|
||||
children: [
|
||||
Padding(
|
||||
padding: const EdgeInsets.all(16.0),
|
||||
child: SizedBox(
|
||||
height: 200,
|
||||
width: context.width,
|
||||
child: MapThumbnail(
|
||||
onTap: (_, __) => context.pushRoute(const MapRoute()),
|
||||
zoom: 8,
|
||||
centre: const LatLng(
|
||||
21.44950,
|
||||
-157.91959,
|
||||
if (search.value == null)
|
||||
Padding(
|
||||
padding: const EdgeInsets.all(16.0),
|
||||
child: SizedBox(
|
||||
height: 200,
|
||||
width: context.width,
|
||||
child: MapThumbnail(
|
||||
onTap: (_, __) => context
|
||||
.pushRoute(MapRoute(initialLocation: currentLocation)),
|
||||
zoom: 8,
|
||||
centre: currentLocation ??
|
||||
const LatLng(
|
||||
21.44950,
|
||||
-157.91959,
|
||||
),
|
||||
showAttribution: false,
|
||||
themeMode:
|
||||
context.isDarkTheme ? ThemeMode.dark : ThemeMode.light,
|
||||
),
|
||||
showAttribution: false,
|
||||
themeMode:
|
||||
context.isDarkTheme ? ThemeMode.dark : ThemeMode.light,
|
||||
),
|
||||
),
|
||||
),
|
||||
places.when(
|
||||
data: (places) {
|
||||
if (search.value != null) {
|
||||
places = places.where((place) {
|
||||
return place.label
|
||||
.toLowerCase()
|
||||
.contains(search.value!.toLowerCase());
|
||||
}).toList();
|
||||
}
|
||||
return ListView.builder(
|
||||
shrinkWrap: true,
|
||||
physics: const NeverScrollableScrollPhysics(),
|
||||
|
@ -34,7 +34,8 @@ import 'package:maplibre_gl/maplibre_gl.dart';
|
||||
|
||||
@RoutePage()
|
||||
class MapPage extends HookConsumerWidget {
|
||||
const MapPage({super.key});
|
||||
const MapPage({super.key, this.initialLocation});
|
||||
final LatLng? initialLocation;
|
||||
|
||||
@override
|
||||
Widget build(BuildContext context, WidgetRef ref) {
|
||||
@ -235,7 +236,8 @@ class MapPage extends HookConsumerWidget {
|
||||
}
|
||||
|
||||
void onZoomToLocation() async {
|
||||
final (location, error) = await MapUtils.checkPermAndGetLocation(context);
|
||||
final (location, error) =
|
||||
await MapUtils.checkPermAndGetLocation(context: context);
|
||||
if (error != null) {
|
||||
if (error == LocationPermission.unableToDetermine && context.mounted) {
|
||||
ImmichToast.show(
|
||||
@ -272,6 +274,7 @@ class MapPage extends HookConsumerWidget {
|
||||
body: Stack(
|
||||
children: [
|
||||
_MapWithMarker(
|
||||
initialLocation: initialLocation,
|
||||
style: style,
|
||||
selectedMarker: selectedMarker,
|
||||
onMapCreated: onMapCreated,
|
||||
@ -303,6 +306,7 @@ class MapPage extends HookConsumerWidget {
|
||||
body: Stack(
|
||||
children: [
|
||||
_MapWithMarker(
|
||||
initialLocation: initialLocation,
|
||||
style: style,
|
||||
selectedMarker: selectedMarker,
|
||||
onMapCreated: onMapCreated,
|
||||
@ -368,6 +372,7 @@ class _MapWithMarker extends StatelessWidget {
|
||||
final OnStyleLoadedCallback onStyleLoaded;
|
||||
final Function()? onMarkerTapped;
|
||||
final ValueNotifier<_AssetMarkerMeta?> selectedMarker;
|
||||
final LatLng? initialLocation;
|
||||
|
||||
const _MapWithMarker({
|
||||
required this.style,
|
||||
@ -377,6 +382,7 @@ class _MapWithMarker extends StatelessWidget {
|
||||
required this.onStyleLoaded,
|
||||
required this.selectedMarker,
|
||||
this.onMarkerTapped,
|
||||
this.initialLocation,
|
||||
});
|
||||
|
||||
@override
|
||||
@ -389,8 +395,10 @@ class _MapWithMarker extends StatelessWidget {
|
||||
children: [
|
||||
style.widgetWhen(
|
||||
onData: (style) => MapLibreMap(
|
||||
initialCameraPosition:
|
||||
const CameraPosition(target: LatLng(0, 0)),
|
||||
initialCameraPosition: CameraPosition(
|
||||
target: initialLocation ?? const LatLng(0, 0),
|
||||
zoom: initialLocation != null ? 12 : 0,
|
||||
),
|
||||
styleString: style,
|
||||
// This is needed to update the selectedMarker's position on map camera updates
|
||||
// The changes are notified through the mapController ValueListener which is added in [onMapCreated]
|
||||
|
@ -46,7 +46,7 @@ class MapLocationPickerPage extends HookConsumerWidget {
|
||||
|
||||
Future<void> getCurrentLocation() async {
|
||||
var (currentLocation, _) =
|
||||
await MapUtils.checkPermAndGetLocation(context);
|
||||
await MapUtils.checkPermAndGetLocation(context: context);
|
||||
|
||||
if (currentLocation == null) {
|
||||
return;
|
||||
|
8
mobile/lib/providers/background_sync.provider.dart
Normal file
8
mobile/lib/providers/background_sync.provider.dart
Normal file
@ -0,0 +1,8 @@
|
||||
import 'package:hooks_riverpod/hooks_riverpod.dart';
|
||||
import 'package:immich_mobile/domain/utils/background_sync.dart';
|
||||
|
||||
final backgroundSyncProvider = Provider<BackgroundSyncManager>((ref) {
|
||||
final manager = BackgroundSyncManager();
|
||||
ref.onDispose(manager.cancel);
|
||||
return manager;
|
||||
});
|
@ -106,12 +106,11 @@ class ImmichLocalImageProvider extends ImageProvider<ImmichLocalImageProvider> {
|
||||
bool operator ==(Object other) {
|
||||
if (identical(this, other)) return true;
|
||||
if (other is ImmichLocalImageProvider) {
|
||||
return asset == other.asset;
|
||||
return asset.id == other.asset.id && asset.localId == other.asset.localId;
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
@override
|
||||
int get hashCode => asset.hashCode;
|
||||
int get hashCode => Object.hash(asset.id, asset.localId);
|
||||
}
|
||||
|
@ -82,11 +82,13 @@ class ImmichLocalThumbnailProvider
|
||||
|
||||
@override
|
||||
bool operator ==(Object other) {
|
||||
if (other is! ImmichLocalThumbnailProvider) return false;
|
||||
if (identical(this, other)) return true;
|
||||
return asset == other.asset;
|
||||
if (other is ImmichLocalThumbnailProvider) {
|
||||
return asset.id == other.asset.id && asset.localId == other.asset.localId;
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
@override
|
||||
int get hashCode => asset.hashCode;
|
||||
int get hashCode => Object.hash(asset.id, asset.localId);
|
||||
}
|
||||
|
12
mobile/lib/providers/infrastructure/cancel.provider.dart
Normal file
12
mobile/lib/providers/infrastructure/cancel.provider.dart
Normal file
@ -0,0 +1,12 @@
|
||||
import 'package:hooks_riverpod/hooks_riverpod.dart';
|
||||
|
||||
/// Provider holding a boolean function that returns true when cancellation is requested.
|
||||
/// A computation running in the isolate uses the function to implement cooperative cancellation.
|
||||
final cancellationProvider = Provider<bool Function()>(
|
||||
// This will be overridden in the isolate's container.
|
||||
// Throwing ensures it's not used without an override.
|
||||
(ref) => throw UnimplementedError(
|
||||
"cancellationProvider must be overridden in the isolate's ProviderContainer and not to be used in the root isolate",
|
||||
),
|
||||
name: 'cancellationProvider',
|
||||
);
|
@ -1,4 +1,7 @@
|
||||
import 'dart:async';
|
||||
|
||||
import 'package:hooks_riverpod/hooks_riverpod.dart';
|
||||
import 'package:immich_mobile/infrastructure/repositories/db.repository.dart';
|
||||
import 'package:isar/isar.dart';
|
||||
import 'package:riverpod_annotation/riverpod_annotation.dart';
|
||||
|
||||
@ -6,3 +9,9 @@ part 'db.provider.g.dart';
|
||||
|
||||
@Riverpod(keepAlive: true)
|
||||
Isar isar(Ref ref) => throw UnimplementedError('isar');
|
||||
|
||||
final driftProvider = Provider<Drift>((ref) {
|
||||
final drift = Drift();
|
||||
ref.onDispose(() => unawaited(drift.close()));
|
||||
return drift;
|
||||
});
|
||||
|
@ -1,24 +1,23 @@
|
||||
import 'dart:async';
|
||||
|
||||
import 'package:hooks_riverpod/hooks_riverpod.dart';
|
||||
import 'package:immich_mobile/domain/services/sync_stream.service.dart';
|
||||
import 'package:immich_mobile/infrastructure/repositories/sync_api.repository.dart';
|
||||
import 'package:immich_mobile/infrastructure/repositories/sync_stream.repository.dart';
|
||||
import 'package:immich_mobile/providers/api.provider.dart';
|
||||
import 'package:immich_mobile/providers/infrastructure/cancel.provider.dart';
|
||||
import 'package:immich_mobile/providers/infrastructure/db.provider.dart';
|
||||
|
||||
final syncStreamServiceProvider = Provider(
|
||||
(ref) {
|
||||
final instance = SyncStreamService(
|
||||
ref.watch(syncApiRepositoryProvider),
|
||||
);
|
||||
|
||||
ref.onDispose(() => unawaited(instance.dispose()));
|
||||
|
||||
return instance;
|
||||
},
|
||||
(ref) => SyncStreamService(
|
||||
syncApiRepository: ref.watch(syncApiRepositoryProvider),
|
||||
syncStreamRepository: ref.watch(syncStreamRepositoryProvider),
|
||||
cancelChecker: ref.watch(cancellationProvider),
|
||||
),
|
||||
);
|
||||
|
||||
final syncApiRepositoryProvider = Provider(
|
||||
(ref) => SyncApiRepository(
|
||||
ref.watch(apiServiceProvider),
|
||||
),
|
||||
(ref) => SyncApiRepository(ref.watch(apiServiceProvider)),
|
||||
);
|
||||
|
||||
final syncStreamRepositoryProvider = Provider(
|
||||
(ref) => DriftSyncStreamRepository(ref.watch(driftProvider)),
|
||||
);
|
||||
|
@ -1024,10 +1024,17 @@ class MapLocationPickerRouteArgs {
|
||||
|
||||
/// generated route for
|
||||
/// [MapPage]
|
||||
class MapRoute extends PageRouteInfo<void> {
|
||||
const MapRoute({List<PageRouteInfo>? children})
|
||||
: super(
|
||||
class MapRoute extends PageRouteInfo<MapRouteArgs> {
|
||||
MapRoute({
|
||||
Key? key,
|
||||
LatLng? initialLocation,
|
||||
List<PageRouteInfo>? children,
|
||||
}) : super(
|
||||
MapRoute.name,
|
||||
args: MapRouteArgs(
|
||||
key: key,
|
||||
initialLocation: initialLocation,
|
||||
),
|
||||
initialChildren: children,
|
||||
);
|
||||
|
||||
@ -1036,11 +1043,32 @@ class MapRoute extends PageRouteInfo<void> {
|
||||
static PageInfo page = PageInfo(
|
||||
name,
|
||||
builder: (data) {
|
||||
return const MapPage();
|
||||
final args =
|
||||
data.argsAs<MapRouteArgs>(orElse: () => const MapRouteArgs());
|
||||
return MapPage(
|
||||
key: args.key,
|
||||
initialLocation: args.initialLocation,
|
||||
);
|
||||
},
|
||||
);
|
||||
}
|
||||
|
||||
class MapRouteArgs {
|
||||
const MapRouteArgs({
|
||||
this.key,
|
||||
this.initialLocation,
|
||||
});
|
||||
|
||||
final Key? key;
|
||||
|
||||
final LatLng? initialLocation;
|
||||
|
||||
@override
|
||||
String toString() {
|
||||
return 'MapRouteArgs{key: $key, initialLocation: $initialLocation}';
|
||||
}
|
||||
}
|
||||
|
||||
/// generated route for
|
||||
/// [MemoryPage]
|
||||
class MemoryRoute extends PageRouteInfo<MemoryRouteArgs> {
|
||||
@ -1333,10 +1361,17 @@ class PhotosRoute extends PageRouteInfo<void> {
|
||||
|
||||
/// generated route for
|
||||
/// [PlacesCollectionPage]
|
||||
class PlacesCollectionRoute extends PageRouteInfo<void> {
|
||||
const PlacesCollectionRoute({List<PageRouteInfo>? children})
|
||||
: super(
|
||||
class PlacesCollectionRoute extends PageRouteInfo<PlacesCollectionRouteArgs> {
|
||||
PlacesCollectionRoute({
|
||||
Key? key,
|
||||
LatLng? currentLocation,
|
||||
List<PageRouteInfo>? children,
|
||||
}) : super(
|
||||
PlacesCollectionRoute.name,
|
||||
args: PlacesCollectionRouteArgs(
|
||||
key: key,
|
||||
currentLocation: currentLocation,
|
||||
),
|
||||
initialChildren: children,
|
||||
);
|
||||
|
||||
@ -1345,11 +1380,32 @@ class PlacesCollectionRoute extends PageRouteInfo<void> {
|
||||
static PageInfo page = PageInfo(
|
||||
name,
|
||||
builder: (data) {
|
||||
return const PlacesCollectionPage();
|
||||
final args = data.argsAs<PlacesCollectionRouteArgs>(
|
||||
orElse: () => const PlacesCollectionRouteArgs());
|
||||
return PlacesCollectionPage(
|
||||
key: args.key,
|
||||
currentLocation: args.currentLocation,
|
||||
);
|
||||
},
|
||||
);
|
||||
}
|
||||
|
||||
class PlacesCollectionRouteArgs {
|
||||
const PlacesCollectionRouteArgs({
|
||||
this.key,
|
||||
this.currentLocation,
|
||||
});
|
||||
|
||||
final Key? key;
|
||||
|
||||
final LatLng? currentLocation;
|
||||
|
||||
@override
|
||||
String toString() {
|
||||
return 'PlacesCollectionRouteArgs{key: $key, currentLocation: $currentLocation}';
|
||||
}
|
||||
}
|
||||
|
||||
/// generated route for
|
||||
/// [RecentlyAddedPage]
|
||||
class RecentlyAddedRoute extends PageRouteInfo<void> {
|
||||
|
@ -3,12 +3,14 @@ import 'dart:io';
|
||||
|
||||
import 'package:hooks_riverpod/hooks_riverpod.dart';
|
||||
import 'package:immich_mobile/domain/models/store.model.dart';
|
||||
import 'package:immich_mobile/domain/utils/background_sync.dart';
|
||||
import 'package:immich_mobile/entities/store.entity.dart';
|
||||
import 'package:immich_mobile/interfaces/auth.interface.dart';
|
||||
import 'package:immich_mobile/interfaces/auth_api.interface.dart';
|
||||
import 'package:immich_mobile/models/auth/auxilary_endpoint.model.dart';
|
||||
import 'package:immich_mobile/models/auth/login_response.model.dart';
|
||||
import 'package:immich_mobile/providers/api.provider.dart';
|
||||
import 'package:immich_mobile/providers/background_sync.provider.dart';
|
||||
import 'package:immich_mobile/repositories/auth.repository.dart';
|
||||
import 'package:immich_mobile/repositories/auth_api.repository.dart';
|
||||
import 'package:immich_mobile/services/api.service.dart';
|
||||
@ -22,6 +24,7 @@ final authServiceProvider = Provider(
|
||||
ref.watch(authRepositoryProvider),
|
||||
ref.watch(apiServiceProvider),
|
||||
ref.watch(networkServiceProvider),
|
||||
ref.watch(backgroundSyncProvider),
|
||||
),
|
||||
);
|
||||
|
||||
@ -30,6 +33,7 @@ class AuthService {
|
||||
final IAuthRepository _authRepository;
|
||||
final ApiService _apiService;
|
||||
final NetworkService _networkService;
|
||||
final BackgroundSyncManager _backgroundSyncManager;
|
||||
|
||||
final _log = Logger("AuthService");
|
||||
|
||||
@ -38,6 +42,7 @@ class AuthService {
|
||||
this._authRepository,
|
||||
this._apiService,
|
||||
this._networkService,
|
||||
this._backgroundSyncManager,
|
||||
);
|
||||
|
||||
/// Validates the provided server URL by resolving and setting the endpoint.
|
||||
@ -115,8 +120,10 @@ class AuthService {
|
||||
/// - Asset ETag
|
||||
///
|
||||
/// All deletions are executed in parallel using [Future.wait].
|
||||
Future<void> clearLocalData() {
|
||||
return Future.wait([
|
||||
Future<void> clearLocalData() async {
|
||||
// Cancel any ongoing background sync operations before clearing data
|
||||
await _backgroundSyncManager.cancel();
|
||||
await Future.wait([
|
||||
_authRepository.clearLocalData(),
|
||||
Store.delete(StoreKey.currentUser),
|
||||
Store.delete(StoreKey.accessToken),
|
||||
|
@ -351,7 +351,6 @@ class BackupService {
|
||||
);
|
||||
|
||||
baseRequest.headers.addAll(ApiService.getRequestHeaders());
|
||||
baseRequest.headers["Transfer-Encoding"] = "chunked";
|
||||
baseRequest.fields['deviceAssetId'] = asset.localId!;
|
||||
baseRequest.fields['deviceId'] = deviceId;
|
||||
baseRequest.fields['fileCreatedAt'] =
|
||||
|
@ -48,11 +48,15 @@ abstract final class Bootstrap {
|
||||
);
|
||||
}
|
||||
|
||||
static Future<void> initDomain(Isar db) async {
|
||||
static Future<void> initDomain(
|
||||
Isar db, {
|
||||
bool shouldBufferLogs = true,
|
||||
}) async {
|
||||
await StoreService.init(storeRepository: IsarStoreRepository(db));
|
||||
await LogService.init(
|
||||
logRepository: IsarLogRepository(db),
|
||||
storeRepository: IsarStoreRepository(db),
|
||||
shouldBuffer: shouldBufferLogs,
|
||||
);
|
||||
}
|
||||
}
|
||||
|
69
mobile/lib/utils/isolate.dart
Normal file
69
mobile/lib/utils/isolate.dart
Normal file
@ -0,0 +1,69 @@
|
||||
import 'dart:async';
|
||||
import 'dart:ui';
|
||||
|
||||
import 'package:flutter/services.dart';
|
||||
import 'package:hooks_riverpod/hooks_riverpod.dart';
|
||||
import 'package:immich_mobile/providers/db.provider.dart';
|
||||
import 'package:immich_mobile/providers/infrastructure/cancel.provider.dart';
|
||||
import 'package:immich_mobile/providers/infrastructure/db.provider.dart';
|
||||
import 'package:immich_mobile/utils/bootstrap.dart';
|
||||
import 'package:logging/logging.dart';
|
||||
import 'package:worker_manager/worker_manager.dart';
|
||||
|
||||
class InvalidIsolateUsageException implements Exception {
|
||||
const InvalidIsolateUsageException();
|
||||
|
||||
@override
|
||||
String toString() =>
|
||||
"IsolateHelper should only be used from the root isolate";
|
||||
}
|
||||
|
||||
// !! Should be used only from the root isolate
|
||||
Cancelable<T?> runInIsolateGentle<T>({
|
||||
required Future<T> Function(ProviderContainer ref) computation,
|
||||
String? debugLabel,
|
||||
}) {
|
||||
final token = RootIsolateToken.instance;
|
||||
if (token == null) {
|
||||
throw const InvalidIsolateUsageException();
|
||||
}
|
||||
|
||||
return workerManager.executeGentle((cancelledChecker) async {
|
||||
BackgroundIsolateBinaryMessenger.ensureInitialized(token);
|
||||
DartPluginRegistrant.ensureInitialized();
|
||||
|
||||
final db = await Bootstrap.initIsar();
|
||||
await Bootstrap.initDomain(db, shouldBufferLogs: false);
|
||||
final ref = ProviderContainer(
|
||||
overrides: [
|
||||
// TODO: Remove once isar is removed
|
||||
dbProvider.overrideWithValue(db),
|
||||
isarProvider.overrideWithValue(db),
|
||||
cancellationProvider.overrideWithValue(cancelledChecker),
|
||||
],
|
||||
);
|
||||
|
||||
Logger log = Logger("IsolateLogger");
|
||||
|
||||
try {
|
||||
return await computation(ref);
|
||||
} on CanceledError {
|
||||
log.warning(
|
||||
"Computation cancelled ${debugLabel == null ? '' : ' for $debugLabel'}",
|
||||
);
|
||||
} catch (error, stack) {
|
||||
log.severe(
|
||||
"Error in runInIsolateGentle ${debugLabel == null ? '' : ' for $debugLabel'}",
|
||||
error,
|
||||
stack,
|
||||
);
|
||||
} finally {
|
||||
// Wait for the logs to flush
|
||||
await Future.delayed(const Duration(seconds: 2));
|
||||
// Always close the new db connection on Isolate end
|
||||
ref.read(driftProvider).close();
|
||||
ref.read(isarProvider).close();
|
||||
}
|
||||
return null;
|
||||
});
|
||||
}
|
@ -64,12 +64,13 @@ class MapUtils {
|
||||
'features': markers.map(_addFeature).toList(),
|
||||
};
|
||||
|
||||
static Future<(Position?, LocationPermission?)> checkPermAndGetLocation(
|
||||
BuildContext context,
|
||||
) async {
|
||||
static Future<(Position?, LocationPermission?)> checkPermAndGetLocation({
|
||||
required BuildContext context,
|
||||
bool silent = false,
|
||||
}) async {
|
||||
try {
|
||||
bool serviceEnabled = await Geolocator.isLocationServiceEnabled();
|
||||
if (!serviceEnabled) {
|
||||
if (!serviceEnabled && !silent) {
|
||||
showDialog(
|
||||
context: context,
|
||||
builder: (context) => _LocationServiceDisabledDialog(),
|
||||
@ -80,7 +81,7 @@ class MapUtils {
|
||||
LocationPermission permission = await Geolocator.checkPermission();
|
||||
bool shouldRequestPermission = false;
|
||||
|
||||
if (permission == LocationPermission.denied) {
|
||||
if (permission == LocationPermission.denied && !silent) {
|
||||
shouldRequestPermission = await showDialog(
|
||||
context: context,
|
||||
builder: (context) => _LocationPermissionDisabledDialog(),
|
||||
@ -94,15 +95,19 @@ class MapUtils {
|
||||
permission == LocationPermission.deniedForever) {
|
||||
// Open app settings only if you did not request for permission before
|
||||
if (permission == LocationPermission.deniedForever &&
|
||||
!shouldRequestPermission) {
|
||||
!shouldRequestPermission &&
|
||||
!silent) {
|
||||
await Geolocator.openAppSettings();
|
||||
}
|
||||
return (null, LocationPermission.deniedForever);
|
||||
}
|
||||
|
||||
Position currentUserLocation = await Geolocator.getCurrentPosition(
|
||||
desiredAccuracy: LocationAccuracy.medium,
|
||||
timeLimit: const Duration(seconds: 5),
|
||||
locationSettings: const LocationSettings(
|
||||
accuracy: LocationAccuracy.high,
|
||||
distanceFilter: 0,
|
||||
timeLimit: Duration(seconds: 5),
|
||||
),
|
||||
);
|
||||
return (currentUserLocation, null);
|
||||
} catch (error, stack) {
|
||||
|
@ -1,11 +1,13 @@
|
||||
import 'package:auto_route/auto_route.dart';
|
||||
import 'package:easy_localization/easy_localization.dart';
|
||||
import 'package:flutter/foundation.dart';
|
||||
import 'package:flutter/material.dart';
|
||||
import 'package:flutter_svg/svg.dart';
|
||||
import 'package:hooks_riverpod/hooks_riverpod.dart';
|
||||
import 'package:immich_mobile/extensions/build_context_extensions.dart';
|
||||
import 'package:immich_mobile/models/backup/backup_state.model.dart';
|
||||
import 'package:immich_mobile/models/server_info/server_info.model.dart';
|
||||
import 'package:immich_mobile/providers/background_sync.provider.dart';
|
||||
import 'package:immich_mobile/providers/backup/backup.provider.dart';
|
||||
import 'package:immich_mobile/providers/server_info.provider.dart';
|
||||
import 'package:immich_mobile/providers/user.provider.dart';
|
||||
@ -178,6 +180,11 @@ class ImmichAppBar extends ConsumerWidget implements PreferredSizeWidget {
|
||||
child: action,
|
||||
),
|
||||
),
|
||||
if (kDebugMode)
|
||||
IconButton(
|
||||
onPressed: () => ref.read(backgroundSyncProvider).sync(),
|
||||
icon: const Icon(Icons.sync),
|
||||
),
|
||||
if (showUploadButton)
|
||||
Padding(
|
||||
padding: const EdgeInsets.only(right: 20),
|
||||
|
@ -12,14 +12,11 @@ class ImmichLogo extends StatelessWidget {
|
||||
|
||||
@override
|
||||
Widget build(BuildContext context) {
|
||||
return Hero(
|
||||
tag: heroTag,
|
||||
child: Image(
|
||||
image: const AssetImage('assets/immich-logo.png'),
|
||||
width: size,
|
||||
filterQuality: FilterQuality.high,
|
||||
isAntiAlias: true,
|
||||
),
|
||||
return Image(
|
||||
image: const AssetImage('assets/immich-logo.png'),
|
||||
width: size,
|
||||
filterQuality: FilterQuality.high,
|
||||
isAntiAlias: true,
|
||||
);
|
||||
}
|
||||
}
|
||||
|
@ -46,12 +46,39 @@ class MapAssetGrid extends HookConsumerWidget {
|
||||
final gridScrollThrottler =
|
||||
useThrottler(interval: const Duration(milliseconds: 300));
|
||||
|
||||
// Add a cache for assets we've already loaded
|
||||
final assetCache = useRef<Map<String, Asset>>({});
|
||||
|
||||
void handleMapEvents(MapEvent event) async {
|
||||
if (event is MapAssetsInBoundsUpdated) {
|
||||
assetsInBounds.value = await ref
|
||||
.read(dbProvider)
|
||||
.assets
|
||||
.getAllByRemoteId(event.assetRemoteIds);
|
||||
final assetIds = event.assetRemoteIds;
|
||||
final missingIds = <String>[];
|
||||
final currentAssets = <Asset>[];
|
||||
|
||||
for (final id in assetIds) {
|
||||
final asset = assetCache.value[id];
|
||||
if (asset != null) {
|
||||
currentAssets.add(asset);
|
||||
} else {
|
||||
missingIds.add(id);
|
||||
}
|
||||
}
|
||||
|
||||
// Only fetch missing assets
|
||||
if (missingIds.isNotEmpty) {
|
||||
final newAssets =
|
||||
await ref.read(dbProvider).assets.getAllByRemoteId(missingIds);
|
||||
|
||||
// Add new assets to cache and current list
|
||||
for (final asset in newAssets) {
|
||||
if (asset.remoteId != null) {
|
||||
assetCache.value[asset.remoteId!] = asset;
|
||||
currentAssets.add(asset);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
assetsInBounds.value = currentAssets;
|
||||
return;
|
||||
}
|
||||
}
|
||||
@ -124,7 +151,7 @@ class MapAssetGrid extends HookConsumerWidget {
|
||||
alignment: Alignment.bottomCenter,
|
||||
child: FractionallySizedBox(
|
||||
// Place it just below the drag handle
|
||||
heightFactor: 0.80,
|
||||
heightFactor: 0.87,
|
||||
child: assetsInBounds.value.isNotEmpty
|
||||
? ref
|
||||
.watch(assetsTimelineProvider(assetsInBounds.value))
|
||||
@ -251,8 +278,18 @@ class _MapSheetDragRegion extends StatelessWidget {
|
||||
const SizedBox(height: 15),
|
||||
const CustomDraggingHandle(),
|
||||
const SizedBox(height: 15),
|
||||
Text(assetsInBoundsText, style: context.textTheme.bodyLarge),
|
||||
const Divider(height: 35),
|
||||
Center(
|
||||
child: Text(
|
||||
assetsInBoundsText,
|
||||
style: TextStyle(
|
||||
fontSize: 20,
|
||||
color: context.textTheme.displayLarge?.color
|
||||
?.withValues(alpha: 0.75),
|
||||
fontWeight: FontWeight.w500,
|
||||
),
|
||||
),
|
||||
),
|
||||
const SizedBox(height: 8),
|
||||
],
|
||||
),
|
||||
ValueListenableBuilder(
|
||||
@ -260,14 +297,14 @@ class _MapSheetDragRegion extends StatelessWidget {
|
||||
builder: (_, value, __) => Visibility(
|
||||
visible: value != null,
|
||||
child: Positioned(
|
||||
right: 15,
|
||||
top: 15,
|
||||
right: 18,
|
||||
top: 24,
|
||||
child: IconButton(
|
||||
icon: Icon(
|
||||
Icons.map_outlined,
|
||||
color: context.textTheme.displayLarge?.color,
|
||||
),
|
||||
iconSize: 20,
|
||||
iconSize: 24,
|
||||
tooltip: 'Zoom to bounds',
|
||||
onPressed: () => onZoomToAsset?.call(value!),
|
||||
),
|
||||
|
@ -20,7 +20,7 @@ class SearchMapThumbnail extends StatelessWidget {
|
||||
return ThumbnailWithInfoContainer(
|
||||
label: 'search_page_your_map'.tr(),
|
||||
onTap: () {
|
||||
context.pushRoute(const MapRoute());
|
||||
context.pushRoute(MapRoute());
|
||||
},
|
||||
child: IgnorePointer(
|
||||
child: MapThumbnail(
|
||||
|
4
mobile/openapi/README.md
generated
4
mobile/openapi/README.md
generated
@ -145,8 +145,8 @@ Class | Method | HTTP request | Description
|
||||
*MemoriesApi* | [**removeMemoryAssets**](doc//MemoriesApi.md#removememoryassets) | **DELETE** /memories/{id}/assets |
|
||||
*MemoriesApi* | [**searchMemories**](doc//MemoriesApi.md#searchmemories) | **GET** /memories |
|
||||
*MemoriesApi* | [**updateMemory**](doc//MemoriesApi.md#updatememory) | **PUT** /memories/{id} |
|
||||
*NotificationsApi* | [**getNotificationTemplate**](doc//NotificationsApi.md#getnotificationtemplate) | **POST** /notifications/templates/{name} |
|
||||
*NotificationsApi* | [**sendTestEmail**](doc//NotificationsApi.md#sendtestemail) | **POST** /notifications/test-email |
|
||||
*NotificationsAdminApi* | [**getNotificationTemplateAdmin**](doc//NotificationsAdminApi.md#getnotificationtemplateadmin) | **POST** /notifications/admin/templates/{name} |
|
||||
*NotificationsAdminApi* | [**sendTestEmailAdmin**](doc//NotificationsAdminApi.md#sendtestemailadmin) | **POST** /notifications/admin/test-email |
|
||||
*OAuthApi* | [**finishOAuth**](doc//OAuthApi.md#finishoauth) | **POST** /oauth/callback |
|
||||
*OAuthApi* | [**linkOAuthAccount**](doc//OAuthApi.md#linkoauthaccount) | **POST** /oauth/link |
|
||||
*OAuthApi* | [**redirectOAuthToMobile**](doc//OAuthApi.md#redirectoauthtomobile) | **GET** /oauth/mobile-redirect |
|
||||
|
2
mobile/openapi/lib/api.dart
generated
2
mobile/openapi/lib/api.dart
generated
@ -44,7 +44,7 @@ part 'api/jobs_api.dart';
|
||||
part 'api/libraries_api.dart';
|
||||
part 'api/map_api.dart';
|
||||
part 'api/memories_api.dart';
|
||||
part 'api/notifications_api.dart';
|
||||
part 'api/notifications_admin_api.dart';
|
||||
part 'api/o_auth_api.dart';
|
||||
part 'api/partners_api.dart';
|
||||
part 'api/people_api.dart';
|
||||
|
@ -11,20 +11,20 @@
|
||||
part of openapi.api;
|
||||
|
||||
|
||||
class NotificationsApi {
|
||||
NotificationsApi([ApiClient? apiClient]) : apiClient = apiClient ?? defaultApiClient;
|
||||
class NotificationsAdminApi {
|
||||
NotificationsAdminApi([ApiClient? apiClient]) : apiClient = apiClient ?? defaultApiClient;
|
||||
|
||||
final ApiClient apiClient;
|
||||
|
||||
/// Performs an HTTP 'POST /notifications/templates/{name}' operation and returns the [Response].
|
||||
/// Performs an HTTP 'POST /notifications/admin/templates/{name}' operation and returns the [Response].
|
||||
/// Parameters:
|
||||
///
|
||||
/// * [String] name (required):
|
||||
///
|
||||
/// * [TemplateDto] templateDto (required):
|
||||
Future<Response> getNotificationTemplateWithHttpInfo(String name, TemplateDto templateDto,) async {
|
||||
Future<Response> getNotificationTemplateAdminWithHttpInfo(String name, TemplateDto templateDto,) async {
|
||||
// ignore: prefer_const_declarations
|
||||
final apiPath = r'/notifications/templates/{name}'
|
||||
final apiPath = r'/notifications/admin/templates/{name}'
|
||||
.replaceAll('{name}', name);
|
||||
|
||||
// ignore: prefer_final_locals
|
||||
@ -53,8 +53,8 @@ class NotificationsApi {
|
||||
/// * [String] name (required):
|
||||
///
|
||||
/// * [TemplateDto] templateDto (required):
|
||||
Future<TemplateResponseDto?> getNotificationTemplate(String name, TemplateDto templateDto,) async {
|
||||
final response = await getNotificationTemplateWithHttpInfo(name, templateDto,);
|
||||
Future<TemplateResponseDto?> getNotificationTemplateAdmin(String name, TemplateDto templateDto,) async {
|
||||
final response = await getNotificationTemplateAdminWithHttpInfo(name, templateDto,);
|
||||
if (response.statusCode >= HttpStatus.badRequest) {
|
||||
throw ApiException(response.statusCode, await _decodeBodyBytes(response));
|
||||
}
|
||||
@ -68,13 +68,13 @@ class NotificationsApi {
|
||||
return null;
|
||||
}
|
||||
|
||||
/// Performs an HTTP 'POST /notifications/test-email' operation and returns the [Response].
|
||||
/// Performs an HTTP 'POST /notifications/admin/test-email' operation and returns the [Response].
|
||||
/// Parameters:
|
||||
///
|
||||
/// * [SystemConfigSmtpDto] systemConfigSmtpDto (required):
|
||||
Future<Response> sendTestEmailWithHttpInfo(SystemConfigSmtpDto systemConfigSmtpDto,) async {
|
||||
Future<Response> sendTestEmailAdminWithHttpInfo(SystemConfigSmtpDto systemConfigSmtpDto,) async {
|
||||
// ignore: prefer_const_declarations
|
||||
final apiPath = r'/notifications/test-email';
|
||||
final apiPath = r'/notifications/admin/test-email';
|
||||
|
||||
// ignore: prefer_final_locals
|
||||
Object? postBody = systemConfigSmtpDto;
|
||||
@ -100,8 +100,8 @@ class NotificationsApi {
|
||||
/// Parameters:
|
||||
///
|
||||
/// * [SystemConfigSmtpDto] systemConfigSmtpDto (required):
|
||||
Future<TestEmailResponseDto?> sendTestEmail(SystemConfigSmtpDto systemConfigSmtpDto,) async {
|
||||
final response = await sendTestEmailWithHttpInfo(systemConfigSmtpDto,);
|
||||
Future<TestEmailResponseDto?> sendTestEmailAdmin(SystemConfigSmtpDto systemConfigSmtpDto,) async {
|
||||
final response = await sendTestEmailAdminWithHttpInfo(systemConfigSmtpDto,);
|
||||
if (response.statusCode >= HttpStatus.badRequest) {
|
||||
throw ApiException(response.statusCode, await _decodeBodyBytes(response));
|
||||
}
|
@ -696,18 +696,18 @@ packages:
|
||||
dependency: "direct main"
|
||||
description:
|
||||
name: geolocator
|
||||
sha256: "6cb9fb6e5928b58b9a84bdf85012d757fd07aab8215c5205337021c4999bad27"
|
||||
sha256: e7ebfa04ce451daf39b5499108c973189a71a919aa53c1204effda1c5b93b822
|
||||
url: "https://pub.dev"
|
||||
source: hosted
|
||||
version: "11.1.0"
|
||||
version: "14.0.0"
|
||||
geolocator_android:
|
||||
dependency: transitive
|
||||
description:
|
||||
name: geolocator_android
|
||||
sha256: "7aefc530db47d90d0580b552df3242440a10fe60814496a979aa67aa98b1fd47"
|
||||
sha256: "114072db5d1dce0ec0b36af2697f55c133bc89a2c8dd513e137c0afe59696ed4"
|
||||
url: "https://pub.dev"
|
||||
source: hosted
|
||||
version: "4.6.1"
|
||||
version: "5.0.1+1"
|
||||
geolocator_apple:
|
||||
dependency: transitive
|
||||
description:
|
||||
@ -728,10 +728,10 @@ packages:
|
||||
dependency: transitive
|
||||
description:
|
||||
name: geolocator_web
|
||||
sha256: "49d8f846ebeb5e2b6641fe477a7e97e5dd73f03cbfef3fd5c42177b7300fb0ed"
|
||||
sha256: b1ae9bdfd90f861fde8fd4f209c37b953d65e92823cb73c7dee1fa021b06f172
|
||||
url: "https://pub.dev"
|
||||
source: hosted
|
||||
version: "3.0.0"
|
||||
version: "4.1.3"
|
||||
geolocator_windows:
|
||||
dependency: transitive
|
||||
description:
|
||||
@ -1806,7 +1806,7 @@ packages:
|
||||
source: hosted
|
||||
version: "3.1.4"
|
||||
uuid:
|
||||
dependency: transitive
|
||||
dependency: "direct main"
|
||||
description:
|
||||
name: uuid
|
||||
sha256: a5be9ef6618a7ac1e964353ef476418026db906c4facdedaa299b7a2e71690ff
|
||||
@ -1933,6 +1933,14 @@ packages:
|
||||
url: "https://pub.dev"
|
||||
source: hosted
|
||||
version: "0.0.3"
|
||||
worker_manager:
|
||||
dependency: "direct main"
|
||||
description:
|
||||
name: worker_manager
|
||||
sha256: "086ed63e9b36266e851404ca90fd44e37c0f4c9bbf819e5f8d7c87f9741c0591"
|
||||
url: "https://pub.dev"
|
||||
source: hosted
|
||||
version: "7.2.3"
|
||||
xdg_directories:
|
||||
dependency: transitive
|
||||
description:
|
||||
|
@ -35,7 +35,7 @@ dependencies:
|
||||
flutter_udid: ^3.0.0
|
||||
flutter_web_auth_2: ^5.0.0-alpha.0
|
||||
fluttertoast: ^8.2.12
|
||||
geolocator: ^11.0.0
|
||||
geolocator: ^14.0.0
|
||||
hooks_riverpod: ^2.6.1
|
||||
http: ^1.3.0
|
||||
image_picker: ^1.1.2
|
||||
@ -60,7 +60,9 @@ dependencies:
|
||||
thumbhash: 0.1.0+1
|
||||
timezone: ^0.9.4
|
||||
url_launcher: ^6.3.1
|
||||
uuid: ^4.5.1
|
||||
wakelock_plus: ^1.2.10
|
||||
worker_manager: ^7.2.3
|
||||
|
||||
native_video_player:
|
||||
git:
|
||||
|
@ -2,3 +2,5 @@ import 'package:mocktail/mocktail.dart';
|
||||
import 'package:openapi/api.dart';
|
||||
|
||||
class MockAssetsApi extends Mock implements AssetsApi {}
|
||||
|
||||
class MockSyncApi extends Mock implements SyncApi {}
|
||||
|
@ -1,7 +1,10 @@
|
||||
import 'package:immich_mobile/domain/services/store.service.dart';
|
||||
import 'package:immich_mobile/domain/services/user.service.dart';
|
||||
import 'package:immich_mobile/domain/utils/background_sync.dart';
|
||||
import 'package:mocktail/mocktail.dart';
|
||||
|
||||
class MockStoreService extends Mock implements StoreService {}
|
||||
|
||||
class MockUserService extends Mock implements UserService {}
|
||||
|
||||
class MockBackgroundSyncManager extends Mock implements BackgroundSyncManager {}
|
||||
|
222
mobile/test/domain/services/sync_stream_service_test.dart
Normal file
222
mobile/test/domain/services/sync_stream_service_test.dart
Normal file
@ -0,0 +1,222 @@
|
||||
// ignore_for_file: avoid-declaring-call-method, avoid-unnecessary-futures
|
||||
|
||||
import 'dart:async';
|
||||
|
||||
import 'package:flutter_test/flutter_test.dart';
|
||||
import 'package:immich_mobile/domain/interfaces/sync_api.interface.dart';
|
||||
import 'package:immich_mobile/domain/interfaces/sync_stream.interface.dart';
|
||||
import 'package:immich_mobile/domain/models/sync_event.model.dart';
|
||||
import 'package:immich_mobile/domain/services/sync_stream.service.dart';
|
||||
import 'package:mocktail/mocktail.dart';
|
||||
|
||||
import '../../fixtures/sync_stream.stub.dart';
|
||||
import '../../infrastructure/repository.mock.dart';
|
||||
|
||||
class _AbortCallbackWrapper {
|
||||
const _AbortCallbackWrapper();
|
||||
|
||||
bool call() => false;
|
||||
}
|
||||
|
||||
class _MockAbortCallbackWrapper extends Mock implements _AbortCallbackWrapper {}
|
||||
|
||||
class _CancellationWrapper {
|
||||
const _CancellationWrapper();
|
||||
|
||||
bool call() => false;
|
||||
}
|
||||
|
||||
class _MockCancellationWrapper extends Mock implements _CancellationWrapper {}
|
||||
|
||||
void main() {
|
||||
late SyncStreamService sut;
|
||||
late ISyncStreamRepository mockSyncStreamRepo;
|
||||
late ISyncApiRepository mockSyncApiRepo;
|
||||
late Function(List<SyncEvent>, Function()) handleEventsCallback;
|
||||
late _MockAbortCallbackWrapper mockAbortCallbackWrapper;
|
||||
|
||||
successHandler(Invocation _) async => true;
|
||||
|
||||
setUp(() {
|
||||
mockSyncStreamRepo = MockSyncStreamRepository();
|
||||
mockSyncApiRepo = MockSyncApiRepository();
|
||||
mockAbortCallbackWrapper = _MockAbortCallbackWrapper();
|
||||
|
||||
when(() => mockAbortCallbackWrapper()).thenReturn(false);
|
||||
|
||||
when(() => mockSyncApiRepo.streamChanges(any()))
|
||||
.thenAnswer((invocation) async {
|
||||
// ignore: avoid-unsafe-collection-methods
|
||||
handleEventsCallback = invocation.positionalArguments.first;
|
||||
});
|
||||
|
||||
when(() => mockSyncApiRepo.ack(any())).thenAnswer((_) async => {});
|
||||
|
||||
when(() => mockSyncStreamRepo.updateUsersV1(any()))
|
||||
.thenAnswer(successHandler);
|
||||
when(() => mockSyncStreamRepo.deleteUsersV1(any()))
|
||||
.thenAnswer(successHandler);
|
||||
when(() => mockSyncStreamRepo.updatePartnerV1(any()))
|
||||
.thenAnswer(successHandler);
|
||||
when(() => mockSyncStreamRepo.deletePartnerV1(any()))
|
||||
.thenAnswer(successHandler);
|
||||
when(() => mockSyncStreamRepo.updateAssetsV1(any()))
|
||||
.thenAnswer(successHandler);
|
||||
when(() => mockSyncStreamRepo.deleteAssetsV1(any()))
|
||||
.thenAnswer(successHandler);
|
||||
when(() => mockSyncStreamRepo.updateAssetsExifV1(any()))
|
||||
.thenAnswer(successHandler);
|
||||
when(() => mockSyncStreamRepo.updatePartnerAssetsV1(any()))
|
||||
.thenAnswer(successHandler);
|
||||
when(() => mockSyncStreamRepo.deletePartnerAssetsV1(any()))
|
||||
.thenAnswer(successHandler);
|
||||
when(() => mockSyncStreamRepo.updatePartnerAssetsExifV1(any()))
|
||||
.thenAnswer(successHandler);
|
||||
|
||||
sut = SyncStreamService(
|
||||
syncApiRepository: mockSyncApiRepo,
|
||||
syncStreamRepository: mockSyncStreamRepo,
|
||||
);
|
||||
});
|
||||
|
||||
Future<void> simulateEvents(List<SyncEvent> events) async {
|
||||
await sut.sync();
|
||||
await handleEventsCallback(events, mockAbortCallbackWrapper.call);
|
||||
}
|
||||
|
||||
group("SyncStreamService - _handleEvents", () {
|
||||
test(
|
||||
"processes events and acks successfully when handlers succeed",
|
||||
() async {
|
||||
final events = [
|
||||
SyncStreamStub.userDeleteV1,
|
||||
SyncStreamStub.userV1Admin,
|
||||
SyncStreamStub.userV1User,
|
||||
SyncStreamStub.partnerDeleteV1,
|
||||
SyncStreamStub.partnerV1,
|
||||
];
|
||||
|
||||
await simulateEvents(events);
|
||||
|
||||
verifyInOrder([
|
||||
() => mockSyncStreamRepo.deleteUsersV1(any()),
|
||||
() => mockSyncApiRepo.ack(["2"]),
|
||||
() => mockSyncStreamRepo.updateUsersV1(any()),
|
||||
() => mockSyncApiRepo.ack(["5"]),
|
||||
() => mockSyncStreamRepo.deletePartnerV1(any()),
|
||||
() => mockSyncApiRepo.ack(["4"]),
|
||||
() => mockSyncStreamRepo.updatePartnerV1(any()),
|
||||
() => mockSyncApiRepo.ack(["3"]),
|
||||
]);
|
||||
verifyNever(() => mockAbortCallbackWrapper());
|
||||
},
|
||||
);
|
||||
|
||||
test("processes final batch correctly", () async {
|
||||
final events = [
|
||||
SyncStreamStub.userDeleteV1,
|
||||
SyncStreamStub.userV1Admin,
|
||||
];
|
||||
|
||||
await simulateEvents(events);
|
||||
|
||||
verifyInOrder([
|
||||
() => mockSyncStreamRepo.deleteUsersV1(any()),
|
||||
() => mockSyncApiRepo.ack(["2"]),
|
||||
() => mockSyncStreamRepo.updateUsersV1(any()),
|
||||
() => mockSyncApiRepo.ack(["1"]),
|
||||
]);
|
||||
verifyNever(() => mockAbortCallbackWrapper());
|
||||
});
|
||||
|
||||
test("does not process or ack when event list is empty", () async {
|
||||
await simulateEvents([]);
|
||||
|
||||
verifyNever(() => mockSyncStreamRepo.updateUsersV1(any()));
|
||||
verifyNever(() => mockSyncStreamRepo.deleteUsersV1(any()));
|
||||
verifyNever(() => mockSyncStreamRepo.updatePartnerV1(any()));
|
||||
verifyNever(() => mockSyncStreamRepo.deletePartnerV1(any()));
|
||||
verifyNever(() => mockAbortCallbackWrapper());
|
||||
verifyNever(() => mockSyncApiRepo.ack(any()));
|
||||
});
|
||||
|
||||
test("aborts and stops processing if cancelled during iteration", () async {
|
||||
final cancellationChecker = _MockCancellationWrapper();
|
||||
when(() => cancellationChecker()).thenReturn(false);
|
||||
|
||||
sut = SyncStreamService(
|
||||
syncApiRepository: mockSyncApiRepo,
|
||||
syncStreamRepository: mockSyncStreamRepo,
|
||||
cancelChecker: cancellationChecker.call,
|
||||
);
|
||||
await sut.sync();
|
||||
|
||||
final events = [
|
||||
SyncStreamStub.userDeleteV1,
|
||||
SyncStreamStub.userV1Admin,
|
||||
SyncStreamStub.partnerDeleteV1,
|
||||
];
|
||||
|
||||
when(() => mockSyncStreamRepo.deleteUsersV1(any())).thenAnswer((_) async {
|
||||
when(() => cancellationChecker()).thenReturn(true);
|
||||
});
|
||||
|
||||
await handleEventsCallback(events, mockAbortCallbackWrapper.call);
|
||||
|
||||
verify(() => mockSyncStreamRepo.deleteUsersV1(any())).called(1);
|
||||
verifyNever(() => mockSyncStreamRepo.updateUsersV1(any()));
|
||||
verifyNever(() => mockSyncStreamRepo.deletePartnerV1(any()));
|
||||
|
||||
verify(() => mockAbortCallbackWrapper()).called(1);
|
||||
|
||||
verify(() => mockSyncApiRepo.ack(["2"])).called(1);
|
||||
});
|
||||
|
||||
test(
|
||||
"aborts and stops processing if cancelled before processing batch",
|
||||
() async {
|
||||
final cancellationChecker = _MockCancellationWrapper();
|
||||
when(() => cancellationChecker()).thenReturn(false);
|
||||
|
||||
final processingCompleter = Completer<void>();
|
||||
bool handler1Started = false;
|
||||
when(() => mockSyncStreamRepo.deleteUsersV1(any()))
|
||||
.thenAnswer((_) async {
|
||||
handler1Started = true;
|
||||
return processingCompleter.future;
|
||||
});
|
||||
|
||||
sut = SyncStreamService(
|
||||
syncApiRepository: mockSyncApiRepo,
|
||||
syncStreamRepository: mockSyncStreamRepo,
|
||||
cancelChecker: cancellationChecker.call,
|
||||
);
|
||||
|
||||
await sut.sync();
|
||||
|
||||
final events = [
|
||||
SyncStreamStub.userDeleteV1,
|
||||
SyncStreamStub.userV1Admin,
|
||||
SyncStreamStub.partnerDeleteV1,
|
||||
];
|
||||
|
||||
final processingFuture =
|
||||
handleEventsCallback(events, mockAbortCallbackWrapper.call);
|
||||
await pumpEventQueue();
|
||||
|
||||
expect(handler1Started, isTrue);
|
||||
|
||||
// Signal cancellation while handler 1 is waiting
|
||||
when(() => cancellationChecker()).thenReturn(true);
|
||||
await pumpEventQueue();
|
||||
|
||||
processingCompleter.complete();
|
||||
await processingFuture;
|
||||
|
||||
verifyNever(() => mockSyncStreamRepo.updateUsersV1(any()));
|
||||
|
||||
verify(() => mockSyncApiRepo.ack(["2"])).called(1);
|
||||
},
|
||||
);
|
||||
});
|
||||
}
|
45
mobile/test/fixtures/sync_stream.stub.dart
vendored
Normal file
45
mobile/test/fixtures/sync_stream.stub.dart
vendored
Normal file
@ -0,0 +1,45 @@
|
||||
import 'package:immich_mobile/domain/models/sync_event.model.dart';
|
||||
import 'package:openapi/api.dart';
|
||||
|
||||
abstract final class SyncStreamStub {
|
||||
static final userV1Admin = SyncEvent(
|
||||
type: SyncEntityType.userV1,
|
||||
data: SyncUserV1(
|
||||
deletedAt: DateTime(2020),
|
||||
email: "admin@admin",
|
||||
id: "1",
|
||||
name: "Admin",
|
||||
),
|
||||
ack: "1",
|
||||
);
|
||||
static final userV1User = SyncEvent(
|
||||
type: SyncEntityType.userV1,
|
||||
data: SyncUserV1(
|
||||
deletedAt: DateTime(2021),
|
||||
email: "user@user",
|
||||
id: "5",
|
||||
name: "User",
|
||||
),
|
||||
ack: "5",
|
||||
);
|
||||
static final userDeleteV1 = SyncEvent(
|
||||
type: SyncEntityType.userDeleteV1,
|
||||
data: SyncUserDeleteV1(userId: "2"),
|
||||
ack: "2",
|
||||
);
|
||||
|
||||
static final partnerV1 = SyncEvent(
|
||||
type: SyncEntityType.partnerV1,
|
||||
data: SyncPartnerV1(
|
||||
inTimeline: true,
|
||||
sharedById: "1",
|
||||
sharedWithId: "2",
|
||||
),
|
||||
ack: "3",
|
||||
);
|
||||
static final partnerDeleteV1 = SyncEvent(
|
||||
type: SyncEntityType.partnerDeleteV1,
|
||||
data: SyncPartnerDeleteV1(sharedById: "3", sharedWithId: "4"),
|
||||
ack: "4",
|
||||
);
|
||||
}
|
@ -0,0 +1,299 @@
|
||||
import 'dart:async';
|
||||
import 'dart:convert';
|
||||
|
||||
import 'package:flutter_test/flutter_test.dart';
|
||||
import 'package:http/http.dart' as http;
|
||||
import 'package:immich_mobile/domain/models/sync_event.model.dart';
|
||||
import 'package:immich_mobile/infrastructure/repositories/sync_api.repository.dart';
|
||||
import 'package:mocktail/mocktail.dart';
|
||||
import 'package:openapi/api.dart';
|
||||
|
||||
import '../../api.mocks.dart';
|
||||
import '../../service.mocks.dart';
|
||||
|
||||
class MockHttpClient extends Mock implements http.Client {}
|
||||
|
||||
class MockApiClient extends Mock implements ApiClient {}
|
||||
|
||||
class MockStreamedResponse extends Mock implements http.StreamedResponse {}
|
||||
|
||||
class FakeBaseRequest extends Fake implements http.BaseRequest {}
|
||||
|
||||
String _createJsonLine(String type, Map<String, dynamic> data, String ack) {
|
||||
return '${jsonEncode({'type': type, 'data': data, 'ack': ack})}\n';
|
||||
}
|
||||
|
||||
void main() {
|
||||
late SyncApiRepository sut;
|
||||
late MockApiService mockApiService;
|
||||
late MockApiClient mockApiClient;
|
||||
late MockSyncApi mockSyncApi;
|
||||
late MockHttpClient mockHttpClient;
|
||||
late MockStreamedResponse mockStreamedResponse;
|
||||
late StreamController<List<int>> responseStreamController;
|
||||
late int testBatchSize = 3;
|
||||
|
||||
setUp(() {
|
||||
mockApiService = MockApiService();
|
||||
mockApiClient = MockApiClient();
|
||||
mockSyncApi = MockSyncApi();
|
||||
mockHttpClient = MockHttpClient();
|
||||
mockStreamedResponse = MockStreamedResponse();
|
||||
responseStreamController =
|
||||
StreamController<List<int>>.broadcast(sync: true);
|
||||
|
||||
registerFallbackValue(FakeBaseRequest());
|
||||
|
||||
when(() => mockApiService.apiClient).thenReturn(mockApiClient);
|
||||
when(() => mockApiService.syncApi).thenReturn(mockSyncApi);
|
||||
when(() => mockApiClient.basePath).thenReturn('http://demo.immich.app/api');
|
||||
when(() => mockApiService.applyToParams(any(), any()))
|
||||
.thenAnswer((_) async => {});
|
||||
|
||||
// Mock HTTP client behavior
|
||||
when(() => mockHttpClient.send(any()))
|
||||
.thenAnswer((_) async => mockStreamedResponse);
|
||||
when(() => mockStreamedResponse.statusCode).thenReturn(200);
|
||||
when(() => mockStreamedResponse.stream)
|
||||
.thenAnswer((_) => http.ByteStream(responseStreamController.stream));
|
||||
when(() => mockHttpClient.close()).thenAnswer((_) => {});
|
||||
|
||||
sut = SyncApiRepository(mockApiService);
|
||||
});
|
||||
|
||||
tearDown(() async {
|
||||
if (!responseStreamController.isClosed) {
|
||||
await responseStreamController.close();
|
||||
}
|
||||
});
|
||||
|
||||
Future<void> streamChanges(
|
||||
Function(List<SyncEvent>, Function() abort) onDataCallback,
|
||||
) {
|
||||
return sut.streamChanges(
|
||||
onDataCallback,
|
||||
batchSize: testBatchSize,
|
||||
httpClient: mockHttpClient,
|
||||
);
|
||||
}
|
||||
|
||||
test('streamChanges stops processing stream when abort is called', () async {
|
||||
int onDataCallCount = 0;
|
||||
bool abortWasCalledInCallback = false;
|
||||
List<SyncEvent> receivedEventsBatch1 = [];
|
||||
|
||||
onDataCallback(List<SyncEvent> events, Function() abort) {
|
||||
onDataCallCount++;
|
||||
if (onDataCallCount == 1) {
|
||||
receivedEventsBatch1 = events;
|
||||
abort();
|
||||
abortWasCalledInCallback = true;
|
||||
} else {
|
||||
fail("onData called more than once after abort was invoked");
|
||||
}
|
||||
}
|
||||
|
||||
final streamChangesFuture = streamChanges(onDataCallback);
|
||||
|
||||
await pumpEventQueue();
|
||||
|
||||
for (int i = 0; i < testBatchSize; i++) {
|
||||
responseStreamController.add(
|
||||
utf8.encode(
|
||||
_createJsonLine(
|
||||
SyncEntityType.userDeleteV1.toString(),
|
||||
SyncUserDeleteV1(userId: "user$i").toJson(),
|
||||
'ack$i',
|
||||
),
|
||||
),
|
||||
);
|
||||
}
|
||||
|
||||
for (int i = testBatchSize; i < testBatchSize * 2; i++) {
|
||||
responseStreamController.add(
|
||||
utf8.encode(
|
||||
_createJsonLine(
|
||||
SyncEntityType.userDeleteV1.toString(),
|
||||
SyncUserDeleteV1(userId: "user$i").toJson(),
|
||||
'ack$i',
|
||||
),
|
||||
),
|
||||
);
|
||||
}
|
||||
|
||||
await responseStreamController.close();
|
||||
await expectLater(streamChangesFuture, completes);
|
||||
|
||||
expect(onDataCallCount, 1);
|
||||
expect(abortWasCalledInCallback, isTrue);
|
||||
expect(receivedEventsBatch1.length, testBatchSize);
|
||||
verify(() => mockHttpClient.close()).called(1);
|
||||
});
|
||||
|
||||
test(
|
||||
'streamChanges does not process remaining lines in finally block if aborted',
|
||||
() async {
|
||||
int onDataCallCount = 0;
|
||||
bool abortWasCalledInCallback = false;
|
||||
|
||||
onDataCallback(List<SyncEvent> events, Function() abort) {
|
||||
onDataCallCount++;
|
||||
if (onDataCallCount == 1) {
|
||||
abort();
|
||||
abortWasCalledInCallback = true;
|
||||
} else {
|
||||
fail("onData called more than once after abort was invoked");
|
||||
}
|
||||
}
|
||||
|
||||
final streamChangesFuture = streamChanges(onDataCallback);
|
||||
|
||||
await pumpEventQueue();
|
||||
|
||||
for (int i = 0; i < testBatchSize; i++) {
|
||||
responseStreamController.add(
|
||||
utf8.encode(
|
||||
_createJsonLine(
|
||||
SyncEntityType.userDeleteV1.toString(),
|
||||
SyncUserDeleteV1(userId: "user$i").toJson(),
|
||||
'ack$i',
|
||||
),
|
||||
),
|
||||
);
|
||||
}
|
||||
|
||||
// emit a single event to skip batching and trigger finally
|
||||
responseStreamController.add(
|
||||
utf8.encode(
|
||||
_createJsonLine(
|
||||
SyncEntityType.userDeleteV1.toString(),
|
||||
SyncUserDeleteV1(userId: "user100").toJson(),
|
||||
'ack100',
|
||||
),
|
||||
),
|
||||
);
|
||||
|
||||
await responseStreamController.close();
|
||||
await expectLater(streamChangesFuture, completes);
|
||||
|
||||
expect(onDataCallCount, 1);
|
||||
expect(abortWasCalledInCallback, isTrue);
|
||||
verify(() => mockHttpClient.close()).called(1);
|
||||
},
|
||||
);
|
||||
|
||||
test(
|
||||
'streamChanges processes remaining lines in finally block if not aborted',
|
||||
() async {
|
||||
int onDataCallCount = 0;
|
||||
List<SyncEvent> receivedEventsBatch1 = [];
|
||||
List<SyncEvent> receivedEventsBatch2 = [];
|
||||
|
||||
onDataCallback(List<SyncEvent> events, Function() _) {
|
||||
onDataCallCount++;
|
||||
if (onDataCallCount == 1) {
|
||||
receivedEventsBatch1 = events;
|
||||
} else if (onDataCallCount == 2) {
|
||||
receivedEventsBatch2 = events;
|
||||
} else {
|
||||
fail("onData called more than expected");
|
||||
}
|
||||
}
|
||||
|
||||
final streamChangesFuture = streamChanges(onDataCallback);
|
||||
|
||||
await pumpEventQueue();
|
||||
|
||||
// Batch 1
|
||||
for (int i = 0; i < testBatchSize; i++) {
|
||||
responseStreamController.add(
|
||||
utf8.encode(
|
||||
_createJsonLine(
|
||||
SyncEntityType.userDeleteV1.toString(),
|
||||
SyncUserDeleteV1(userId: "user$i").toJson(),
|
||||
'ack$i',
|
||||
),
|
||||
),
|
||||
);
|
||||
}
|
||||
|
||||
// Partial Batch 2
|
||||
responseStreamController.add(
|
||||
utf8.encode(
|
||||
_createJsonLine(
|
||||
SyncEntityType.userDeleteV1.toString(),
|
||||
SyncUserDeleteV1(userId: "user100").toJson(),
|
||||
'ack100',
|
||||
),
|
||||
),
|
||||
);
|
||||
|
||||
await responseStreamController.close();
|
||||
await expectLater(streamChangesFuture, completes);
|
||||
|
||||
expect(onDataCallCount, 2);
|
||||
expect(receivedEventsBatch1.length, testBatchSize);
|
||||
expect(receivedEventsBatch2.length, 1);
|
||||
verify(() => mockHttpClient.close()).called(1);
|
||||
},
|
||||
);
|
||||
|
||||
test('streamChanges handles stream error gracefully', () async {
|
||||
final streamError = Exception("Network Error");
|
||||
int onDataCallCount = 0;
|
||||
|
||||
onDataCallback(List<SyncEvent> events, Function() _) {
|
||||
onDataCallCount++;
|
||||
}
|
||||
|
||||
final streamChangesFuture = streamChanges(onDataCallback);
|
||||
|
||||
await pumpEventQueue();
|
||||
|
||||
responseStreamController.add(
|
||||
utf8.encode(
|
||||
_createJsonLine(
|
||||
SyncEntityType.userDeleteV1.toString(),
|
||||
SyncUserDeleteV1(userId: "user1").toJson(),
|
||||
'ack1',
|
||||
),
|
||||
),
|
||||
);
|
||||
|
||||
responseStreamController.addError(streamError);
|
||||
await expectLater(streamChangesFuture, throwsA(streamError));
|
||||
|
||||
expect(onDataCallCount, 0);
|
||||
verify(() => mockHttpClient.close()).called(1);
|
||||
});
|
||||
|
||||
test('streamChanges throws ApiException on non-200 status code', () async {
|
||||
when(() => mockStreamedResponse.statusCode).thenReturn(401);
|
||||
final errorBodyController = StreamController<List<int>>(sync: true);
|
||||
when(() => mockStreamedResponse.stream)
|
||||
.thenAnswer((_) => http.ByteStream(errorBodyController.stream));
|
||||
|
||||
int onDataCallCount = 0;
|
||||
|
||||
onDataCallback(List<SyncEvent> events, Function() _) {
|
||||
onDataCallCount++;
|
||||
}
|
||||
|
||||
final future = streamChanges(onDataCallback);
|
||||
|
||||
errorBodyController.add(utf8.encode('{"error":"Unauthorized"}'));
|
||||
await errorBodyController.close();
|
||||
|
||||
await expectLater(
|
||||
future,
|
||||
throwsA(
|
||||
isA<ApiException>()
|
||||
.having((e) => e.code, 'code', 401)
|
||||
.having((e) => e.message, 'message', contains('Unauthorized')),
|
||||
),
|
||||
);
|
||||
|
||||
expect(onDataCallCount, 0);
|
||||
verify(() => mockHttpClient.close()).called(1);
|
||||
});
|
||||
}
|
@ -1,6 +1,8 @@
|
||||
import 'package:immich_mobile/domain/interfaces/device_asset.interface.dart';
|
||||
import 'package:immich_mobile/domain/interfaces/log.interface.dart';
|
||||
import 'package:immich_mobile/domain/interfaces/store.interface.dart';
|
||||
import 'package:immich_mobile/domain/interfaces/sync_api.interface.dart';
|
||||
import 'package:immich_mobile/domain/interfaces/sync_stream.interface.dart';
|
||||
import 'package:immich_mobile/domain/interfaces/user.interface.dart';
|
||||
import 'package:immich_mobile/domain/interfaces/user_api.interface.dart';
|
||||
import 'package:mocktail/mocktail.dart';
|
||||
@ -14,5 +16,9 @@ class MockUserRepository extends Mock implements IUserRepository {}
|
||||
class MockDeviceAssetRepository extends Mock
|
||||
implements IDeviceAssetRepository {}
|
||||
|
||||
class MockSyncStreamRepository extends Mock implements ISyncStreamRepository {}
|
||||
|
||||
// API Repos
|
||||
class MockUserApiRepository extends Mock implements IUserApiRepository {}
|
||||
|
||||
class MockSyncApiRepository extends Mock implements ISyncApiRepository {}
|
||||
|
@ -29,4 +29,3 @@ class MockSearchApi extends Mock implements SearchApi {}
|
||||
class MockAppSettingService extends Mock implements AppSettingsService {}
|
||||
|
||||
class MockBackgroundService extends Mock implements BackgroundService {}
|
||||
|
||||
|
@ -8,6 +8,7 @@ import 'package:isar/isar.dart';
|
||||
import 'package:mocktail/mocktail.dart';
|
||||
import 'package:openapi/api.dart';
|
||||
|
||||
import '../domain/service.mock.dart';
|
||||
import '../repository.mocks.dart';
|
||||
import '../service.mocks.dart';
|
||||
import '../test_utils.dart';
|
||||
@ -18,6 +19,7 @@ void main() {
|
||||
late MockAuthRepository authRepository;
|
||||
late MockApiService apiService;
|
||||
late MockNetworkService networkService;
|
||||
late MockBackgroundSyncManager backgroundSyncManager;
|
||||
late Isar db;
|
||||
|
||||
setUp(() async {
|
||||
@ -25,12 +27,14 @@ void main() {
|
||||
authRepository = MockAuthRepository();
|
||||
apiService = MockApiService();
|
||||
networkService = MockNetworkService();
|
||||
backgroundSyncManager = MockBackgroundSyncManager();
|
||||
|
||||
sut = AuthService(
|
||||
authApiRepository,
|
||||
authRepository,
|
||||
apiService,
|
||||
networkService,
|
||||
backgroundSyncManager,
|
||||
);
|
||||
|
||||
registerFallbackValue(Uri());
|
||||
@ -116,24 +120,28 @@ void main() {
|
||||
group('logout', () {
|
||||
test('Should logout user', () async {
|
||||
when(() => authApiRepository.logout()).thenAnswer((_) async => {});
|
||||
when(() => backgroundSyncManager.cancel()).thenAnswer((_) async => {});
|
||||
when(() => authRepository.clearLocalData())
|
||||
.thenAnswer((_) => Future.value(null));
|
||||
|
||||
await sut.logout();
|
||||
|
||||
verify(() => authApiRepository.logout()).called(1);
|
||||
verify(() => backgroundSyncManager.cancel()).called(1);
|
||||
verify(() => authRepository.clearLocalData()).called(1);
|
||||
});
|
||||
|
||||
test('Should clear local data even on server error', () async {
|
||||
when(() => authApiRepository.logout())
|
||||
.thenThrow(Exception('Server error'));
|
||||
when(() => backgroundSyncManager.cancel()).thenAnswer((_) async => {});
|
||||
when(() => authRepository.clearLocalData())
|
||||
.thenAnswer((_) => Future.value(null));
|
||||
|
||||
await sut.logout();
|
||||
|
||||
verify(() => authApiRepository.logout()).called(1);
|
||||
verify(() => backgroundSyncManager.cancel()).called(1);
|
||||
verify(() => authRepository.clearLocalData()).called(1);
|
||||
});
|
||||
});
|
||||
|
@ -3485,9 +3485,9 @@
|
||||
]
|
||||
}
|
||||
},
|
||||
"/notifications/templates/{name}": {
|
||||
"/notifications/admin/templates/{name}": {
|
||||
"post": {
|
||||
"operationId": "getNotificationTemplate",
|
||||
"operationId": "getNotificationTemplateAdmin",
|
||||
"parameters": [
|
||||
{
|
||||
"name": "name",
|
||||
@ -3532,13 +3532,13 @@
|
||||
}
|
||||
],
|
||||
"tags": [
|
||||
"Notifications"
|
||||
"Notifications (Admin)"
|
||||
]
|
||||
}
|
||||
},
|
||||
"/notifications/test-email": {
|
||||
"/notifications/admin/test-email": {
|
||||
"post": {
|
||||
"operationId": "sendTestEmail",
|
||||
"operationId": "sendTestEmailAdmin",
|
||||
"parameters": [],
|
||||
"requestBody": {
|
||||
"content": {
|
||||
@ -3574,7 +3574,7 @@
|
||||
}
|
||||
],
|
||||
"tags": [
|
||||
"Notifications"
|
||||
"Notifications (Admin)"
|
||||
]
|
||||
}
|
||||
},
|
||||
|
8
open-api/typescript-sdk/package-lock.json
generated
8
open-api/typescript-sdk/package-lock.json
generated
@ -12,7 +12,7 @@
|
||||
"@oazapfts/runtime": "^1.0.2"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@types/node": "^22.14.0",
|
||||
"@types/node": "^22.14.1",
|
||||
"typescript": "^5.3.3"
|
||||
}
|
||||
},
|
||||
@ -23,9 +23,9 @@
|
||||
"license": "MIT"
|
||||
},
|
||||
"node_modules/@types/node": {
|
||||
"version": "22.14.0",
|
||||
"resolved": "https://registry.npmjs.org/@types/node/-/node-22.14.0.tgz",
|
||||
"integrity": "sha512-Kmpl+z84ILoG+3T/zQFyAJsU6EPTmOCj8/2+83fSN6djd6I4o7uOuGIH6vq3PrjY5BGitSbFuMN18j3iknubbA==",
|
||||
"version": "22.14.1",
|
||||
"resolved": "https://registry.npmjs.org/@types/node/-/node-22.14.1.tgz",
|
||||
"integrity": "sha512-u0HuPQwe/dHrItgHHpmw3N2fYCR6x4ivMNbPHRkBVP4CvN+kiRrKHWk3i8tXiO/joPwXLMYvF9TTF0eqgHIuOw==",
|
||||
"dev": true,
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
|
@ -19,7 +19,7 @@
|
||||
"@oazapfts/runtime": "^1.0.2"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@types/node": "^22.14.0",
|
||||
"@types/node": "^22.14.1",
|
||||
"typescript": "^5.3.3"
|
||||
},
|
||||
"repository": {
|
||||
|
@ -2318,26 +2318,26 @@ export function addMemoryAssets({ id, bulkIdsDto }: {
|
||||
body: bulkIdsDto
|
||||
})));
|
||||
}
|
||||
export function getNotificationTemplate({ name, templateDto }: {
|
||||
export function getNotificationTemplateAdmin({ name, templateDto }: {
|
||||
name: string;
|
||||
templateDto: TemplateDto;
|
||||
}, opts?: Oazapfts.RequestOpts) {
|
||||
return oazapfts.ok(oazapfts.fetchJson<{
|
||||
status: 200;
|
||||
data: TemplateResponseDto;
|
||||
}>(`/notifications/templates/${encodeURIComponent(name)}`, oazapfts.json({
|
||||
}>(`/notifications/admin/templates/${encodeURIComponent(name)}`, oazapfts.json({
|
||||
...opts,
|
||||
method: "POST",
|
||||
body: templateDto
|
||||
})));
|
||||
}
|
||||
export function sendTestEmail({ systemConfigSmtpDto }: {
|
||||
export function sendTestEmailAdmin({ systemConfigSmtpDto }: {
|
||||
systemConfigSmtpDto: SystemConfigSmtpDto;
|
||||
}, opts?: Oazapfts.RequestOpts) {
|
||||
return oazapfts.ok(oazapfts.fetchJson<{
|
||||
status: 200;
|
||||
data: TestEmailResponseDto;
|
||||
}>("/notifications/test-email", oazapfts.json({
|
||||
}>("/notifications/admin/test-email", oazapfts.json({
|
||||
...opts,
|
||||
method: "POST",
|
||||
body: systemConfigSmtpDto
|
||||
|
2
server/package-lock.json
generated
2
server/package-lock.json
generated
@ -90,7 +90,7 @@
|
||||
"@types/lodash": "^4.14.197",
|
||||
"@types/mock-fs": "^4.13.1",
|
||||
"@types/multer": "^1.4.7",
|
||||
"@types/node": "^22.14.0",
|
||||
"@types/node": "^22.14.1",
|
||||
"@types/nodemailer": "^6.4.14",
|
||||
"@types/picomatch": "^3.0.0",
|
||||
"@types/pngjs": "^6.0.5",
|
||||
|
@ -26,9 +26,8 @@
|
||||
"migrations:generate": "node ./dist/bin/migrations.js generate",
|
||||
"migrations:create": "node ./dist/bin/migrations.js create",
|
||||
"migrations:run": "node ./dist/bin/migrations.js run",
|
||||
"typeorm:migrations:revert": "typeorm migration:revert -d ./dist/bin/database.js",
|
||||
"typeorm:schema:drop": "typeorm query -d ./dist/bin/database.js 'DROP schema public cascade; CREATE schema public;'",
|
||||
"typeorm:schema:reset": "npm run typeorm:schema:drop && npm run migrations:run",
|
||||
"schema:drop": "node ./dist/bin/migrations.js query 'DROP schema public cascade; CREATE schema public;'",
|
||||
"schema:reset": "npm run schema:drop && npm run migrations:run",
|
||||
"kysely:codegen": "npx kysely-codegen --include-pattern=\"(public|vectors).*\" --dialect postgres --url postgres://postgres:postgres@localhost/immich --log-level debug --out-file=./src/db.d.ts",
|
||||
"sync:open-api": "node ./dist/bin/sync-open-api.js",
|
||||
"sync:sql": "node ./dist/bin/sync-sql.js",
|
||||
@ -116,7 +115,7 @@
|
||||
"@types/lodash": "^4.14.197",
|
||||
"@types/mock-fs": "^4.13.1",
|
||||
"@types/multer": "^1.4.7",
|
||||
"@types/node": "^22.14.0",
|
||||
"@types/node": "^22.14.1",
|
||||
"@types/nodemailer": "^6.4.14",
|
||||
"@types/picomatch": "^3.0.0",
|
||||
"@types/pngjs": "^6.0.5",
|
||||
|
@ -1,11 +0,0 @@
|
||||
import { ConfigRepository } from 'src/repositories/config.repository';
|
||||
import { DataSource } from 'typeorm';
|
||||
|
||||
const { database } = new ConfigRepository().getEnv();
|
||||
|
||||
/**
|
||||
* @deprecated - DO NOT USE THIS
|
||||
*
|
||||
* this export is ONLY to be used for TypeORM commands in package.json#scripts
|
||||
*/
|
||||
export const dataSource = new DataSource({ ...database.config.typeorm, host: 'localhost' });
|
@ -1,8 +1,8 @@
|
||||
#!/usr/bin/env node
|
||||
process.env.DB_URL = process.env.DB_URL || 'postgres://postgres:postgres@localhost:5432/immich';
|
||||
|
||||
import { Kysely } from 'kysely';
|
||||
import { writeFileSync } from 'node:fs';
|
||||
import { Kysely, sql } from 'kysely';
|
||||
import { mkdirSync, writeFileSync } from 'node:fs';
|
||||
import { basename, dirname, extname, join } from 'node:path';
|
||||
import postgres from 'postgres';
|
||||
import { ConfigRepository } from 'src/repositories/config.repository';
|
||||
@ -23,8 +23,13 @@ const main = async () => {
|
||||
}
|
||||
|
||||
case 'run': {
|
||||
const only = process.argv[3] as 'kysely' | 'typeorm' | undefined;
|
||||
await run(only);
|
||||
await runMigrations();
|
||||
return;
|
||||
}
|
||||
|
||||
case 'query': {
|
||||
const query = process.argv[3];
|
||||
await runQuery(query);
|
||||
return;
|
||||
}
|
||||
|
||||
@ -48,14 +53,25 @@ const main = async () => {
|
||||
}
|
||||
};
|
||||
|
||||
const run = async (only?: 'kysely' | 'typeorm') => {
|
||||
const getDatabaseClient = () => {
|
||||
const configRepository = new ConfigRepository();
|
||||
const { database } = configRepository.getEnv();
|
||||
const logger = new LoggingRepository(undefined, configRepository);
|
||||
const db = new Kysely<any>(getKyselyConfig(database.config.kysely));
|
||||
const databaseRepository = new DatabaseRepository(db, logger, configRepository);
|
||||
return new Kysely<any>(getKyselyConfig(database.config.kysely));
|
||||
};
|
||||
|
||||
await databaseRepository.runMigrations({ only });
|
||||
const runQuery = async (query: string) => {
|
||||
const db = getDatabaseClient();
|
||||
await sql.raw(query).execute(db);
|
||||
await db.destroy();
|
||||
};
|
||||
|
||||
const runMigrations = async () => {
|
||||
const configRepository = new ConfigRepository();
|
||||
const logger = new LoggingRepository(undefined, configRepository);
|
||||
const db = getDatabaseClient();
|
||||
const databaseRepository = new DatabaseRepository(db, logger, configRepository);
|
||||
await databaseRepository.runMigrations();
|
||||
await db.destroy();
|
||||
};
|
||||
|
||||
const debug = async () => {
|
||||
@ -81,7 +97,8 @@ const create = (path: string, up: string[], down: string[]) => {
|
||||
const filename = `${timestamp}-${name}.ts`;
|
||||
const folder = dirname(path);
|
||||
const fullPath = join(folder, filename);
|
||||
writeFileSync(fullPath, asMigration('typeorm', { name, timestamp, up, down }));
|
||||
mkdirSync(folder, { recursive: true });
|
||||
writeFileSync(fullPath, asMigration('kysely', { name, timestamp, up, down }));
|
||||
console.log(`Wrote ${fullPath}`);
|
||||
};
|
||||
|
||||
|
@ -13,7 +13,7 @@ import { JobController } from 'src/controllers/job.controller';
|
||||
import { LibraryController } from 'src/controllers/library.controller';
|
||||
import { MapController } from 'src/controllers/map.controller';
|
||||
import { MemoryController } from 'src/controllers/memory.controller';
|
||||
import { NotificationController } from 'src/controllers/notification.controller';
|
||||
import { NotificationAdminController } from 'src/controllers/notification-admin.controller';
|
||||
import { OAuthController } from 'src/controllers/oauth.controller';
|
||||
import { PartnerController } from 'src/controllers/partner.controller';
|
||||
import { PersonController } from 'src/controllers/person.controller';
|
||||
@ -47,7 +47,7 @@ export const controllers = [
|
||||
LibraryController,
|
||||
MapController,
|
||||
MemoryController,
|
||||
NotificationController,
|
||||
NotificationAdminController,
|
||||
OAuthController,
|
||||
PartnerController,
|
||||
PersonController,
|
||||
|
@ -4,25 +4,25 @@ import { AuthDto } from 'src/dtos/auth.dto';
|
||||
import { TemplateDto, TemplateResponseDto, TestEmailResponseDto } from 'src/dtos/notification.dto';
|
||||
import { SystemConfigSmtpDto } from 'src/dtos/system-config.dto';
|
||||
import { Auth, Authenticated } from 'src/middleware/auth.guard';
|
||||
import { EmailTemplate } from 'src/repositories/notification.repository';
|
||||
import { EmailTemplate } from 'src/repositories/email.repository';
|
||||
import { NotificationService } from 'src/services/notification.service';
|
||||
|
||||
@ApiTags('Notifications')
|
||||
@Controller('notifications')
|
||||
export class NotificationController {
|
||||
@ApiTags('Notifications (Admin)')
|
||||
@Controller('notifications/admin')
|
||||
export class NotificationAdminController {
|
||||
constructor(private service: NotificationService) {}
|
||||
|
||||
@Post('test-email')
|
||||
@HttpCode(HttpStatus.OK)
|
||||
@Authenticated({ admin: true })
|
||||
sendTestEmail(@Auth() auth: AuthDto, @Body() dto: SystemConfigSmtpDto): Promise<TestEmailResponseDto> {
|
||||
sendTestEmailAdmin(@Auth() auth: AuthDto, @Body() dto: SystemConfigSmtpDto): Promise<TestEmailResponseDto> {
|
||||
return this.service.sendTestEmail(auth.user.id, dto);
|
||||
}
|
||||
|
||||
@Post('templates/:name')
|
||||
@HttpCode(HttpStatus.OK)
|
||||
@Authenticated({ admin: true })
|
||||
getNotificationTemplate(
|
||||
getNotificationTemplateAdmin(
|
||||
@Auth() auth: AuthDto,
|
||||
@Param('name') name: EmailTemplate,
|
||||
@Body() dto: TemplateDto,
|
@ -1,13 +1,13 @@
|
||||
import { Selectable } from 'kysely';
|
||||
import { AssetJobStatus as DatabaseAssetJobStatus, Exif as DatabaseExif } from 'src/db';
|
||||
import { AssetEntity } from 'src/entities/asset.entity';
|
||||
import { Albums, Exif as DatabaseExif } from 'src/db';
|
||||
import { MapAsset } from 'src/dtos/asset-response.dto';
|
||||
import {
|
||||
AlbumUserRole,
|
||||
AssetFileType,
|
||||
AssetStatus,
|
||||
AssetType,
|
||||
MemoryType,
|
||||
Permission,
|
||||
SharedLinkType,
|
||||
SourceType,
|
||||
UserStatus,
|
||||
} from 'src/enum';
|
||||
@ -44,7 +44,7 @@ export type Library = {
|
||||
exclusionPatterns: string[];
|
||||
deletedAt: Date | null;
|
||||
refreshedAt: Date | null;
|
||||
assets?: Asset[];
|
||||
assets?: MapAsset[];
|
||||
};
|
||||
|
||||
export type AuthApiKey = {
|
||||
@ -96,7 +96,26 @@ export type Memory = {
|
||||
data: OnThisDayData;
|
||||
ownerId: string;
|
||||
isSaved: boolean;
|
||||
assets: Asset[];
|
||||
assets: MapAsset[];
|
||||
};
|
||||
|
||||
export type Asset = {
|
||||
id: string;
|
||||
checksum: Buffer<ArrayBufferLike>;
|
||||
deviceAssetId: string;
|
||||
deviceId: string;
|
||||
fileCreatedAt: Date;
|
||||
fileModifiedAt: Date;
|
||||
isExternal: boolean;
|
||||
isVisible: boolean;
|
||||
libraryId: string | null;
|
||||
livePhotoVideoId: string | null;
|
||||
localDateTime: Date;
|
||||
originalFileName: string;
|
||||
originalPath: string;
|
||||
ownerId: string;
|
||||
sidecarPath: string | null;
|
||||
type: AssetType;
|
||||
};
|
||||
|
||||
export type User = {
|
||||
@ -128,39 +147,6 @@ export type StorageAsset = {
|
||||
encodedVideoPath: string | null;
|
||||
};
|
||||
|
||||
export type Asset = {
|
||||
createdAt: Date;
|
||||
updatedAt: Date;
|
||||
deletedAt: Date | null;
|
||||
id: string;
|
||||
updateId: string;
|
||||
status: AssetStatus;
|
||||
checksum: Buffer<ArrayBufferLike>;
|
||||
deviceAssetId: string;
|
||||
deviceId: string;
|
||||
duplicateId: string | null;
|
||||
duration: string | null;
|
||||
encodedVideoPath: string | null;
|
||||
fileCreatedAt: Date | null;
|
||||
fileModifiedAt: Date | null;
|
||||
isArchived: boolean;
|
||||
isExternal: boolean;
|
||||
isFavorite: boolean;
|
||||
isOffline: boolean;
|
||||
isVisible: boolean;
|
||||
libraryId: string | null;
|
||||
livePhotoVideoId: string | null;
|
||||
localDateTime: Date | null;
|
||||
originalFileName: string;
|
||||
originalPath: string;
|
||||
ownerId: string;
|
||||
sidecarPath: string | null;
|
||||
stack?: Stack | null;
|
||||
stackId: string | null;
|
||||
thumbhash: Buffer<ArrayBufferLike> | null;
|
||||
type: AssetType;
|
||||
};
|
||||
|
||||
export type SidecarWriteAsset = {
|
||||
id: string;
|
||||
sidecarPath: string | null;
|
||||
@ -173,7 +159,7 @@ export type Stack = {
|
||||
primaryAssetId: string;
|
||||
owner?: User;
|
||||
ownerId: string;
|
||||
assets: AssetEntity[];
|
||||
assets: MapAsset[];
|
||||
assetCount?: number;
|
||||
};
|
||||
|
||||
@ -187,6 +173,28 @@ export type AuthSharedLink = {
|
||||
password: string | null;
|
||||
};
|
||||
|
||||
export type SharedLink = {
|
||||
id: string;
|
||||
album?: Album | null;
|
||||
albumId: string | null;
|
||||
allowDownload: boolean;
|
||||
allowUpload: boolean;
|
||||
assets: MapAsset[];
|
||||
createdAt: Date;
|
||||
description: string | null;
|
||||
expiresAt: Date | null;
|
||||
key: Buffer;
|
||||
password: string | null;
|
||||
showExif: boolean;
|
||||
type: SharedLinkType;
|
||||
userId: string;
|
||||
};
|
||||
|
||||
export type Album = Selectable<Albums> & {
|
||||
owner: User;
|
||||
assets: MapAsset[];
|
||||
};
|
||||
|
||||
export type AuthSession = {
|
||||
id: string;
|
||||
};
|
||||
@ -256,10 +264,6 @@ export type AssetFace = {
|
||||
person?: Person | null;
|
||||
};
|
||||
|
||||
export type AssetJobStatus = Selectable<DatabaseAssetJobStatus> & {
|
||||
asset: AssetEntity;
|
||||
};
|
||||
|
||||
const userColumns = ['id', 'name', 'email', 'profileImagePath', 'profileChangedAt'] as const;
|
||||
|
||||
export const columns = {
|
||||
|
6
server/src/db.d.ts
vendored
6
server/src/db.d.ts
vendored
@ -143,8 +143,8 @@ export interface Assets {
|
||||
duplicateId: string | null;
|
||||
duration: string | null;
|
||||
encodedVideoPath: Generated<string | null>;
|
||||
fileCreatedAt: Timestamp | null;
|
||||
fileModifiedAt: Timestamp | null;
|
||||
fileCreatedAt: Timestamp;
|
||||
fileModifiedAt: Timestamp;
|
||||
id: Generated<string>;
|
||||
isArchived: Generated<boolean>;
|
||||
isExternal: Generated<boolean>;
|
||||
@ -153,7 +153,7 @@ export interface Assets {
|
||||
isVisible: Generated<boolean>;
|
||||
libraryId: string | null;
|
||||
livePhotoVideoId: string | null;
|
||||
localDateTime: Timestamp | null;
|
||||
localDateTime: Timestamp;
|
||||
originalFileName: string;
|
||||
originalPath: string;
|
||||
ownerId: string;
|
||||
|
@ -11,7 +11,8 @@ import { setUnion } from 'src/utils/set';
|
||||
const GeneratedUuidV7Column = (options: Omit<ColumnOptions, 'type' | 'default' | 'nullable'> = {}) =>
|
||||
Column({ ...options, type: 'uuid', nullable: false, default: () => `${immich_uuid_v7.name}()` });
|
||||
|
||||
export const UpdateIdColumn = () => GeneratedUuidV7Column();
|
||||
export const UpdateIdColumn = (options: Omit<ColumnOptions, 'type' | 'default' | 'nullable'> = {}) =>
|
||||
GeneratedUuidV7Column(options);
|
||||
|
||||
export const PrimaryGeneratedUuidV7Column = () => GeneratedUuidV7Column({ primary: true });
|
||||
|
||||
|
@ -2,10 +2,10 @@ import { ApiProperty } from '@nestjs/swagger';
|
||||
import { Type } from 'class-transformer';
|
||||
import { ArrayNotEmpty, IsArray, IsEnum, IsString, ValidateNested } from 'class-validator';
|
||||
import _ from 'lodash';
|
||||
import { AssetResponseDto, mapAsset } from 'src/dtos/asset-response.dto';
|
||||
import { AlbumUser, AuthSharedLink, User } from 'src/database';
|
||||
import { AssetResponseDto, MapAsset, mapAsset } from 'src/dtos/asset-response.dto';
|
||||
import { AuthDto } from 'src/dtos/auth.dto';
|
||||
import { UserResponseDto, mapUser } from 'src/dtos/user.dto';
|
||||
import { AlbumEntity } from 'src/entities/album.entity';
|
||||
import { AlbumUserRole, AssetOrder } from 'src/enum';
|
||||
import { Optional, ValidateBoolean, ValidateUUID } from 'src/validation';
|
||||
|
||||
@ -142,7 +142,23 @@ export class AlbumResponseDto {
|
||||
order?: AssetOrder;
|
||||
}
|
||||
|
||||
export const mapAlbum = (entity: AlbumEntity, withAssets: boolean, auth?: AuthDto): AlbumResponseDto => {
|
||||
export type MapAlbumDto = {
|
||||
albumUsers?: AlbumUser[];
|
||||
assets?: MapAsset[];
|
||||
sharedLinks?: AuthSharedLink[];
|
||||
albumName: string;
|
||||
description: string;
|
||||
albumThumbnailAssetId: string | null;
|
||||
createdAt: Date;
|
||||
updatedAt: Date;
|
||||
id: string;
|
||||
ownerId: string;
|
||||
owner: User;
|
||||
isActivityEnabled: boolean;
|
||||
order: AssetOrder;
|
||||
};
|
||||
|
||||
export const mapAlbum = (entity: MapAlbumDto, withAssets: boolean, auth?: AuthDto): AlbumResponseDto => {
|
||||
const albumUsers: AlbumUserResponseDto[] = [];
|
||||
|
||||
if (entity.albumUsers) {
|
||||
@ -159,7 +175,7 @@ export const mapAlbum = (entity: AlbumEntity, withAssets: boolean, auth?: AuthDt
|
||||
|
||||
const assets = entity.assets || [];
|
||||
|
||||
const hasSharedLink = entity.sharedLinks?.length > 0;
|
||||
const hasSharedLink = !!entity.sharedLinks && entity.sharedLinks.length > 0;
|
||||
const hasSharedUser = albumUsers.length > 0;
|
||||
|
||||
let startDate = assets.at(0)?.localDateTime;
|
||||
@ -190,5 +206,5 @@ export const mapAlbum = (entity: AlbumEntity, withAssets: boolean, auth?: AuthDt
|
||||
};
|
||||
};
|
||||
|
||||
export const mapAlbumWithAssets = (entity: AlbumEntity) => mapAlbum(entity, true);
|
||||
export const mapAlbumWithoutAssets = (entity: AlbumEntity) => mapAlbum(entity, false);
|
||||
export const mapAlbumWithAssets = (entity: MapAlbumDto) => mapAlbum(entity, true);
|
||||
export const mapAlbumWithoutAssets = (entity: MapAlbumDto) => mapAlbum(entity, false);
|
||||
|
@ -1,5 +1,6 @@
|
||||
import { ApiProperty } from '@nestjs/swagger';
|
||||
import { AssetFace } from 'src/database';
|
||||
import { Selectable } from 'kysely';
|
||||
import { AssetFace, AssetFile, Exif, Stack, Tag, User } from 'src/database';
|
||||
import { PropertyLifecycle } from 'src/decorators';
|
||||
import { AuthDto } from 'src/dtos/auth.dto';
|
||||
import { ExifResponseDto, mapExif } from 'src/dtos/exif.dto';
|
||||
@ -11,8 +12,7 @@ import {
|
||||
} from 'src/dtos/person.dto';
|
||||
import { TagResponseDto, mapTag } from 'src/dtos/tag.dto';
|
||||
import { UserResponseDto, mapUser } from 'src/dtos/user.dto';
|
||||
import { AssetEntity } from 'src/entities/asset.entity';
|
||||
import { AssetType } from 'src/enum';
|
||||
import { AssetStatus, AssetType } from 'src/enum';
|
||||
import { mimeTypes } from 'src/utils/mime-types';
|
||||
|
||||
export class SanitizedAssetResponseDto {
|
||||
@ -56,6 +56,44 @@ export class AssetResponseDto extends SanitizedAssetResponseDto {
|
||||
resized?: boolean;
|
||||
}
|
||||
|
||||
export type MapAsset = {
|
||||
createdAt: Date;
|
||||
updatedAt: Date;
|
||||
deletedAt: Date | null;
|
||||
id: string;
|
||||
updateId: string;
|
||||
status: AssetStatus;
|
||||
checksum: Buffer<ArrayBufferLike>;
|
||||
deviceAssetId: string;
|
||||
deviceId: string;
|
||||
duplicateId: string | null;
|
||||
duration: string | null;
|
||||
encodedVideoPath: string | null;
|
||||
exifInfo?: Selectable<Exif> | null;
|
||||
faces?: AssetFace[];
|
||||
fileCreatedAt: Date;
|
||||
fileModifiedAt: Date;
|
||||
files?: AssetFile[];
|
||||
isArchived: boolean;
|
||||
isExternal: boolean;
|
||||
isFavorite: boolean;
|
||||
isOffline: boolean;
|
||||
isVisible: boolean;
|
||||
libraryId: string | null;
|
||||
livePhotoVideoId: string | null;
|
||||
localDateTime: Date;
|
||||
originalFileName: string;
|
||||
originalPath: string;
|
||||
owner?: User | null;
|
||||
ownerId: string;
|
||||
sidecarPath: string | null;
|
||||
stack?: Stack | null;
|
||||
stackId: string | null;
|
||||
tags?: Tag[];
|
||||
thumbhash: Buffer<ArrayBufferLike> | null;
|
||||
type: AssetType;
|
||||
};
|
||||
|
||||
export class AssetStackResponseDto {
|
||||
id!: string;
|
||||
|
||||
@ -72,7 +110,7 @@ export type AssetMapOptions = {
|
||||
};
|
||||
|
||||
// TODO: this is inefficient
|
||||
const peopleWithFaces = (faces: AssetFace[]): PersonWithFacesResponseDto[] => {
|
||||
const peopleWithFaces = (faces?: AssetFace[]): PersonWithFacesResponseDto[] => {
|
||||
const result: PersonWithFacesResponseDto[] = [];
|
||||
if (faces) {
|
||||
for (const face of faces) {
|
||||
@ -90,7 +128,7 @@ const peopleWithFaces = (faces: AssetFace[]): PersonWithFacesResponseDto[] => {
|
||||
return result;
|
||||
};
|
||||
|
||||
const mapStack = (entity: AssetEntity) => {
|
||||
const mapStack = (entity: { stack?: Stack | null }) => {
|
||||
if (!entity.stack) {
|
||||
return null;
|
||||
}
|
||||
@ -111,7 +149,7 @@ export const hexOrBufferToBase64 = (encoded: string | Buffer) => {
|
||||
return encoded.toString('base64');
|
||||
};
|
||||
|
||||
export function mapAsset(entity: AssetEntity, options: AssetMapOptions = {}): AssetResponseDto {
|
||||
export function mapAsset(entity: MapAsset, options: AssetMapOptions = {}): AssetResponseDto {
|
||||
const { stripMetadata = false, withStack = false } = options;
|
||||
|
||||
if (stripMetadata) {
|
||||
|
@ -4,7 +4,6 @@ import { IsEnum, IsInt, IsObject, IsPositive, ValidateNested } from 'class-valid
|
||||
import { Memory } from 'src/database';
|
||||
import { AssetResponseDto, mapAsset } from 'src/dtos/asset-response.dto';
|
||||
import { AuthDto } from 'src/dtos/auth.dto';
|
||||
import { AssetEntity } from 'src/entities/asset.entity';
|
||||
import { MemoryType } from 'src/enum';
|
||||
import { Optional, ValidateBoolean, ValidateDate, ValidateUUID } from 'src/validation';
|
||||
|
||||
@ -103,6 +102,6 @@ export const mapMemory = (entity: Memory, auth: AuthDto): MemoryResponseDto => {
|
||||
type: entity.type as MemoryType,
|
||||
data: entity.data as unknown as MemoryData,
|
||||
isSaved: entity.isSaved,
|
||||
assets: ('assets' in entity ? entity.assets : []).map((asset) => mapAsset(asset as AssetEntity, { auth })),
|
||||
assets: ('assets' in entity ? entity.assets : []).map((asset) => mapAsset(asset, { auth })),
|
||||
};
|
||||
};
|
||||
|
@ -1,9 +1,9 @@
|
||||
import { ApiProperty } from '@nestjs/swagger';
|
||||
import { IsEnum, IsString } from 'class-validator';
|
||||
import _ from 'lodash';
|
||||
import { SharedLink } from 'src/database';
|
||||
import { AlbumResponseDto, mapAlbumWithoutAssets } from 'src/dtos/album.dto';
|
||||
import { AssetResponseDto, mapAsset } from 'src/dtos/asset-response.dto';
|
||||
import { SharedLinkEntity } from 'src/entities/shared-link.entity';
|
||||
import { SharedLinkType } from 'src/enum';
|
||||
import { Optional, ValidateBoolean, ValidateDate, ValidateUUID } from 'src/validation';
|
||||
|
||||
@ -102,7 +102,7 @@ export class SharedLinkResponseDto {
|
||||
showMetadata!: boolean;
|
||||
}
|
||||
|
||||
export function mapSharedLink(sharedLink: SharedLinkEntity): SharedLinkResponseDto {
|
||||
export function mapSharedLink(sharedLink: SharedLink): SharedLinkResponseDto {
|
||||
const linkAssets = sharedLink.assets || [];
|
||||
|
||||
return {
|
||||
@ -122,7 +122,7 @@ export function mapSharedLink(sharedLink: SharedLinkEntity): SharedLinkResponseD
|
||||
};
|
||||
}
|
||||
|
||||
export function mapSharedLinkWithoutMetadata(sharedLink: SharedLinkEntity): SharedLinkResponseDto {
|
||||
export function mapSharedLinkWithoutMetadata(sharedLink: SharedLink): SharedLinkResponseDto {
|
||||
const linkAssets = sharedLink.assets || [];
|
||||
const albumAssets = (sharedLink?.album?.assets || []).map((asset) => asset);
|
||||
|
||||
@ -137,7 +137,7 @@ export function mapSharedLinkWithoutMetadata(sharedLink: SharedLinkEntity): Shar
|
||||
type: sharedLink.type,
|
||||
createdAt: sharedLink.createdAt,
|
||||
expiresAt: sharedLink.expiresAt,
|
||||
assets: assets.map((asset) => mapAsset(asset, { stripMetadata: true })) as AssetResponseDto[],
|
||||
assets: assets.map((asset) => mapAsset(asset, { stripMetadata: true })),
|
||||
album: sharedLink.album ? mapAlbumWithoutAssets(sharedLink.album) : undefined,
|
||||
allowUpload: sharedLink.allowUpload,
|
||||
allowDownload: sharedLink.allowDownload,
|
||||
|
@ -2,7 +2,7 @@ import { Img, Link, Section, Text } from '@react-email/components';
|
||||
import * as React from 'react';
|
||||
import { ImmichButton } from 'src/emails/components/button.component';
|
||||
import ImmichLayout from 'src/emails/components/immich.layout';
|
||||
import { AlbumInviteEmailProps } from 'src/repositories/notification.repository';
|
||||
import { AlbumInviteEmailProps } from 'src/repositories/email.repository';
|
||||
import { replaceTemplateTags } from 'src/utils/replace-template-tags';
|
||||
|
||||
export const AlbumInviteEmail = ({
|
||||
|
@ -2,7 +2,7 @@ import { Img, Link, Section, Text } from '@react-email/components';
|
||||
import * as React from 'react';
|
||||
import { ImmichButton } from 'src/emails/components/button.component';
|
||||
import ImmichLayout from 'src/emails/components/immich.layout';
|
||||
import { AlbumUpdateEmailProps } from 'src/repositories/notification.repository';
|
||||
import { AlbumUpdateEmailProps } from 'src/repositories/email.repository';
|
||||
import { replaceTemplateTags } from 'src/utils/replace-template-tags';
|
||||
|
||||
export const AlbumUpdateEmail = ({
|
||||
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
x
Reference in New Issue
Block a user