From fe7189430848171dd3f65c5e1e35551388f5b130 Mon Sep 17 00:00:00 2001 From: Alex Date: Tue, 20 May 2025 08:35:22 -0500 Subject: [PATCH 01/35] feat: locked view mobile (#18316) * feat: locked/private view * feat: locked/private view * feat: mobile lock/private view * feat: mobile lock/private view * merge main * pr feedback * pr feedback * bottom sheet sizing * always lock when navigating away --- i18n/en.json | 13 +- .../android/app/src/main/AndroidManifest.xml | 1 + .../app/alextran/immich/MainActivity.kt | 18 +- .../app/src/main/res/values/styles.xml | 29 +- mobile/ios/Podfile.lock | 13 + mobile/ios/Runner/Info.plist | 326 +++++++++--------- mobile/lib/constants/constants.dart | 3 + mobile/lib/constants/enums.dart | 2 + mobile/lib/entities/asset.entity.dart | 27 +- mobile/lib/entities/asset.entity.g.dart | 121 ++++++- .../lib/interfaces/asset_api.interface.dart | 6 + mobile/lib/interfaces/auth_api.interface.dart | 5 + .../lib/interfaces/biometric.interface.dart | 6 + .../interfaces/secure_storage.interface.dart | 5 + mobile/lib/interfaces/timeline.interface.dart | 5 + mobile/lib/main.dart | 4 +- .../models/auth/biometric_status.model.dart | 38 ++ mobile/lib/pages/library/library.page.dart | 13 + .../lib/pages/library/locked/locked.page.dart | 95 +++++ .../pages/library/locked/pin_auth.page.dart | 127 +++++++ mobile/lib/providers/asset.provider.dart | 8 + mobile/lib/providers/auth.provider.dart | 12 + mobile/lib/providers/local_auth.provider.dart | 97 ++++++ mobile/lib/providers/routes.provider.dart | 3 + .../providers/secure_storage.provider.dart | 10 + mobile/lib/providers/timeline.provider.dart | 5 + .../repositories/asset_api.repository.dart | 24 ++ .../lib/repositories/auth_api.repository.dart | 22 ++ .../repositories/biometric.repository.dart | 35 ++ .../secure_storage.repository.dart | 27 ++ .../lib/repositories/timeline.repository.dart | 27 +- .../lib/routing/app_navigation_observer.dart | 52 +++ mobile/lib/routing/locked_guard.dart | 89 +++++ mobile/lib/routing/router.dart | 34 +- mobile/lib/routing/router.gr.dart | 66 ++++ .../lib/routing/tab_navigation_observer.dart | 35 -- mobile/lib/services/asset.service.dart | 23 ++ mobile/lib/services/auth.service.dart | 12 + mobile/lib/services/local_auth.service.dart | 26 ++ mobile/lib/services/memory.service.dart | 7 +- .../lib/services/secure_storage.service.dart | 29 ++ mobile/lib/services/timeline.service.dart | 9 + mobile/lib/theme/theme_data.dart | 16 +- mobile/lib/utils/migration.dart | 2 +- mobile/lib/utils/openapi_patching.dart | 5 + mobile/lib/utils/selection_handlers.dart | 27 ++ .../asset_grid/control_bottom_app_bar.dart | 95 +++-- .../widgets/asset_grid/multiselect_grid.dart | 29 ++ .../asset_viewer/bottom_gallery_bar.dart | 8 +- .../asset_viewer/top_control_app_bar.dart | 15 +- mobile/lib/widgets/common/drag_sheet.dart | 8 +- mobile/lib/widgets/common/immich_toast.dart | 15 +- mobile/lib/widgets/forms/pin_input.dart | 124 +++++++ .../widgets/forms/pin_registration_form.dart | 128 +++++++ .../widgets/forms/pin_verification_form.dart | 94 +++++ mobile/pubspec.lock | 104 ++++++ mobile/pubspec.yaml | 3 + 57 files changed, 1893 insertions(+), 289 deletions(-) create mode 100644 mobile/lib/interfaces/biometric.interface.dart create mode 100644 mobile/lib/interfaces/secure_storage.interface.dart create mode 100644 mobile/lib/models/auth/biometric_status.model.dart create mode 100644 mobile/lib/pages/library/locked/locked.page.dart create mode 100644 mobile/lib/pages/library/locked/pin_auth.page.dart create mode 100644 mobile/lib/providers/local_auth.provider.dart create mode 100644 mobile/lib/providers/routes.provider.dart create mode 100644 mobile/lib/providers/secure_storage.provider.dart create mode 100644 mobile/lib/repositories/biometric.repository.dart create mode 100644 mobile/lib/repositories/secure_storage.repository.dart create mode 100644 mobile/lib/routing/app_navigation_observer.dart create mode 100644 mobile/lib/routing/locked_guard.dart delete mode 100644 mobile/lib/routing/tab_navigation_observer.dart create mode 100644 mobile/lib/services/local_auth.service.dart create mode 100644 mobile/lib/services/secure_storage.service.dart create mode 100644 mobile/lib/widgets/forms/pin_input.dart create mode 100644 mobile/lib/widgets/forms/pin_registration_form.dart create mode 100644 mobile/lib/widgets/forms/pin_verification_form.dart diff --git a/i18n/en.json b/i18n/en.json index 66b6e3afe0..fb7743f8e4 100644 --- a/i18n/en.json +++ b/i18n/en.json @@ -563,6 +563,10 @@ "backup_options_page_title": "Backup options", "backup_setting_subtitle": "Manage background and foreground upload settings", "backward": "Backward", + "biometric_auth_enabled": "Biometric authentication enabled", + "biometric_locked_out": "You are locked out of biometric authentication", + "biometric_no_options": "No biometric options available", + "biometric_not_available": "Biometric authentication is not available on this device", "birthdate_saved": "Date of birth saved successfully", "birthdate_set_description": "Date of birth is used to calculate the age of this person at the time of a photo.", "blurred_background": "Blurred background", @@ -822,6 +826,7 @@ "empty_trash": "Empty trash", "empty_trash_confirmation": "Are you sure you want to empty the trash? This will remove all the assets in trash permanently from Immich.\nYou cannot undo this action!", "enable": "Enable", + "enable_biometric_auth_description": "Enter your PIN code to enable biometric authentication", "enabled": "Enabled", "end_date": "End date", "enqueued": "Enqueued", @@ -995,6 +1000,7 @@ "external_network_sheet_info": "When not on the preferred Wi-Fi network, the app will connect to the server through the first of the below URLs it can reach, starting from top to bottom", "face_unassigned": "Unassigned", "failed": "Failed", + "failed_to_authenticate": "Failed to authenticate", "failed_to_load_assets": "Failed to load assets", "failed_to_load_folder": "Failed to load folder", "favorite": "Favorite", @@ -1060,6 +1066,8 @@ "home_page_favorite_err_local": "Can not favorite local assets yet, skipping", "home_page_favorite_err_partner": "Can not favorite partner assets yet, skipping", "home_page_first_time_notice": "If this is your first time using the app, please make sure to choose a backup album so that the timeline can populate photos and videos in it", + "home_page_locked_error_local": "Can not move local assets to locked folder, skipping", + "home_page_locked_error_partner": "Can not move partner assets to locked folder, skipping", "home_page_share_err_local": "Can not share local assets via link, skipping", "home_page_upload_err_limit": "Can only upload a maximum of 30 assets at a time, skipping", "host": "Host", @@ -1227,8 +1235,6 @@ "memories_setting_description": "Manage what you see in your memories", "memories_start_over": "Start Over", "memories_swipe_to_close": "Swipe up to close", - "memories_year_ago": "A year ago", - "memories_years_ago": "{years, plural, other {# years}} ago", "memory": "Memory", "memory_lane_title": "Memory Lane {title}", "menu": "Menu", @@ -1400,6 +1406,7 @@ "play_memories": "Play memories", "play_motion_photo": "Play Motion Photo", "play_or_pause_video": "Play or pause video", + "please_auth_to_access": "Please authenticate to access", "port": "Port", "preferences_settings_subtitle": "Manage the app's preferences", "preferences_settings_title": "Preferences", @@ -1661,6 +1668,7 @@ "share_add_photos": "Add photos", "share_assets_selected": "{count} selected", "share_dialog_preparing": "Preparing...", + "share_link": "Share Link", "shared": "Shared", "shared_album_activities_input_disable": "Comment is disabled", "shared_album_activity_remove_content": "Do you want to delete this activity?", @@ -1884,6 +1892,7 @@ "uploading": "Uploading", "url": "URL", "usage": "Usage", + "use_biometric": "Use biometric", "use_current_connection": "use current connection", "use_custom_date_range": "Use custom date range instead", "user": "User", diff --git a/mobile/android/app/src/main/AndroidManifest.xml b/mobile/android/app/src/main/AndroidManifest.xml index eb81dc267b..2179c9eb3c 100644 --- a/mobile/android/app/src/main/AndroidManifest.xml +++ b/mobile/android/app/src/main/AndroidManifest.xml @@ -18,6 +18,7 @@ + diff --git a/mobile/android/app/src/main/kotlin/app/alextran/immich/MainActivity.kt b/mobile/android/app/src/main/kotlin/app/alextran/immich/MainActivity.kt index 752ded59ce..c1e5152d28 100644 --- a/mobile/android/app/src/main/kotlin/app/alextran/immich/MainActivity.kt +++ b/mobile/android/app/src/main/kotlin/app/alextran/immich/MainActivity.kt @@ -1,14 +1,14 @@ package app.alextran.immich -import io.flutter.embedding.android.FlutterActivity -import io.flutter.embedding.engine.FlutterEngine import androidx.annotation.NonNull +import io.flutter.embedding.android.FlutterFragmentActivity +import io.flutter.embedding.engine.FlutterEngine -class MainActivity : FlutterActivity() { - override fun configureFlutterEngine(@NonNull flutterEngine: FlutterEngine) { - super.configureFlutterEngine(flutterEngine) - flutterEngine.plugins.add(BackgroundServicePlugin()) - flutterEngine.plugins.add(HttpSSLOptionsPlugin()) - // No need to set up method channel here as it's now handled in the plugin - } +class MainActivity : FlutterFragmentActivity() { + override fun configureFlutterEngine(@NonNull flutterEngine: FlutterEngine) { + super.configureFlutterEngine(flutterEngine) + flutterEngine.plugins.add(BackgroundServicePlugin()) + flutterEngine.plugins.add(HttpSSLOptionsPlugin()) + // No need to set up method channel here as it's now handled in the plugin + } } diff --git a/mobile/android/app/src/main/res/values/styles.xml b/mobile/android/app/src/main/res/values/styles.xml index 0fdc703671..0a4dd28549 100644 --- a/mobile/android/app/src/main/res/values/styles.xml +++ b/mobile/android/app/src/main/res/values/styles.xml @@ -1,22 +1,23 @@ - - - - - + + \ No newline at end of file diff --git a/mobile/ios/Podfile.lock b/mobile/ios/Podfile.lock index 9740d6aa52..537cdba8d8 100644 --- a/mobile/ios/Podfile.lock +++ b/mobile/ios/Podfile.lock @@ -44,6 +44,8 @@ PODS: - Flutter - flutter_native_splash (2.4.3): - Flutter + - flutter_secure_storage (6.0.0): + - Flutter - flutter_udid (0.0.1): - Flutter - SAMKeychain @@ -59,6 +61,9 @@ PODS: - Flutter - isar_flutter_libs (1.0.0): - Flutter + - local_auth_darwin (0.0.1): + - Flutter + - FlutterMacOS - MapLibre (6.5.0) - maplibre_gl (0.0.1): - Flutter @@ -130,6 +135,7 @@ DEPENDENCIES: - Flutter (from `Flutter`) - flutter_local_notifications (from `.symlinks/plugins/flutter_local_notifications/ios`) - flutter_native_splash (from `.symlinks/plugins/flutter_native_splash/ios`) + - flutter_secure_storage (from `.symlinks/plugins/flutter_secure_storage/ios`) - flutter_udid (from `.symlinks/plugins/flutter_udid/ios`) - flutter_web_auth_2 (from `.symlinks/plugins/flutter_web_auth_2/ios`) - fluttertoast (from `.symlinks/plugins/fluttertoast/ios`) @@ -137,6 +143,7 @@ DEPENDENCIES: - image_picker_ios (from `.symlinks/plugins/image_picker_ios/ios`) - integration_test (from `.symlinks/plugins/integration_test/ios`) - isar_flutter_libs (from `.symlinks/plugins/isar_flutter_libs/ios`) + - local_auth_darwin (from `.symlinks/plugins/local_auth_darwin/darwin`) - maplibre_gl (from `.symlinks/plugins/maplibre_gl/ios`) - native_video_player (from `.symlinks/plugins/native_video_player/ios`) - network_info_plus (from `.symlinks/plugins/network_info_plus/ios`) @@ -178,6 +185,8 @@ EXTERNAL SOURCES: :path: ".symlinks/plugins/flutter_local_notifications/ios" flutter_native_splash: :path: ".symlinks/plugins/flutter_native_splash/ios" + flutter_secure_storage: + :path: ".symlinks/plugins/flutter_secure_storage/ios" flutter_udid: :path: ".symlinks/plugins/flutter_udid/ios" flutter_web_auth_2: @@ -192,6 +201,8 @@ EXTERNAL SOURCES: :path: ".symlinks/plugins/integration_test/ios" isar_flutter_libs: :path: ".symlinks/plugins/isar_flutter_libs/ios" + local_auth_darwin: + :path: ".symlinks/plugins/local_auth_darwin/darwin" maplibre_gl: :path: ".symlinks/plugins/maplibre_gl/ios" native_video_player: @@ -233,6 +244,7 @@ SPEC CHECKSUMS: Flutter: e0871f40cf51350855a761d2e70bf5af5b9b5de7 flutter_local_notifications: ad39620c743ea4c15127860f4b5641649a988100 flutter_native_splash: c32d145d68aeda5502d5f543ee38c192065986cf + flutter_secure_storage: 1ed9476fba7e7a782b22888f956cce43e2c62f13 flutter_udid: f7c3884e6ec2951efe4f9de082257fc77c4d15e9 flutter_web_auth_2: 5c8d9dcd7848b5a9efb086d24e7a9adcae979c80 fluttertoast: 2c67e14dce98bbdb200df9e1acf610d7a6264ea1 @@ -240,6 +252,7 @@ SPEC CHECKSUMS: image_picker_ios: 7fe1ff8e34c1790d6fff70a32484959f563a928a integration_test: 4a889634ef21a45d28d50d622cf412dc6d9f586e isar_flutter_libs: bc909e72c3d756c2759f14c8776c13b5b0556e26 + local_auth_darwin: 553ce4f9b16d3fdfeafce9cf042e7c9f77c1c391 MapLibre: 0ebfa9329d313cec8bf0a5ba5a336a1dc903785e maplibre_gl: eab61cca6e1cfa9187249bacd3f08b51e8cd8ae9 native_video_player: b65c58951ede2f93d103a25366bdebca95081265 diff --git a/mobile/ios/Runner/Info.plist b/mobile/ios/Runner/Info.plist index 38394f0f1b..e0c719fd0f 100644 --- a/mobile/ios/Runner/Info.plist +++ b/mobile/ios/Runner/Info.plist @@ -1,165 +1,167 @@ - - AppGroupId - $(CUSTOM_GROUP_ID) - BGTaskSchedulerPermittedIdentifiers - - app.alextran.immich.backgroundFetch - app.alextran.immich.backgroundProcessing - - CADisableMinimumFrameDurationOnPhone - - CFBundleDevelopmentRegion - $(DEVELOPMENT_LANGUAGE) - CFBundleDisplayName - ${PRODUCT_NAME} - CFBundleDocumentTypes - - - CFBundleTypeName - ShareHandler - LSHandlerRank - Alternate - LSItemContentTypes - - public.file-url - public.image - public.text - public.movie - public.url - public.data - - - - CFBundleExecutable - $(EXECUTABLE_NAME) - CFBundleIdentifier - $(PRODUCT_BUNDLE_IDENTIFIER) - CFBundleInfoDictionaryVersion - 6.0 - CFBundleLocalizations - - en - ar - ca - cs - da - de - es - fi - fr - he - hi - hu - it - ja - ko - lv - mn - nb - nl - pl - pt - ro - ru - sk - sl - sr - sv - th - uk - vi - zh - - CFBundleName - immich_mobile - CFBundlePackageType - APPL - CFBundleShortVersionString - 1.132.3 - CFBundleSignature - ???? - CFBundleURLTypes - - - CFBundleTypeRole - Editor - CFBundleURLSchemes - - ShareMedia-$(PRODUCT_BUNDLE_IDENTIFIER) - - - - CFBundleVersion - 205 - FLTEnableImpeller - - ITSAppUsesNonExemptEncryption - - LSApplicationQueriesSchemes - - https - - LSRequiresIPhoneOS - - LSSupportsOpeningDocumentsInPlace - No - MGLMapboxMetricsEnabledSettingShownInApp - - NSAppTransportSecurity - - NSAllowsArbitraryLoads - - - NSCameraUsageDescription - We need to access the camera to let you take beautiful video using this app - NSLocationAlwaysAndWhenInUseUsageDescription - We require this permission to access the local WiFi name for background upload mechanism - NSLocationUsageDescription - We require this permission to access the local WiFi name - NSLocationWhenInUseUsageDescription - We require this permission to access the local WiFi name - NSMicrophoneUsageDescription - We need to access the microphone to let you take beautiful video using this app - NSPhotoLibraryAddUsageDescription - We need to manage backup your photos album - NSPhotoLibraryUsageDescription - We need to manage backup your photos album - NSUserActivityTypes - - INSendMessageIntent - - UIApplicationSupportsIndirectInputEvents - - UIBackgroundModes - - fetch - processing - - UILaunchStoryboardName - LaunchScreen - UIMainStoryboardFile - Main - UIStatusBarHidden - - UISupportedInterfaceOrientations - - UIInterfaceOrientationPortrait - UIInterfaceOrientationLandscapeLeft - UIInterfaceOrientationLandscapeRight - - UISupportedInterfaceOrientations~ipad - - UIInterfaceOrientationPortrait - UIInterfaceOrientationPortraitUpsideDown - UIInterfaceOrientationLandscapeLeft - UIInterfaceOrientationLandscapeRight - - UIViewControllerBasedStatusBarAppearance - - io.flutter.embedded_views_preview - - - + + AppGroupId + $(CUSTOM_GROUP_ID) + BGTaskSchedulerPermittedIdentifiers + + app.alextran.immich.backgroundFetch + app.alextran.immich.backgroundProcessing + + CADisableMinimumFrameDurationOnPhone + + CFBundleDevelopmentRegion + $(DEVELOPMENT_LANGUAGE) + CFBundleDisplayName + ${PRODUCT_NAME} + CFBundleDocumentTypes + + + CFBundleTypeName + ShareHandler + LSHandlerRank + Alternate + LSItemContentTypes + + public.file-url + public.image + public.text + public.movie + public.url + public.data + + + + CFBundleExecutable + $(EXECUTABLE_NAME) + CFBundleIdentifier + $(PRODUCT_BUNDLE_IDENTIFIER) + CFBundleInfoDictionaryVersion + 6.0 + CFBundleLocalizations + + en + ar + ca + cs + da + de + es + fi + fr + he + hi + hu + it + ja + ko + lv + mn + nb + nl + pl + pt + ro + ru + sk + sl + sr + sv + th + uk + vi + zh + + CFBundleName + immich_mobile + CFBundlePackageType + APPL + CFBundleShortVersionString + 1.132.3 + CFBundleSignature + ???? + CFBundleURLTypes + + + CFBundleTypeRole + Editor + CFBundleURLSchemes + + ShareMedia-$(PRODUCT_BUNDLE_IDENTIFIER) + + + + CFBundleVersion + 205 + FLTEnableImpeller + + ITSAppUsesNonExemptEncryption + + LSApplicationQueriesSchemes + + https + + LSRequiresIPhoneOS + + LSSupportsOpeningDocumentsInPlace + No + MGLMapboxMetricsEnabledSettingShownInApp + + NSAppTransportSecurity + + NSAllowsArbitraryLoads + + + NSCameraUsageDescription + We need to access the camera to let you take beautiful video using this app + NSLocationAlwaysAndWhenInUseUsageDescription + We require this permission to access the local WiFi name for background upload mechanism + NSLocationUsageDescription + We require this permission to access the local WiFi name + NSLocationWhenInUseUsageDescription + We require this permission to access the local WiFi name + NSMicrophoneUsageDescription + We need to access the microphone to let you take beautiful video using this app + NSPhotoLibraryAddUsageDescription + We need to manage backup your photos album + NSPhotoLibraryUsageDescription + We need to manage backup your photos album + NSUserActivityTypes + + INSendMessageIntent + + UIApplicationSupportsIndirectInputEvents + + UIBackgroundModes + + fetch + processing + + UILaunchStoryboardName + LaunchScreen + UIMainStoryboardFile + Main + UIStatusBarHidden + + UISupportedInterfaceOrientations + + UIInterfaceOrientationPortrait + UIInterfaceOrientationLandscapeLeft + UIInterfaceOrientationLandscapeRight + + UISupportedInterfaceOrientations~ipad + + UIInterfaceOrientationPortrait + UIInterfaceOrientationPortraitUpsideDown + UIInterfaceOrientationLandscapeLeft + UIInterfaceOrientationLandscapeRight + + UIViewControllerBasedStatusBarAppearance + + io.flutter.embedded_views_preview + + NSFaceIDUsageDescription + We need to use FaceID to allow access to your locked folder + + \ No newline at end of file diff --git a/mobile/lib/constants/constants.dart b/mobile/lib/constants/constants.dart index a91e0a715d..33683afd92 100644 --- a/mobile/lib/constants/constants.dart +++ b/mobile/lib/constants/constants.dart @@ -11,3 +11,6 @@ const int kSyncEventBatchSize = 5000; // Hash batch limits const int kBatchHashFileLimit = 128; const int kBatchHashSizeLimit = 1024 * 1024 * 1024; // 1GB + +// Secure storage keys +const String kSecuredPinCode = "secured_pin_code"; diff --git a/mobile/lib/constants/enums.dart b/mobile/lib/constants/enums.dart index 3a3bf9959a..a691263a1e 100644 --- a/mobile/lib/constants/enums.dart +++ b/mobile/lib/constants/enums.dart @@ -8,3 +8,5 @@ enum TextSearchType { filename, description, } + +enum AssetVisibilityEnum { timeline, hidden, archive, locked } diff --git a/mobile/lib/entities/asset.entity.dart b/mobile/lib/entities/asset.entity.dart index 084cd1ee5d..9119d96a63 100644 --- a/mobile/lib/entities/asset.entity.dart +++ b/mobile/lib/entities/asset.entity.dart @@ -1,6 +1,7 @@ import 'dart:convert'; import 'dart:io'; +import 'package:immich_mobile/constants/enums.dart'; import 'package:immich_mobile/domain/models/exif.model.dart'; import 'package:immich_mobile/extensions/string_extensions.dart'; import 'package:immich_mobile/infrastructure/entities/exif.entity.dart' @@ -45,7 +46,8 @@ class Asset { : remote.stack?.primaryAssetId, stackCount = remote.stack?.assetCount ?? 0, stackId = remote.stack?.id, - thumbhash = remote.thumbhash; + thumbhash = remote.thumbhash, + visibility = getVisibility(remote.visibility); Asset({ this.id = Isar.autoIncrement, @@ -71,6 +73,7 @@ class Asset { this.stackCount = 0, this.isOffline = false, this.thumbhash, + this.visibility = AssetVisibilityEnum.timeline, }); @ignore @@ -173,6 +176,9 @@ class Asset { int stackCount; + @Enumerated(EnumType.ordinal) + AssetVisibilityEnum visibility; + /// Returns null if the asset has no sync access to the exif info @ignore double? get aspectRatio { @@ -349,7 +355,8 @@ class Asset { a.thumbhash != thumbhash || stackId != a.stackId || stackCount != a.stackCount || - stackPrimaryAssetId == null && a.stackPrimaryAssetId != null; + stackPrimaryAssetId == null && a.stackPrimaryAssetId != null || + visibility != a.visibility; } /// Returns a new [Asset] with values from this and merged & updated with [a] @@ -452,6 +459,7 @@ class Asset { String? stackPrimaryAssetId, int? stackCount, String? thumbhash, + AssetVisibilityEnum? visibility, }) => Asset( id: id ?? this.id, @@ -477,6 +485,7 @@ class Asset { stackPrimaryAssetId: stackPrimaryAssetId ?? this.stackPrimaryAssetId, stackCount: stackCount ?? this.stackCount, thumbhash: thumbhash ?? this.thumbhash, + visibility: visibility ?? this.visibility, ); Future put(Isar db) async { @@ -541,8 +550,22 @@ class Asset { "isArchived": $isArchived, "isTrashed": $isTrashed, "isOffline": $isOffline, + "visibility": "$visibility", }"""; } + + static getVisibility(AssetResponseDtoVisibilityEnum visibility) { + switch (visibility) { + case AssetResponseDtoVisibilityEnum.timeline: + return AssetVisibilityEnum.timeline; + case AssetResponseDtoVisibilityEnum.archive: + return AssetVisibilityEnum.archive; + case AssetResponseDtoVisibilityEnum.hidden: + return AssetVisibilityEnum.hidden; + case AssetResponseDtoVisibilityEnum.locked: + return AssetVisibilityEnum.locked; + } + } } enum AssetType { diff --git a/mobile/lib/entities/asset.entity.g.dart b/mobile/lib/entities/asset.entity.g.dart index 07eee4825e..b558690813 100644 --- a/mobile/lib/entities/asset.entity.g.dart +++ b/mobile/lib/entities/asset.entity.g.dart @@ -118,8 +118,14 @@ const AssetSchema = CollectionSchema( name: r'updatedAt', type: IsarType.dateTime, ), - r'width': PropertySchema( + r'visibility': PropertySchema( id: 20, + name: r'visibility', + type: IsarType.byte, + enumMap: _AssetvisibilityEnumValueMap, + ), + r'width': PropertySchema( + id: 21, name: r'width', type: IsarType.int, ) @@ -256,7 +262,8 @@ void _assetSerialize( writer.writeString(offsets[17], object.thumbhash); writer.writeByte(offsets[18], object.type.index); writer.writeDateTime(offsets[19], object.updatedAt); - writer.writeInt(offsets[20], object.width); + writer.writeByte(offsets[20], object.visibility.index); + writer.writeInt(offsets[21], object.width); } Asset _assetDeserialize( @@ -288,7 +295,10 @@ Asset _assetDeserialize( type: _AssettypeValueEnumMap[reader.readByteOrNull(offsets[18])] ?? AssetType.other, updatedAt: reader.readDateTime(offsets[19]), - width: reader.readIntOrNull(offsets[20]), + visibility: + _AssetvisibilityValueEnumMap[reader.readByteOrNull(offsets[20])] ?? + AssetVisibilityEnum.timeline, + width: reader.readIntOrNull(offsets[21]), ); return object; } @@ -342,6 +352,9 @@ P _assetDeserializeProp

( case 19: return (reader.readDateTime(offset)) as P; case 20: + return (_AssetvisibilityValueEnumMap[reader.readByteOrNull(offset)] ?? + AssetVisibilityEnum.timeline) as P; + case 21: return (reader.readIntOrNull(offset)) as P; default: throw IsarError('Unknown property with id $propertyId'); @@ -360,6 +373,18 @@ const _AssettypeValueEnumMap = { 2: AssetType.video, 3: AssetType.audio, }; +const _AssetvisibilityEnumValueMap = { + 'timeline': 0, + 'hidden': 1, + 'archive': 2, + 'locked': 3, +}; +const _AssetvisibilityValueEnumMap = { + 0: AssetVisibilityEnum.timeline, + 1: AssetVisibilityEnum.hidden, + 2: AssetVisibilityEnum.archive, + 3: AssetVisibilityEnum.locked, +}; Id _assetGetId(Asset object) { return object.id; @@ -2477,6 +2502,59 @@ extension AssetQueryFilter on QueryBuilder { }); } + QueryBuilder visibilityEqualTo( + AssetVisibilityEnum value) { + return QueryBuilder.apply(this, (query) { + return query.addFilterCondition(FilterCondition.equalTo( + property: r'visibility', + value: value, + )); + }); + } + + QueryBuilder visibilityGreaterThan( + AssetVisibilityEnum value, { + bool include = false, + }) { + return QueryBuilder.apply(this, (query) { + return query.addFilterCondition(FilterCondition.greaterThan( + include: include, + property: r'visibility', + value: value, + )); + }); + } + + QueryBuilder visibilityLessThan( + AssetVisibilityEnum value, { + bool include = false, + }) { + return QueryBuilder.apply(this, (query) { + return query.addFilterCondition(FilterCondition.lessThan( + include: include, + property: r'visibility', + value: value, + )); + }); + } + + QueryBuilder visibilityBetween( + AssetVisibilityEnum lower, + AssetVisibilityEnum upper, { + bool includeLower = true, + bool includeUpper = true, + }) { + return QueryBuilder.apply(this, (query) { + return query.addFilterCondition(FilterCondition.between( + property: r'visibility', + lower: lower, + includeLower: includeLower, + upper: upper, + includeUpper: includeUpper, + )); + }); + } + QueryBuilder widthIsNull() { return QueryBuilder.apply(this, (query) { return query.addFilterCondition(const FilterCondition.isNull( @@ -2791,6 +2869,18 @@ extension AssetQuerySortBy on QueryBuilder { }); } + QueryBuilder sortByVisibility() { + return QueryBuilder.apply(this, (query) { + return query.addSortBy(r'visibility', Sort.asc); + }); + } + + QueryBuilder sortByVisibilityDesc() { + return QueryBuilder.apply(this, (query) { + return query.addSortBy(r'visibility', Sort.desc); + }); + } + QueryBuilder sortByWidth() { return QueryBuilder.apply(this, (query) { return query.addSortBy(r'width', Sort.asc); @@ -3057,6 +3147,18 @@ extension AssetQuerySortThenBy on QueryBuilder { }); } + QueryBuilder thenByVisibility() { + return QueryBuilder.apply(this, (query) { + return query.addSortBy(r'visibility', Sort.asc); + }); + } + + QueryBuilder thenByVisibilityDesc() { + return QueryBuilder.apply(this, (query) { + return query.addSortBy(r'visibility', Sort.desc); + }); + } + QueryBuilder thenByWidth() { return QueryBuilder.apply(this, (query) { return query.addSortBy(r'width', Sort.asc); @@ -3201,6 +3303,12 @@ extension AssetQueryWhereDistinct on QueryBuilder { }); } + QueryBuilder distinctByVisibility() { + return QueryBuilder.apply(this, (query) { + return query.addDistinctBy(r'visibility'); + }); + } + QueryBuilder distinctByWidth() { return QueryBuilder.apply(this, (query) { return query.addDistinctBy(r'width'); @@ -3335,6 +3443,13 @@ extension AssetQueryProperty on QueryBuilder { }); } + QueryBuilder + visibilityProperty() { + return QueryBuilder.apply(this, (query) { + return query.addPropertyName(r'visibility'); + }); + } + QueryBuilder widthProperty() { return QueryBuilder.apply(this, (query) { return query.addPropertyName(r'width'); diff --git a/mobile/lib/interfaces/asset_api.interface.dart b/mobile/lib/interfaces/asset_api.interface.dart index fe3320c9bb..a17e607d83 100644 --- a/mobile/lib/interfaces/asset_api.interface.dart +++ b/mobile/lib/interfaces/asset_api.interface.dart @@ -1,3 +1,4 @@ +import 'package:immich_mobile/constants/enums.dart'; import 'package:immich_mobile/entities/asset.entity.dart'; abstract interface class IAssetApiRepository { @@ -15,4 +16,9 @@ abstract interface class IAssetApiRepository { // Future delete(String id); Future> search({List personIds = const []}); + + Future updateVisibility( + List list, + AssetVisibilityEnum visibility, + ); } diff --git a/mobile/lib/interfaces/auth_api.interface.dart b/mobile/lib/interfaces/auth_api.interface.dart index 0a4b235ff3..bb9a8b5a2c 100644 --- a/mobile/lib/interfaces/auth_api.interface.dart +++ b/mobile/lib/interfaces/auth_api.interface.dart @@ -6,4 +6,9 @@ abstract interface class IAuthApiRepository { Future logout(); Future changePassword(String newPassword); + + Future unlockPinCode(String pinCode); + Future lockPinCode(); + + Future setupPinCode(String pinCode); } diff --git a/mobile/lib/interfaces/biometric.interface.dart b/mobile/lib/interfaces/biometric.interface.dart new file mode 100644 index 0000000000..e410c8e26e --- /dev/null +++ b/mobile/lib/interfaces/biometric.interface.dart @@ -0,0 +1,6 @@ +import 'package:immich_mobile/models/auth/biometric_status.model.dart'; + +abstract interface class IBiometricRepository { + Future getStatus(); + Future authenticate(String? message); +} diff --git a/mobile/lib/interfaces/secure_storage.interface.dart b/mobile/lib/interfaces/secure_storage.interface.dart new file mode 100644 index 0000000000..81230e0abd --- /dev/null +++ b/mobile/lib/interfaces/secure_storage.interface.dart @@ -0,0 +1,5 @@ +abstract interface class ISecureStorageRepository { + Future read(String key); + Future write(String key, String value); + Future delete(String key); +} diff --git a/mobile/lib/interfaces/timeline.interface.dart b/mobile/lib/interfaces/timeline.interface.dart index bc486a785f..3a4cce3cb6 100644 --- a/mobile/lib/interfaces/timeline.interface.dart +++ b/mobile/lib/interfaces/timeline.interface.dart @@ -31,4 +31,9 @@ abstract class ITimelineRepository { ); Stream watchAssetSelectionTimeline(String userId); + + Stream watchLockedTimeline( + String userId, + GroupAssetsBy groupAssetsBy, + ); } diff --git a/mobile/lib/main.dart b/mobile/lib/main.dart index c39d5e3a66..3c7c1fbe4d 100644 --- a/mobile/lib/main.dart +++ b/mobile/lib/main.dart @@ -19,7 +19,7 @@ import 'package:immich_mobile/providers/infrastructure/db.provider.dart'; import 'package:immich_mobile/providers/locale_provider.dart'; import 'package:immich_mobile/providers/theme.provider.dart'; import 'package:immich_mobile/routing/router.dart'; -import 'package:immich_mobile/routing/tab_navigation_observer.dart'; +import 'package:immich_mobile/routing/app_navigation_observer.dart'; import 'package:immich_mobile/services/background.service.dart'; import 'package:immich_mobile/services/local_notification.service.dart'; import 'package:immich_mobile/theme/dynamic_theme.dart'; @@ -219,7 +219,7 @@ class ImmichAppState extends ConsumerState ), routeInformationParser: router.defaultRouteParser(), routerDelegate: router.delegate( - navigatorObservers: () => [TabNavigationObserver(ref: ref)], + navigatorObservers: () => [AppNavigationObserver(ref: ref)], ), ), ), diff --git a/mobile/lib/models/auth/biometric_status.model.dart b/mobile/lib/models/auth/biometric_status.model.dart new file mode 100644 index 0000000000..3057f06e9c --- /dev/null +++ b/mobile/lib/models/auth/biometric_status.model.dart @@ -0,0 +1,38 @@ +import 'package:collection/collection.dart'; +import 'package:local_auth/local_auth.dart'; + +class BiometricStatus { + final List availableBiometrics; + final bool canAuthenticate; + + const BiometricStatus({ + required this.availableBiometrics, + required this.canAuthenticate, + }); + + @override + String toString() => + 'BiometricStatus(availableBiometrics: $availableBiometrics, canAuthenticate: $canAuthenticate)'; + + BiometricStatus copyWith({ + List? availableBiometrics, + bool? canAuthenticate, + }) { + return BiometricStatus( + availableBiometrics: availableBiometrics ?? this.availableBiometrics, + canAuthenticate: canAuthenticate ?? this.canAuthenticate, + ); + } + + @override + bool operator ==(covariant BiometricStatus other) { + if (identical(this, other)) return true; + final listEquals = const DeepCollectionEquality().equals; + + return listEquals(other.availableBiometrics, availableBiometrics) && + other.canAuthenticate == canAuthenticate; + } + + @override + int get hashCode => availableBiometrics.hashCode ^ canAuthenticate.hashCode; +} diff --git a/mobile/lib/pages/library/library.page.dart b/mobile/lib/pages/library/library.page.dart index 1dc336d204..50126ed1a8 100644 --- a/mobile/lib/pages/library/library.page.dart +++ b/mobile/lib/pages/library/library.page.dart @@ -140,6 +140,19 @@ class QuickAccessButtons extends ConsumerWidget { ), onTap: () => context.pushRoute(FolderRoute()), ), + ListTile( + leading: const Icon( + Icons.lock_outline_rounded, + size: 26, + ), + title: Text( + 'locked_folder'.tr(), + style: context.textTheme.titleSmall?.copyWith( + fontWeight: FontWeight.w500, + ), + ), + onTap: () => context.pushRoute(const LockedRoute()), + ), ListTile( leading: const Icon( Icons.group_outlined, diff --git a/mobile/lib/pages/library/locked/locked.page.dart b/mobile/lib/pages/library/locked/locked.page.dart new file mode 100644 index 0000000000..eef12a7107 --- /dev/null +++ b/mobile/lib/pages/library/locked/locked.page.dart @@ -0,0 +1,95 @@ +import 'package:auto_route/auto_route.dart'; +import 'package:easy_localization/easy_localization.dart'; +import 'package:flutter/material.dart'; +import 'package:flutter_hooks/flutter_hooks.dart'; +import 'package:hooks_riverpod/hooks_riverpod.dart'; +import 'package:immich_mobile/extensions/build_context_extensions.dart'; +import 'package:immich_mobile/providers/auth.provider.dart'; +import 'package:immich_mobile/providers/multiselect.provider.dart'; +import 'package:immich_mobile/providers/timeline.provider.dart'; +import 'package:immich_mobile/widgets/asset_grid/multiselect_grid.dart'; + +@RoutePage() +class LockedPage extends HookConsumerWidget { + const LockedPage({super.key}); + + @override + Widget build(BuildContext context, WidgetRef ref) { + final appLifeCycle = useAppLifecycleState(); + final showOverlay = useState(false); + final authProviderNotifier = ref.read(authProvider.notifier); + // lock the page when it is destroyed + useEffect( + () { + return () { + authProviderNotifier.lockPinCode(); + }; + }, + [], + ); + + useEffect( + () { + if (context.mounted) { + if (appLifeCycle == AppLifecycleState.resumed) { + showOverlay.value = false; + } else { + showOverlay.value = true; + } + } + + return null; + }, + [appLifeCycle], + ); + + return Scaffold( + appBar: ref.watch(multiselectProvider) ? null : const LockPageAppBar(), + body: showOverlay.value + ? const SizedBox() + : MultiselectGrid( + renderListProvider: lockedTimelineProvider, + topWidget: Padding( + padding: const EdgeInsets.all(16.0), + child: Center( + child: Text( + 'no_locked_photos_message'.tr(), + style: context.textTheme.labelLarge, + ), + ), + ), + editEnabled: false, + favoriteEnabled: false, + unfavorite: false, + archiveEnabled: false, + stackEnabled: false, + unarchive: false, + ), + ); + } +} + +class LockPageAppBar extends ConsumerWidget implements PreferredSizeWidget { + const LockPageAppBar({super.key}); + + @override + Widget build(BuildContext context, WidgetRef ref) { + return AppBar( + leading: IconButton( + onPressed: () { + ref.read(authProvider.notifier).lockPinCode(); + context.maybePop(); + }, + icon: const Icon(Icons.arrow_back_ios_rounded), + ), + centerTitle: true, + automaticallyImplyLeading: false, + title: const Text( + 'locked_folder', + ).tr(), + ); + } + + @override + Size get preferredSize => const Size.fromHeight(kToolbarHeight); +} diff --git a/mobile/lib/pages/library/locked/pin_auth.page.dart b/mobile/lib/pages/library/locked/pin_auth.page.dart new file mode 100644 index 0000000000..cca0e3b7ac --- /dev/null +++ b/mobile/lib/pages/library/locked/pin_auth.page.dart @@ -0,0 +1,127 @@ +import 'package:auto_route/auto_route.dart'; +import 'package:easy_localization/easy_localization.dart'; +import 'package:flutter/material.dart'; +import 'package:flutter_hooks/flutter_hooks.dart'; +import 'package:hooks_riverpod/hooks_riverpod.dart'; +import 'package:immich_mobile/extensions/build_context_extensions.dart'; +import 'package:immich_mobile/providers/local_auth.provider.dart'; +import 'package:immich_mobile/routing/router.dart'; +import 'package:immich_mobile/widgets/forms/pin_registration_form.dart'; +import 'package:immich_mobile/widgets/forms/pin_verification_form.dart'; + +@RoutePage() +class PinAuthPage extends HookConsumerWidget { + final bool createPinCode; + + const PinAuthPage({super.key, this.createPinCode = false}); + + @override + Widget build(BuildContext context, WidgetRef ref) { + final localAuthState = ref.watch(localAuthProvider); + final showPinRegistrationForm = useState(createPinCode); + + Future registerBiometric(String pinCode) async { + final isRegistered = + await ref.read(localAuthProvider.notifier).registerBiometric( + context, + pinCode, + ); + + if (isRegistered) { + context.showSnackBar( + SnackBar( + content: Text( + 'biometric_auth_enabled'.tr(), + style: context.textTheme.labelLarge, + ), + duration: const Duration(seconds: 3), + backgroundColor: context.colorScheme.primaryContainer, + ), + ); + + context.replaceRoute(const LockedRoute()); + } + } + + enableBiometricAuth() { + showDialog( + context: context, + builder: (buildContext) { + return SimpleDialog( + children: [ + Container( + padding: const EdgeInsets.all(16), + child: Column( + mainAxisSize: MainAxisSize.min, + children: [ + PinVerificationForm( + description: 'enable_biometric_auth_description'.tr(), + onSuccess: (pinCode) { + Navigator.pop(buildContext); + registerBiometric(pinCode); + }, + autoFocus: true, + icon: Icons.fingerprint_rounded, + successIcon: Icons.fingerprint_rounded, + ), + ], + ), + ), + ], + ); + }, + ); + } + + return Scaffold( + appBar: AppBar( + title: Text('locked_folder'.tr()), + ), + body: ListView( + shrinkWrap: true, + children: [ + Padding( + padding: const EdgeInsets.only(top: 36.0), + child: showPinRegistrationForm.value + ? Center( + child: PinRegistrationForm( + onDone: () => showPinRegistrationForm.value = false, + ), + ) + : Column( + children: [ + Center( + child: PinVerificationForm( + autoFocus: true, + onSuccess: (_) => + context.replaceRoute(const LockedRoute()), + ), + ), + const SizedBox(height: 24), + if (localAuthState.canAuthenticate) ...[ + Padding( + padding: const EdgeInsets.only(right: 16.0), + child: TextButton.icon( + icon: const Icon( + Icons.fingerprint, + size: 28, + ), + onPressed: enableBiometricAuth, + label: Text( + 'use_biometric'.tr(), + style: context.textTheme.labelLarge?.copyWith( + color: context.primaryColor, + fontSize: 18, + ), + ), + ), + ), + ], + ], + ), + ), + ], + ), + ); + } +} diff --git a/mobile/lib/providers/asset.provider.dart b/mobile/lib/providers/asset.provider.dart index a35ab10bf3..5b77da90f3 100644 --- a/mobile/lib/providers/asset.provider.dart +++ b/mobile/lib/providers/asset.provider.dart @@ -1,5 +1,6 @@ import 'package:flutter/material.dart'; import 'package:hooks_riverpod/hooks_riverpod.dart'; +import 'package:immich_mobile/constants/enums.dart'; import 'package:immich_mobile/domain/models/store.model.dart'; import 'package:immich_mobile/domain/services/user.service.dart'; import 'package:immich_mobile/entities/asset.entity.dart'; @@ -170,6 +171,13 @@ class AssetNotifier extends StateNotifier { status ??= !assets.every((a) => a.isArchived); return _assetService.changeArchiveStatus(assets, status); } + + Future setLockedView( + List selection, + AssetVisibilityEnum visibility, + ) { + return _assetService.setVisibility(selection, visibility); + } } final assetDetailProvider = diff --git a/mobile/lib/providers/auth.provider.dart b/mobile/lib/providers/auth.provider.dart index 297b3a99fe..8c783395cd 100644 --- a/mobile/lib/providers/auth.provider.dart +++ b/mobile/lib/providers/auth.provider.dart @@ -188,4 +188,16 @@ class AuthNotifier extends StateNotifier { Future setOpenApiServiceEndpoint() { return _authService.setOpenApiServiceEndpoint(); } + + Future unlockPinCode(String pinCode) { + return _authService.unlockPinCode(pinCode); + } + + Future lockPinCode() { + return _authService.lockPinCode(); + } + + Future setupPinCode(String pinCode) { + return _authService.setupPinCode(pinCode); + } } diff --git a/mobile/lib/providers/local_auth.provider.dart b/mobile/lib/providers/local_auth.provider.dart new file mode 100644 index 0000000000..6f7ca5eb71 --- /dev/null +++ b/mobile/lib/providers/local_auth.provider.dart @@ -0,0 +1,97 @@ +import 'package:easy_localization/easy_localization.dart'; +import 'package:flutter/material.dart'; +import 'package:flutter/services.dart'; +import 'package:hooks_riverpod/hooks_riverpod.dart'; +import 'package:immich_mobile/constants/constants.dart'; +import 'package:immich_mobile/extensions/build_context_extensions.dart'; +import 'package:immich_mobile/models/auth/biometric_status.model.dart'; +import 'package:immich_mobile/services/local_auth.service.dart'; +import 'package:immich_mobile/services/secure_storage.service.dart'; +import 'package:logging/logging.dart'; + +final localAuthProvider = + StateNotifierProvider((ref) { + return LocalAuthNotifier( + ref.watch(localAuthServiceProvider), + ref.watch(secureStorageServiceProvider), + ); +}); + +class LocalAuthNotifier extends StateNotifier { + final LocalAuthService _localAuthService; + final SecureStorageService _secureStorageService; + + final _log = Logger("LocalAuthNotifier"); + + LocalAuthNotifier(this._localAuthService, this._secureStorageService) + : super( + const BiometricStatus( + availableBiometrics: [], + canAuthenticate: false, + ), + ) { + _localAuthService.getStatus().then((value) { + state = state.copyWith( + canAuthenticate: value.canAuthenticate, + availableBiometrics: value.availableBiometrics, + ); + }); + } + + Future registerBiometric(BuildContext context, String pinCode) async { + final isAuthenticated = + await authenticate(context, 'Authenticate to enable biometrics'); + + if (!isAuthenticated) { + return false; + } + + await _secureStorageService.write(kSecuredPinCode, pinCode); + + return true; + } + + Future authenticate(BuildContext context, String? message) async { + String errorMessage = ""; + + try { + return await _localAuthService.authenticate(message); + } on PlatformException catch (error) { + switch (error.code) { + case "NotEnrolled": + _log.warning("User is not enrolled in biometrics"); + errorMessage = "biometric_no_options".tr(); + break; + case "NotAvailable": + _log.warning("Biometric authentication is not available"); + errorMessage = "biometric_not_available".tr(); + break; + case "LockedOut": + _log.warning("User is locked out of biometric authentication"); + errorMessage = "biometric_locked_out".tr(); + break; + default: + _log.warning("Failed to authenticate with unknown reason"); + errorMessage = 'failed_to_authenticate'.tr(); + } + } catch (error) { + _log.warning("Error during authentication: $error"); + errorMessage = 'failed_to_authenticate'.tr(); + } finally { + if (errorMessage.isNotEmpty) { + context.showSnackBar( + SnackBar( + content: Text( + errorMessage, + style: context.textTheme.labelLarge, + ), + duration: const Duration(seconds: 3), + backgroundColor: context.colorScheme.errorContainer, + ), + ); + } + } + + return false; + } +} diff --git a/mobile/lib/providers/routes.provider.dart b/mobile/lib/providers/routes.provider.dart new file mode 100644 index 0000000000..a5b903e312 --- /dev/null +++ b/mobile/lib/providers/routes.provider.dart @@ -0,0 +1,3 @@ +import 'package:hooks_riverpod/hooks_riverpod.dart'; + +final inLockedViewProvider = StateProvider((ref) => false); diff --git a/mobile/lib/providers/secure_storage.provider.dart b/mobile/lib/providers/secure_storage.provider.dart new file mode 100644 index 0000000000..0194e527e9 --- /dev/null +++ b/mobile/lib/providers/secure_storage.provider.dart @@ -0,0 +1,10 @@ +import 'package:hooks_riverpod/hooks_riverpod.dart'; + +final secureStorageProvider = + StateNotifierProvider((ref) { + return SecureStorageProvider(); +}); + +class SecureStorageProvider extends StateNotifier { + SecureStorageProvider() : super(null); +} diff --git a/mobile/lib/providers/timeline.provider.dart b/mobile/lib/providers/timeline.provider.dart index f857d8aa6c..b2c763cdfa 100644 --- a/mobile/lib/providers/timeline.provider.dart +++ b/mobile/lib/providers/timeline.provider.dart @@ -73,3 +73,8 @@ final assetsTimelineProvider = null, ); }); + +final lockedTimelineProvider = StreamProvider((ref) { + final timelineService = ref.watch(timelineServiceProvider); + return timelineService.watchLockedTimelineProvider(); +}); diff --git a/mobile/lib/repositories/asset_api.repository.dart b/mobile/lib/repositories/asset_api.repository.dart index f4fcd8a6dd..45442c2d61 100644 --- a/mobile/lib/repositories/asset_api.repository.dart +++ b/mobile/lib/repositories/asset_api.repository.dart @@ -1,4 +1,5 @@ import 'package:hooks_riverpod/hooks_riverpod.dart'; +import 'package:immich_mobile/constants/enums.dart'; import 'package:immich_mobile/entities/asset.entity.dart'; import 'package:immich_mobile/interfaces/asset_api.interface.dart'; import 'package:immich_mobile/providers/api.provider.dart'; @@ -48,4 +49,27 @@ class AssetApiRepository extends ApiRepository implements IAssetApiRepository { } return result; } + + @override + Future updateVisibility( + List ids, + AssetVisibilityEnum visibility, + ) async { + return _api.updateAssets( + AssetBulkUpdateDto(ids: ids, visibility: _mapVisibility(visibility)), + ); + } + + _mapVisibility(AssetVisibilityEnum visibility) { + switch (visibility) { + case AssetVisibilityEnum.timeline: + return AssetVisibility.timeline; + case AssetVisibilityEnum.hidden: + return AssetVisibility.hidden; + case AssetVisibilityEnum.locked: + return AssetVisibility.locked; + case AssetVisibilityEnum.archive: + return AssetVisibility.archive; + } + } } diff --git a/mobile/lib/repositories/auth_api.repository.dart b/mobile/lib/repositories/auth_api.repository.dart index f3a1d52de3..4015ffd7bc 100644 --- a/mobile/lib/repositories/auth_api.repository.dart +++ b/mobile/lib/repositories/auth_api.repository.dart @@ -55,4 +55,26 @@ class AuthApiRepository extends ApiRepository implements IAuthApiRepository { userId: dto.userId, ); } + + @override + Future unlockPinCode(String pinCode) async { + try { + await _apiService.authenticationApi + .unlockAuthSession(SessionUnlockDto(pinCode: pinCode)); + return true; + } catch (_) { + return false; + } + } + + @override + Future setupPinCode(String pinCode) { + return _apiService.authenticationApi + .setupPinCode(PinCodeSetupDto(pinCode: pinCode)); + } + + @override + Future lockPinCode() { + return _apiService.authenticationApi.lockAuthSession(); + } } diff --git a/mobile/lib/repositories/biometric.repository.dart b/mobile/lib/repositories/biometric.repository.dart new file mode 100644 index 0000000000..588fa44797 --- /dev/null +++ b/mobile/lib/repositories/biometric.repository.dart @@ -0,0 +1,35 @@ +import 'package:easy_localization/easy_localization.dart'; +import 'package:hooks_riverpod/hooks_riverpod.dart'; +import 'package:immich_mobile/interfaces/biometric.interface.dart'; +import 'package:immich_mobile/models/auth/biometric_status.model.dart'; +import 'package:local_auth/local_auth.dart'; + +final biometricRepositoryProvider = + Provider((ref) => BiometricRepository(LocalAuthentication())); + +class BiometricRepository implements IBiometricRepository { + final LocalAuthentication _localAuth; + + BiometricRepository(this._localAuth); + + @override + Future getStatus() async { + final bool canAuthenticateWithBiometrics = + await _localAuth.canCheckBiometrics; + final bool canAuthenticate = + canAuthenticateWithBiometrics || await _localAuth.isDeviceSupported(); + final availableBiometric = await _localAuth.getAvailableBiometrics(); + + return BiometricStatus( + canAuthenticate: canAuthenticate, + availableBiometrics: availableBiometric, + ); + } + + @override + Future authenticate(String? message) async { + return _localAuth.authenticate( + localizedReason: message ?? 'please_auth_to_access'.tr(), + ); + } +} diff --git a/mobile/lib/repositories/secure_storage.repository.dart b/mobile/lib/repositories/secure_storage.repository.dart new file mode 100644 index 0000000000..fc641bcc91 --- /dev/null +++ b/mobile/lib/repositories/secure_storage.repository.dart @@ -0,0 +1,27 @@ +import 'package:flutter_secure_storage/flutter_secure_storage.dart'; +import 'package:hooks_riverpod/hooks_riverpod.dart'; +import 'package:immich_mobile/interfaces/secure_storage.interface.dart'; + +final secureStorageRepositoryProvider = + Provider((ref) => SecureStorageRepository(const FlutterSecureStorage())); + +class SecureStorageRepository implements ISecureStorageRepository { + final FlutterSecureStorage _secureStorage; + + SecureStorageRepository(this._secureStorage); + + @override + Future read(String key) { + return _secureStorage.read(key: key); + } + + @override + Future write(String key, String value) { + return _secureStorage.write(key: key, value: value); + } + + @override + Future delete(String key) { + return _secureStorage.delete(key: key); + } +} diff --git a/mobile/lib/repositories/timeline.repository.dart b/mobile/lib/repositories/timeline.repository.dart index 319ce3e5b4..f48b749767 100644 --- a/mobile/lib/repositories/timeline.repository.dart +++ b/mobile/lib/repositories/timeline.repository.dart @@ -45,8 +45,8 @@ class TimelineRepository extends DatabaseRepository .where() .ownerIdEqualToAnyChecksum(fastHash(userId)) .filter() - .isArchivedEqualTo(true) .isTrashedEqualTo(false) + .visibilityEqualTo(AssetVisibilityEnum.archive) .sortByFileCreatedAtDesc(); return _watchRenderList(query, GroupAssetsBy.none); @@ -59,6 +59,8 @@ class TimelineRepository extends DatabaseRepository .ownerIdEqualToAnyChecksum(fastHash(userId)) .filter() .isFavoriteEqualTo(true) + .not() + .visibilityEqualTo(AssetVisibilityEnum.locked) .isTrashedEqualTo(false) .sortByFileCreatedAtDesc(); @@ -94,8 +96,8 @@ class TimelineRepository extends DatabaseRepository Stream watchAllVideosTimeline() { final query = db.assets .filter() - .isArchivedEqualTo(false) .isTrashedEqualTo(false) + .visibilityEqualTo(AssetVisibilityEnum.timeline) .typeEqualTo(AssetType.video) .sortByFileCreatedAtDesc(); @@ -111,9 +113,9 @@ class TimelineRepository extends DatabaseRepository .where() .ownerIdEqualToAnyChecksum(fastHash(userId)) .filter() - .isArchivedEqualTo(false) .isTrashedEqualTo(false) .stackPrimaryAssetIdIsNull() + .visibilityEqualTo(AssetVisibilityEnum.timeline) .sortByFileCreatedAtDesc(); return _watchRenderList(query, groupAssetByOption); @@ -129,8 +131,8 @@ class TimelineRepository extends DatabaseRepository .where() .anyOf(isarUserIds, (qb, id) => qb.ownerIdEqualToAnyChecksum(id)) .filter() - .isArchivedEqualTo(false) .isTrashedEqualTo(false) + .visibilityEqualTo(AssetVisibilityEnum.timeline) .stackPrimaryAssetIdIsNull() .sortByFileCreatedAtDesc(); return _watchRenderList(query, groupAssetByOption); @@ -151,6 +153,7 @@ class TimelineRepository extends DatabaseRepository .remoteIdIsNotNull() .filter() .ownerIdEqualTo(fastHash(userId)) + .visibilityEqualTo(AssetVisibilityEnum.timeline) .isTrashedEqualTo(false) .stackPrimaryAssetIdIsNull() .sortByFileCreatedAtDesc(); @@ -158,6 +161,22 @@ class TimelineRepository extends DatabaseRepository return _watchRenderList(query, GroupAssetsBy.none); } + @override + Stream watchLockedTimeline( + String userId, + GroupAssetsBy getGroupByOption, + ) { + final query = db.assets + .where() + .ownerIdEqualToAnyChecksum(fastHash(userId)) + .filter() + .visibilityEqualTo(AssetVisibilityEnum.locked) + .isTrashedEqualTo(false) + .sortByFileCreatedAtDesc(); + + return _watchRenderList(query, getGroupByOption); + } + Stream _watchRenderList( QueryBuilder query, GroupAssetsBy groupAssetsBy, diff --git a/mobile/lib/routing/app_navigation_observer.dart b/mobile/lib/routing/app_navigation_observer.dart new file mode 100644 index 0000000000..44662c0b8b --- /dev/null +++ b/mobile/lib/routing/app_navigation_observer.dart @@ -0,0 +1,52 @@ +import 'package:auto_route/auto_route.dart'; +import 'package:flutter/material.dart'; +import 'package:hooks_riverpod/hooks_riverpod.dart'; +import 'package:immich_mobile/providers/routes.provider.dart'; +import 'package:immich_mobile/routing/router.dart'; + +class AppNavigationObserver extends AutoRouterObserver { + /// Riverpod Instance + final WidgetRef ref; + + AppNavigationObserver({ + required this.ref, + }); + + @override + Future didChangeTabRoute( + TabPageRoute route, + TabPageRoute previousRoute, + ) async { + Future( + () => ref.read(inLockedViewProvider.notifier).state = false, + ); + } + + @override + void didPush(Route route, Route? previousRoute) { + _handleLockedViewState(route, previousRoute); + } + + _handleLockedViewState(Route route, Route? previousRoute) { + final isInLockedView = ref.read(inLockedViewProvider); + final isFromLockedViewToDetailView = + route.settings.name == GalleryViewerRoute.name && + previousRoute?.settings.name == LockedRoute.name; + + final isFromDetailViewToInfoPanelView = route.settings.name == null && + previousRoute?.settings.name == GalleryViewerRoute.name && + isInLockedView; + + if (route.settings.name == LockedRoute.name || + isFromLockedViewToDetailView || + isFromDetailViewToInfoPanelView) { + Future( + () => ref.read(inLockedViewProvider.notifier).state = true, + ); + } else { + Future( + () => ref.read(inLockedViewProvider.notifier).state = false, + ); + } + } +} diff --git a/mobile/lib/routing/locked_guard.dart b/mobile/lib/routing/locked_guard.dart new file mode 100644 index 0000000000..d731c7942c --- /dev/null +++ b/mobile/lib/routing/locked_guard.dart @@ -0,0 +1,89 @@ +import 'package:auto_route/auto_route.dart'; +import 'package:flutter/services.dart'; +import 'package:immich_mobile/constants/constants.dart'; +import 'package:immich_mobile/routing/router.dart'; + +import 'package:immich_mobile/services/api.service.dart'; +import 'package:immich_mobile/services/local_auth.service.dart'; +import 'package:immich_mobile/services/secure_storage.service.dart'; +import 'package:local_auth/error_codes.dart' as auth_error; +import 'package:logging/logging.dart'; +// ignore: import_rule_openapi +import 'package:openapi/api.dart'; + +class LockedGuard extends AutoRouteGuard { + final ApiService _apiService; + final SecureStorageService _secureStorageService; + final LocalAuthService _localAuth; + final _log = Logger("AuthGuard"); + + LockedGuard( + this._apiService, + this._secureStorageService, + this._localAuth, + ); + + @override + void onNavigation(NavigationResolver resolver, StackRouter router) async { + final authStatus = await _apiService.authenticationApi.getAuthStatus(); + + if (authStatus == null) { + resolver.next(false); + return; + } + + /// Check if a pincode has been created but this user. Show the form to create if not exist + if (!authStatus.pinCode) { + router.push(PinAuthRoute(createPinCode: true)); + } + + if (authStatus.isElevated) { + resolver.next(true); + return; + } + + /// Check if the user has the pincode saved in secure storage, meaning + /// the user has enabled the biometric authentication + final securePinCode = await _secureStorageService.read(kSecuredPinCode); + if (securePinCode == null) { + router.push(PinAuthRoute()); + return; + } + + try { + final bool isAuth = await _localAuth.authenticate(); + + if (!isAuth) { + resolver.next(false); + return; + } + + await _apiService.authenticationApi.unlockAuthSession( + SessionUnlockDto(pinCode: securePinCode), + ); + + resolver.next(true); + } on PlatformException catch (error) { + switch (error.code) { + case auth_error.notAvailable: + _log.severe("notAvailable: $error"); + break; + case auth_error.notEnrolled: + _log.severe("not enrolled"); + break; + default: + _log.severe("error"); + break; + } + + resolver.next(false); + } on ApiException { + // PIN code has changed, need to re-enter to access + await _secureStorageService.delete(kSecuredPinCode); + router.push(PinAuthRoute()); + } catch (error) { + _log.severe("Failed to access locked page", error); + resolver.next(false); + } + } +} diff --git a/mobile/lib/routing/router.dart b/mobile/lib/routing/router.dart index fcfe7e59bd..317ce7cc54 100644 --- a/mobile/lib/routing/router.dart +++ b/mobile/lib/routing/router.dart @@ -39,6 +39,8 @@ import 'package:immich_mobile/pages/library/favorite.page.dart'; import 'package:immich_mobile/pages/library/folder/folder.page.dart'; import 'package:immich_mobile/pages/library/library.page.dart'; import 'package:immich_mobile/pages/library/local_albums.page.dart'; +import 'package:immich_mobile/pages/library/locked/locked.page.dart'; +import 'package:immich_mobile/pages/library/locked/pin_auth.page.dart'; import 'package:immich_mobile/pages/library/partner/partner.page.dart'; import 'package:immich_mobile/pages/library/partner/partner_detail.page.dart'; import 'package:immich_mobile/pages/library/people/people_collection.page.dart'; @@ -67,24 +69,41 @@ import 'package:immich_mobile/routing/auth_guard.dart'; import 'package:immich_mobile/routing/backup_permission_guard.dart'; import 'package:immich_mobile/routing/custom_transition_builders.dart'; import 'package:immich_mobile/routing/duplicate_guard.dart'; +import 'package:immich_mobile/routing/locked_guard.dart'; import 'package:immich_mobile/services/api.service.dart'; +import 'package:immich_mobile/services/local_auth.service.dart'; +import 'package:immich_mobile/services/secure_storage.service.dart'; import 'package:immich_mobile/widgets/asset_grid/asset_grid_data_structure.dart'; import 'package:maplibre_gl/maplibre_gl.dart'; part 'router.gr.dart'; +final appRouterProvider = Provider( + (ref) => AppRouter( + ref.watch(apiServiceProvider), + ref.watch(galleryPermissionNotifier.notifier), + ref.watch(secureStorageServiceProvider), + ref.watch(localAuthServiceProvider), + ), +); + @AutoRouterConfig(replaceInRouteName: 'Page,Route') class AppRouter extends RootStackRouter { late final AuthGuard _authGuard; late final DuplicateGuard _duplicateGuard; late final BackupPermissionGuard _backupPermissionGuard; + late final LockedGuard _lockedGuard; AppRouter( ApiService apiService, GalleryPermissionNotifier galleryPermissionNotifier, + SecureStorageService secureStorageService, + LocalAuthService localAuthService, ) { _authGuard = AuthGuard(apiService); _duplicateGuard = DuplicateGuard(); + _lockedGuard = + LockedGuard(apiService, secureStorageService, localAuthService); _backupPermissionGuard = BackupPermissionGuard(galleryPermissionNotifier); } @@ -289,12 +308,13 @@ class AppRouter extends RootStackRouter { page: ShareIntentRoute.page, guards: [_authGuard, _duplicateGuard], ), + AutoRoute( + page: LockedRoute.page, + guards: [_authGuard, _lockedGuard, _duplicateGuard], + ), + AutoRoute( + page: PinAuthRoute.page, + guards: [_authGuard, _duplicateGuard], + ), ]; } - -final appRouterProvider = Provider( - (ref) => AppRouter( - ref.watch(apiServiceProvider), - ref.watch(galleryPermissionNotifier.notifier), - ), -); diff --git a/mobile/lib/routing/router.gr.dart b/mobile/lib/routing/router.gr.dart index 01ab3fa13c..da488779e6 100644 --- a/mobile/lib/routing/router.gr.dart +++ b/mobile/lib/routing/router.gr.dart @@ -956,6 +956,25 @@ class LocalAlbumsRoute extends PageRouteInfo { ); } +/// generated route for +/// [LockedPage] +class LockedRoute extends PageRouteInfo { + const LockedRoute({List? children}) + : super( + LockedRoute.name, + initialChildren: children, + ); + + static const String name = 'LockedRoute'; + + static PageInfo page = PageInfo( + name, + builder: (data) { + return const LockedPage(); + }, + ); +} + /// generated route for /// [LoginPage] class LoginRoute extends PageRouteInfo { @@ -1359,6 +1378,53 @@ class PhotosRoute extends PageRouteInfo { ); } +/// generated route for +/// [PinAuthPage] +class PinAuthRoute extends PageRouteInfo { + PinAuthRoute({ + Key? key, + bool createPinCode = false, + List? children, + }) : super( + PinAuthRoute.name, + args: PinAuthRouteArgs( + key: key, + createPinCode: createPinCode, + ), + initialChildren: children, + ); + + static const String name = 'PinAuthRoute'; + + static PageInfo page = PageInfo( + name, + builder: (data) { + final args = + data.argsAs(orElse: () => const PinAuthRouteArgs()); + return PinAuthPage( + key: args.key, + createPinCode: args.createPinCode, + ); + }, + ); +} + +class PinAuthRouteArgs { + const PinAuthRouteArgs({ + this.key, + this.createPinCode = false, + }); + + final Key? key; + + final bool createPinCode; + + @override + String toString() { + return 'PinAuthRouteArgs{key: $key, createPinCode: $createPinCode}'; + } +} + /// generated route for /// [PlacesCollectionPage] class PlacesCollectionRoute extends PageRouteInfo { diff --git a/mobile/lib/routing/tab_navigation_observer.dart b/mobile/lib/routing/tab_navigation_observer.dart deleted file mode 100644 index d95820885e..0000000000 --- a/mobile/lib/routing/tab_navigation_observer.dart +++ /dev/null @@ -1,35 +0,0 @@ -import 'package:auto_route/auto_route.dart'; -import 'package:flutter/foundation.dart'; -import 'package:hooks_riverpod/hooks_riverpod.dart'; -import 'package:immich_mobile/providers/asset.provider.dart'; -import 'package:immich_mobile/providers/infrastructure/user.provider.dart'; -import 'package:immich_mobile/providers/memory.provider.dart'; -import 'package:immich_mobile/providers/server_info.provider.dart'; - -class TabNavigationObserver extends AutoRouterObserver { - /// Riverpod Instance - final WidgetRef ref; - - TabNavigationObserver({ - required this.ref, - }); - - @override - Future didChangeTabRoute( - TabPageRoute route, - TabPageRoute previousRoute, - ) async { - if (route.name == 'HomeRoute') { - ref.invalidate(memoryFutureProvider); - Future(() => ref.read(assetProvider.notifier).getAllAsset()); - - // Update user info - try { - ref.read(userServiceProvider).refreshMyUser(); - ref.read(serverInfoProvider.notifier).getServerVersion(); - } catch (e) { - debugPrint("Error refreshing user info $e"); - } - } - } -} diff --git a/mobile/lib/services/asset.service.dart b/mobile/lib/services/asset.service.dart index 8a24e72fbe..a52d6e6368 100644 --- a/mobile/lib/services/asset.service.dart +++ b/mobile/lib/services/asset.service.dart @@ -3,6 +3,7 @@ import 'dart:async'; import 'package:collection/collection.dart'; import 'package:flutter/material.dart'; import 'package:hooks_riverpod/hooks_riverpod.dart'; +import 'package:immich_mobile/constants/enums.dart'; import 'package:immich_mobile/domain/interfaces/exif.interface.dart'; import 'package:immich_mobile/domain/interfaces/user.interface.dart'; import 'package:immich_mobile/domain/models/user.model.dart'; @@ -239,6 +240,9 @@ class AssetService { for (var element in assets) { element.isArchived = isArchived; + element.visibility = isArchived + ? AssetVisibilityEnum.archive + : AssetVisibilityEnum.timeline; } await _syncService.upsertAssetsWithExif(assets); @@ -458,6 +462,7 @@ class AssetService { bool shouldDeletePermanently = false, }) async { final candidates = assets.where((a) => a.isRemote); + if (candidates.isEmpty) { return; } @@ -475,6 +480,7 @@ class AssetService { .where((asset) => asset.storage == AssetState.merged) .map((asset) { asset.remoteId = null; + asset.visibility = AssetVisibilityEnum.timeline; return asset; }) : assets.where((asset) => asset.isRemote).map((asset) { @@ -529,4 +535,21 @@ class AssetService { final me = _userService.getMyUser(); return _assetRepository.getMotionAssets(me.id); } + + Future setVisibility( + List assets, + AssetVisibilityEnum visibility, + ) async { + await _assetApiRepository.updateVisibility( + assets.map((asset) => asset.remoteId!).toList(), + visibility, + ); + + final updatedAssets = assets.map((asset) { + asset.visibility = visibility; + return asset; + }).toList(); + + await _assetRepository.updateAll(updatedAssets); + } } diff --git a/mobile/lib/services/auth.service.dart b/mobile/lib/services/auth.service.dart index ec053c078b..41709b714c 100644 --- a/mobile/lib/services/auth.service.dart +++ b/mobile/lib/services/auth.service.dart @@ -201,4 +201,16 @@ class AuthService { return null; } + + Future unlockPinCode(String pinCode) { + return _authApiRepository.unlockPinCode(pinCode); + } + + Future lockPinCode() { + return _authApiRepository.lockPinCode(); + } + + Future setupPinCode(String pinCode) { + return _authApiRepository.setupPinCode(pinCode); + } } diff --git a/mobile/lib/services/local_auth.service.dart b/mobile/lib/services/local_auth.service.dart new file mode 100644 index 0000000000..f797e9065a --- /dev/null +++ b/mobile/lib/services/local_auth.service.dart @@ -0,0 +1,26 @@ +import 'package:hooks_riverpod/hooks_riverpod.dart'; +import 'package:immich_mobile/interfaces/biometric.interface.dart'; +import 'package:immich_mobile/models/auth/biometric_status.model.dart'; +import 'package:immich_mobile/repositories/biometric.repository.dart'; + +final localAuthServiceProvider = Provider( + (ref) => LocalAuthService( + ref.watch(biometricRepositoryProvider), + ), +); + +class LocalAuthService { + // final _log = Logger("LocalAuthService"); + + final IBiometricRepository _biometricRepository; + + LocalAuthService(this._biometricRepository); + + Future getStatus() { + return _biometricRepository.getStatus(); + } + + Future authenticate([String? message]) async { + return _biometricRepository.authenticate(message); + } +} diff --git a/mobile/lib/services/memory.service.dart b/mobile/lib/services/memory.service.dart index efd38f1140..d6c44278c7 100644 --- a/mobile/lib/services/memory.service.dart +++ b/mobile/lib/services/memory.service.dart @@ -1,10 +1,10 @@ -import 'package:easy_localization/easy_localization.dart'; import 'package:hooks_riverpod/hooks_riverpod.dart'; import 'package:immich_mobile/interfaces/asset.interface.dart'; import 'package:immich_mobile/models/memories/memory.model.dart'; import 'package:immich_mobile/providers/api.provider.dart'; import 'package:immich_mobile/repositories/asset.repository.dart'; import 'package:immich_mobile/services/api.service.dart'; +import 'package:immich_mobile/utils/translation.dart'; import 'package:logging/logging.dart'; final memoryServiceProvider = StateProvider((ref) { @@ -40,10 +40,7 @@ class MemoryService { .getAllByRemoteId(memory.assets.map((e) => e.id)); final yearsAgo = now.year - memory.data.year; if (dbAssets.isNotEmpty) { - final String title = yearsAgo <= 1 - ? 'memories_year_ago'.tr() - : 'memories_years_ago' - .tr(namedArgs: {'years': yearsAgo.toString()}); + final String title = t('years_ago', {'years': yearsAgo.toString()}); memories.add( Memory( title: title, diff --git a/mobile/lib/services/secure_storage.service.dart b/mobile/lib/services/secure_storage.service.dart new file mode 100644 index 0000000000..77803f29c3 --- /dev/null +++ b/mobile/lib/services/secure_storage.service.dart @@ -0,0 +1,29 @@ +import 'package:hooks_riverpod/hooks_riverpod.dart'; +import 'package:immich_mobile/interfaces/secure_storage.interface.dart'; +import 'package:immich_mobile/repositories/secure_storage.repository.dart'; + +final secureStorageServiceProvider = Provider( + (ref) => SecureStorageService( + ref.watch(secureStorageRepositoryProvider), + ), +); + +class SecureStorageService { + // final _log = Logger("LocalAuthService"); + + final ISecureStorageRepository _secureStorageRepository; + + SecureStorageService(this._secureStorageRepository); + + Future write(String key, String value) async { + await _secureStorageRepository.write(key, value); + } + + Future delete(String key) async { + await _secureStorageRepository.delete(key); + } + + Future read(String key) async { + return _secureStorageRepository.read(key); + } +} diff --git a/mobile/lib/services/timeline.service.dart b/mobile/lib/services/timeline.service.dart index 4e91d27a7c..7ecad43ca7 100644 --- a/mobile/lib/services/timeline.service.dart +++ b/mobile/lib/services/timeline.service.dart @@ -105,4 +105,13 @@ class TimelineService { return GroupAssetsBy .values[_appSettingsService.getSetting(AppSettingsEnum.groupAssetsBy)]; } + + Stream watchLockedTimelineProvider() async* { + final user = _userService.getMyUser(); + + yield* _timelineRepository.watchLockedTimeline( + user.id, + _getGroupByOption(), + ); + } } diff --git a/mobile/lib/theme/theme_data.dart b/mobile/lib/theme/theme_data.dart index 2a593ffb38..a351b09093 100644 --- a/mobile/lib/theme/theme_data.dart +++ b/mobile/lib/theme/theme_data.dart @@ -42,7 +42,7 @@ ThemeData getThemeData({ titleTextStyle: TextStyle( color: colorScheme.primary, fontFamily: _getFontFamilyFromLocale(locale), - fontWeight: FontWeight.bold, + fontWeight: FontWeight.w600, fontSize: 18, ), backgroundColor: @@ -54,28 +54,28 @@ ThemeData getThemeData({ ), textTheme: const TextTheme( displayLarge: TextStyle( - fontSize: 26, - fontWeight: FontWeight.bold, + fontSize: 18, + fontWeight: FontWeight.w600, ), displayMedium: TextStyle( fontSize: 14, - fontWeight: FontWeight.bold, + fontWeight: FontWeight.w600, ), displaySmall: TextStyle( fontSize: 12, - fontWeight: FontWeight.bold, + fontWeight: FontWeight.w600, ), titleSmall: TextStyle( fontSize: 16.0, - fontWeight: FontWeight.bold, + fontWeight: FontWeight.w600, ), titleMedium: TextStyle( fontSize: 18.0, - fontWeight: FontWeight.bold, + fontWeight: FontWeight.w600, ), titleLarge: TextStyle( fontSize: 26.0, - fontWeight: FontWeight.bold, + fontWeight: FontWeight.w600, ), ), elevatedButtonTheme: ElevatedButtonThemeData( diff --git a/mobile/lib/utils/migration.dart b/mobile/lib/utils/migration.dart index 6a09f79ce2..4519c6d803 100644 --- a/mobile/lib/utils/migration.dart +++ b/mobile/lib/utils/migration.dart @@ -20,7 +20,7 @@ import 'package:isar/isar.dart'; // ignore: import_rule_photo_manager import 'package:photo_manager/photo_manager.dart'; -const int targetVersion = 10; +const int targetVersion = 11; Future migrateDatabaseIfNeeded(Isar db) async { final int version = Store.get(StoreKey.version, targetVersion); diff --git a/mobile/lib/utils/openapi_patching.dart b/mobile/lib/utils/openapi_patching.dart index d054749b1e..1ffe05c781 100644 --- a/mobile/lib/utils/openapi_patching.dart +++ b/mobile/lib/utils/openapi_patching.dart @@ -32,6 +32,11 @@ dynamic upgradeDto(dynamic value, String targetType) { addDefault(value, 'visibility', AssetVisibility.timeline); } break; + case 'AssetResponseDto': + if (value is Map) { + addDefault(value, 'visibility', 'timeline'); + } + break; case 'UserAdminResponseDto': if (value is Map) { addDefault(value, 'profileChangedAt', DateTime.now().toIso8601String()); diff --git a/mobile/lib/utils/selection_handlers.dart b/mobile/lib/utils/selection_handlers.dart index c63d819153..1ae583bedd 100644 --- a/mobile/lib/utils/selection_handlers.dart +++ b/mobile/lib/utils/selection_handlers.dart @@ -2,6 +2,7 @@ import 'package:easy_localization/easy_localization.dart'; import 'package:flutter/material.dart'; import 'package:fluttertoast/fluttertoast.dart'; import 'package:hooks_riverpod/hooks_riverpod.dart'; +import 'package:immich_mobile/constants/enums.dart'; import 'package:immich_mobile/entities/asset.entity.dart'; import 'package:immich_mobile/extensions/asset_extensions.dart'; import 'package:immich_mobile/extensions/build_context_extensions.dart'; @@ -157,3 +158,29 @@ Future handleEditLocation( ref.read(assetServiceProvider).changeLocation(selection.toList(), location); } + +Future handleSetAssetsVisibility( + WidgetRef ref, + BuildContext context, + AssetVisibilityEnum visibility, + List selection, { + ToastGravity toastGravity = ToastGravity.BOTTOM, +}) async { + if (selection.isNotEmpty) { + await ref + .watch(assetProvider.notifier) + .setLockedView(selection, visibility); + + final assetOrAssets = selection.length > 1 ? 'assets' : 'asset'; + final toastMessage = visibility == AssetVisibilityEnum.locked + ? 'Added ${selection.length} $assetOrAssets to locked folder' + : 'Removed ${selection.length} $assetOrAssets from locked folder'; + if (context.mounted) { + ImmichToast.show( + context: context, + msg: toastMessage, + gravity: ToastGravity.BOTTOM, + ); + } + } +} diff --git a/mobile/lib/widgets/asset_grid/control_bottom_app_bar.dart b/mobile/lib/widgets/asset_grid/control_bottom_app_bar.dart index 7a049fa7fd..892e7e5b8a 100644 --- a/mobile/lib/widgets/asset_grid/control_bottom_app_bar.dart +++ b/mobile/lib/widgets/asset_grid/control_bottom_app_bar.dart @@ -6,6 +6,7 @@ import 'package:flutter_hooks/flutter_hooks.dart'; import 'package:hooks_riverpod/hooks_riverpod.dart'; import 'package:immich_mobile/extensions/build_context_extensions.dart'; import 'package:immich_mobile/providers/album/album.provider.dart'; +import 'package:immich_mobile/providers/routes.provider.dart'; import 'package:immich_mobile/widgets/album/add_to_album_sliverlist.dart'; import 'package:immich_mobile/models/asset_selection_state.dart'; import 'package:immich_mobile/widgets/asset_grid/delete_dialog.dart'; @@ -37,6 +38,7 @@ class ControlBottomAppBar extends HookConsumerWidget { final void Function()? onEditTime; final void Function()? onEditLocation; final void Function()? onRemoveFromAlbum; + final void Function()? onToggleLocked; final bool enabled; final bool unfavorite; @@ -58,6 +60,7 @@ class ControlBottomAppBar extends HookConsumerWidget { this.onEditTime, this.onEditLocation, this.onRemoveFromAlbum, + this.onToggleLocked, this.selectionAssetState = const AssetSelectionState(), this.enabled = true, this.unarchive = false, @@ -77,6 +80,7 @@ class ControlBottomAppBar extends HookConsumerWidget { ref.watch(albumProvider).where((a) => a.shared).toList(); const bottomPadding = 0.20; final scrollController = useDraggableScrollController(); + final isInLockedView = ref.watch(inLockedViewProvider); void minimize() { scrollController.animateTo( @@ -133,11 +137,12 @@ class ControlBottomAppBar extends HookConsumerWidget { label: "share".tr(), onPressed: enabled ? () => onShare(true) : null, ), - ControlBoxButton( - iconData: Icons.link_rounded, - label: "control_bottom_app_bar_share_link".tr(), - onPressed: enabled ? () => onShare(false) : null, - ), + if (!isInLockedView) + ControlBoxButton( + iconData: Icons.link_rounded, + label: "share_link".tr(), + onPressed: enabled ? () => onShare(false) : null, + ), if (hasRemote && onArchive != null) ControlBoxButton( iconData: @@ -153,7 +158,7 @@ class ControlBottomAppBar extends HookConsumerWidget { label: (unfavorite ? "unfavorite" : "favorite").tr(), onPressed: enabled ? onFavorite : null, ), - if (hasLocal && hasRemote && onDelete != null) + if (hasLocal && hasRemote && onDelete != null && !isInLockedView) ConstrainedBox( constraints: const BoxConstraints(maxWidth: 90), child: ControlBoxButton( @@ -166,7 +171,7 @@ class ControlBottomAppBar extends HookConsumerWidget { enabled ? () => showForceDeleteDialog(onDelete!) : null, ), ), - if (hasRemote && onDeleteServer != null) + if (hasRemote && onDeleteServer != null && !isInLockedView) ConstrainedBox( constraints: const BoxConstraints(maxWidth: 85), child: ControlBoxButton( @@ -189,9 +194,23 @@ class ControlBottomAppBar extends HookConsumerWidget { : null, ), ), - if (hasLocal && onDeleteLocal != null) + if (isInLockedView) ConstrainedBox( - constraints: const BoxConstraints(maxWidth: 85), + constraints: const BoxConstraints(maxWidth: 110), + child: ControlBoxButton( + iconData: Icons.delete_forever, + label: "delete_dialog_title".tr(), + onPressed: enabled + ? () => showForceDeleteDialog( + onDeleteServer!, + alertMsg: "delete_dialog_alert_remote", + ) + : null, + ), + ), + if (hasLocal && onDeleteLocal != null && !isInLockedView) + ConstrainedBox( + constraints: const BoxConstraints(maxWidth: 95), child: ControlBoxButton( iconData: Icons.no_cell_outlined, label: "control_bottom_app_bar_delete_from_local".tr(), @@ -231,6 +250,19 @@ class ControlBottomAppBar extends HookConsumerWidget { onPressed: enabled ? onEditLocation : null, ), ), + if (hasRemote) + ConstrainedBox( + constraints: const BoxConstraints(maxWidth: 100), + child: ControlBoxButton( + iconData: isInLockedView + ? Icons.lock_open_rounded + : Icons.lock_outline_rounded, + label: isInLockedView + ? "remove_from_locked_folder".tr() + : "move_to_locked_folder".tr(), + onPressed: enabled ? onToggleLocked : null, + ), + ), if (!selectionAssetState.hasLocal && selectionAssetState.selectedCount > 1 && onStack != null) @@ -269,20 +301,40 @@ class ControlBottomAppBar extends HookConsumerWidget { ]; } + getInitialSize() { + if (isInLockedView) { + return 0.20; + } + if (hasRemote) { + return 0.35; + } + return bottomPadding; + } + + getMaxChildSize() { + if (isInLockedView) { + return 0.20; + } + if (hasRemote) { + return 0.65; + } + return bottomPadding; + } + return DraggableScrollableSheet( controller: scrollController, - initialChildSize: hasRemote ? 0.35 : bottomPadding, + initialChildSize: getInitialSize(), minChildSize: bottomPadding, - maxChildSize: hasRemote ? 0.65 : bottomPadding, + maxChildSize: getMaxChildSize(), snap: true, builder: ( BuildContext context, ScrollController scrollController, ) { return Card( - color: context.colorScheme.surfaceContainerLow, - surfaceTintColor: Colors.transparent, - elevation: 18.0, + color: context.colorScheme.surfaceContainerHigh, + surfaceTintColor: context.colorScheme.surfaceContainerHigh, + elevation: 6.0, shape: const RoundedRectangleBorder( borderRadius: BorderRadius.only( topLeft: Radius.circular(12), @@ -300,27 +352,27 @@ class ControlBottomAppBar extends HookConsumerWidget { const CustomDraggingHandle(), const SizedBox(height: 12), SizedBox( - height: 100, + height: 120, child: ListView( shrinkWrap: true, scrollDirection: Axis.horizontal, children: renderActionButtons(), ), ), - if (hasRemote) + if (hasRemote && !isInLockedView) ...[ const Divider( indent: 16, endIndent: 16, thickness: 1, ), - if (hasRemote) _AddToAlbumTitleRow( onCreateNewAlbum: enabled ? onCreateNewAlbum : null, ), + ], ], ), ), - if (hasRemote) + if (hasRemote && !isInLockedView) SliverPadding( padding: const EdgeInsets.symmetric(horizontal: 16), sliver: AddToAlbumSliverList( @@ -352,12 +404,9 @@ class _AddToAlbumTitleRow extends StatelessWidget { child: Row( mainAxisAlignment: MainAxisAlignment.spaceBetween, children: [ - const Text( + Text( "add_to_album", - style: TextStyle( - fontSize: 14, - fontWeight: FontWeight.bold, - ), + style: context.textTheme.titleSmall, ).tr(), TextButton.icon( onPressed: onCreateNewAlbum, diff --git a/mobile/lib/widgets/asset_grid/multiselect_grid.dart b/mobile/lib/widgets/asset_grid/multiselect_grid.dart index ceaee581d2..8cc725ab77 100644 --- a/mobile/lib/widgets/asset_grid/multiselect_grid.dart +++ b/mobile/lib/widgets/asset_grid/multiselect_grid.dart @@ -7,6 +7,7 @@ import 'package:flutter/material.dart'; import 'package:flutter_hooks/flutter_hooks.dart'; import 'package:fluttertoast/fluttertoast.dart'; import 'package:hooks_riverpod/hooks_riverpod.dart'; +import 'package:immich_mobile/constants/enums.dart'; import 'package:immich_mobile/entities/album.entity.dart'; import 'package:immich_mobile/entities/asset.entity.dart'; import 'package:immich_mobile/extensions/collection_extensions.dart'; @@ -15,6 +16,7 @@ import 'package:immich_mobile/providers/album/album.provider.dart'; import 'package:immich_mobile/providers/asset.provider.dart'; import 'package:immich_mobile/providers/backup/manual_upload.provider.dart'; import 'package:immich_mobile/providers/multiselect.provider.dart'; +import 'package:immich_mobile/providers/routes.provider.dart'; import 'package:immich_mobile/providers/user.provider.dart'; import 'package:immich_mobile/routing/router.dart'; import 'package:immich_mobile/services/album.service.dart'; @@ -395,6 +397,32 @@ class MultiselectGrid extends HookConsumerWidget { } } + void onToggleLockedVisibility() async { + processing.value = true; + try { + final remoteAssets = ownedRemoteSelection( + localErrorMessage: 'home_page_locked_error_local'.tr(), + ownerErrorMessage: 'home_page_locked_error_partner'.tr(), + ); + if (remoteAssets.isNotEmpty) { + final isInLockedView = ref.read(inLockedViewProvider); + final visibility = isInLockedView + ? AssetVisibilityEnum.timeline + : AssetVisibilityEnum.locked; + + await handleSetAssetsVisibility( + ref, + context, + visibility, + remoteAssets.toList(), + ); + } + } finally { + processing.value = false; + selectionEnabledHook.value = false; + } + } + Future Function() wrapLongRunningFun( Future Function() fun, { bool showOverlay = true, @@ -460,6 +488,7 @@ class MultiselectGrid extends HookConsumerWidget { onEditLocation: editEnabled ? onEditLocation : null, unfavorite: unfavorite, unarchive: unarchive, + onToggleLocked: onToggleLockedVisibility, onRemoveFromAlbum: onRemoveFromAlbum != null ? wrapLongRunningFun( () => onRemoveFromAlbum!(selection.value), diff --git a/mobile/lib/widgets/asset_viewer/bottom_gallery_bar.dart b/mobile/lib/widgets/asset_viewer/bottom_gallery_bar.dart index 8bfcdc12ca..1ff8596c43 100644 --- a/mobile/lib/widgets/asset_viewer/bottom_gallery_bar.dart +++ b/mobile/lib/widgets/asset_viewer/bottom_gallery_bar.dart @@ -15,6 +15,7 @@ import 'package:immich_mobile/providers/asset_viewer/asset_stack.provider.dart'; import 'package:immich_mobile/providers/asset_viewer/current_asset.provider.dart'; import 'package:immich_mobile/providers/asset_viewer/download.provider.dart'; import 'package:immich_mobile/providers/asset_viewer/show_controls.provider.dart'; +import 'package:immich_mobile/providers/routes.provider.dart'; import 'package:immich_mobile/providers/server_info.provider.dart'; import 'package:immich_mobile/providers/user.provider.dart'; import 'package:immich_mobile/routing/router.dart'; @@ -46,6 +47,7 @@ class BottomGalleryBar extends ConsumerWidget { @override Widget build(BuildContext context, WidgetRef ref) { + final isInLockedView = ref.watch(inLockedViewProvider); final asset = ref.watch(currentAssetProvider); if (asset == null) { return const SizedBox(); @@ -277,7 +279,7 @@ class BottomGalleryBar extends ConsumerWidget { tooltip: 'share'.tr(), ): (_) => shareAsset(), }, - if (asset.isImage) + if (asset.isImage && !isInLockedView) { BottomNavigationBarItem( icon: const Icon(Icons.tune_outlined), @@ -285,7 +287,7 @@ class BottomGalleryBar extends ConsumerWidget { tooltip: 'edit'.tr(), ): (_) => handleEdit(), }, - if (isOwner) + if (isOwner && !isInLockedView) { asset.isArchived ? BottomNavigationBarItem( @@ -299,7 +301,7 @@ class BottomGalleryBar extends ConsumerWidget { tooltip: 'archive'.tr(), ): (_) => handleArchive(), }, - if (isOwner && asset.stackCount > 0) + if (isOwner && asset.stackCount > 0 && !isInLockedView) { BottomNavigationBarItem( icon: const Icon(Icons.burst_mode_outlined), diff --git a/mobile/lib/widgets/asset_viewer/top_control_app_bar.dart b/mobile/lib/widgets/asset_viewer/top_control_app_bar.dart index 937d1adf32..64cb1c619f 100644 --- a/mobile/lib/widgets/asset_viewer/top_control_app_bar.dart +++ b/mobile/lib/widgets/asset_viewer/top_control_app_bar.dart @@ -5,6 +5,7 @@ import 'package:immich_mobile/providers/activity_statistics.provider.dart'; import 'package:immich_mobile/providers/album/current_album.provider.dart'; import 'package:immich_mobile/entities/asset.entity.dart'; import 'package:immich_mobile/providers/asset.provider.dart'; +import 'package:immich_mobile/providers/routes.provider.dart'; import 'package:immich_mobile/providers/tab.provider.dart'; import 'package:immich_mobile/widgets/asset_viewer/motion_photo_button.dart'; import 'package:immich_mobile/providers/asset_viewer/current_asset.provider.dart'; @@ -39,6 +40,7 @@ class TopControlAppBar extends HookConsumerWidget { @override Widget build(BuildContext context, WidgetRef ref) { + final isInLockedView = ref.watch(inLockedViewProvider); const double iconSize = 22.0; final a = ref.watch(assetWatcher(asset)).value ?? asset; final album = ref.watch(currentAlbumProvider); @@ -178,15 +180,22 @@ class TopControlAppBar extends HookConsumerWidget { shape: const Border(), actions: [ if (asset.isRemote && isOwner) buildFavoriteButton(a), - if (isOwner && !isInHomePage && !(isInTrash ?? false)) + if (isOwner && + !isInHomePage && + !(isInTrash ?? false) && + !isInLockedView) buildLocateButton(), if (asset.livePhotoVideoId != null) const MotionPhotoButton(), if (asset.isLocal && !asset.isRemote) buildUploadButton(), if (asset.isRemote && !asset.isLocal && isOwner) buildDownloadButton(), - if (asset.isRemote && (isOwner || isPartner) && !asset.isTrashed) + if (asset.isRemote && + (isOwner || isPartner) && + !asset.isTrashed && + !isInLockedView) buildAddToAlbumButton(), if (asset.isTrashed) buildRestoreButton(), - if (album != null && album.shared) buildActivitiesButton(), + if (album != null && album.shared && !isInLockedView) + buildActivitiesButton(), buildMoreInfoButton(), ], ); diff --git a/mobile/lib/widgets/common/drag_sheet.dart b/mobile/lib/widgets/common/drag_sheet.dart index 45addd0c2e..923050bcc6 100644 --- a/mobile/lib/widgets/common/drag_sheet.dart +++ b/mobile/lib/widgets/common/drag_sheet.dart @@ -35,7 +35,9 @@ class ControlBoxButton extends StatelessWidget { Widget build(BuildContext context) { return MaterialButton( padding: const EdgeInsets.all(10), - shape: const CircleBorder(), + shape: const RoundedRectangleBorder( + borderRadius: BorderRadius.all(Radius.circular(20)), + ), onPressed: onPressed, onLongPress: onLongPressed, minWidth: 75.0, @@ -47,8 +49,8 @@ class ControlBoxButton extends StatelessWidget { const SizedBox(height: 8), Text( label, - style: const TextStyle(fontSize: 12.0), - maxLines: 2, + style: const TextStyle(fontSize: 14.0, fontWeight: FontWeight.w400), + maxLines: 3, textAlign: TextAlign.center, ), ], diff --git a/mobile/lib/widgets/common/immich_toast.dart b/mobile/lib/widgets/common/immich_toast.dart index 7f3207032b..945568a74c 100644 --- a/mobile/lib/widgets/common/immich_toast.dart +++ b/mobile/lib/widgets/common/immich_toast.dart @@ -40,7 +40,7 @@ class ImmichToast { child: Container( padding: const EdgeInsets.symmetric(horizontal: 24.0, vertical: 12.0), decoration: BoxDecoration( - borderRadius: BorderRadius.circular(5.0), + borderRadius: const BorderRadius.all(Radius.circular(16.0)), color: context.colorScheme.surfaceContainer, border: Border.all( color: context.colorScheme.outline.withValues(alpha: .5), @@ -59,14 +59,23 @@ class ImmichToast { msg, style: TextStyle( color: getColor(toastType, context), - fontWeight: FontWeight.bold, - fontSize: 15, + fontWeight: FontWeight.w600, + fontSize: 14, ), ), ), ], ), ), + positionedToastBuilder: (context, child, gravity) { + return Positioned( + top: gravity == ToastGravity.TOP ? 150 : null, + bottom: gravity == ToastGravity.BOTTOM ? 150 : null, + left: MediaQuery.of(context).size.width / 2 - 150, + right: MediaQuery.of(context).size.width / 2 - 150, + child: child, + ); + }, gravity: gravity, toastDuration: Duration(seconds: durationInSecond), ); diff --git a/mobile/lib/widgets/forms/pin_input.dart b/mobile/lib/widgets/forms/pin_input.dart new file mode 100644 index 0000000000..1588a65c60 --- /dev/null +++ b/mobile/lib/widgets/forms/pin_input.dart @@ -0,0 +1,124 @@ +import 'package:flutter/material.dart'; +import 'package:immich_mobile/extensions/build_context_extensions.dart'; +import 'package:pinput/pinput.dart'; + +class PinInput extends StatelessWidget { + final Function(String)? onCompleted; + final Function(String)? onChanged; + final int? length; + final bool? obscureText; + final bool? autoFocus; + final bool? hasError; + final String? label; + final TextEditingController? controller; + + const PinInput({ + super.key, + this.onCompleted, + this.onChanged, + this.length, + this.obscureText, + this.autoFocus, + this.hasError, + this.label, + this.controller, + }); + + @override + Widget build(BuildContext context) { + getPinSize() { + final minimumPadding = 18.0; + final gapWidth = 3.0; + final screenWidth = context.width; + final pinWidth = + (screenWidth - (minimumPadding * 2) - (gapWidth * 5)) / (length ?? 6); + + if (pinWidth > 60) { + return const Size(60, 64); + } + + final pinHeight = pinWidth / (60 / 64); + return Size(pinWidth, pinHeight); + } + + final defaultPinTheme = PinTheme( + width: getPinSize().width, + height: getPinSize().height, + textStyle: TextStyle( + fontSize: 24, + color: context.colorScheme.onSurface, + fontFamily: 'Overpass Mono', + ), + decoration: BoxDecoration( + borderRadius: const BorderRadius.all(Radius.circular(19)), + border: Border.all(color: context.colorScheme.surfaceBright), + color: context.colorScheme.surfaceContainerHigh, + ), + ); + + return Column( + crossAxisAlignment: CrossAxisAlignment.start, + children: [ + if (label != null) ...[ + Text( + label!, + style: context.textTheme.displayLarge + ?.copyWith(color: context.colorScheme.onSurface.withAlpha(200)), + ), + const SizedBox(height: 4), + ], + Pinput( + controller: controller, + forceErrorState: hasError ?? false, + autofocus: autoFocus ?? false, + obscureText: obscureText ?? false, + obscuringWidget: Icon( + Icons.vpn_key_rounded, + color: context.primaryColor, + size: 20, + ), + separatorBuilder: (index) => const SizedBox( + height: 64, + width: 3, + ), + cursor: Column( + mainAxisAlignment: MainAxisAlignment.end, + children: [ + Container( + margin: const EdgeInsets.only(bottom: 9), + width: 18, + height: 2, + color: context.primaryColor, + ), + ], + ), + defaultPinTheme: defaultPinTheme, + focusedPinTheme: defaultPinTheme.copyWith( + decoration: BoxDecoration( + borderRadius: const BorderRadius.all(Radius.circular(19)), + border: Border.all( + color: context.primaryColor.withValues(alpha: 0.5), + width: 2, + ), + color: context.colorScheme.surfaceContainerHigh, + ), + ), + errorPinTheme: defaultPinTheme.copyWith( + decoration: BoxDecoration( + color: context.colorScheme.error.withAlpha(15), + borderRadius: const BorderRadius.all(Radius.circular(19)), + border: Border.all( + color: context.colorScheme.error.withAlpha(100), + width: 2, + ), + ), + ), + pinputAutovalidateMode: PinputAutovalidateMode.onSubmit, + length: length ?? 6, + onChanged: onChanged, + onCompleted: onCompleted, + ), + ], + ); + } +} diff --git a/mobile/lib/widgets/forms/pin_registration_form.dart b/mobile/lib/widgets/forms/pin_registration_form.dart new file mode 100644 index 0000000000..c3cfd3a864 --- /dev/null +++ b/mobile/lib/widgets/forms/pin_registration_form.dart @@ -0,0 +1,128 @@ +import 'package:easy_localization/easy_localization.dart'; +import 'package:flutter/material.dart'; +import 'package:flutter_hooks/flutter_hooks.dart'; +import 'package:hooks_riverpod/hooks_riverpod.dart'; +import 'package:immich_mobile/extensions/build_context_extensions.dart'; +import 'package:immich_mobile/providers/auth.provider.dart'; +import 'package:immich_mobile/widgets/forms/pin_input.dart'; + +class PinRegistrationForm extends HookConsumerWidget { + final Function() onDone; + + const PinRegistrationForm({ + super.key, + required this.onDone, + }); + + @override + Widget build(BuildContext context, WidgetRef ref) { + final hasError = useState(false); + final newPinCodeController = useTextEditingController(); + final confirmPinCodeController = useTextEditingController(); + + bool validatePinCode() { + if (confirmPinCodeController.text.length != 6) { + return false; + } + + if (newPinCodeController.text != confirmPinCodeController.text) { + return false; + } + + return true; + } + + createNewPinCode() async { + final isValid = validatePinCode(); + if (!isValid) { + hasError.value = true; + return; + } + + try { + await ref.read(authProvider.notifier).setupPinCode( + newPinCodeController.text, + ); + + onDone(); + } catch (error) { + hasError.value = true; + context.showSnackBar( + SnackBar(content: Text(error.toString())), + ); + } + } + + return Form( + child: Column( + children: [ + Icon( + Icons.pin_outlined, + size: 64, + color: context.primaryColor, + ), + const SizedBox(height: 32), + SizedBox( + width: context.width * 0.7, + child: Text( + 'setup_pin_code'.tr(), + style: context.textTheme.labelLarge!.copyWith( + fontSize: 24, + ), + textAlign: TextAlign.center, + ), + ), + SizedBox( + width: context.width * 0.8, + child: Text( + 'new_pin_code_subtitle'.tr(), + style: context.textTheme.bodyLarge!.copyWith( + fontSize: 16, + ), + textAlign: TextAlign.center, + ), + ), + const SizedBox(height: 32), + PinInput( + controller: newPinCodeController, + label: 'new_pin_code'.tr(), + length: 6, + autoFocus: true, + hasError: hasError.value, + onChanged: (input) { + if (input.length < 6) { + hasError.value = false; + } + }, + ), + const SizedBox(height: 32), + PinInput( + controller: confirmPinCodeController, + label: 'confirm_new_pin_code'.tr(), + length: 6, + hasError: hasError.value, + onChanged: (input) { + if (input.length < 6) { + hasError.value = false; + } + }, + ), + const SizedBox(height: 48), + Padding( + padding: const EdgeInsets.symmetric(horizontal: 24.0), + child: Row( + children: [ + Expanded( + child: ElevatedButton( + onPressed: createNewPinCode, + child: Text('create'.tr()), + ), + ), + ], + ), + ), + ], + ), + ); + } +} diff --git a/mobile/lib/widgets/forms/pin_verification_form.dart b/mobile/lib/widgets/forms/pin_verification_form.dart new file mode 100644 index 0000000000..f4ebf4272f --- /dev/null +++ b/mobile/lib/widgets/forms/pin_verification_form.dart @@ -0,0 +1,94 @@ +import 'package:easy_localization/easy_localization.dart'; +import 'package:flutter/material.dart'; +import 'package:flutter_hooks/flutter_hooks.dart'; +import 'package:hooks_riverpod/hooks_riverpod.dart'; +import 'package:immich_mobile/extensions/build_context_extensions.dart'; +import 'package:immich_mobile/providers/auth.provider.dart'; +import 'package:immich_mobile/widgets/forms/pin_input.dart'; + +class PinVerificationForm extends HookConsumerWidget { + final Function(String) onSuccess; + final VoidCallback? onError; + final bool? autoFocus; + final String? description; + final IconData? icon; + final IconData? successIcon; + + const PinVerificationForm({ + super.key, + required this.onSuccess, + this.onError, + this.autoFocus, + this.description, + this.icon, + this.successIcon, + }); + + @override + Widget build(BuildContext context, WidgetRef ref) { + final hasError = useState(false); + final isVerified = useState(false); + + verifyPin(String pinCode) async { + final isUnlocked = + await ref.read(authProvider.notifier).unlockPinCode(pinCode); + + if (isUnlocked) { + isVerified.value = true; + + await Future.delayed(const Duration(seconds: 1)); + onSuccess(pinCode); + } else { + hasError.value = true; + onError?.call(); + } + } + + return Form( + child: Column( + children: [ + AnimatedSwitcher( + duration: const Duration(milliseconds: 200), + child: isVerified.value + ? Icon( + successIcon ?? Icons.lock_open_rounded, + size: 64, + color: Colors.green[300], + ) + : Icon( + icon ?? Icons.lock_outline_rounded, + size: 64, + color: hasError.value + ? context.colorScheme.error + : context.primaryColor, + ), + ), + const SizedBox(height: 36), + SizedBox( + width: context.width * 0.7, + child: Text( + description ?? 'enter_your_pin_code_subtitle'.tr(), + style: context.textTheme.labelLarge!.copyWith( + fontSize: 18, + ), + textAlign: TextAlign.center, + ), + ), + const SizedBox(height: 18), + PinInput( + obscureText: true, + autoFocus: autoFocus, + hasError: hasError.value, + length: 6, + onChanged: (pinCode) { + if (pinCode.length < 6) { + hasError.value = false; + } + }, + onCompleted: verifyPin, + ), + ], + ), + ); + } +} diff --git a/mobile/pubspec.lock b/mobile/pubspec.lock index 7e490edd25..3df4e4e8a9 100644 --- a/mobile/pubspec.lock +++ b/mobile/pubspec.lock @@ -621,6 +621,54 @@ packages: url: "https://pub.dev" source: hosted version: "2.6.1" + flutter_secure_storage: + dependency: "direct main" + description: + name: flutter_secure_storage + sha256: "9cad52d75ebc511adfae3d447d5d13da15a55a92c9410e50f67335b6d21d16ea" + url: "https://pub.dev" + source: hosted + version: "9.2.4" + flutter_secure_storage_linux: + dependency: transitive + description: + name: flutter_secure_storage_linux + sha256: be76c1d24a97d0b98f8b54bce6b481a380a6590df992d0098f868ad54dc8f688 + url: "https://pub.dev" + source: hosted + version: "1.2.3" + flutter_secure_storage_macos: + dependency: transitive + description: + name: flutter_secure_storage_macos + sha256: "6c0a2795a2d1de26ae202a0d78527d163f4acbb11cde4c75c670f3a0fc064247" + url: "https://pub.dev" + source: hosted + version: "3.1.3" + flutter_secure_storage_platform_interface: + dependency: transitive + description: + name: flutter_secure_storage_platform_interface + sha256: cf91ad32ce5adef6fba4d736a542baca9daf3beac4db2d04be350b87f69ac4a8 + url: "https://pub.dev" + source: hosted + version: "1.1.2" + flutter_secure_storage_web: + dependency: transitive + description: + name: flutter_secure_storage_web + sha256: f4ebff989b4f07b2656fb16b47852c0aab9fed9b4ec1c70103368337bc1886a9 + url: "https://pub.dev" + source: hosted + version: "1.2.1" + flutter_secure_storage_windows: + dependency: transitive + description: + name: flutter_secure_storage_windows + sha256: b20b07cb5ed4ed74fc567b78a72936203f587eba460af1df11281c9326cd3709 + url: "https://pub.dev" + source: hosted + version: "3.1.2" flutter_svg: dependency: "direct main" description: @@ -976,6 +1024,46 @@ packages: url: "https://pub.dev" source: hosted version: "5.1.1" + local_auth: + dependency: "direct main" + description: + name: local_auth + sha256: "434d854cf478f17f12ab29a76a02b3067f86a63a6d6c4eb8fbfdcfe4879c1b7b" + url: "https://pub.dev" + source: hosted + version: "2.3.0" + local_auth_android: + dependency: transitive + description: + name: local_auth_android + sha256: "63ad7ca6396290626dc0cb34725a939e4cfe965d80d36112f08d49cf13a8136e" + url: "https://pub.dev" + source: hosted + version: "1.0.49" + local_auth_darwin: + dependency: transitive + description: + name: local_auth_darwin + sha256: "630996cd7b7f28f5ab92432c4b35d055dd03a747bc319e5ffbb3c4806a3e50d2" + url: "https://pub.dev" + source: hosted + version: "1.4.3" + local_auth_platform_interface: + dependency: transitive + description: + name: local_auth_platform_interface + sha256: "1b842ff177a7068442eae093b64abe3592f816afd2a533c0ebcdbe40f9d2075a" + url: "https://pub.dev" + source: hosted + version: "1.0.10" + local_auth_windows: + dependency: transitive + description: + name: local_auth_windows + sha256: bc4e66a29b0fdf751aafbec923b5bed7ad6ed3614875d8151afe2578520b2ab5 + url: "https://pub.dev" + source: hosted + version: "1.0.11" logging: dependency: "direct main" description: @@ -1264,6 +1352,14 @@ packages: url: "https://pub.dev" source: hosted version: "2.2.0" + pinput: + dependency: "direct main" + description: + name: pinput + sha256: "8a73be426a91fefec90a7f130763ca39772d547e92f19a827cf4aa02e323d35a" + url: "https://pub.dev" + source: hosted + version: "5.0.1" platform: dependency: transitive description: @@ -1741,6 +1837,14 @@ packages: url: "https://pub.dev" source: hosted version: "2.2.2" + universal_platform: + dependency: transitive + description: + name: universal_platform + sha256: "64e16458a0ea9b99260ceb5467a214c1f298d647c659af1bff6d3bf82536b1ec" + url: "https://pub.dev" + source: hosted + version: "1.1.0" url_launcher: dependency: "direct main" description: diff --git a/mobile/pubspec.yaml b/mobile/pubspec.yaml index 08e9661d58..37c9ef7498 100644 --- a/mobile/pubspec.yaml +++ b/mobile/pubspec.yaml @@ -64,6 +64,9 @@ dependencies: uuid: ^4.5.1 wakelock_plus: ^1.2.10 worker_manager: ^7.2.3 + local_auth: ^2.3.0 + pinput: ^5.0.1 + flutter_secure_storage: ^9.2.4 native_video_player: git: From 0d773af6c343d423226a27e9414cef9c5862a169 Mon Sep 17 00:00:00 2001 From: Mert <101130780+mertalev@users.noreply.github.com> Date: Tue, 20 May 2025 09:36:43 -0400 Subject: [PATCH 02/35] feat: vectorchord (#18042) * wip auto-detect available extensions auto-recovery, fix reindexing check use original image for ml * set probes * update image for sql checker update images for gha * cascade * fix new instance * accurate dummy vector * simplify dummy * preexisiting pg docs * handle different db name * maybe fix sql generation * revert refreshfaces sql change * redundant switch * outdated message * update docker compose files * Update docs/docs/administration/postgres-standalone.md Co-authored-by: Daniel Dietzler <36593685+danieldietzler@users.noreply.github.com> * tighten range * avoid always printing "vector reindexing complete" * remove nesting * use new images * add vchord to unit tests * debug e2e image * mention 1.107.2 in startup error * support new vchord versions --------- Co-authored-by: Daniel Dietzler <36593685+danieldietzler@users.noreply.github.com> --- .github/workflows/test.yml | 2 +- docker/docker-compose.dev.yml | 20 +- docker/docker-compose.prod.yml | 20 +- docker/docker-compose.yml | 22 +- .../administration/postgres-standalone.md | 77 ++++- docs/docs/features/searching.md | 2 +- docs/docs/install/environment-variables.md | 24 +- e2e/docker-compose.yml | 4 +- server/src/constants.ts | 17 +- server/src/decorators.ts | 2 +- server/src/dtos/env.dto.ts | 4 +- server/src/enum.ts | 1 + .../migrations/1700713871511-UsePgVectors.ts | 6 +- .../1700713994428-AddCLIPEmbeddingIndex.ts | 5 +- .../1700714033632-AddFaceEmbeddingIndex.ts | 5 +- .../1718486162779-AddFaceSearchRelation.ts | 7 +- server/src/queries/database.repository.sql | 8 - server/src/queries/person.repository.sql | 15 + server/src/queries/search.repository.sql | 24 +- .../repositories/config.repository.spec.ts | 2 +- server/src/repositories/config.repository.ts | 20 +- .../src/repositories/database.repository.ts | 306 +++++++++++++----- server/src/repositories/person.repository.ts | 1 + server/src/repositories/search.repository.ts | 185 ++++------- .../1744910873969-InitialMigration.ts | 7 +- server/src/services/database.service.spec.ts | 72 +---- server/src/services/database.service.ts | 53 ++- server/src/services/person.service.ts | 3 + .../src/services/smart-info.service.spec.ts | 54 ++-- server/src/services/smart-info.service.ts | 8 +- server/src/types.ts | 4 +- server/src/utils/database.ts | 18 +- server/test/medium.factory.ts | 2 +- server/test/medium/globalSetup.ts | 8 +- .../repositories/database.repository.mock.ts | 8 +- 35 files changed, 572 insertions(+), 444 deletions(-) diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index 91f4ffce4f..6c1cb8e07e 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -643,7 +643,7 @@ jobs: contents: read services: postgres: - image: tensorchord/pgvecto-rs:pg14-v0.2.0@sha256:739cdd626151ff1f796dc95a6591b55a714f341c737e27f045019ceabf8e8c52 + image: ghcr.io/immich-app/postgres:14 env: POSTGRES_PASSWORD: postgres POSTGRES_USER: postgres diff --git a/docker/docker-compose.dev.yml b/docker/docker-compose.dev.yml index a428934022..1da06ef2ff 100644 --- a/docker/docker-compose.dev.yml +++ b/docker/docker-compose.dev.yml @@ -122,7 +122,7 @@ services: database: container_name: immich_postgres - image: tensorchord/pgvecto-rs:pg14-v0.2.0@sha256:739cdd626151ff1f796dc95a6591b55a714f341c737e27f045019ceabf8e8c52 + image: ghcr.io/immich-app/postgres:14-vectorchord0.3.0-pgvectors0.2.0 env_file: - .env environment: @@ -134,24 +134,6 @@ services: - ${UPLOAD_LOCATION}/postgres:/var/lib/postgresql/data ports: - 5432:5432 - healthcheck: - test: >- - pg_isready --dbname="$${POSTGRES_DB}" --username="$${POSTGRES_USER}" || exit 1; - Chksum="$$(psql --dbname="$${POSTGRES_DB}" --username="$${POSTGRES_USER}" --tuples-only --no-align - --command='SELECT COALESCE(SUM(checksum_failures), 0) FROM pg_stat_database')"; - echo "checksum failure count is $$Chksum"; - [ "$$Chksum" = '0' ] || exit 1 - interval: 5m - start_interval: 30s - start_period: 5m - command: >- - postgres - -c shared_preload_libraries=vectors.so - -c 'search_path="$$user", public, vectors' - -c logging_collector=on - -c max_wal_size=2GB - -c shared_buffers=512MB - -c wal_compression=on # set IMMICH_TELEMETRY_INCLUDE=all in .env to enable metrics # immich-prometheus: diff --git a/docker/docker-compose.prod.yml b/docker/docker-compose.prod.yml index bfcb5455aa..e17a034ddb 100644 --- a/docker/docker-compose.prod.yml +++ b/docker/docker-compose.prod.yml @@ -63,7 +63,7 @@ services: database: container_name: immich_postgres - image: tensorchord/pgvecto-rs:pg14-v0.2.0@sha256:739cdd626151ff1f796dc95a6591b55a714f341c737e27f045019ceabf8e8c52 + image: ghcr.io/immich-app/postgres:14-vectorchord0.3.0-pgvectors0.2.0 env_file: - .env environment: @@ -75,24 +75,6 @@ services: - ${UPLOAD_LOCATION}/postgres:/var/lib/postgresql/data ports: - 5432:5432 - healthcheck: - test: >- - pg_isready --dbname="$${POSTGRES_DB}" --username="$${POSTGRES_USER}" || exit 1; - Chksum="$$(psql --dbname="$${POSTGRES_DB}" --username="$${POSTGRES_USER}" --tuples-only --no-align - --command='SELECT COALESCE(SUM(checksum_failures), 0) FROM pg_stat_database')"; - echo "checksum failure count is $$Chksum"; - [ "$$Chksum" = '0' ] || exit 1 - interval: 5m - start_interval: 30s - start_period: 5m - command: >- - postgres - -c shared_preload_libraries=vectors.so - -c 'search_path="$$user", public, vectors' - -c logging_collector=on - -c max_wal_size=2GB - -c shared_buffers=512MB - -c wal_compression=on restart: always # set IMMICH_TELEMETRY_INCLUDE=all in .env to enable metrics diff --git a/docker/docker-compose.yml b/docker/docker-compose.yml index 4387f5fd0c..f2b1a20321 100644 --- a/docker/docker-compose.yml +++ b/docker/docker-compose.yml @@ -56,7 +56,7 @@ services: database: container_name: immich_postgres - image: docker.io/tensorchord/pgvecto-rs:pg14-v0.2.0@sha256:739cdd626151ff1f796dc95a6591b55a714f341c737e27f045019ceabf8e8c52 + image: ghcr.io/immich-app/postgres:14-vectorchord0.3.0-pgvectors0.2.0 environment: POSTGRES_PASSWORD: ${DB_PASSWORD} POSTGRES_USER: ${DB_USERNAME} @@ -65,24 +65,8 @@ services: volumes: # Do not edit the next line. If you want to change the database storage location on your system, edit the value of DB_DATA_LOCATION in the .env file - ${DB_DATA_LOCATION}:/var/lib/postgresql/data - healthcheck: - test: >- - pg_isready --dbname="$${POSTGRES_DB}" --username="$${POSTGRES_USER}" || exit 1; - Chksum="$$(psql --dbname="$${POSTGRES_DB}" --username="$${POSTGRES_USER}" --tuples-only --no-align - --command='SELECT COALESCE(SUM(checksum_failures), 0) FROM pg_stat_database')"; - echo "checksum failure count is $$Chksum"; - [ "$$Chksum" = '0' ] || exit 1 - interval: 5m - start_interval: 30s - start_period: 5m - command: >- - postgres - -c shared_preload_libraries=vectors.so - -c 'search_path="$$user", public, vectors' - -c logging_collector=on - -c max_wal_size=2GB - -c shared_buffers=512MB - -c wal_compression=on + # change ssd below to hdd if you are using a hard disk drive or other slow storage + command: postgres -c config_file=/etc/postgresql/postgresql.ssd.conf restart: always volumes: diff --git a/docs/docs/administration/postgres-standalone.md b/docs/docs/administration/postgres-standalone.md index 2ca23e195f..44c2c8e4c6 100644 --- a/docs/docs/administration/postgres-standalone.md +++ b/docs/docs/administration/postgres-standalone.md @@ -10,12 +10,12 @@ Running with a pre-existing Postgres server can unlock powerful administrative f ## Prerequisites -You must install pgvecto.rs into your instance of Postgres using their [instructions][vectors-install]. After installation, add `shared_preload_libraries = 'vectors.so'` to your `postgresql.conf`. If you already have some `shared_preload_libraries` set, you can separate each extension with a comma. For example, `shared_preload_libraries = 'pg_stat_statements, vectors.so'`. +You must install VectorChord into your instance of Postgres using their [instructions][vchord-install]. After installation, add `shared_preload_libraries = 'vchord.so'` to your `postgresql.conf`. If you already have some `shared_preload_libraries` set, you can separate each extension with a comma. For example, `shared_preload_libraries = 'pg_stat_statements, vchord.so'`. :::note -Immich is known to work with Postgres versions 14, 15, and 16. Earlier versions are unsupported. Postgres 17 is nominally compatible, but pgvecto.rs does not have prebuilt images or packages for it as of writing. +Immich is known to work with Postgres versions 14, 15, 16 and 17. Earlier versions are unsupported. -Make sure the installed version of pgvecto.rs is compatible with your version of Immich. The current accepted range for pgvecto.rs is `>= 0.2.0, < 0.4.0`. +Make sure the installed version of VectorChord is compatible with your version of Immich. The current accepted range for VectorChord is `>= 0.3.0, < 1.0.0`. ::: ## Specifying the connection URL @@ -53,16 +53,75 @@ CREATE DATABASE ; \c BEGIN; ALTER DATABASE OWNER TO ; -CREATE EXTENSION vectors; +CREATE EXTENSION vchord CASCADE; CREATE EXTENSION earthdistance CASCADE; -ALTER DATABASE SET search_path TO "$user", public, vectors; -ALTER SCHEMA vectors OWNER TO ; COMMIT; ``` -### Updating pgvecto.rs +### Updating VectorChord -When installing a new version of pgvecto.rs, you will need to manually update the extension by connecting to the Immich database and running `ALTER EXTENSION vectors UPDATE;`. +When installing a new version of VectorChord, you will need to manually update the extension by connecting to the Immich database and running `ALTER EXTENSION vchord UPDATE;`. + +## Migrating to VectorChord + +VectorChord is the successor extension to pgvecto.rs, allowing for higher performance, lower memory usage and higher quality results for smart search and facial recognition. + +### Migrating from pgvecto.rs + +Support for pgvecto.rs will be dropped in a later release, hence we recommend all users currently using pgvecto.rs to migrate to VectorChord at their convenience. There are two primary approaches to do so. + +The easiest option is to have both extensions installed during the migration: + +1. Ensure you still have pgvecto.rs installed +2. [Install VectorChord][vchord-install] +3. Add `shared_preload_libraries= 'vchord.so, vectors.so'` to your `postgresql.conf`, making sure to include _both_ `vchord.so` and `vectors.so`. You may include other libraries here as well if needed +4. If Immich does not have superuser permissions, run the SQL command `CREATE EXTENSION vchord CASCADE;` using psql or your choice of database client +5. Start Immich and wait for the logs `Reindexed face_index` and `Reindexed clip_index` to be output +6. Remove the `vectors.so` entry from the `shared_preload_libraries` setting +7. Uninstall pgvecto.rs (e.g. `apt-get purge vectors-pg14` on Debian-based environments, replacing `pg14` as appropriate) + +If it is not possible to have both VectorChord and pgvector.s installed at the same time, you can perform the migration with more manual steps: + +1. While pgvecto.rs is still installed, run the following SQL command using psql or your choice of database client. Take note of the number outputted by this command as you will need it later + +```sql +SELECT atttypmod as dimsize + FROM pg_attribute f + JOIN pg_class c ON c.oid = f.attrelid + WHERE c.relkind = 'r'::char + AND f.attnum > 0 + AND c.relname = 'smart_search'::text + AND f.attname = 'embedding'::text; +``` + +2. Remove references to pgvecto.rs using the below SQL commands + +```sql +DROP INDEX IF EXISTS clip_index; +DROP INDEX IF EXISTS face_index; +ALTER TABLE smart_search ALTER COLUMN embedding SET DATA TYPE real[]; +ALTER TABLE face_search ALTER COLUMN embedding SET DATA TYPE real[]; +``` + +3. [Install VectorChord][vchord-install] +4. Change the columns back to the appropriate vector types, replacing `` with the number from step 1 + +```sql +CREATE EXTENSION IF NOT EXISTS vchord CASCADE; +ALTER TABLE smart_search ALTER COLUMN embedding SET DATA TYPE vector(); +ALTER TABLE face_search ALTER COLUMN embedding SET DATA TYPE vector(512); +``` + +5. Start Immich and let it create new indices using VectorChord + +### Migrating from pgvector + +1. Ensure you have at least 0.7.0 of pgvector installed. If it is below that, please upgrade it and run the SQL command `ALTER EXTENSION vector UPDATE;` using psql or your choice of database client +2. Follow the Prerequisites to install VectorChord +3. If Immich does not have superuser permissions, run the SQL command `CREATE EXTENSION vchord CASCADE;` +4. Start Immich and let it create new indices using VectorChord + +Note that VectorChord itself uses pgvector types, so you should not uninstall pgvector after following these steps. ### Common errors @@ -70,4 +129,4 @@ When installing a new version of pgvecto.rs, you will need to manually update th If you get the error `driverError: error: permission denied for view pg_vector_index_stat`, you can fix this by connecting to the Immich database and running `GRANT SELECT ON TABLE pg_vector_index_stat TO ;`. -[vectors-install]: https://docs.vectorchord.ai/getting-started/installation.html +[vchord-install]: https://docs.vectorchord.ai/vectorchord/getting-started/installation.html diff --git a/docs/docs/features/searching.md b/docs/docs/features/searching.md index f6bfac6e7a..d7ebd1a468 100644 --- a/docs/docs/features/searching.md +++ b/docs/docs/features/searching.md @@ -5,7 +5,7 @@ import TabItem from '@theme/TabItem'; Immich uses Postgres as its search database for both metadata and contextual CLIP search. -Contextual CLIP search is powered by the [pgvecto.rs](https://github.com/tensorchord/pgvecto.rs) extension, utilizing machine learning models like [CLIP](https://openai.com/research/clip) to provide relevant search results. This allows for freeform searches without requiring specific keywords in the image or video metadata. +Contextual CLIP search is powered by the [VectorChord](https://github.com/tensorchord/VectorChord) extension, utilizing machine learning models like [CLIP](https://openai.com/research/clip) to provide relevant search results. This allows for freeform searches without requiring specific keywords in the image or video metadata. ## Advanced Search Filters diff --git a/docs/docs/install/environment-variables.md b/docs/docs/install/environment-variables.md index c853a873ab..d3ca49a0a4 100644 --- a/docs/docs/install/environment-variables.md +++ b/docs/docs/install/environment-variables.md @@ -72,21 +72,21 @@ Information on the current workers can be found [here](/docs/administration/jobs ## Database -| Variable | Description | Default | Containers | -| :---------------------------------- | :----------------------------------------------------------------------- | :----------: | :----------------------------- | -| `DB_URL` | Database URL | | server | -| `DB_HOSTNAME` | Database host | `database` | server | -| `DB_PORT` | Database port | `5432` | server | -| `DB_USERNAME` | Database user | `postgres` | server, database\*1 | -| `DB_PASSWORD` | Database password | `postgres` | server, database\*1 | -| `DB_DATABASE_NAME` | Database name | `immich` | server, database\*1 | -| `DB_SSL_MODE` | Database SSL mode | | server | -| `DB_VECTOR_EXTENSION`\*2 | Database vector extension (one of [`pgvector`, `pgvecto.rs`]) | `pgvecto.rs` | server | -| `DB_SKIP_MIGRATIONS` | Whether to skip running migrations on startup (one of [`true`, `false`]) | `false` | server | +| Variable | Description | Default | Containers | +| :---------------------------------- | :--------------------------------------------------------------------------- | :--------: | :----------------------------- | +| `DB_URL` | Database URL | | server | +| `DB_HOSTNAME` | Database host | `database` | server | +| `DB_PORT` | Database port | `5432` | server | +| `DB_USERNAME` | Database user | `postgres` | server, database\*1 | +| `DB_PASSWORD` | Database password | `postgres` | server, database\*1 | +| `DB_DATABASE_NAME` | Database name | `immich` | server, database\*1 | +| `DB_SSL_MODE` | Database SSL mode | | server | +| `DB_VECTOR_EXTENSION`\*2 | Database vector extension (one of [`vectorchord`, `pgvector`, `pgvecto.rs`]) | | server | +| `DB_SKIP_MIGRATIONS` | Whether to skip running migrations on startup (one of [`true`, `false`]) | `false` | server | \*1: The values of `DB_USERNAME`, `DB_PASSWORD`, and `DB_DATABASE_NAME` are passed to the Postgres container as the variables `POSTGRES_USER`, `POSTGRES_PASSWORD`, and `POSTGRES_DB` in `docker-compose.yml`. -\*2: This setting cannot be changed after the server has successfully started up. +\*2: If not provided, the appropriate extension to use is auto-detected at startup by introspecting the database. When multiple extensions are installed, the order of preference is VectorChord, pgvecto.rs, pgvector. :::info diff --git a/e2e/docker-compose.yml b/e2e/docker-compose.yml index 48c17c828b..a8cb21aaf7 100644 --- a/e2e/docker-compose.yml +++ b/e2e/docker-compose.yml @@ -37,8 +37,8 @@ services: image: redis:6.2-alpine@sha256:3211c33a618c457e5d241922c975dbc4f446d0bdb2dc75694f5573ef8e2d01fa database: - image: tensorchord/pgvecto-rs:pg14-v0.2.0@sha256:739cdd626151ff1f796dc95a6591b55a714f341c737e27f045019ceabf8e8c52 - command: -c fsync=off -c shared_preload_libraries=vectors.so + image: ghcr.io/immich-app/postgres:14 + command: -c fsync=off -c shared_preload_libraries=vchord.so -c config_file=/var/lib/postgresql/data/postgresql.conf environment: POSTGRES_PASSWORD: postgres POSTGRES_USER: postgres diff --git a/server/src/constants.ts b/server/src/constants.ts index 6c0319fcee..8268360d9f 100644 --- a/server/src/constants.ts +++ b/server/src/constants.ts @@ -1,9 +1,10 @@ import { Duration } from 'luxon'; import { readFileSync } from 'node:fs'; import { SemVer } from 'semver'; -import { DatabaseExtension, ExifOrientation } from 'src/enum'; +import { DatabaseExtension, ExifOrientation, VectorIndex } from 'src/enum'; export const POSTGRES_VERSION_RANGE = '>=14.0.0'; +export const VECTORCHORD_VERSION_RANGE = '>=0.3 <1'; export const VECTORS_VERSION_RANGE = '>=0.2 <0.4'; export const VECTOR_VERSION_RANGE = '>=0.5 <1'; @@ -20,8 +21,22 @@ export const EXTENSION_NAMES: Record = { earthdistance: 'earthdistance', vector: 'pgvector', vectors: 'pgvecto.rs', + vchord: 'VectorChord', } as const; +export const VECTOR_EXTENSIONS = [ + DatabaseExtension.VECTORCHORD, + DatabaseExtension.VECTORS, + DatabaseExtension.VECTOR, +] as const; + +export const VECTOR_INDEX_TABLES = { + [VectorIndex.CLIP]: 'smart_search', + [VectorIndex.FACE]: 'face_search', +} as const; + +export const VECTORCHORD_LIST_SLACK_FACTOR = 1.2; + export const SALT_ROUNDS = 10; export const IWorker = 'IWorker'; diff --git a/server/src/decorators.ts b/server/src/decorators.ts index 1af9342e0b..6b34ffcafe 100644 --- a/server/src/decorators.ts +++ b/server/src/decorators.ts @@ -116,7 +116,7 @@ export const DummyValue = { DATE: new Date(), TIME_BUCKET: '2024-01-01T00:00:00.000Z', BOOLEAN: true, - VECTOR: '[1, 2, 3]', + VECTOR: JSON.stringify(Array.from({ length: 512 }, () => 0)), }; export const GENERATE_SQL_KEY = 'generate-sql-key'; diff --git a/server/src/dtos/env.dto.ts b/server/src/dtos/env.dto.ts index 7f0df8abb9..99fd1d2149 100644 --- a/server/src/dtos/env.dto.ts +++ b/server/src/dtos/env.dto.ts @@ -154,9 +154,9 @@ export class EnvDto { @Optional() DB_USERNAME?: string; - @IsEnum(['pgvector', 'pgvecto.rs']) + @IsEnum(['pgvector', 'pgvecto.rs', 'vectorchord']) @Optional() - DB_VECTOR_EXTENSION?: 'pgvector' | 'pgvecto.rs'; + DB_VECTOR_EXTENSION?: 'pgvector' | 'pgvecto.rs' | 'vectorchord'; @IsString() @Optional() diff --git a/server/src/enum.ts b/server/src/enum.ts index e49f1636a0..c9cf34383e 100644 --- a/server/src/enum.ts +++ b/server/src/enum.ts @@ -414,6 +414,7 @@ export enum DatabaseExtension { EARTH_DISTANCE = 'earthdistance', VECTOR = 'vector', VECTORS = 'vectors', + VECTORCHORD = 'vchord', } export enum BootstrapEventPriority { diff --git a/server/src/migrations/1700713871511-UsePgVectors.ts b/server/src/migrations/1700713871511-UsePgVectors.ts index e67c7275a7..4511e1001b 100644 --- a/server/src/migrations/1700713871511-UsePgVectors.ts +++ b/server/src/migrations/1700713871511-UsePgVectors.ts @@ -1,15 +1,13 @@ -import { ConfigRepository } from 'src/repositories/config.repository'; +import { getVectorExtension } from 'src/repositories/database.repository'; import { getCLIPModelInfo } from 'src/utils/misc'; import { MigrationInterface, QueryRunner } from 'typeorm'; -const vectorExtension = new ConfigRepository().getEnv().database.vectorExtension; - export class UsePgVectors1700713871511 implements MigrationInterface { name = 'UsePgVectors1700713871511'; public async up(queryRunner: QueryRunner): Promise { await queryRunner.query(`SET search_path TO "$user", public, vectors`); - await queryRunner.query(`CREATE EXTENSION IF NOT EXISTS ${vectorExtension}`); + await queryRunner.query(`CREATE EXTENSION IF NOT EXISTS ${await getVectorExtension(queryRunner)}`); const faceDimQuery = await queryRunner.query(` SELECT CARDINALITY(embedding::real[]) as dimsize FROM asset_faces diff --git a/server/src/migrations/1700713994428-AddCLIPEmbeddingIndex.ts b/server/src/migrations/1700713994428-AddCLIPEmbeddingIndex.ts index b5d47bb8cd..43809d6364 100644 --- a/server/src/migrations/1700713994428-AddCLIPEmbeddingIndex.ts +++ b/server/src/migrations/1700713994428-AddCLIPEmbeddingIndex.ts @@ -1,13 +1,12 @@ -import { ConfigRepository } from 'src/repositories/config.repository'; +import { getVectorExtension } from 'src/repositories/database.repository'; import { vectorIndexQuery } from 'src/utils/database'; import { MigrationInterface, QueryRunner } from 'typeorm'; -const vectorExtension = new ConfigRepository().getEnv().database.vectorExtension; - export class AddCLIPEmbeddingIndex1700713994428 implements MigrationInterface { name = 'AddCLIPEmbeddingIndex1700713994428'; public async up(queryRunner: QueryRunner): Promise { + const vectorExtension = await getVectorExtension(queryRunner); await queryRunner.query(`SET search_path TO "$user", public, vectors`); await queryRunner.query(vectorIndexQuery({ vectorExtension, table: 'smart_search', indexName: 'clip_index' })); diff --git a/server/src/migrations/1700714033632-AddFaceEmbeddingIndex.ts b/server/src/migrations/1700714033632-AddFaceEmbeddingIndex.ts index 2b41788fe4..5ee91afbcc 100644 --- a/server/src/migrations/1700714033632-AddFaceEmbeddingIndex.ts +++ b/server/src/migrations/1700714033632-AddFaceEmbeddingIndex.ts @@ -1,13 +1,12 @@ -import { ConfigRepository } from 'src/repositories/config.repository'; +import { getVectorExtension } from 'src/repositories/database.repository'; import { vectorIndexQuery } from 'src/utils/database'; import { MigrationInterface, QueryRunner } from 'typeorm'; -const vectorExtension = new ConfigRepository().getEnv().database.vectorExtension; - export class AddFaceEmbeddingIndex1700714033632 implements MigrationInterface { name = 'AddFaceEmbeddingIndex1700714033632'; public async up(queryRunner: QueryRunner): Promise { + const vectorExtension = await getVectorExtension(queryRunner); await queryRunner.query(`SET search_path TO "$user", public, vectors`); await queryRunner.query(vectorIndexQuery({ vectorExtension, table: 'asset_faces', indexName: 'face_index' })); diff --git a/server/src/migrations/1718486162779-AddFaceSearchRelation.ts b/server/src/migrations/1718486162779-AddFaceSearchRelation.ts index 64849708d2..68e1618775 100644 --- a/server/src/migrations/1718486162779-AddFaceSearchRelation.ts +++ b/server/src/migrations/1718486162779-AddFaceSearchRelation.ts @@ -1,12 +1,11 @@ import { DatabaseExtension } from 'src/enum'; -import { ConfigRepository } from 'src/repositories/config.repository'; +import { getVectorExtension } from 'src/repositories/database.repository'; import { vectorIndexQuery } from 'src/utils/database'; import { MigrationInterface, QueryRunner } from 'typeorm'; -const vectorExtension = new ConfigRepository().getEnv().database.vectorExtension; - export class AddFaceSearchRelation1718486162779 implements MigrationInterface { public async up(queryRunner: QueryRunner): Promise { + const vectorExtension = await getVectorExtension(queryRunner); if (vectorExtension === DatabaseExtension.VECTORS) { await queryRunner.query(`SET search_path TO "$user", public, vectors`); } @@ -48,11 +47,11 @@ export class AddFaceSearchRelation1718486162779 implements MigrationInterface { await queryRunner.query(`ALTER TABLE face_search ALTER COLUMN embedding SET DATA TYPE vector(512)`); await queryRunner.query(vectorIndexQuery({ vectorExtension, table: 'smart_search', indexName: 'clip_index' })); - await queryRunner.query(vectorIndexQuery({ vectorExtension, table: 'face_search', indexName: 'face_index' })); } public async down(queryRunner: QueryRunner): Promise { + const vectorExtension = await getVectorExtension(queryRunner); if (vectorExtension === DatabaseExtension.VECTORS) { await queryRunner.query(`SET search_path TO "$user", public, vectors`); } diff --git a/server/src/queries/database.repository.sql b/server/src/queries/database.repository.sql index 8c87a7470f..9dc60ac43f 100644 --- a/server/src/queries/database.repository.sql +++ b/server/src/queries/database.repository.sql @@ -11,11 +11,3 @@ WHERE -- DatabaseRepository.getPostgresVersion SHOW server_version - --- DatabaseRepository.shouldReindex -SELECT - idx_status -FROM - pg_vector_index_stat -WHERE - indexname = $1 diff --git a/server/src/queries/person.repository.sql b/server/src/queries/person.repository.sql index fefc25ee6a..48854f4872 100644 --- a/server/src/queries/person.repository.sql +++ b/server/src/queries/person.repository.sql @@ -204,6 +204,21 @@ where "person"."ownerId" = $3 and "asset_faces"."deletedAt" is null +-- PersonRepository.refreshFaces +with + "added_embeddings" as ( + insert into + "face_search" ("faceId", "embedding") + values + ($1, $2) + ) +select +from + ( + select + 1 + ) as "dummy" + -- PersonRepository.getFacesByIds select "asset_faces".*, diff --git a/server/src/queries/search.repository.sql b/server/src/queries/search.repository.sql index c18fe02418..c100089179 100644 --- a/server/src/queries/search.repository.sql +++ b/server/src/queries/search.repository.sql @@ -64,6 +64,9 @@ limit $15 -- SearchRepository.searchSmart +begin +set + local vchordrq.probes = 1 select "assets".* from @@ -83,8 +86,12 @@ limit $7 offset $8 +commit -- SearchRepository.searchDuplicates +begin +set + local vchordrq.probes = 1 with "cte" as ( select @@ -102,18 +109,22 @@ with and "assets"."id" != $5::uuid and "assets"."stackId" is null order by - smart_search.embedding <=> $6 + "distance" limit - $7 + $6 ) select * from "cte" where - "cte"."distance" <= $8 + "cte"."distance" <= $7 +commit -- SearchRepository.searchFaces +begin +set + local vchordrq.probes = 1 with "cte" as ( select @@ -129,16 +140,17 @@ with "assets"."ownerId" = any ($2::uuid[]) and "assets"."deletedAt" is null order by - face_search.embedding <=> $3 + "distance" limit - $4 + $3 ) select * from "cte" where - "cte"."distance" <= $5 + "cte"."distance" <= $4 +commit -- SearchRepository.searchPlaces select diff --git a/server/src/repositories/config.repository.spec.ts b/server/src/repositories/config.repository.spec.ts index 143892fdd0..238b48bcef 100644 --- a/server/src/repositories/config.repository.spec.ts +++ b/server/src/repositories/config.repository.spec.ts @@ -89,7 +89,7 @@ describe('getEnv', () => { password: 'postgres', }, skipMigrations: false, - vectorExtension: 'vectors', + vectorExtension: undefined, }); }); diff --git a/server/src/repositories/config.repository.ts b/server/src/repositories/config.repository.ts index 9b3e406437..9a0a24f70f 100644 --- a/server/src/repositories/config.repository.ts +++ b/server/src/repositories/config.repository.ts @@ -58,7 +58,7 @@ export interface EnvData { database: { config: DatabaseConnectionParams; skipMigrations: boolean; - vectorExtension: VectorExtension; + vectorExtension?: VectorExtension; }; licensePublicKey: { @@ -196,6 +196,22 @@ const getEnv = (): EnvData => { ssl: dto.DB_SSL_MODE || undefined, }; + let vectorExtension: VectorExtension | undefined; + switch (dto.DB_VECTOR_EXTENSION) { + case 'pgvector': { + vectorExtension = DatabaseExtension.VECTOR; + break; + } + case 'pgvecto.rs': { + vectorExtension = DatabaseExtension.VECTORS; + break; + } + case 'vectorchord': { + vectorExtension = DatabaseExtension.VECTORCHORD; + break; + } + } + return { host: dto.IMMICH_HOST, port: dto.IMMICH_PORT || 2283, @@ -251,7 +267,7 @@ const getEnv = (): EnvData => { database: { config: databaseConnection, skipMigrations: dto.DB_SKIP_MIGRATIONS ?? false, - vectorExtension: dto.DB_VECTOR_EXTENSION === 'pgvector' ? DatabaseExtension.VECTOR : DatabaseExtension.VECTORS, + vectorExtension, }, licensePublicKey: isProd ? productionKeys : stagingKeys, diff --git a/server/src/repositories/database.repository.ts b/server/src/repositories/database.repository.ts index addf6bcff0..67bb1b6ca2 100644 --- a/server/src/repositories/database.repository.ts +++ b/server/src/repositories/database.repository.ts @@ -5,7 +5,16 @@ import { InjectKysely } from 'nestjs-kysely'; import { readdir } from 'node:fs/promises'; import { join, resolve } from 'node:path'; import semver from 'semver'; -import { EXTENSION_NAMES, POSTGRES_VERSION_RANGE, VECTOR_VERSION_RANGE, VECTORS_VERSION_RANGE } from 'src/constants'; +import { + EXTENSION_NAMES, + POSTGRES_VERSION_RANGE, + VECTOR_EXTENSIONS, + VECTOR_INDEX_TABLES, + VECTOR_VERSION_RANGE, + VECTORCHORD_LIST_SLACK_FACTOR, + VECTORCHORD_VERSION_RANGE, + VECTORS_VERSION_RANGE, +} from 'src/constants'; import { DB } from 'src/db'; import { GenerateSql } from 'src/decorators'; import { DatabaseExtension, DatabaseLock, VectorIndex } from 'src/enum'; @@ -14,11 +23,42 @@ import { LoggingRepository } from 'src/repositories/logging.repository'; import { ExtensionVersion, VectorExtension, VectorUpdateResult } from 'src/types'; import { vectorIndexQuery } from 'src/utils/database'; import { isValidInteger } from 'src/validation'; -import { DataSource } from 'typeorm'; +import { DataSource, QueryRunner } from 'typeorm'; + +export let cachedVectorExtension: VectorExtension | undefined; +export async function getVectorExtension(runner: Kysely | QueryRunner): Promise { + if (cachedVectorExtension) { + return cachedVectorExtension; + } + + cachedVectorExtension = new ConfigRepository().getEnv().database.vectorExtension; + if (cachedVectorExtension) { + return cachedVectorExtension; + } + + let availableExtensions: { name: VectorExtension }[]; + const query = `SELECT name FROM pg_available_extensions WHERE name IN (${VECTOR_EXTENSIONS.map((ext) => `'${ext}'`).join(', ')})`; + if (runner instanceof Kysely) { + const { rows } = await sql.raw<{ name: VectorExtension }>(query).execute(runner); + availableExtensions = rows; + } else { + availableExtensions = (await runner.query(query)) as { name: VectorExtension }[]; + } + const extensionNames = new Set(availableExtensions.map((row) => row.name)); + cachedVectorExtension = VECTOR_EXTENSIONS.find((ext) => extensionNames.has(ext)); + if (!cachedVectorExtension) { + throw new Error(`No vector extension found. Available extensions: ${VECTOR_EXTENSIONS.join(', ')}`); + } + return cachedVectorExtension; +} + +export const probes: Record = { + [VectorIndex.CLIP]: 1, + [VectorIndex.FACE]: 1, +}; @Injectable() export class DatabaseRepository { - private vectorExtension: VectorExtension; private readonly asyncLock = new AsyncLock(); constructor( @@ -26,7 +66,6 @@ export class DatabaseRepository { private logger: LoggingRepository, private configRepository: ConfigRepository, ) { - this.vectorExtension = configRepository.getEnv().database.vectorExtension; this.logger.setContext(DatabaseRepository.name); } @@ -34,6 +73,10 @@ export class DatabaseRepository { await this.db.destroy(); } + getVectorExtension(): Promise { + return getVectorExtension(this.db); + } + @GenerateSql({ params: [DatabaseExtension.VECTORS] }) async getExtensionVersion(extension: DatabaseExtension): Promise { const { rows } = await sql` @@ -45,7 +88,20 @@ export class DatabaseRepository { } getExtensionVersionRange(extension: VectorExtension): string { - return extension === DatabaseExtension.VECTORS ? VECTORS_VERSION_RANGE : VECTOR_VERSION_RANGE; + switch (extension) { + case DatabaseExtension.VECTORCHORD: { + return VECTORCHORD_VERSION_RANGE; + } + case DatabaseExtension.VECTORS: { + return VECTORS_VERSION_RANGE; + } + case DatabaseExtension.VECTOR: { + return VECTOR_VERSION_RANGE; + } + default: { + throw new Error(`Unsupported vector extension: '${extension}'`); + } + } } @GenerateSql() @@ -59,7 +115,14 @@ export class DatabaseRepository { } async createExtension(extension: DatabaseExtension): Promise { - await sql`CREATE EXTENSION IF NOT EXISTS ${sql.raw(extension)}`.execute(this.db); + await sql`CREATE EXTENSION IF NOT EXISTS ${sql.raw(extension)} CASCADE`.execute(this.db); + if (extension === DatabaseExtension.VECTORCHORD) { + const dbName = sql.table(await this.getDatabaseName()); + await sql`ALTER DATABASE ${dbName} SET vchordrq.prewarm_dim = '512,640,768,1024,1152,1536'`.execute(this.db); + await sql`SET vchordrq.prewarm_dim = '512,640,768,1024,1152,1536'`.execute(this.db); + await sql`ALTER DATABASE ${dbName} SET vchordrq.probes = 1`.execute(this.db); + await sql`SET vchordrq.probes = 1`.execute(this.db); + } } async updateVectorExtension(extension: VectorExtension, targetVersion?: string): Promise { @@ -78,120 +141,201 @@ export class DatabaseRepository { await this.db.transaction().execute(async (tx) => { await this.setSearchPath(tx); - if (isVectors && installedVersion === '0.1.1') { - await this.setExtVersion(tx, DatabaseExtension.VECTORS, '0.1.11'); - } - - const isSchemaUpgrade = semver.satisfies(installedVersion, '0.1.1 || 0.1.11'); - if (isSchemaUpgrade && isVectors) { - await this.updateVectorsSchema(tx); - } - await sql`ALTER EXTENSION ${sql.raw(extension)} UPDATE TO ${sql.lit(targetVersion)}`.execute(tx); const diff = semver.diff(installedVersion, targetVersion); - if (isVectors && diff && ['minor', 'major'].includes(diff)) { + if (isVectors && (diff === 'major' || diff === 'minor')) { await sql`SELECT pgvectors_upgrade()`.execute(tx); restartRequired = true; - } else { - await this.reindex(VectorIndex.CLIP); - await this.reindex(VectorIndex.FACE); + } else if (diff) { + await Promise.all([this.reindexVectors(VectorIndex.CLIP), this.reindexVectors(VectorIndex.FACE)]); } }); return { restartRequired }; } - async reindex(index: VectorIndex): Promise { - try { - await sql`REINDEX INDEX ${sql.raw(index)}`.execute(this.db); - } catch (error) { - if (this.vectorExtension !== DatabaseExtension.VECTORS) { - throw error; - } - this.logger.warn(`Could not reindex index ${index}. Attempting to auto-fix.`); + async prewarm(index: VectorIndex): Promise { + const vectorExtension = await getVectorExtension(this.db); + if (vectorExtension !== DatabaseExtension.VECTORCHORD) { + return; + } + this.logger.debug(`Prewarming ${index}`); + await sql`SELECT vchordrq_prewarm(${index})`.execute(this.db); + } - const table = await this.getIndexTable(index); - const dimSize = await this.getDimSize(table); - await this.db.transaction().execute(async (tx) => { - await this.setSearchPath(tx); - await sql`DROP INDEX IF EXISTS ${sql.raw(index)}`.execute(tx); - await sql`ALTER TABLE ${sql.raw(table)} ALTER COLUMN embedding SET DATA TYPE real[]`.execute(tx); - await sql`ALTER TABLE ${sql.raw(table)} ALTER COLUMN embedding SET DATA TYPE vector(${sql.raw(String(dimSize))})`.execute( - tx, - ); - await sql.raw(vectorIndexQuery({ vectorExtension: this.vectorExtension, table, indexName: index })).execute(tx); - }); + async reindexVectorsIfNeeded(names: VectorIndex[]): Promise { + const { rows } = await sql<{ + indexdef: string; + indexname: string; + }>`SELECT indexdef, indexname FROM pg_indexes WHERE indexname = ANY(ARRAY[${sql.join(names)}])`.execute(this.db); + + const vectorExtension = await getVectorExtension(this.db); + + const promises = []; + for (const indexName of names) { + const row = rows.find((index) => index.indexname === indexName); + const table = VECTOR_INDEX_TABLES[indexName]; + if (!row) { + promises.push(this.reindexVectors(indexName)); + continue; + } + + switch (vectorExtension) { + case DatabaseExtension.VECTOR: { + if (!row.indexdef.toLowerCase().includes('using hnsw')) { + promises.push(this.reindexVectors(indexName)); + } + break; + } + case DatabaseExtension.VECTORS: { + if (!row.indexdef.toLowerCase().includes('using vectors')) { + promises.push(this.reindexVectors(indexName)); + } + break; + } + case DatabaseExtension.VECTORCHORD: { + const matches = row.indexdef.match(/(?<=lists = \[)\d+/g); + const lists = matches && matches.length > 0 ? Number(matches[0]) : 1; + promises.push( + this.db + .selectFrom(this.db.dynamic.table(table).as('t')) + .select((eb) => eb.fn.countAll().as('count')) + .executeTakeFirstOrThrow() + .then(({ count }) => { + const targetLists = this.targetListCount(count); + this.logger.log(`targetLists=${targetLists}, current=${lists} for ${indexName} of ${count} rows`); + if ( + !row.indexdef.toLowerCase().includes('using vchordrq') || + // slack factor is to avoid frequent reindexing if the count is borderline + (lists !== targetLists && lists !== this.targetListCount(count * VECTORCHORD_LIST_SLACK_FACTOR)) + ) { + probes[indexName] = this.targetProbeCount(targetLists); + return this.reindexVectors(indexName, { lists: targetLists }); + } else { + probes[indexName] = this.targetProbeCount(lists); + } + }), + ); + break; + } + } + } + + if (promises.length > 0) { + await Promise.all(promises); } } - @GenerateSql({ params: [VectorIndex.CLIP] }) - async shouldReindex(name: VectorIndex): Promise { - if (this.vectorExtension !== DatabaseExtension.VECTORS) { - return false; + private async reindexVectors(indexName: VectorIndex, { lists }: { lists?: number } = {}): Promise { + this.logger.log(`Reindexing ${indexName}`); + const table = VECTOR_INDEX_TABLES[indexName]; + const vectorExtension = await getVectorExtension(this.db); + const { rows } = await sql<{ + columnName: string; + }>`SELECT column_name as "columnName" FROM information_schema.columns WHERE table_name = ${table}`.execute(this.db); + if (rows.length === 0) { + this.logger.warn( + `Table ${table} does not exist, skipping reindexing. This is only normal if this is a new Immich instance.`, + ); + return; } - - try { - const { rows } = await sql<{ - idx_status: string; - }>`SELECT idx_status FROM pg_vector_index_stat WHERE indexname = ${name}`.execute(this.db); - return rows[0]?.idx_status === 'UPGRADE'; - } catch (error) { - const message: string = (error as any).message; - if (message.includes('index is not existing')) { - return true; - } else if (message.includes('relation "pg_vector_index_stat" does not exist')) { - return false; + const dimSize = await this.getDimensionSize(table); + await this.db.transaction().execute(async (tx) => { + await sql`DROP INDEX IF EXISTS ${sql.raw(indexName)}`.execute(tx); + if (!rows.some((row) => row.columnName === 'embedding')) { + this.logger.warn(`Column 'embedding' does not exist in table '${table}', truncating and adding column.`); + await sql`TRUNCATE TABLE ${sql.raw(table)}`.execute(tx); + await sql`ALTER TABLE ${sql.raw(table)} ADD COLUMN embedding real[] NOT NULL`.execute(tx); } - throw error; - } + await sql`ALTER TABLE ${sql.raw(table)} ALTER COLUMN embedding SET DATA TYPE real[]`.execute(tx); + const schema = vectorExtension === DatabaseExtension.VECTORS ? 'vectors.' : ''; + await sql` + ALTER TABLE ${sql.raw(table)} + ALTER COLUMN embedding + SET DATA TYPE ${sql.raw(schema)}vector(${sql.raw(String(dimSize))})`.execute(tx); + await sql.raw(vectorIndexQuery({ vectorExtension, table, indexName, lists })).execute(tx); + }); + this.logger.log(`Reindexed ${indexName}`); } private async setSearchPath(tx: Transaction): Promise { await sql`SET search_path TO "$user", public, vectors`.execute(tx); } - private async setExtVersion(tx: Transaction, extName: DatabaseExtension, version: string): Promise { - await sql`UPDATE pg_catalog.pg_extension SET extversion = ${version} WHERE extname = ${extName}`.execute(tx); + private async getDatabaseName(): Promise { + const { rows } = await sql<{ db: string }>`SELECT current_database() as db`.execute(this.db); + return rows[0].db; } - private async getIndexTable(index: VectorIndex): Promise { - const { rows } = await sql<{ - relname: string | null; - }>`SELECT relname FROM pg_stat_all_indexes WHERE indexrelname = ${index}`.execute(this.db); - const table = rows[0]?.relname; - if (!table) { - throw new Error(`Could not find table for index ${index}`); - } - return table; - } - - private async updateVectorsSchema(tx: Transaction): Promise { - const extension = DatabaseExtension.VECTORS; - await sql`CREATE SCHEMA IF NOT EXISTS ${extension}`.execute(tx); - await sql`UPDATE pg_catalog.pg_extension SET extrelocatable = true WHERE extname = ${extension}`.execute(tx); - await sql`ALTER EXTENSION vectors SET SCHEMA vectors`.execute(tx); - await sql`UPDATE pg_catalog.pg_extension SET extrelocatable = false WHERE extname = ${extension}`.execute(tx); - } - - private async getDimSize(table: string, column = 'embedding'): Promise { + async getDimensionSize(table: string, column = 'embedding'): Promise { const { rows } = await sql<{ dimsize: number }>` SELECT atttypmod as dimsize FROM pg_attribute f JOIN pg_class c ON c.oid = f.attrelid WHERE c.relkind = 'r'::char AND f.attnum > 0 - AND c.relname = ${table} - AND f.attname = '${column}' + AND c.relname = ${table}::text + AND f.attname = ${column}::text `.execute(this.db); const dimSize = rows[0]?.dimsize; if (!isValidInteger(dimSize, { min: 1, max: 2 ** 16 })) { - throw new Error(`Could not retrieve dimension size`); + this.logger.warn(`Could not retrieve dimension size of column '${column}' in table '${table}', assuming 512`); + return 512; } return dimSize; } + async setDimensionSize(dimSize: number): Promise { + if (!isValidInteger(dimSize, { min: 1, max: 2 ** 16 })) { + throw new Error(`Invalid CLIP dimension size: ${dimSize}`); + } + + // this is done in two transactions to handle concurrent writes + await this.db.transaction().execute(async (trx) => { + await sql`delete from ${sql.table('smart_search')}`.execute(trx); + await trx.schema.alterTable('smart_search').dropConstraint('dim_size_constraint').ifExists().execute(); + await sql`alter table ${sql.table('smart_search')} add constraint dim_size_constraint check (array_length(embedding::real[], 1) = ${sql.lit(dimSize)})`.execute( + trx, + ); + }); + + const vectorExtension = await this.getVectorExtension(); + await this.db.transaction().execute(async (trx) => { + await sql`drop index if exists clip_index`.execute(trx); + await trx.schema + .alterTable('smart_search') + .alterColumn('embedding', (col) => col.setDataType(sql.raw(`vector(${dimSize})`))) + .execute(); + await sql + .raw(vectorIndexQuery({ vectorExtension, table: 'smart_search', indexName: VectorIndex.CLIP })) + .execute(trx); + await trx.schema.alterTable('smart_search').dropConstraint('dim_size_constraint').ifExists().execute(); + }); + probes[VectorIndex.CLIP] = 1; + + await sql`vacuum analyze ${sql.table('smart_search')}`.execute(this.db); + } + + async deleteAllSearchEmbeddings(): Promise { + await sql`truncate ${sql.table('smart_search')}`.execute(this.db); + } + + private targetListCount(count: number) { + if (count < 128_000) { + return 1; + } else if (count < 2_048_000) { + return 1 << (32 - Math.clz32(count / 1000)); + } else { + return 1 << (33 - Math.clz32(Math.sqrt(count))); + } + } + + private targetProbeCount(lists: number) { + return Math.ceil(lists / 8); + } + async runMigrations(options?: { transaction?: 'all' | 'none' | 'each' }): Promise { const { database } = this.configRepository.getEnv(); diff --git a/server/src/repositories/person.repository.ts b/server/src/repositories/person.repository.ts index ad18d7ed67..70a9980201 100644 --- a/server/src/repositories/person.repository.ts +++ b/server/src/repositories/person.repository.ts @@ -398,6 +398,7 @@ export class PersonRepository { return results.map(({ id }) => id); } + @GenerateSql({ params: [[], [], [{ faceId: DummyValue.UUID, embedding: DummyValue.VECTOR }]] }) async refreshFaces( facesToAdd: (Insertable & { assetId: string })[], faceIdsToRemove: string[], diff --git a/server/src/repositories/search.repository.ts b/server/src/repositories/search.repository.ts index 4e6b6e0fcf..a7b7027b7b 100644 --- a/server/src/repositories/search.repository.ts +++ b/server/src/repositories/search.repository.ts @@ -5,9 +5,9 @@ import { randomUUID } from 'node:crypto'; import { DB, Exif } from 'src/db'; import { DummyValue, GenerateSql } from 'src/decorators'; import { MapAsset } from 'src/dtos/asset-response.dto'; -import { AssetStatus, AssetType, AssetVisibility } from 'src/enum'; -import { ConfigRepository } from 'src/repositories/config.repository'; -import { anyUuid, asUuid, searchAssetBuilder, vectorIndexQuery } from 'src/utils/database'; +import { AssetStatus, AssetType, AssetVisibility, VectorIndex } from 'src/enum'; +import { probes } from 'src/repositories/database.repository'; +import { anyUuid, asUuid, searchAssetBuilder } from 'src/utils/database'; import { paginationHelper } from 'src/utils/pagination'; import { isValidInteger } from 'src/validation'; @@ -168,10 +168,7 @@ export interface GetCameraMakesOptions { @Injectable() export class SearchRepository { - constructor( - @InjectKysely() private db: Kysely, - private configRepository: ConfigRepository, - ) {} + constructor(@InjectKysely() private db: Kysely) {} @GenerateSql({ params: [ @@ -236,19 +233,21 @@ export class SearchRepository { }, ], }) - async searchSmart(pagination: SearchPaginationOptions, options: SmartSearchOptions) { + searchSmart(pagination: SearchPaginationOptions, options: SmartSearchOptions) { if (!isValidInteger(pagination.size, { min: 1, max: 1000 })) { throw new Error(`Invalid value for 'size': ${pagination.size}`); } - const items = await searchAssetBuilder(this.db, options) - .innerJoin('smart_search', 'assets.id', 'smart_search.assetId') - .orderBy(sql`smart_search.embedding <=> ${options.embedding}`) - .limit(pagination.size + 1) - .offset((pagination.page - 1) * pagination.size) - .execute(); - - return paginationHelper(items, pagination.size); + return this.db.transaction().execute(async (trx) => { + await sql`set local vchordrq.probes = ${sql.lit(probes[VectorIndex.CLIP])}`.execute(trx); + const items = await searchAssetBuilder(trx, options) + .innerJoin('smart_search', 'assets.id', 'smart_search.assetId') + .orderBy(sql`smart_search.embedding <=> ${options.embedding}`) + .limit(pagination.size + 1) + .offset((pagination.page - 1) * pagination.size) + .execute(); + return paginationHelper(items, pagination.size); + }); } @GenerateSql({ @@ -263,29 +262,32 @@ export class SearchRepository { ], }) searchDuplicates({ assetId, embedding, maxDistance, type, userIds }: AssetDuplicateSearch) { - return this.db - .with('cte', (qb) => - qb - .selectFrom('assets') - .select([ - 'assets.id as assetId', - 'assets.duplicateId', - sql`smart_search.embedding <=> ${embedding}`.as('distance'), - ]) - .innerJoin('smart_search', 'assets.id', 'smart_search.assetId') - .where('assets.ownerId', '=', anyUuid(userIds)) - .where('assets.deletedAt', 'is', null) - .where('assets.visibility', '!=', AssetVisibility.HIDDEN) - .where('assets.type', '=', type) - .where('assets.id', '!=', asUuid(assetId)) - .where('assets.stackId', 'is', null) - .orderBy(sql`smart_search.embedding <=> ${embedding}`) - .limit(64), - ) - .selectFrom('cte') - .selectAll() - .where('cte.distance', '<=', maxDistance as number) - .execute(); + return this.db.transaction().execute(async (trx) => { + await sql`set local vchordrq.probes = ${sql.lit(probes[VectorIndex.CLIP])}`.execute(trx); + return await trx + .with('cte', (qb) => + qb + .selectFrom('assets') + .select([ + 'assets.id as assetId', + 'assets.duplicateId', + sql`smart_search.embedding <=> ${embedding}`.as('distance'), + ]) + .innerJoin('smart_search', 'assets.id', 'smart_search.assetId') + .where('assets.ownerId', '=', anyUuid(userIds)) + .where('assets.deletedAt', 'is', null) + .where('assets.visibility', '!=', AssetVisibility.HIDDEN) + .where('assets.type', '=', type) + .where('assets.id', '!=', asUuid(assetId)) + .where('assets.stackId', 'is', null) + .orderBy('distance') + .limit(64), + ) + .selectFrom('cte') + .selectAll() + .where('cte.distance', '<=', maxDistance as number) + .execute(); + }); } @GenerateSql({ @@ -303,31 +305,36 @@ export class SearchRepository { throw new Error(`Invalid value for 'numResults': ${numResults}`); } - return this.db - .with('cte', (qb) => - qb - .selectFrom('asset_faces') - .select([ - 'asset_faces.id', - 'asset_faces.personId', - sql`face_search.embedding <=> ${embedding}`.as('distance'), - ]) - .innerJoin('assets', 'assets.id', 'asset_faces.assetId') - .innerJoin('face_search', 'face_search.faceId', 'asset_faces.id') - .leftJoin('person', 'person.id', 'asset_faces.personId') - .where('assets.ownerId', '=', anyUuid(userIds)) - .where('assets.deletedAt', 'is', null) - .$if(!!hasPerson, (qb) => qb.where('asset_faces.personId', 'is not', null)) - .$if(!!minBirthDate, (qb) => - qb.where((eb) => eb.or([eb('person.birthDate', 'is', null), eb('person.birthDate', '<=', minBirthDate!)])), - ) - .orderBy(sql`face_search.embedding <=> ${embedding}`) - .limit(numResults), - ) - .selectFrom('cte') - .selectAll() - .where('cte.distance', '<=', maxDistance) - .execute(); + return this.db.transaction().execute(async (trx) => { + await sql`set local vchordrq.probes = ${sql.lit(probes[VectorIndex.FACE])}`.execute(trx); + return await trx + .with('cte', (qb) => + qb + .selectFrom('asset_faces') + .select([ + 'asset_faces.id', + 'asset_faces.personId', + sql`face_search.embedding <=> ${embedding}`.as('distance'), + ]) + .innerJoin('assets', 'assets.id', 'asset_faces.assetId') + .innerJoin('face_search', 'face_search.faceId', 'asset_faces.id') + .leftJoin('person', 'person.id', 'asset_faces.personId') + .where('assets.ownerId', '=', anyUuid(userIds)) + .where('assets.deletedAt', 'is', null) + .$if(!!hasPerson, (qb) => qb.where('asset_faces.personId', 'is not', null)) + .$if(!!minBirthDate, (qb) => + qb.where((eb) => + eb.or([eb('person.birthDate', 'is', null), eb('person.birthDate', '<=', minBirthDate!)]), + ), + ) + .orderBy('distance') + .limit(numResults), + ) + .selectFrom('cte') + .selectAll() + .where('cte.distance', '<=', maxDistance) + .execute(); + }); } @GenerateSql({ params: [DummyValue.STRING] }) @@ -416,56 +423,6 @@ export class SearchRepository { .execute(); } - async getDimensionSize(): Promise { - const { rows } = await sql<{ dimsize: number }>` - select atttypmod as dimsize - from pg_attribute f - join pg_class c ON c.oid = f.attrelid - where c.relkind = 'r'::char - and f.attnum > 0 - and c.relname = 'smart_search' - and f.attname = 'embedding' - `.execute(this.db); - - const dimSize = rows[0]['dimsize']; - if (!isValidInteger(dimSize, { min: 1, max: 2 ** 16 })) { - throw new Error(`Could not retrieve CLIP dimension size`); - } - return dimSize; - } - - async setDimensionSize(dimSize: number): Promise { - if (!isValidInteger(dimSize, { min: 1, max: 2 ** 16 })) { - throw new Error(`Invalid CLIP dimension size: ${dimSize}`); - } - - // this is done in two transactions to handle concurrent writes - await this.db.transaction().execute(async (trx) => { - await sql`delete from ${sql.table('smart_search')}`.execute(trx); - await trx.schema.alterTable('smart_search').dropConstraint('dim_size_constraint').ifExists().execute(); - await sql`alter table ${sql.table('smart_search')} add constraint dim_size_constraint check (array_length(embedding::real[], 1) = ${sql.lit(dimSize)})`.execute( - trx, - ); - }); - - const vectorExtension = this.configRepository.getEnv().database.vectorExtension; - await this.db.transaction().execute(async (trx) => { - await sql`drop index if exists clip_index`.execute(trx); - await trx.schema - .alterTable('smart_search') - .alterColumn('embedding', (col) => col.setDataType(sql.raw(`vector(${dimSize})`))) - .execute(); - await sql.raw(vectorIndexQuery({ vectorExtension, table: 'smart_search', indexName: 'clip_index' })).execute(trx); - await trx.schema.alterTable('smart_search').dropConstraint('dim_size_constraint').ifExists().execute(); - }); - - await sql`vacuum analyze ${sql.table('smart_search')}`.execute(this.db); - } - - async deleteAllSearchEmbeddings(): Promise { - await sql`truncate ${sql.table('smart_search')}`.execute(this.db); - } - async getCountries(userIds: string[]): Promise { const res = await this.getExifField('country', userIds).execute(); return res.map((row) => row.country!); diff --git a/server/src/schema/migrations/1744910873969-InitialMigration.ts b/server/src/schema/migrations/1744910873969-InitialMigration.ts index ce4a37ae3b..63625a69ad 100644 --- a/server/src/schema/migrations/1744910873969-InitialMigration.ts +++ b/server/src/schema/migrations/1744910873969-InitialMigration.ts @@ -1,10 +1,9 @@ import { Kysely, sql } from 'kysely'; import { DatabaseExtension } from 'src/enum'; -import { ConfigRepository } from 'src/repositories/config.repository'; +import { getVectorExtension } from 'src/repositories/database.repository'; import { LoggingRepository } from 'src/repositories/logging.repository'; import { vectorIndexQuery } from 'src/utils/database'; -const vectorExtension = new ConfigRepository().getEnv().database.vectorExtension; const lastMigrationSql = sql<{ name: string }>`SELECT "name" FROM "migrations" ORDER BY "timestamp" DESC LIMIT 1;`; const tableExists = sql<{ result: string | null }>`select to_regclass('migrations') as "result"`; const logger = LoggingRepository.create(); @@ -25,12 +24,14 @@ export async function up(db: Kysely): Promise { return; } + const vectorExtension = await getVectorExtension(db); + await sql`CREATE EXTENSION IF NOT EXISTS "uuid-ossp";`.execute(db); await sql`CREATE EXTENSION IF NOT EXISTS "unaccent";`.execute(db); await sql`CREATE EXTENSION IF NOT EXISTS "cube";`.execute(db); await sql`CREATE EXTENSION IF NOT EXISTS "earthdistance";`.execute(db); await sql`CREATE EXTENSION IF NOT EXISTS "pg_trgm";`.execute(db); - await sql`CREATE EXTENSION IF NOT EXISTS ${sql.raw(vectorExtension)}`.execute(db); + await sql`CREATE EXTENSION IF NOT EXISTS ${sql.raw(vectorExtension)} CASCADE`.execute(db); await sql`CREATE OR REPLACE FUNCTION immich_uuid_v7(p_timestamp timestamp with time zone default clock_timestamp()) RETURNS uuid VOLATILE LANGUAGE SQL diff --git a/server/src/services/database.service.spec.ts b/server/src/services/database.service.spec.ts index e0ab4a624d..1c89fa313c 100644 --- a/server/src/services/database.service.spec.ts +++ b/server/src/services/database.service.spec.ts @@ -1,5 +1,5 @@ import { EXTENSION_NAMES } from 'src/constants'; -import { DatabaseExtension } from 'src/enum'; +import { DatabaseExtension, VectorIndex } from 'src/enum'; import { DatabaseService } from 'src/services/database.service'; import { VectorExtension } from 'src/types'; import { mockEnvData } from 'test/repositories/config.repository.mock'; @@ -47,8 +47,10 @@ describe(DatabaseService.name, () => { describe.each(>[ { extension: DatabaseExtension.VECTOR, extensionName: EXTENSION_NAMES[DatabaseExtension.VECTOR] }, { extension: DatabaseExtension.VECTORS, extensionName: EXTENSION_NAMES[DatabaseExtension.VECTORS] }, + { extension: DatabaseExtension.VECTORCHORD, extensionName: EXTENSION_NAMES[DatabaseExtension.VECTORCHORD] }, ])('should work with $extensionName', ({ extension, extensionName }) => { beforeEach(() => { + mocks.database.getVectorExtension.mockResolvedValue(extension); mocks.config.getEnv.mockReturnValue( mockEnvData({ database: { @@ -240,41 +242,32 @@ describe(DatabaseService.name, () => { }); it(`should reindex ${extension} indices if needed`, async () => { - mocks.database.shouldReindex.mockResolvedValue(true); - await expect(sut.onBootstrap()).resolves.toBeUndefined(); - expect(mocks.database.shouldReindex).toHaveBeenCalledTimes(2); - expect(mocks.database.reindex).toHaveBeenCalledTimes(2); + expect(mocks.database.reindexVectorsIfNeeded).toHaveBeenCalledExactlyOnceWith([ + VectorIndex.CLIP, + VectorIndex.FACE, + ]); + expect(mocks.database.reindexVectorsIfNeeded).toHaveBeenCalledTimes(1); expect(mocks.database.runMigrations).toHaveBeenCalledTimes(1); expect(mocks.logger.fatal).not.toHaveBeenCalled(); }); it(`should throw an error if reindexing fails`, async () => { - mocks.database.shouldReindex.mockResolvedValue(true); - mocks.database.reindex.mockRejectedValue(new Error('Error reindexing')); + mocks.database.reindexVectorsIfNeeded.mockRejectedValue(new Error('Error reindexing')); await expect(sut.onBootstrap()).rejects.toBeDefined(); - expect(mocks.database.shouldReindex).toHaveBeenCalledTimes(1); - expect(mocks.database.reindex).toHaveBeenCalledTimes(1); + expect(mocks.database.reindexVectorsIfNeeded).toHaveBeenCalledExactlyOnceWith([ + VectorIndex.CLIP, + VectorIndex.FACE, + ]); expect(mocks.database.runMigrations).not.toHaveBeenCalled(); expect(mocks.logger.fatal).not.toHaveBeenCalled(); expect(mocks.logger.warn).toHaveBeenCalledWith( expect.stringContaining('Could not run vector reindexing checks.'), ); }); - - it(`should not reindex ${extension} indices if not needed`, async () => { - mocks.database.shouldReindex.mockResolvedValue(false); - - await expect(sut.onBootstrap()).resolves.toBeUndefined(); - - expect(mocks.database.shouldReindex).toHaveBeenCalledTimes(2); - expect(mocks.database.reindex).toHaveBeenCalledTimes(0); - expect(mocks.database.runMigrations).toHaveBeenCalledTimes(1); - expect(mocks.logger.fatal).not.toHaveBeenCalled(); - }); }); it('should skip migrations if DB_SKIP_MIGRATIONS=true', async () => { @@ -300,23 +293,7 @@ describe(DatabaseService.name, () => { expect(mocks.database.runMigrations).not.toHaveBeenCalled(); }); - it(`should throw error if pgvector extension could not be created`, async () => { - mocks.config.getEnv.mockReturnValue( - mockEnvData({ - database: { - config: { - connectionType: 'parts', - host: 'database', - port: 5432, - username: 'postgres', - password: 'postgres', - database: 'immich', - }, - skipMigrations: true, - vectorExtension: DatabaseExtension.VECTOR, - }, - }), - ); + it(`should throw error if extension could not be created`, async () => { mocks.database.getExtensionVersion.mockResolvedValue({ installedVersion: null, availableVersion: minVersionInRange, @@ -328,26 +305,7 @@ describe(DatabaseService.name, () => { expect(mocks.logger.fatal).toHaveBeenCalledTimes(1); expect(mocks.logger.fatal.mock.calls[0][0]).toContain( - `Alternatively, if your Postgres instance has pgvecto.rs, you may use this instead`, - ); - expect(mocks.database.createExtension).toHaveBeenCalledTimes(1); - expect(mocks.database.updateVectorExtension).not.toHaveBeenCalled(); - expect(mocks.database.runMigrations).not.toHaveBeenCalled(); - }); - - it(`should throw error if pgvecto.rs extension could not be created`, async () => { - mocks.database.getExtensionVersion.mockResolvedValue({ - installedVersion: null, - availableVersion: minVersionInRange, - }); - mocks.database.updateVectorExtension.mockResolvedValue({ restartRequired: false }); - mocks.database.createExtension.mockRejectedValue(new Error('Failed to create extension')); - - await expect(sut.onBootstrap()).rejects.toThrow('Failed to create extension'); - - expect(mocks.logger.fatal).toHaveBeenCalledTimes(1); - expect(mocks.logger.fatal.mock.calls[0][0]).toContain( - `Alternatively, if your Postgres instance has pgvector, you may use this instead`, + `Alternatively, if your Postgres instance has any of vector, vectors, vchord, you may use one of them instead by setting the environment variable 'DB_VECTOR_EXTENSION='`, ); expect(mocks.database.createExtension).toHaveBeenCalledTimes(1); expect(mocks.database.updateVectorExtension).not.toHaveBeenCalled(); diff --git a/server/src/services/database.service.ts b/server/src/services/database.service.ts index d71dc25104..ec7be195ba 100644 --- a/server/src/services/database.service.ts +++ b/server/src/services/database.service.ts @@ -6,7 +6,7 @@ import { BootstrapEventPriority, DatabaseExtension, DatabaseLock, VectorIndex } import { BaseService } from 'src/services/base.service'; import { VectorExtension } from 'src/types'; -type CreateFailedArgs = { name: string; extension: string; otherName: string }; +type CreateFailedArgs = { name: string; extension: string; otherExtensions: string[] }; type UpdateFailedArgs = { name: string; extension: string; availableVersion: string }; type RestartRequiredArgs = { name: string; availableVersion: string }; type NightlyVersionArgs = { name: string; extension: string; version: string }; @@ -25,18 +25,15 @@ const messages = { outOfRange: ({ name, version, range }: OutOfRangeArgs) => `The ${name} extension version is ${version}, but Immich only supports ${range}. Please change ${name} to a compatible version in the Postgres instance.`, - createFailed: ({ name, extension, otherName }: CreateFailedArgs) => + createFailed: ({ name, extension, otherExtensions }: CreateFailedArgs) => `Failed to activate ${name} extension. Please ensure the Postgres instance has ${name} installed. If the Postgres instance already has ${name} installed, Immich may not have the necessary permissions to activate it. - In this case, please run 'CREATE EXTENSION IF NOT EXISTS ${extension}' manually as a superuser. + In this case, please run 'CREATE EXTENSION IF NOT EXISTS ${extension} CASCADE' manually as a superuser. See https://immich.app/docs/guides/database-queries for how to query the database. - Alternatively, if your Postgres instance has ${otherName}, you may use this instead by setting the environment variable 'DB_VECTOR_EXTENSION=${otherName}'. - Note that switching between the two extensions after a successful startup is not supported. - The exception is if your version of Immich prior to upgrading was 1.90.2 or earlier. - In this case, you may set either extension now, but you will not be able to switch to the other extension following a successful startup.`, + Alternatively, if your Postgres instance has any of ${otherExtensions.join(', ')}, you may use one of them instead by setting the environment variable 'DB_VECTOR_EXTENSION='.`, updateFailed: ({ name, extension, availableVersion }: UpdateFailedArgs) => `The ${name} extension can be updated to ${availableVersion}. Immich attempted to update the extension, but failed to do so. @@ -67,8 +64,7 @@ export class DatabaseService extends BaseService { } await this.databaseRepository.withLock(DatabaseLock.Migrations, async () => { - const envData = this.configRepository.getEnv(); - const extension = envData.database.vectorExtension; + const extension = await this.databaseRepository.getVectorExtension(); const name = EXTENSION_NAMES[extension]; const extensionRange = this.databaseRepository.getExtensionVersionRange(extension); @@ -97,12 +93,23 @@ export class DatabaseService extends BaseService { throw new Error(messages.invalidDowngrade({ name, extension, availableVersion, installedVersion })); } - await this.checkReindexing(); + try { + await this.databaseRepository.reindexVectorsIfNeeded([VectorIndex.CLIP, VectorIndex.FACE]); + } catch (error) { + this.logger.warn( + 'Could not run vector reindexing checks. If the extension was updated, please restart the Postgres instance. If you are upgrading directly from a version below 1.107.2, please upgrade to 1.107.2 first.', + ); + throw error; + } const { database } = this.configRepository.getEnv(); if (!database.skipMigrations) { await this.databaseRepository.runMigrations(); } + await Promise.all([ + this.databaseRepository.prewarm(VectorIndex.CLIP), + this.databaseRepository.prewarm(VectorIndex.FACE), + ]); }); } @@ -110,10 +117,13 @@ export class DatabaseService extends BaseService { try { await this.databaseRepository.createExtension(extension); } catch (error) { - const otherExtension = - extension === DatabaseExtension.VECTORS ? DatabaseExtension.VECTOR : DatabaseExtension.VECTORS; + const otherExtensions = [ + DatabaseExtension.VECTOR, + DatabaseExtension.VECTORS, + DatabaseExtension.VECTORCHORD, + ].filter((ext) => ext !== extension); const name = EXTENSION_NAMES[extension]; - this.logger.fatal(messages.createFailed({ name, extension, otherName: EXTENSION_NAMES[otherExtension] })); + this.logger.fatal(messages.createFailed({ name, extension, otherExtensions })); throw error; } } @@ -130,21 +140,4 @@ export class DatabaseService extends BaseService { throw error; } } - - private async checkReindexing() { - try { - if (await this.databaseRepository.shouldReindex(VectorIndex.CLIP)) { - await this.databaseRepository.reindex(VectorIndex.CLIP); - } - - if (await this.databaseRepository.shouldReindex(VectorIndex.FACE)) { - await this.databaseRepository.reindex(VectorIndex.FACE); - } - } catch (error) { - this.logger.warn( - 'Could not run vector reindexing checks. If the extension was updated, please restart the Postgres instance.', - ); - throw error; - } - } } diff --git a/server/src/services/person.service.ts b/server/src/services/person.service.ts index 23ba562ba6..cd484c230b 100644 --- a/server/src/services/person.service.ts +++ b/server/src/services/person.service.ts @@ -33,6 +33,7 @@ import { QueueName, SourceType, SystemMetadataKey, + VectorIndex, } from 'src/enum'; import { BoundingBox } from 'src/repositories/machine-learning.repository'; import { UpdateFacesData } from 'src/repositories/person.repository'; @@ -418,6 +419,8 @@ export class PersonService extends BaseService { return JobStatus.SKIPPED; } + await this.databaseRepository.prewarm(VectorIndex.FACE); + const lastRun = new Date().toISOString(); const facePagination = this.personRepository.getAllFaces( force ? undefined : { personId: null, sourceType: SourceType.MACHINE_LEARNING }, diff --git a/server/src/services/smart-info.service.spec.ts b/server/src/services/smart-info.service.spec.ts index 9cc97a8f0d..a6529fa623 100644 --- a/server/src/services/smart-info.service.spec.ts +++ b/server/src/services/smart-info.service.spec.ts @@ -54,28 +54,28 @@ describe(SmartInfoService.name, () => { it('should return if machine learning is disabled', async () => { await sut.onConfigInit({ newConfig: systemConfigStub.machineLearningDisabled as SystemConfig }); - expect(mocks.search.getDimensionSize).not.toHaveBeenCalled(); - expect(mocks.search.setDimensionSize).not.toHaveBeenCalled(); - expect(mocks.search.deleteAllSearchEmbeddings).not.toHaveBeenCalled(); + expect(mocks.database.getDimensionSize).not.toHaveBeenCalled(); + expect(mocks.database.setDimensionSize).not.toHaveBeenCalled(); + expect(mocks.database.deleteAllSearchEmbeddings).not.toHaveBeenCalled(); }); it('should return if model and DB dimension size are equal', async () => { - mocks.search.getDimensionSize.mockResolvedValue(512); + mocks.database.getDimensionSize.mockResolvedValue(512); await sut.onConfigInit({ newConfig: systemConfigStub.machineLearningEnabled as SystemConfig }); - expect(mocks.search.getDimensionSize).toHaveBeenCalledTimes(1); - expect(mocks.search.setDimensionSize).not.toHaveBeenCalled(); - expect(mocks.search.deleteAllSearchEmbeddings).not.toHaveBeenCalled(); + expect(mocks.database.getDimensionSize).toHaveBeenCalledTimes(1); + expect(mocks.database.setDimensionSize).not.toHaveBeenCalled(); + expect(mocks.database.deleteAllSearchEmbeddings).not.toHaveBeenCalled(); }); it('should update DB dimension size if model and DB have different values', async () => { - mocks.search.getDimensionSize.mockResolvedValue(768); + mocks.database.getDimensionSize.mockResolvedValue(768); await sut.onConfigInit({ newConfig: systemConfigStub.machineLearningEnabled as SystemConfig }); - expect(mocks.search.getDimensionSize).toHaveBeenCalledTimes(1); - expect(mocks.search.setDimensionSize).toHaveBeenCalledWith(512); + expect(mocks.database.getDimensionSize).toHaveBeenCalledTimes(1); + expect(mocks.database.setDimensionSize).toHaveBeenCalledWith(512); }); }); @@ -89,13 +89,13 @@ describe(SmartInfoService.name, () => { }); expect(mocks.systemMetadata.get).not.toHaveBeenCalled(); - expect(mocks.search.getDimensionSize).not.toHaveBeenCalled(); - expect(mocks.search.setDimensionSize).not.toHaveBeenCalled(); - expect(mocks.search.deleteAllSearchEmbeddings).not.toHaveBeenCalled(); + expect(mocks.database.getDimensionSize).not.toHaveBeenCalled(); + expect(mocks.database.setDimensionSize).not.toHaveBeenCalled(); + expect(mocks.database.deleteAllSearchEmbeddings).not.toHaveBeenCalled(); }); it('should return if model and DB dimension size are equal', async () => { - mocks.search.getDimensionSize.mockResolvedValue(512); + mocks.database.getDimensionSize.mockResolvedValue(512); await sut.onConfigUpdate({ newConfig: { @@ -106,13 +106,13 @@ describe(SmartInfoService.name, () => { } as SystemConfig, }); - expect(mocks.search.getDimensionSize).toHaveBeenCalledTimes(1); - expect(mocks.search.setDimensionSize).not.toHaveBeenCalled(); - expect(mocks.search.deleteAllSearchEmbeddings).not.toHaveBeenCalled(); + expect(mocks.database.getDimensionSize).toHaveBeenCalledTimes(1); + expect(mocks.database.setDimensionSize).not.toHaveBeenCalled(); + expect(mocks.database.deleteAllSearchEmbeddings).not.toHaveBeenCalled(); }); it('should update DB dimension size if model and DB have different values', async () => { - mocks.search.getDimensionSize.mockResolvedValue(512); + mocks.database.getDimensionSize.mockResolvedValue(512); await sut.onConfigUpdate({ newConfig: { @@ -123,12 +123,12 @@ describe(SmartInfoService.name, () => { } as SystemConfig, }); - expect(mocks.search.getDimensionSize).toHaveBeenCalledTimes(1); - expect(mocks.search.setDimensionSize).toHaveBeenCalledWith(768); + expect(mocks.database.getDimensionSize).toHaveBeenCalledTimes(1); + expect(mocks.database.setDimensionSize).toHaveBeenCalledWith(768); }); it('should clear embeddings if old and new models are different', async () => { - mocks.search.getDimensionSize.mockResolvedValue(512); + mocks.database.getDimensionSize.mockResolvedValue(512); await sut.onConfigUpdate({ newConfig: { @@ -139,9 +139,9 @@ describe(SmartInfoService.name, () => { } as SystemConfig, }); - expect(mocks.search.deleteAllSearchEmbeddings).toHaveBeenCalled(); - expect(mocks.search.getDimensionSize).toHaveBeenCalledTimes(1); - expect(mocks.search.setDimensionSize).not.toHaveBeenCalled(); + expect(mocks.database.deleteAllSearchEmbeddings).toHaveBeenCalled(); + expect(mocks.database.getDimensionSize).toHaveBeenCalledTimes(1); + expect(mocks.database.setDimensionSize).not.toHaveBeenCalled(); }); }); @@ -151,7 +151,7 @@ describe(SmartInfoService.name, () => { await sut.handleQueueEncodeClip({}); - expect(mocks.search.setDimensionSize).not.toHaveBeenCalled(); + expect(mocks.database.setDimensionSize).not.toHaveBeenCalled(); }); it('should queue the assets without clip embeddings', async () => { @@ -163,7 +163,7 @@ describe(SmartInfoService.name, () => { { name: JobName.SMART_SEARCH, data: { id: assetStub.image.id } }, ]); expect(mocks.assetJob.streamForEncodeClip).toHaveBeenCalledWith(false); - expect(mocks.search.setDimensionSize).not.toHaveBeenCalled(); + expect(mocks.database.setDimensionSize).not.toHaveBeenCalled(); }); it('should queue all the assets', async () => { @@ -175,7 +175,7 @@ describe(SmartInfoService.name, () => { { name: JobName.SMART_SEARCH, data: { id: assetStub.image.id } }, ]); expect(mocks.assetJob.streamForEncodeClip).toHaveBeenCalledWith(true); - expect(mocks.search.setDimensionSize).toHaveBeenCalledExactlyOnceWith(512); + expect(mocks.database.setDimensionSize).toHaveBeenCalledExactlyOnceWith(512); }); }); diff --git a/server/src/services/smart-info.service.ts b/server/src/services/smart-info.service.ts index f3702c2010..705e8ed2e5 100644 --- a/server/src/services/smart-info.service.ts +++ b/server/src/services/smart-info.service.ts @@ -38,7 +38,7 @@ export class SmartInfoService extends BaseService { await this.databaseRepository.withLock(DatabaseLock.CLIPDimSize, async () => { const { dimSize } = getCLIPModelInfo(newConfig.machineLearning.clip.modelName); - const dbDimSize = await this.searchRepository.getDimensionSize(); + const dbDimSize = await this.databaseRepository.getDimensionSize('smart_search'); this.logger.verbose(`Current database CLIP dimension size is ${dbDimSize}`); const modelChange = @@ -53,10 +53,10 @@ export class SmartInfoService extends BaseService { `Dimension size of model ${newConfig.machineLearning.clip.modelName} is ${dimSize}, but database expects ${dbDimSize}.`, ); this.logger.log(`Updating database CLIP dimension size to ${dimSize}.`); - await this.searchRepository.setDimensionSize(dimSize); + await this.databaseRepository.setDimensionSize(dimSize); this.logger.log(`Successfully updated database CLIP dimension size from ${dbDimSize} to ${dimSize}.`); } else { - await this.searchRepository.deleteAllSearchEmbeddings(); + await this.databaseRepository.deleteAllSearchEmbeddings(); } // TODO: A job to reindex all assets should be scheduled, though user @@ -74,7 +74,7 @@ export class SmartInfoService extends BaseService { if (force) { const { dimSize } = getCLIPModelInfo(machineLearning.clip.modelName); // in addition to deleting embeddings, update the dimension size in case it failed earlier - await this.searchRepository.setDimensionSize(dimSize); + await this.databaseRepository.setDimensionSize(dimSize); } let queue: JobItem[] = []; diff --git a/server/src/types.ts b/server/src/types.ts index 52a5266e42..9479a39eea 100644 --- a/server/src/types.ts +++ b/server/src/types.ts @@ -1,7 +1,7 @@ import { SystemConfig } from 'src/config'; +import { VECTOR_EXTENSIONS } from 'src/constants'; import { AssetType, - DatabaseExtension, DatabaseSslMode, ExifOrientation, ImageFormat, @@ -363,7 +363,7 @@ export type JobItem = // Version check | { name: JobName.VERSION_CHECK; data: IBaseJob }; -export type VectorExtension = DatabaseExtension.VECTOR | DatabaseExtension.VECTORS; +export type VectorExtension = (typeof VECTOR_EXTENSIONS)[number]; export type DatabaseConnectionURL = { connectionType: 'url'; diff --git a/server/src/utils/database.ts b/server/src/utils/database.ts index e0e7af49a4..40bf7503db 100644 --- a/server/src/utils/database.ts +++ b/server/src/utils/database.ts @@ -384,14 +384,28 @@ export function searchAssetBuilder(kysely: Kysely, options: AssetSearchBuild .$if(!options.withDeleted, (qb) => qb.where('assets.deletedAt', 'is', null)); } -type VectorIndexOptions = { vectorExtension: VectorExtension; table: string; indexName: string }; +export type ReindexVectorIndexOptions = { indexName: string; lists?: number }; -export function vectorIndexQuery({ vectorExtension, table, indexName }: VectorIndexOptions): string { +type VectorIndexQueryOptions = { table: string; vectorExtension: VectorExtension } & ReindexVectorIndexOptions; + +export function vectorIndexQuery({ vectorExtension, table, indexName, lists }: VectorIndexQueryOptions): string { switch (vectorExtension) { + case DatabaseExtension.VECTORCHORD: { + return ` + CREATE INDEX IF NOT EXISTS ${indexName} ON ${table} USING vchordrq (embedding vector_cosine_ops) WITH (options = $$ + residual_quantization = false + [build.internal] + lists = [${lists ?? 1}] + spherical_centroids = true + build_threads = 4 + sampling_factor = 1024 + $$)`; + } case DatabaseExtension.VECTORS: { return ` CREATE INDEX IF NOT EXISTS ${indexName} ON ${table} USING vectors (embedding vector_cos_ops) WITH (options = $$ + optimizing.optimizing_threads = 4 [indexing.hnsw] m = 16 ef_construction = 300 diff --git a/server/test/medium.factory.ts b/server/test/medium.factory.ts index 6f4f46c075..8b730c2b41 100644 --- a/server/test/medium.factory.ts +++ b/server/test/medium.factory.ts @@ -170,7 +170,7 @@ export const getRepository = (key: K, db: Kys } case 'search': { - return new SearchRepository(db, new ConfigRepository()); + return new SearchRepository(db); } case 'session': { diff --git a/server/test/medium/globalSetup.ts b/server/test/medium/globalSetup.ts index 4398da5c0a..323d2c4a53 100644 --- a/server/test/medium/globalSetup.ts +++ b/server/test/medium/globalSetup.ts @@ -7,7 +7,7 @@ import { getKyselyConfig } from 'src/utils/database'; import { GenericContainer, Wait } from 'testcontainers'; const globalSetup = async () => { - const postgresContainer = await new GenericContainer('tensorchord/pgvecto-rs:pg14-v0.2.0') + const postgresContainer = await new GenericContainer('ghcr.io/immich-app/postgres:14') .withExposedPorts(5432) .withEnvironment({ POSTGRES_PASSWORD: 'postgres', @@ -17,9 +17,7 @@ const globalSetup = async () => { .withCommand([ 'postgres', '-c', - 'shared_preload_libraries=vectors.so', - '-c', - 'search_path="$$user", public, vectors', + 'shared_preload_libraries=vchord.so', '-c', 'max_wal_size=2GB', '-c', @@ -30,6 +28,8 @@ const globalSetup = async () => { 'full_page_writes=off', '-c', 'synchronous_commit=off', + '-c', + 'config_file=/var/lib/postgresql/data/postgresql.conf', ]) .withWaitStrategy(Wait.forAll([Wait.forLogMessage('database system is ready to accept connections', 2)])) .start(); diff --git a/server/test/repositories/database.repository.mock.ts b/server/test/repositories/database.repository.mock.ts index eeedf682de..171e04fe33 100644 --- a/server/test/repositories/database.repository.mock.ts +++ b/server/test/repositories/database.repository.mock.ts @@ -6,13 +6,17 @@ export const newDatabaseRepositoryMock = (): Mocked=14.0.0'), createExtension: vitest.fn().mockResolvedValue(void 0), updateVectorExtension: vitest.fn(), - reindex: vitest.fn(), - shouldReindex: vitest.fn(), + reindexVectorsIfNeeded: vitest.fn(), + getDimensionSize: vitest.fn(), + setDimensionSize: vitest.fn(), + deleteAllSearchEmbeddings: vitest.fn(), + prewarm: vitest.fn(), runMigrations: vitest.fn(), withLock: vitest.fn().mockImplementation((_, function_: () => Promise) => function_()), tryLock: vitest.fn(), From a6a4dfcfd3875fd28e852e05e2ee9941607e2f3d Mon Sep 17 00:00:00 2001 From: Mert <101130780+mertalev@users.noreply.github.com> Date: Tue, 20 May 2025 09:44:39 -0400 Subject: [PATCH 03/35] fix(server): queueing for duplicate detection (#18380) * fix queueing * update tests --- server/src/queries/asset.job.repository.sql | 31 +++---------------- .../src/repositories/asset-job.repository.ts | 24 +++++++------- server/src/services/duplicate.service.spec.ts | 22 +------------ server/src/services/duplicate.service.ts | 11 ++----- 4 files changed, 18 insertions(+), 70 deletions(-) diff --git a/server/src/queries/asset.job.repository.sql b/server/src/queries/asset.job.repository.sql index 577635a912..2301408ffe 100644 --- a/server/src/queries/asset.job.repository.sql +++ b/server/src/queries/asset.job.repository.sql @@ -8,30 +8,14 @@ select "duplicateId", "stackId", "visibility", - "smart_search"."embedding", - ( - select - coalesce(json_agg(agg), '[]') - from - ( - select - "asset_files"."id", - "asset_files"."path", - "asset_files"."type" - from - "asset_files" - where - "asset_files"."assetId" = "assets"."id" - and "asset_files"."type" = $1 - ) as agg - ) as "files" + "smart_search"."embedding" from "assets" left join "smart_search" on "assets"."id" = "smart_search"."assetId" where - "assets"."id" = $2::uuid + "assets"."id" = $1::uuid limit - $3 + $2 -- AssetJobRepository.getForSidecarWriteJob select @@ -199,18 +183,11 @@ select "assets"."id" from "assets" + inner join "smart_search" on "assets"."id" = "smart_search"."assetId" inner join "asset_job_status" as "job_status" on "assetId" = "assets"."id" where "assets"."visibility" != $1 and "assets"."deletedAt" is null - and "job_status"."previewAt" is not null - and not exists ( - select - from - "smart_search" - where - "assetId" = "assets"."id" - ) and "job_status"."duplicatesDetectedAt" is null -- AssetJobRepository.streamForEncodeClip diff --git a/server/src/repositories/asset-job.repository.ts b/server/src/repositories/asset-job.repository.ts index 132bef6988..b9ce52962c 100644 --- a/server/src/repositories/asset-job.repository.ts +++ b/server/src/repositories/asset-job.repository.ts @@ -28,16 +28,7 @@ export class AssetJobRepository { .selectFrom('assets') .where('assets.id', '=', asUuid(id)) .leftJoin('smart_search', 'assets.id', 'smart_search.assetId') - .select((eb) => [ - 'id', - 'type', - 'ownerId', - 'duplicateId', - 'stackId', - 'visibility', - 'smart_search.embedding', - withFiles(eb, AssetFileType.PREVIEW), - ]) + .select(['id', 'type', 'ownerId', 'duplicateId', 'stackId', 'visibility', 'smart_search.embedding']) .limit(1) .executeTakeFirst(); } @@ -146,10 +137,17 @@ export class AssetJobRepository { @GenerateSql({ params: [], stream: true }) streamForSearchDuplicates(force?: boolean) { - return this.assetsWithPreviews() - .where((eb) => eb.not((eb) => eb.exists(eb.selectFrom('smart_search').whereRef('assetId', '=', 'assets.id')))) - .$if(!force, (qb) => qb.where('job_status.duplicatesDetectedAt', 'is', null)) + return this.db + .selectFrom('assets') .select(['assets.id']) + .where('assets.visibility', '!=', AssetVisibility.HIDDEN) + .where('assets.deletedAt', 'is', null) + .innerJoin('smart_search', 'assets.id', 'smart_search.assetId') + .$if(!force, (qb) => + qb + .innerJoin('asset_job_status as job_status', 'assetId', 'assets.id') + .where('job_status.duplicatesDetectedAt', 'is', null), + ) .stream(); } diff --git a/server/src/services/duplicate.service.spec.ts b/server/src/services/duplicate.service.spec.ts index 3f08e36a21..d23144babe 100644 --- a/server/src/services/duplicate.service.spec.ts +++ b/server/src/services/duplicate.service.spec.ts @@ -1,4 +1,4 @@ -import { AssetFileType, AssetType, AssetVisibility, JobName, JobStatus } from 'src/enum'; +import { AssetType, AssetVisibility, JobName, JobStatus } from 'src/enum'; import { DuplicateService } from 'src/services/duplicate.service'; import { SearchService } from 'src/services/search.service'; import { assetStub } from 'test/fixtures/asset.stub'; @@ -11,17 +11,6 @@ vitest.useFakeTimers(); const hasEmbedding = { id: 'asset-1', ownerId: 'user-id', - files: [ - { - assetId: 'asset-1', - createdAt: new Date(), - id: 'file-1', - path: 'preview.jpg', - type: AssetFileType.PREVIEW, - updatedAt: new Date(), - updateId: 'update-1', - }, - ], stackId: null, type: AssetType.IMAGE, duplicateId: null, @@ -218,15 +207,6 @@ describe(SearchService.name, () => { expect(mocks.logger.debug).toHaveBeenCalledWith(`Asset ${id} is not visible, skipping`); }); - it('should fail if asset is missing preview image', async () => { - mocks.assetJob.getForSearchDuplicatesJob.mockResolvedValue({ ...hasEmbedding, files: [] }); - - const result = await sut.handleSearchDuplicates({ id: assetStub.noResizePath.id }); - - expect(result).toBe(JobStatus.FAILED); - expect(mocks.logger.warn).toHaveBeenCalledWith(`Asset ${assetStub.noResizePath.id} is missing preview image`); - }); - it('should fail if asset is missing embedding', async () => { mocks.assetJob.getForSearchDuplicatesJob.mockResolvedValue({ ...hasEmbedding, embedding: null }); diff --git a/server/src/services/duplicate.service.ts b/server/src/services/duplicate.service.ts index b5e4f573f2..617f5c5d0d 100644 --- a/server/src/services/duplicate.service.ts +++ b/server/src/services/duplicate.service.ts @@ -4,11 +4,10 @@ import { OnJob } from 'src/decorators'; import { mapAsset } from 'src/dtos/asset-response.dto'; import { AuthDto } from 'src/dtos/auth.dto'; import { DuplicateResponseDto } from 'src/dtos/duplicate.dto'; -import { AssetFileType, AssetVisibility, JobName, JobStatus, QueueName } from 'src/enum'; +import { AssetVisibility, JobName, JobStatus, QueueName } from 'src/enum'; import { AssetDuplicateResult } from 'src/repositories/search.repository'; import { BaseService } from 'src/services/base.service'; import { JobItem, JobOf } from 'src/types'; -import { getAssetFile } from 'src/utils/asset.util'; import { isDuplicateDetectionEnabled } from 'src/utils/misc'; @Injectable() @@ -65,17 +64,11 @@ export class DuplicateService extends BaseService { return JobStatus.SKIPPED; } - if (asset.visibility == AssetVisibility.HIDDEN) { + if (asset.visibility === AssetVisibility.HIDDEN) { this.logger.debug(`Asset ${id} is not visible, skipping`); return JobStatus.SKIPPED; } - const previewFile = getAssetFile(asset.files || [], AssetFileType.PREVIEW); - if (!previewFile) { - this.logger.warn(`Asset ${id} is missing preview image`); - return JobStatus.FAILED; - } - if (!asset.embedding) { this.logger.debug(`Asset ${id} is missing embedding`); return JobStatus.FAILED; From 495a9598790e4be9750e7d6c5fc1b3e8acebef55 Mon Sep 17 00:00:00 2001 From: Daimolean <92239625+wuzihao051119@users.noreply.github.com> Date: Tue, 20 May 2025 22:00:20 +0800 Subject: [PATCH 04/35] fix(web): slide show in blurred background (#18384) Co-authored-by: Alex --- web/src/lib/components/asset-viewer/photo-viewer.svelte | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/web/src/lib/components/asset-viewer/photo-viewer.svelte b/web/src/lib/components/asset-viewer/photo-viewer.svelte index 564cef5308..b00be3c2f3 100644 --- a/web/src/lib/components/asset-viewer/photo-viewer.svelte +++ b/web/src/lib/components/asset-viewer/photo-viewer.svelte @@ -216,7 +216,7 @@ {/if} From 62f24a79f46b1d39d509b2ea15b1fda0258e8e3b Mon Sep 17 00:00:00 2001 From: "renovate[bot]" <29139614+renovate[bot]@users.noreply.github.com> Date: Tue, 20 May 2025 15:14:47 +0100 Subject: [PATCH 05/35] chore(deps): update prom/prometheus docker digest to 78ed1f9 (#18381) Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com> --- docker/docker-compose.prod.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docker/docker-compose.prod.yml b/docker/docker-compose.prod.yml index e17a034ddb..efe4271209 100644 --- a/docker/docker-compose.prod.yml +++ b/docker/docker-compose.prod.yml @@ -82,7 +82,7 @@ services: container_name: immich_prometheus ports: - 9090:9090 - image: prom/prometheus@sha256:e2b8aa62b64855956e3ec1e18b4f9387fb6203174a4471936f4662f437f04405 + image: prom/prometheus@sha256:78ed1f9050eb9eaf766af6e580230b1c4965728650e332cd1ee918c0c4699775 volumes: - ./prometheus.yml:/etc/prometheus/prometheus.yml - prometheus-data:/prometheus From 965498d19b1cf5dbceb356cc6924af66b3d0da8e Mon Sep 17 00:00:00 2001 From: "renovate[bot]" <29139614+renovate[bot]@users.noreply.github.com> Date: Tue, 20 May 2025 15:14:59 +0100 Subject: [PATCH 06/35] chore(deps): update node.js to v22.15.1 (#18388) Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com> --- .github/.nvmrc | 2 +- cli/.nvmrc | 2 +- cli/package.json | 2 +- docs/.nvmrc | 2 +- docs/package.json | 2 +- e2e/.nvmrc | 2 +- e2e/package.json | 2 +- open-api/typescript-sdk/.nvmrc | 2 +- open-api/typescript-sdk/package.json | 2 +- server/.nvmrc | 2 +- server/package.json | 2 +- web/.nvmrc | 2 +- web/package.json | 2 +- 13 files changed, 13 insertions(+), 13 deletions(-) diff --git a/.github/.nvmrc b/.github/.nvmrc index b8ffd70759..8320a6d299 100644 --- a/.github/.nvmrc +++ b/.github/.nvmrc @@ -1 +1 @@ -22.15.0 +22.15.1 diff --git a/cli/.nvmrc b/cli/.nvmrc index b8ffd70759..8320a6d299 100644 --- a/cli/.nvmrc +++ b/cli/.nvmrc @@ -1 +1 @@ -22.15.0 +22.15.1 diff --git a/cli/package.json b/cli/package.json index 74a97ccaec..320f815499 100644 --- a/cli/package.json +++ b/cli/package.json @@ -69,6 +69,6 @@ "micromatch": "^4.0.8" }, "volta": { - "node": "22.15.0" + "node": "22.15.1" } } diff --git a/docs/.nvmrc b/docs/.nvmrc index b8ffd70759..8320a6d299 100644 --- a/docs/.nvmrc +++ b/docs/.nvmrc @@ -1 +1 @@ -22.15.0 +22.15.1 diff --git a/docs/package.json b/docs/package.json index b20303c4ab..05ca51d6f4 100644 --- a/docs/package.json +++ b/docs/package.json @@ -57,6 +57,6 @@ "node": ">=20" }, "volta": { - "node": "22.15.0" + "node": "22.15.1" } } diff --git a/e2e/.nvmrc b/e2e/.nvmrc index b8ffd70759..8320a6d299 100644 --- a/e2e/.nvmrc +++ b/e2e/.nvmrc @@ -1 +1 @@ -22.15.0 +22.15.1 diff --git a/e2e/package.json b/e2e/package.json index b792d1aaf6..deb0492302 100644 --- a/e2e/package.json +++ b/e2e/package.json @@ -52,6 +52,6 @@ "vitest": "^3.0.0" }, "volta": { - "node": "22.15.0" + "node": "22.15.1" } } diff --git a/open-api/typescript-sdk/.nvmrc b/open-api/typescript-sdk/.nvmrc index b8ffd70759..8320a6d299 100644 --- a/open-api/typescript-sdk/.nvmrc +++ b/open-api/typescript-sdk/.nvmrc @@ -1 +1 @@ -22.15.0 +22.15.1 diff --git a/open-api/typescript-sdk/package.json b/open-api/typescript-sdk/package.json index 3daaa27f78..90b8b6a0b3 100644 --- a/open-api/typescript-sdk/package.json +++ b/open-api/typescript-sdk/package.json @@ -28,6 +28,6 @@ "directory": "open-api/typescript-sdk" }, "volta": { - "node": "22.15.0" + "node": "22.15.1" } } diff --git a/server/.nvmrc b/server/.nvmrc index b8ffd70759..8320a6d299 100644 --- a/server/.nvmrc +++ b/server/.nvmrc @@ -1 +1 @@ -22.15.0 +22.15.1 diff --git a/server/package.json b/server/package.json index a9336059ee..a61e9443db 100644 --- a/server/package.json +++ b/server/package.json @@ -154,7 +154,7 @@ "vitest": "^3.0.0" }, "volta": { - "node": "22.15.0" + "node": "22.15.1" }, "overrides": { "sharp": "^0.34.0" diff --git a/web/.nvmrc b/web/.nvmrc index b8ffd70759..8320a6d299 100644 --- a/web/.nvmrc +++ b/web/.nvmrc @@ -1 +1 @@ -22.15.0 +22.15.1 diff --git a/web/package.json b/web/package.json index 99df56b7f0..e61c3919ee 100644 --- a/web/package.json +++ b/web/package.json @@ -100,6 +100,6 @@ "vitest": "^3.0.0" }, "volta": { - "node": "22.15.0" + "node": "22.15.1" } } From d1e6682df04fab9879fccaf439824732e6586607 Mon Sep 17 00:00:00 2001 From: "renovate[bot]" <29139614+renovate[bot]@users.noreply.github.com> Date: Tue, 20 May 2025 15:15:10 +0100 Subject: [PATCH 07/35] chore(deps): update dependency @types/node to ^22.15.18 (#18387) Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com> --- cli/package-lock.json | 10 +++++----- cli/package.json | 2 +- e2e/package-lock.json | 12 ++++++------ e2e/package.json | 2 +- open-api/typescript-sdk/package-lock.json | 8 ++++---- open-api/typescript-sdk/package.json | 2 +- server/package-lock.json | 8 ++++---- server/package.json | 2 +- 8 files changed, 23 insertions(+), 23 deletions(-) diff --git a/cli/package-lock.json b/cli/package-lock.json index bc4a710b46..8680ae54bd 100644 --- a/cli/package-lock.json +++ b/cli/package-lock.json @@ -27,7 +27,7 @@ "@types/lodash-es": "^4.17.12", "@types/micromatch": "^4.0.9", "@types/mock-fs": "^4.13.1", - "@types/node": "^22.15.16", + "@types/node": "^22.15.18", "@vitest/coverage-v8": "^3.0.0", "byte-size": "^9.0.0", "cli-progress": "^3.12.0", @@ -61,7 +61,7 @@ "@oazapfts/runtime": "^1.0.2" }, "devDependencies": { - "@types/node": "^22.15.16", + "@types/node": "^22.15.18", "typescript": "^5.3.3" } }, @@ -1372,9 +1372,9 @@ } }, "node_modules/@types/node": { - "version": "22.15.17", - "resolved": "https://registry.npmjs.org/@types/node/-/node-22.15.17.tgz", - "integrity": "sha512-wIX2aSZL5FE+MR0JlvF87BNVrtFWf6AE6rxSE9X7OwnVvoyCQjpzSRJ+M87se/4QCkCiebQAqrJ0y6fwIyi7nw==", + "version": "22.15.19", + "resolved": "https://registry.npmjs.org/@types/node/-/node-22.15.19.tgz", + "integrity": "sha512-3vMNr4TzNQyjHcRZadojpRaD9Ofr6LsonZAoQ+HMUa/9ORTPoxVIw0e0mpqWpdjj8xybyCM+oKOUH2vwFu/oEw==", "dev": true, "license": "MIT", "dependencies": { diff --git a/cli/package.json b/cli/package.json index 320f815499..40c19c91b1 100644 --- a/cli/package.json +++ b/cli/package.json @@ -21,7 +21,7 @@ "@types/lodash-es": "^4.17.12", "@types/micromatch": "^4.0.9", "@types/mock-fs": "^4.13.1", - "@types/node": "^22.15.16", + "@types/node": "^22.15.18", "@vitest/coverage-v8": "^3.0.0", "byte-size": "^9.0.0", "cli-progress": "^3.12.0", diff --git a/e2e/package-lock.json b/e2e/package-lock.json index eb0de90a39..cbce017e08 100644 --- a/e2e/package-lock.json +++ b/e2e/package-lock.json @@ -15,7 +15,7 @@ "@immich/sdk": "file:../open-api/typescript-sdk", "@playwright/test": "^1.44.1", "@types/luxon": "^3.4.2", - "@types/node": "^22.15.16", + "@types/node": "^22.15.18", "@types/oidc-provider": "^8.5.1", "@types/pg": "^8.15.1", "@types/pngjs": "^6.0.4", @@ -66,7 +66,7 @@ "@types/lodash-es": "^4.17.12", "@types/micromatch": "^4.0.9", "@types/mock-fs": "^4.13.1", - "@types/node": "^22.15.16", + "@types/node": "^22.15.18", "@vitest/coverage-v8": "^3.0.0", "byte-size": "^9.0.0", "cli-progress": "^3.12.0", @@ -100,7 +100,7 @@ "@oazapfts/runtime": "^1.0.2" }, "devDependencies": { - "@types/node": "^22.15.16", + "@types/node": "^22.15.18", "typescript": "^5.3.3" } }, @@ -1593,9 +1593,9 @@ "license": "MIT" }, "node_modules/@types/node": { - "version": "22.15.17", - "resolved": "https://registry.npmjs.org/@types/node/-/node-22.15.17.tgz", - "integrity": "sha512-wIX2aSZL5FE+MR0JlvF87BNVrtFWf6AE6rxSE9X7OwnVvoyCQjpzSRJ+M87se/4QCkCiebQAqrJ0y6fwIyi7nw==", + "version": "22.15.19", + "resolved": "https://registry.npmjs.org/@types/node/-/node-22.15.19.tgz", + "integrity": "sha512-3vMNr4TzNQyjHcRZadojpRaD9Ofr6LsonZAoQ+HMUa/9ORTPoxVIw0e0mpqWpdjj8xybyCM+oKOUH2vwFu/oEw==", "dev": true, "license": "MIT", "dependencies": { diff --git a/e2e/package.json b/e2e/package.json index deb0492302..fc0196fb99 100644 --- a/e2e/package.json +++ b/e2e/package.json @@ -25,7 +25,7 @@ "@immich/sdk": "file:../open-api/typescript-sdk", "@playwright/test": "^1.44.1", "@types/luxon": "^3.4.2", - "@types/node": "^22.15.16", + "@types/node": "^22.15.18", "@types/oidc-provider": "^8.5.1", "@types/pg": "^8.15.1", "@types/pngjs": "^6.0.4", diff --git a/open-api/typescript-sdk/package-lock.json b/open-api/typescript-sdk/package-lock.json index 9abec7f0a8..542f67d62e 100644 --- a/open-api/typescript-sdk/package-lock.json +++ b/open-api/typescript-sdk/package-lock.json @@ -12,7 +12,7 @@ "@oazapfts/runtime": "^1.0.2" }, "devDependencies": { - "@types/node": "^22.15.16", + "@types/node": "^22.15.18", "typescript": "^5.3.3" } }, @@ -23,9 +23,9 @@ "license": "MIT" }, "node_modules/@types/node": { - "version": "22.15.17", - "resolved": "https://registry.npmjs.org/@types/node/-/node-22.15.17.tgz", - "integrity": "sha512-wIX2aSZL5FE+MR0JlvF87BNVrtFWf6AE6rxSE9X7OwnVvoyCQjpzSRJ+M87se/4QCkCiebQAqrJ0y6fwIyi7nw==", + "version": "22.15.19", + "resolved": "https://registry.npmjs.org/@types/node/-/node-22.15.19.tgz", + "integrity": "sha512-3vMNr4TzNQyjHcRZadojpRaD9Ofr6LsonZAoQ+HMUa/9ORTPoxVIw0e0mpqWpdjj8xybyCM+oKOUH2vwFu/oEw==", "dev": true, "license": "MIT", "dependencies": { diff --git a/open-api/typescript-sdk/package.json b/open-api/typescript-sdk/package.json index 90b8b6a0b3..a5d4a1592b 100644 --- a/open-api/typescript-sdk/package.json +++ b/open-api/typescript-sdk/package.json @@ -19,7 +19,7 @@ "@oazapfts/runtime": "^1.0.2" }, "devDependencies": { - "@types/node": "^22.15.16", + "@types/node": "^22.15.18", "typescript": "^5.3.3" }, "repository": { diff --git a/server/package-lock.json b/server/package-lock.json index 3f00bb575c..bf70dd5f0a 100644 --- a/server/package-lock.json +++ b/server/package-lock.json @@ -92,7 +92,7 @@ "@types/lodash": "^4.14.197", "@types/mock-fs": "^4.13.1", "@types/multer": "^1.4.7", - "@types/node": "^22.15.16", + "@types/node": "^22.15.18", "@types/nodemailer": "^6.4.14", "@types/picomatch": "^4.0.0", "@types/pngjs": "^6.0.5", @@ -5439,9 +5439,9 @@ } }, "node_modules/@types/node": { - "version": "22.15.17", - "resolved": "https://registry.npmjs.org/@types/node/-/node-22.15.17.tgz", - "integrity": "sha512-wIX2aSZL5FE+MR0JlvF87BNVrtFWf6AE6rxSE9X7OwnVvoyCQjpzSRJ+M87se/4QCkCiebQAqrJ0y6fwIyi7nw==", + "version": "22.15.19", + "resolved": "https://registry.npmjs.org/@types/node/-/node-22.15.19.tgz", + "integrity": "sha512-3vMNr4TzNQyjHcRZadojpRaD9Ofr6LsonZAoQ+HMUa/9ORTPoxVIw0e0mpqWpdjj8xybyCM+oKOUH2vwFu/oEw==", "license": "MIT", "dependencies": { "undici-types": "~6.21.0" diff --git a/server/package.json b/server/package.json index a61e9443db..b588aa153a 100644 --- a/server/package.json +++ b/server/package.json @@ -117,7 +117,7 @@ "@types/lodash": "^4.14.197", "@types/mock-fs": "^4.13.1", "@types/multer": "^1.4.7", - "@types/node": "^22.15.16", + "@types/node": "^22.15.18", "@types/nodemailer": "^6.4.14", "@types/picomatch": "^4.0.0", "@types/pngjs": "^6.0.5", From 4e2fc9f0175765d5065ade1a60d405222739cf56 Mon Sep 17 00:00:00 2001 From: Alex Date: Tue, 20 May 2025 09:39:05 -0500 Subject: [PATCH 08/35] chore: remove PIN code from secure storage on logged out (#18393) --- mobile/lib/providers/auth.provider.dart | 13 +++++++++++-- 1 file changed, 11 insertions(+), 2 deletions(-) diff --git a/mobile/lib/providers/auth.provider.dart b/mobile/lib/providers/auth.provider.dart index 8c783395cd..5207858f99 100644 --- a/mobile/lib/providers/auth.provider.dart +++ b/mobile/lib/providers/auth.provider.dart @@ -1,6 +1,7 @@ import 'package:flutter/foundation.dart'; import 'package:flutter_udid/flutter_udid.dart'; import 'package:hooks_riverpod/hooks_riverpod.dart'; +import 'package:immich_mobile/constants/constants.dart'; import 'package:immich_mobile/domain/models/store.model.dart'; import 'package:immich_mobile/domain/models/user.model.dart'; import 'package:immich_mobile/domain/services/user.service.dart'; @@ -11,6 +12,7 @@ import 'package:immich_mobile/providers/api.provider.dart'; import 'package:immich_mobile/providers/infrastructure/user.provider.dart'; import 'package:immich_mobile/services/api.service.dart'; import 'package:immich_mobile/services/auth.service.dart'; +import 'package:immich_mobile/services/secure_storage.service.dart'; import 'package:immich_mobile/utils/hash.dart'; import 'package:logging/logging.dart'; import 'package:openapi/api.dart'; @@ -20,6 +22,7 @@ final authProvider = StateNotifierProvider((ref) { ref.watch(authServiceProvider), ref.watch(apiServiceProvider), ref.watch(userServiceProvider), + ref.watch(secureStorageServiceProvider), ); }); @@ -27,12 +30,17 @@ class AuthNotifier extends StateNotifier { final AuthService _authService; final ApiService _apiService; final UserService _userService; + final SecureStorageService _secureStorageService; final _log = Logger("AuthenticationNotifier"); static const Duration _timeoutDuration = Duration(seconds: 7); - AuthNotifier(this._authService, this._apiService, this._userService) - : super( + AuthNotifier( + this._authService, + this._apiService, + this._userService, + this._secureStorageService, + ) : super( AuthState( deviceId: "", userId: "", @@ -67,6 +75,7 @@ class AuthNotifier extends StateNotifier { Future logout() async { try { + await _secureStorageService.delete(kSecuredPinCode); await _authService.logout(); } finally { await _cleanUp(); From e7b60a927878caf7014a07c43b350232f6bb3910 Mon Sep 17 00:00:00 2001 From: "renovate[bot]" <29139614+renovate[bot]@users.noreply.github.com> Date: Tue, 20 May 2025 16:12:17 +0100 Subject: [PATCH 09/35] chore(deps): update github-actions (#18246) Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com> --- .github/actions/image-build/action.yml | 2 +- .github/workflows/cli.yml | 2 +- .github/workflows/docs-deploy.yml | 6 +++--- .github/workflows/docs-destroy.yml | 2 +- .github/workflows/multi-runner-build.yml | 2 +- .github/workflows/test.yml | 2 +- 6 files changed, 8 insertions(+), 8 deletions(-) diff --git a/.github/actions/image-build/action.yml b/.github/actions/image-build/action.yml index ee23bd8ba8..a4168dcd5a 100644 --- a/.github/actions/image-build/action.yml +++ b/.github/actions/image-build/action.yml @@ -84,7 +84,7 @@ runs: - name: Build and push image id: build - uses: docker/build-push-action@471d1dc4e07e5cdedd4c2171150001c434f0b7a4 # v6.15.0 + uses: docker/build-push-action@1dc73863535b631f98b2378be8619f83b136f4a0 # v6.17.0 with: context: ${{ inputs.context }} file: ${{ inputs.dockerfile }} diff --git a/.github/workflows/cli.yml b/.github/workflows/cli.yml index 4e0bf12fdc..74f5970139 100644 --- a/.github/workflows/cli.yml +++ b/.github/workflows/cli.yml @@ -96,7 +96,7 @@ jobs: type=raw,value=latest,enable=${{ github.event_name == 'release' }} - name: Build and push image - uses: docker/build-push-action@14487ce63c7a62a4a324b0bfb37086795e31c6c1 # v6.16.0 + uses: docker/build-push-action@1dc73863535b631f98b2378be8619f83b136f4a0 # v6.17.0 with: file: cli/Dockerfile platforms: linux/amd64,linux/arm64 diff --git a/.github/workflows/docs-deploy.yml b/.github/workflows/docs-deploy.yml index 73c5d5945a..c04adbafc6 100644 --- a/.github/workflows/docs-deploy.yml +++ b/.github/workflows/docs-deploy.yml @@ -150,7 +150,7 @@ jobs: CLOUDFLARE_API_TOKEN: ${{ secrets.CLOUDFLARE_API_TOKEN }} CLOUDFLARE_ACCOUNT_ID: ${{ secrets.CLOUDFLARE_ACCOUNT_ID }} TF_STATE_POSTGRES_CONN_STR: ${{ secrets.TF_STATE_POSTGRES_CONN_STR }} - uses: gruntwork-io/terragrunt-action@9559e51d05873b0ea467c42bbabcb5c067642ccc # v2.1.5 + uses: gruntwork-io/terragrunt-action@aee21a7df999be8b471c2a8564c6cd853cb674e1 # v2.1.8 with: tg_version: '0.58.12' tofu_version: '1.7.1' @@ -165,7 +165,7 @@ jobs: CLOUDFLARE_API_TOKEN: ${{ secrets.CLOUDFLARE_API_TOKEN }} CLOUDFLARE_ACCOUNT_ID: ${{ secrets.CLOUDFLARE_ACCOUNT_ID }} TF_STATE_POSTGRES_CONN_STR: ${{ secrets.TF_STATE_POSTGRES_CONN_STR }} - uses: gruntwork-io/terragrunt-action@9559e51d05873b0ea467c42bbabcb5c067642ccc # v2.1.5 + uses: gruntwork-io/terragrunt-action@aee21a7df999be8b471c2a8564c6cd853cb674e1 # v2.1.8 with: tg_version: '0.58.12' tofu_version: '1.7.1' @@ -199,7 +199,7 @@ jobs: CLOUDFLARE_API_TOKEN: ${{ secrets.CLOUDFLARE_API_TOKEN }} CLOUDFLARE_ACCOUNT_ID: ${{ secrets.CLOUDFLARE_ACCOUNT_ID }} TF_STATE_POSTGRES_CONN_STR: ${{ secrets.TF_STATE_POSTGRES_CONN_STR }} - uses: gruntwork-io/terragrunt-action@9559e51d05873b0ea467c42bbabcb5c067642ccc # v2.1.5 + uses: gruntwork-io/terragrunt-action@aee21a7df999be8b471c2a8564c6cd853cb674e1 # v2.1.8 with: tg_version: '0.58.12' tofu_version: '1.7.1' diff --git a/.github/workflows/docs-destroy.yml b/.github/workflows/docs-destroy.yml index 778cba77e1..cd095b117f 100644 --- a/.github/workflows/docs-destroy.yml +++ b/.github/workflows/docs-destroy.yml @@ -25,7 +25,7 @@ jobs: CLOUDFLARE_API_TOKEN: ${{ secrets.CLOUDFLARE_API_TOKEN }} CLOUDFLARE_ACCOUNT_ID: ${{ secrets.CLOUDFLARE_ACCOUNT_ID }} TF_STATE_POSTGRES_CONN_STR: ${{ secrets.TF_STATE_POSTGRES_CONN_STR }} - uses: gruntwork-io/terragrunt-action@9559e51d05873b0ea467c42bbabcb5c067642ccc # v2.1.5 + uses: gruntwork-io/terragrunt-action@aee21a7df999be8b471c2a8564c6cd853cb674e1 # v2.1.8 with: tg_version: '0.58.12' tofu_version: '1.7.1' diff --git a/.github/workflows/multi-runner-build.yml b/.github/workflows/multi-runner-build.yml index 17eceb7e8f..f6d7c12355 100644 --- a/.github/workflows/multi-runner-build.yml +++ b/.github/workflows/multi-runner-build.yml @@ -115,7 +115,7 @@ jobs: packages: write steps: - name: Download digests - uses: actions/download-artifact@95815c38cf2ff2164869cbab79da8d1f422bc89e # v4 + uses: actions/download-artifact@d3f86a106a0bac45b974a628896c90dbdf5c8093 # v4 with: path: ${{ runner.temp }}/digests pattern: ${{ needs.matrix.outputs.key }}-* diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index 6c1cb8e07e..e6aecdb403 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -643,7 +643,7 @@ jobs: contents: read services: postgres: - image: ghcr.io/immich-app/postgres:14 + image: ghcr.io/immich-app/postgres:14@sha256:14bec5d02e8704081eafd566029204a4eb6bb75f3056cfb34e81c5ab1657a490 env: POSTGRES_PASSWORD: postgres POSTGRES_USER: postgres From 895e0eacfebf41ea6f0ab0bacfbbcec6775ca992 Mon Sep 17 00:00:00 2001 From: Alex Date: Tue, 20 May 2025 10:37:10 -0500 Subject: [PATCH 10/35] refactor: slide-show settings (#18394) --- .../asset-viewer/slideshow-bar.svelte | 68 +++++++++++++++---- 1 file changed, 54 insertions(+), 14 deletions(-) diff --git a/web/src/lib/components/asset-viewer/slideshow-bar.svelte b/web/src/lib/components/asset-viewer/slideshow-bar.svelte index 327227f0b4..949f069caf 100644 --- a/web/src/lib/components/asset-viewer/slideshow-bar.svelte +++ b/web/src/lib/components/asset-viewer/slideshow-bar.svelte @@ -1,10 +1,10 @@ - + - (progressBarStatus === ProgressBarStatus.Paused ? progressBar?.play() : progressBar?.pause())} - title={progressBarStatus === ProgressBarStatus.Paused ? $t('play') : $t('pause')} + aria-label={progressBarStatus === ProgressBarStatus.Paused ? $t('play') : $t('pause')} + class="text-white" /> - - - + + (showSettings = !showSettings)} - title={$t('slideshow_settings')} + onclick={onSettingToggled} + aria-label={$t('slideshow_settings')} + class="text-white" /> {#if !isFullScreen} - {/if} From bdf19ce3316cca170ab6d85855b140202ffb62a4 Mon Sep 17 00:00:00 2001 From: Alex Date: Tue, 20 May 2025 10:53:34 -0500 Subject: [PATCH 11/35] fix: TimelineAsset visibility (#18395) * fix: TimelineAsset visibility * fix enum values --- mobile/lib/entities/asset.entity.dart | 10 +-- mobile/lib/utils/openapi_patching.dart | 1 - .../openapi/lib/model/asset_response_dto.dart | 84 +------------------ open-api/immich-openapi-specs.json | 12 ++- open-api/typescript-sdk/src/fetch-client.ts | 8 +- server/src/dtos/asset-response.dto.ts | 1 + .../actions/set-visibility-action.svelte | 4 +- .../asset-viewer/asset-viewer-nav-bar.svelte | 4 +- .../actions/set-visibility-action.svelte | 6 +- web/src/lib/utils/timeline-util.ts | 5 +- web/src/routes/auth/pin-prompt/+page.svelte | 2 +- web/src/test-data/factories/asset-factory.ts | 10 +-- 12 files changed, 27 insertions(+), 120 deletions(-) diff --git a/mobile/lib/entities/asset.entity.dart b/mobile/lib/entities/asset.entity.dart index 9119d96a63..d8d2bd23c3 100644 --- a/mobile/lib/entities/asset.entity.dart +++ b/mobile/lib/entities/asset.entity.dart @@ -554,15 +554,15 @@ class Asset { }"""; } - static getVisibility(AssetResponseDtoVisibilityEnum visibility) { + static getVisibility(AssetVisibility visibility) { switch (visibility) { - case AssetResponseDtoVisibilityEnum.timeline: + case AssetVisibility.timeline: return AssetVisibilityEnum.timeline; - case AssetResponseDtoVisibilityEnum.archive: + case AssetVisibility.archive: return AssetVisibilityEnum.archive; - case AssetResponseDtoVisibilityEnum.hidden: + case AssetVisibility.hidden: return AssetVisibilityEnum.hidden; - case AssetResponseDtoVisibilityEnum.locked: + case AssetVisibility.locked: return AssetVisibilityEnum.locked; } } diff --git a/mobile/lib/utils/openapi_patching.dart b/mobile/lib/utils/openapi_patching.dart index 1ffe05c781..7c7d9bab88 100644 --- a/mobile/lib/utils/openapi_patching.dart +++ b/mobile/lib/utils/openapi_patching.dart @@ -29,7 +29,6 @@ dynamic upgradeDto(dynamic value, String targetType) { case 'UserResponseDto': if (value is Map) { addDefault(value, 'profileChangedAt', DateTime.now().toIso8601String()); - addDefault(value, 'visibility', AssetVisibility.timeline); } break; case 'AssetResponseDto': diff --git a/mobile/openapi/lib/model/asset_response_dto.dart b/mobile/openapi/lib/model/asset_response_dto.dart index 74af8bd1eb..3d85b779cc 100644 --- a/mobile/openapi/lib/model/asset_response_dto.dart +++ b/mobile/openapi/lib/model/asset_response_dto.dart @@ -133,7 +133,7 @@ class AssetResponseDto { DateTime updatedAt; - AssetResponseDtoVisibilityEnum visibility; + AssetVisibility visibility; @override bool operator ==(Object other) => identical(this, other) || other is AssetResponseDto && @@ -318,7 +318,7 @@ class AssetResponseDto { type: AssetTypeEnum.fromJson(json[r'type'])!, unassignedFaces: AssetFaceWithoutPersonResponseDto.listFromJson(json[r'unassignedFaces']), updatedAt: mapDateTime(json, r'updatedAt', r'')!, - visibility: AssetResponseDtoVisibilityEnum.fromJson(json[r'visibility'])!, + visibility: AssetVisibility.fromJson(json[r'visibility'])!, ); } return null; @@ -389,83 +389,3 @@ class AssetResponseDto { }; } - -class AssetResponseDtoVisibilityEnum { - /// Instantiate a new enum with the provided [value]. - const AssetResponseDtoVisibilityEnum._(this.value); - - /// The underlying value of this enum member. - final String value; - - @override - String toString() => value; - - String toJson() => value; - - static const archive = AssetResponseDtoVisibilityEnum._(r'archive'); - static const timeline = AssetResponseDtoVisibilityEnum._(r'timeline'); - static const hidden = AssetResponseDtoVisibilityEnum._(r'hidden'); - static const locked = AssetResponseDtoVisibilityEnum._(r'locked'); - - /// List of all possible values in this [enum][AssetResponseDtoVisibilityEnum]. - static const values = [ - archive, - timeline, - hidden, - locked, - ]; - - static AssetResponseDtoVisibilityEnum? fromJson(dynamic value) => AssetResponseDtoVisibilityEnumTypeTransformer().decode(value); - - static List listFromJson(dynamic json, {bool growable = false,}) { - final result = []; - if (json is List && json.isNotEmpty) { - for (final row in json) { - final value = AssetResponseDtoVisibilityEnum.fromJson(row); - if (value != null) { - result.add(value); - } - } - } - return result.toList(growable: growable); - } -} - -/// Transformation class that can [encode] an instance of [AssetResponseDtoVisibilityEnum] to String, -/// and [decode] dynamic data back to [AssetResponseDtoVisibilityEnum]. -class AssetResponseDtoVisibilityEnumTypeTransformer { - factory AssetResponseDtoVisibilityEnumTypeTransformer() => _instance ??= const AssetResponseDtoVisibilityEnumTypeTransformer._(); - - const AssetResponseDtoVisibilityEnumTypeTransformer._(); - - String encode(AssetResponseDtoVisibilityEnum data) => data.value; - - /// Decodes a [dynamic value][data] to a AssetResponseDtoVisibilityEnum. - /// - /// If [allowNull] is true and the [dynamic value][data] cannot be decoded successfully, - /// then null is returned. However, if [allowNull] is false and the [dynamic value][data] - /// cannot be decoded successfully, then an [UnimplementedError] is thrown. - /// - /// The [allowNull] is very handy when an API changes and a new enum value is added or removed, - /// and users are still using an old app with the old code. - AssetResponseDtoVisibilityEnum? decode(dynamic data, {bool allowNull = true}) { - if (data != null) { - switch (data) { - case r'archive': return AssetResponseDtoVisibilityEnum.archive; - case r'timeline': return AssetResponseDtoVisibilityEnum.timeline; - case r'hidden': return AssetResponseDtoVisibilityEnum.hidden; - case r'locked': return AssetResponseDtoVisibilityEnum.locked; - default: - if (!allowNull) { - throw ArgumentError('Unknown enum value to decode: $data'); - } - } - } - return null; - } - - /// Singleton [AssetResponseDtoVisibilityEnumTypeTransformer] instance. - static AssetResponseDtoVisibilityEnumTypeTransformer? _instance; -} - - diff --git a/open-api/immich-openapi-specs.json b/open-api/immich-openapi-specs.json index 8d21c3ef90..2a8555f82c 100644 --- a/open-api/immich-openapi-specs.json +++ b/open-api/immich-openapi-specs.json @@ -9289,13 +9289,11 @@ "type": "string" }, "visibility": { - "enum": [ - "archive", - "timeline", - "hidden", - "locked" - ], - "type": "string" + "allOf": [ + { + "$ref": "#/components/schemas/AssetVisibility" + } + ] } }, "required": [ diff --git a/open-api/typescript-sdk/src/fetch-client.ts b/open-api/typescript-sdk/src/fetch-client.ts index 5358cdfec9..c27c9bc194 100644 --- a/open-api/typescript-sdk/src/fetch-client.ts +++ b/open-api/typescript-sdk/src/fetch-client.ts @@ -329,7 +329,7 @@ export type AssetResponseDto = { "type": AssetTypeEnum; unassignedFaces?: AssetFaceWithoutPersonResponseDto[]; updatedAt: string; - visibility: Visibility; + visibility: AssetVisibility; }; export type AlbumResponseDto = { albumName: string; @@ -3675,12 +3675,6 @@ export enum AssetTypeEnum { Audio = "AUDIO", Other = "OTHER" } -export enum Visibility { - Archive = "archive", - Timeline = "timeline", - Hidden = "hidden", - Locked = "locked" -} export enum AssetOrder { Asc = "asc", Desc = "desc" diff --git a/server/src/dtos/asset-response.dto.ts b/server/src/dtos/asset-response.dto.ts index 4c1f2571e8..9bbfb450b2 100644 --- a/server/src/dtos/asset-response.dto.ts +++ b/server/src/dtos/asset-response.dto.ts @@ -44,6 +44,7 @@ export class AssetResponseDto extends SanitizedAssetResponseDto { isArchived!: boolean; isTrashed!: boolean; isOffline!: boolean; + @ApiProperty({ enum: AssetVisibility, enumName: 'AssetVisibility' }) visibility!: AssetVisibility; exifInfo?: ExifResponseDto; tags?: TagResponseDto[]; diff --git a/web/src/lib/components/asset-viewer/actions/set-visibility-action.svelte b/web/src/lib/components/asset-viewer/actions/set-visibility-action.svelte index 91db84b172..dff470f456 100644 --- a/web/src/lib/components/asset-viewer/actions/set-visibility-action.svelte +++ b/web/src/lib/components/asset-viewer/actions/set-visibility-action.svelte @@ -6,7 +6,7 @@ import type { TimelineAsset } from '$lib/stores/assets-store.svelte'; import { handleError } from '$lib/utils/handle-error'; import { AssetVisibility, updateAssets } from '@immich/sdk'; - import { mdiEyeOffOutline, mdiFolderMoveOutline } from '@mdi/js'; + import { mdiLockOpenVariantOutline, mdiLockOutline } from '@mdi/js'; import { t } from 'svelte-i18n'; import type { OnAction, PreAction } from './action'; @@ -57,5 +57,5 @@ toggleLockedVisibility()} text={isLocked ? $t('move_off_locked_folder') : $t('add_to_locked_folder')} - icon={isLocked ? mdiFolderMoveOutline : mdiEyeOffOutline} + icon={isLocked ? mdiLockOpenVariantOutline : mdiLockOutline} /> diff --git a/web/src/lib/components/asset-viewer/asset-viewer-nav-bar.svelte b/web/src/lib/components/asset-viewer/asset-viewer-nav-bar.svelte index 70600e6208..19705f05b6 100644 --- a/web/src/lib/components/asset-viewer/asset-viewer-nav-bar.svelte +++ b/web/src/lib/components/asset-viewer/asset-viewer-nav-bar.svelte @@ -29,7 +29,7 @@ import { AssetJobName, AssetTypeEnum, - Visibility, + AssetVisibility, type AlbumResponseDto, type AssetResponseDto, type PersonResponseDto, @@ -94,7 +94,7 @@ const sharedLink = getSharedLink(); let isOwner = $derived($user && asset.ownerId === $user?.id); let showDownloadButton = $derived(sharedLink ? sharedLink.allowDownload : !asset.isOffline); - let isLocked = $derived(asset.visibility === Visibility.Locked); + let isLocked = $derived(asset.visibility === AssetVisibility.Locked); // $: showEditorButton = // isOwner && diff --git a/web/src/lib/components/photos-page/actions/set-visibility-action.svelte b/web/src/lib/components/photos-page/actions/set-visibility-action.svelte index c11ba114ce..407a92fadc 100644 --- a/web/src/lib/components/photos-page/actions/set-visibility-action.svelte +++ b/web/src/lib/components/photos-page/actions/set-visibility-action.svelte @@ -7,7 +7,7 @@ import { handleError } from '$lib/utils/handle-error'; import { AssetVisibility, updateAssets } from '@immich/sdk'; import { Button } from '@immich/ui'; - import { mdiEyeOffOutline, mdiFolderMoveOutline } from '@mdi/js'; + import { mdiLockOpenVariantOutline, mdiLockOutline } from '@mdi/js'; import { t } from 'svelte-i18n'; interface Props { @@ -56,11 +56,11 @@ {:else} + {:else} diff --git a/web/src/test-data/factories/asset-factory.ts b/web/src/test-data/factories/asset-factory.ts index e36bec6c4e..f68c3a1a1a 100644 --- a/web/src/test-data/factories/asset-factory.ts +++ b/web/src/test-data/factories/asset-factory.ts @@ -1,12 +1,6 @@ import type { TimelineAsset } from '$lib/stores/assets-store.svelte'; import { faker } from '@faker-js/faker'; -import { - AssetTypeEnum, - AssetVisibility, - Visibility, - type AssetResponseDto, - type TimeBucketAssetResponseDto, -} from '@immich/sdk'; +import { AssetTypeEnum, AssetVisibility, type AssetResponseDto, type TimeBucketAssetResponseDto } from '@immich/sdk'; import { Sync } from 'factory.ts'; export const assetFactory = Sync.makeFactory({ @@ -31,7 +25,7 @@ export const assetFactory = Sync.makeFactory({ checksum: Sync.each(() => faker.string.alphanumeric(28)), isOffline: Sync.each(() => faker.datatype.boolean()), hasMetadata: Sync.each(() => faker.datatype.boolean()), - visibility: Visibility.Timeline, + visibility: AssetVisibility.Timeline, }); export const timelineAssetFactory = Sync.makeFactory({ From c05aa445d8a0ad1b3b679bc909c4b8ad3f993c9d Mon Sep 17 00:00:00 2001 From: Alex Date: Tue, 20 May 2025 11:22:30 -0500 Subject: [PATCH 12/35] fix: location search result z-index (#18379) --- .../lib/components/shared-components/change-location.svelte | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/web/src/lib/components/shared-components/change-location.svelte b/web/src/lib/components/shared-components/change-location.svelte index 56e586456d..13f1c72da1 100644 --- a/web/src/lib/components/shared-components/change-location.svelte +++ b/web/src/lib/components/shared-components/change-location.svelte @@ -118,7 +118,7 @@ > {#snippet promptSnippet()}

-
+
{#if suggestionContainer}
{$t('pick_a_location')} -
+
{#await import('../shared-components/map/map.svelte')} {#await delay(timeToLoadTheMap) then} From 53420b7c0271348b7770107f9490cdf6e77917f9 Mon Sep 17 00:00:00 2001 From: Alex Date: Tue, 20 May 2025 11:30:27 -0500 Subject: [PATCH 13/35] chore: notification panel style tweak (#18398) --- .../navigation-bar/notification-panel.svelte | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/web/src/lib/components/shared-components/navigation-bar/notification-panel.svelte b/web/src/lib/components/shared-components/navigation-bar/notification-panel.svelte index f76f187ad9..aafa8377fd 100644 --- a/web/src/lib/components/shared-components/navigation-bar/notification-panel.svelte +++ b/web/src/lib/components/shared-components/navigation-bar/notification-panel.svelte @@ -39,7 +39,7 @@ in:fade={{ duration: 100 }} out:fade={{ duration: 100 }} id="notification-panel" - class="absolute right-[25px] top-[70px] z-1 w-[min(360px,100vw-50px)] rounded-3xl bg-gray-100 border border-gray-200 shadow-lg dark:border dark:border-light dark:bg-immich-dark-gray text-light px-2" + class="absolute right-[25px] top-[70px] z-1 w-[min(360px,100vw-50px)] rounded-3xl bg-gray-100 border border-gray-200 shadow-lg dark:border dark:border-light dark:bg-immich-dark-gray text-light" use:focusTrap > @@ -57,7 +57,7 @@
-
+
{#if noUnreadNotifications} Date: Tue, 20 May 2025 19:52:23 +0200 Subject: [PATCH 14/35] fix: map (#18399) --- .../components/album-page/album-map.svelte | 56 +++++++++---------- .../shared-components/map/map.svelte | 10 ++-- 2 files changed, 31 insertions(+), 35 deletions(-) diff --git a/web/src/lib/components/album-page/album-map.svelte b/web/src/lib/components/album-page/album-map.svelte index fd96cf8b64..d668f65d0e 100644 --- a/web/src/lib/components/album-page/album-map.svelte +++ b/web/src/lib/components/album-page/album-map.svelte @@ -1,7 +1,5 @@ + +{#if castManager.availableDestinations.length > 0 && castManager.availableDestinations[0].type === CastDestinationType.GCAST} + {#if navBar} + void GCastDestination.showCastDialog()} + aria-label={$t('cast')} + /> + {:else} + + {/if} +{/if} diff --git a/web/src/lib/components/album-page/album-viewer.svelte b/web/src/lib/components/album-page/album-viewer.svelte index 887c3a81e4..227fb999b8 100644 --- a/web/src/lib/components/album-page/album-viewer.svelte +++ b/web/src/lib/components/album-page/album-viewer.svelte @@ -21,6 +21,7 @@ import ImmichLogoSmallLink from '../shared-components/immich-logo-small-link.svelte'; import ThemeButton from '../shared-components/theme-button.svelte'; import AlbumSummary from './album-summary.svelte'; + import CastButton from '$lib/cast/cast-button.svelte'; interface Props { sharedLink: SharedLinkResponseDto; @@ -103,6 +104,8 @@ {/snippet} {#snippet trailing()} + + {#if sharedLink.allowUpload} import { goto } from '$app/navigation'; + import CastButton from '$lib/cast/cast-button.svelte'; import type { OnAction, PreAction } from '$lib/components/asset-viewer/actions/action'; import AddToAlbumAction from '$lib/components/asset-viewer/actions/add-to-album-action.svelte'; import ArchiveAction from '$lib/components/asset-viewer/actions/archive-action.svelte'; @@ -116,6 +117,8 @@ {/if}
+ + {#if !asset.isTrashed && $user && !isLocked} {/if} diff --git a/web/src/lib/components/asset-viewer/photo-viewer.spec.ts b/web/src/lib/components/asset-viewer/photo-viewer.spec.ts index d90fb89c23..9e9f8fae62 100644 --- a/web/src/lib/components/asset-viewer/photo-viewer.spec.ts +++ b/web/src/lib/components/asset-viewer/photo-viewer.spec.ts @@ -31,6 +31,29 @@ describe('PhotoViewer component', () => { beforeAll(() => { getAssetOriginalUrlSpy = vi.spyOn(utils, 'getAssetOriginalUrl'); getAssetThumbnailUrlSpy = vi.spyOn(utils, 'getAssetThumbnailUrl'); + + vi.stubGlobal('cast', { + framework: { + CastState: { + NO_DEVICES_AVAILABLE: 'NO_DEVICES_AVAILABLE', + }, + RemotePlayer: vi.fn().mockImplementation(() => ({})), + RemotePlayerEventType: { + ANY_CHANGE: 'anyChanged', + }, + RemotePlayerController: vi.fn().mockImplementation(() => ({ addEventListener: vi.fn() })), + CastContext: { + getInstance: vi.fn().mockImplementation(() => ({ setOptions: vi.fn(), addEventListener: vi.fn() })), + }, + CastContextEventType: { + SESSION_STATE_CHANGED: 'sessionstatechanged', + CAST_STATE_CHANGED: 'caststatechanged', + }, + }, + }); + vi.stubGlobal('chrome', { + cast: { media: { PlayerState: { IDLE: 'IDLE' } }, AutoJoinPolicy: { ORIGIN_SCOPED: 'origin_scoped' } }, + }); }); beforeEach(() => { diff --git a/web/src/lib/components/asset-viewer/photo-viewer.svelte b/web/src/lib/components/asset-viewer/photo-viewer.svelte index b00be3c2f3..bb817494de 100644 --- a/web/src/lib/components/asset-viewer/photo-viewer.svelte +++ b/web/src/lib/components/asset-viewer/photo-viewer.svelte @@ -23,6 +23,7 @@ import { fade } from 'svelte/transition'; import LoadingSpinner from '../shared-components/loading-spinner.svelte'; import { NotificationType, notificationController } from '../shared-components/notification/notification'; + import { castManager } from '$lib/managers/cast-manager.svelte'; interface Props { asset: AssetResponseDto; @@ -147,6 +148,27 @@ return AssetMediaSize.Preview; }); + $effect(() => { + if (assetFileUrl) { + // this can't be in an async context with $effect + void cast(assetFileUrl); + } + }); + + const cast = async (url: string) => { + if (!url || !castManager.isCasting) { + return; + } + const fullUrl = new URL(url, globalThis.location.href); + + try { + await castManager.loadMedia(fullUrl.href); + } catch (error) { + handleError(error, 'Unable to cast'); + return; + } + }; + const onload = () => { imageLoaded = true; assetFileUrl = imageLoaderUrl; diff --git a/web/src/lib/components/asset-viewer/video-native-viewer.svelte b/web/src/lib/components/asset-viewer/video-native-viewer.svelte index a8b0abe5eb..8205c8c353 100644 --- a/web/src/lib/components/asset-viewer/video-native-viewer.svelte +++ b/web/src/lib/components/asset-viewer/video-native-viewer.svelte @@ -1,6 +1,8 @@ + + + + {$t('connected_to')} {castManager.receiverName} + + +poster + +
+ {#if castManager.castState == CastState.BUFFERING} +
+ +
+ {:else} + handlePlayPauseButton()} + title={castManager.castState == CastState.PLAYING ? 'Pause' : 'Play'} + /> + {/if} + + +
diff --git a/web/src/lib/components/shared-components/navigation-bar/navigation-bar.svelte b/web/src/lib/components/shared-components/navigation-bar/navigation-bar.svelte index f1a48c98b4..582270b1af 100644 --- a/web/src/lib/components/shared-components/navigation-bar/navigation-bar.svelte +++ b/web/src/lib/components/shared-components/navigation-bar/navigation-bar.svelte @@ -27,6 +27,7 @@ import ThemeButton from '../theme-button.svelte'; import UserAvatar from '../user-avatar.svelte'; import AccountInfoPanel from './account-info-panel.svelte'; + import CastButton from '$lib/cast/cast-button.svelte'; interface Props { showUploadButton?: boolean; @@ -162,6 +163,8 @@ {/if}
+ +
(shouldShowAccountInfoPanel = false), diff --git a/web/src/lib/managers/cast-manager.svelte.ts b/web/src/lib/managers/cast-manager.svelte.ts new file mode 100644 index 0000000000..227bd3faea --- /dev/null +++ b/web/src/lib/managers/cast-manager.svelte.ts @@ -0,0 +1,159 @@ +import { GCastDestination } from '$lib/utils/cast/gcast-destination.svelte'; +import { createSession, type SessionCreateResponseDto } from '@immich/sdk'; +import { DateTime, Duration } from 'luxon'; + +// follows chrome.cast.media.PlayerState +export enum CastState { + IDLE = 'IDLE', + PLAYING = 'PLAYING', + PAUSED = 'PAUSED', + BUFFERING = 'BUFFERING', +} + +export enum CastDestinationType { + GCAST = 'GCAST', +} + +export interface ICastDestination { + initialize(): Promise; // returns if the cast destination can be used + type: CastDestinationType; // type of cast destination + + isAvailable: boolean; // can we use the cast destination + isConnected: boolean; // is the cast destination actively sharing + + currentTime: number | null; // current seek time the player is at + duration: number | null; // duration of media + + receiverName: string | null; // name of the cast destination + castState: CastState; // current state of the cast destination + + loadMedia(mediaUrl: string, sessionKey: string, reload: boolean): Promise; // load media to the cast destination + + // remote player controls + play(): void; + pause(): void; + seekTo(time: number): void; + disconnect(): void; +} + +class CastManager { + private castDestinations = $state([]); + private current = $derived(this.monitorConnectedDestination()); + + availableDestinations = $state([]); + initialized = $state(false); + + isCasting = $derived(this.current?.isConnected ?? false); + receiverName = $derived(this.current?.receiverName ?? null); + castState = $derived(this.current?.castState ?? null); + currentTime = $derived(this.current?.currentTime ?? null); + duration = $derived(this.current?.duration ?? null); + + private sessionKey: SessionCreateResponseDto | null = null; + + constructor() { + // load each cast destination + this.castDestinations = [ + new GCastDestination(), + // Add other cast destinations here (ie FCast) + ]; + } + + async initialize() { + // this goes first to prevent multiple calls to initialize + if (this.initialized) { + return; + } + this.initialized = true; + + // try to initialize each cast destination + for (const castDestination of this.castDestinations) { + const destAvailable = await castDestination.initialize(); + if (destAvailable) { + this.availableDestinations.push(castDestination); + } + } + } + + // monitor all cast destinations for changes + // we want to make sure only one session is active at a time + private monitorConnectedDestination(): ICastDestination | null { + // check if we have a connected destination + const connectedDest = this.castDestinations.find((dest) => dest.isConnected); + return connectedDest || null; + } + + private isTokenValid() { + // check if we already have a session token + // we should always have a expiration date + if (!this.sessionKey || !this.sessionKey.expiresAt) { + return false; + } + + const tokenExpiration = DateTime.fromISO(this.sessionKey.expiresAt); + + // we want to make sure we have at least 10 seconds remaining in the session + // this is to account for network latency and other delays when sending the request + const bufferedExpiration = tokenExpiration.minus({ seconds: 10 }); + + return bufferedExpiration > DateTime.now(); + } + + private async refreshSessionToken() { + // get session token to authenticate the media url + // check and make sure we have at least 10 seconds remaining in the session + // before we send the media request, refresh the session if needed + if (!this.isTokenValid()) { + this.sessionKey = await createSession({ + sessionCreateDto: { + duration: Duration.fromObject({ minutes: 15 }).as('seconds'), + deviceOS: 'Google Cast', + deviceType: 'Cast', + }, + }); + } + } + + async loadMedia(mediaUrl: string, reload: boolean = false) { + if (!this.current) { + throw new Error('No active cast destination'); + } + + await this.refreshSessionToken(); + if (!this.sessionKey) { + throw new Error('No session key available'); + } + + await this.current.loadMedia(mediaUrl, this.sessionKey.token, reload); + } + + play() { + this.current?.play(); + } + + pause() { + this.current?.pause(); + } + + seekTo(time: number) { + this.current?.seekTo(time); + } + + disconnect() { + this.current?.disconnect(); + } +} + +// Persist castManager across Svelte HMRs +let castManager: CastManager; + +if (import.meta.hot && import.meta.hot.data) { + if (!import.meta.hot.data.castManager) { + import.meta.hot.data.castManager = new CastManager(); + } + castManager = import.meta.hot.data.castManager; +} else { + castManager = new CastManager(); +} + +export { castManager }; diff --git a/web/src/lib/utils/cast/gcast-destination.svelte.ts b/web/src/lib/utils/cast/gcast-destination.svelte.ts new file mode 100644 index 0000000000..fcfb8c382a --- /dev/null +++ b/web/src/lib/utils/cast/gcast-destination.svelte.ts @@ -0,0 +1,234 @@ +import { CastDestinationType, CastState, type ICastDestination } from '$lib/managers/cast-manager.svelte'; +import 'chromecast-caf-sender'; +import { Duration } from 'luxon'; + +const FRAMEWORK_LINK = 'https://www.gstatic.com/cv/js/sender/v1/cast_sender.js?loadCastFramework=1'; + +enum SESSION_DISCOVERY_CAUSE { + LOAD_MEDIA, + ACTIVE_SESSION, +} + +export class GCastDestination implements ICastDestination { + type = CastDestinationType.GCAST; + isAvailable = $state(false); + isConnected = $state(false); + currentTime = $state(null); + duration = $state(null); + castState = $state(CastState.IDLE); + receiverName = $state(null); + + private remotePlayer: cast.framework.RemotePlayer | null = null; + private session: chrome.cast.Session | null = null; + private currentMedia: chrome.cast.media.Media | null = null; + private currentUrl: string | null = null; + + async initialize(): Promise { + // this is a really messy way since google does a pseudo-callbak + // in the form of a global window event. We will give Chrome 3 seconds to respond + // or we will mark the destination as unavailable + + const callbackPromise: Promise = new Promise((resolve) => { + // check if the cast framework is already loaded + if (this.isAvailable) { + resolve(true); + return; + } + + window['__onGCastApiAvailable'] = (isAvailable: boolean) => { + resolve(isAvailable); + }; + + if (!document.querySelector(`script[src="${FRAMEWORK_LINK}"]`)) { + const script = document.createElement('script'); + script.src = FRAMEWORK_LINK; + document.body.append(script); + } + }); + + const timeoutPromise: Promise = new Promise((resolve) => { + setTimeout( + () => { + resolve(false); + }, + Duration.fromObject({ seconds: 3 }).toMillis(), + ); + }); + + this.isAvailable = await Promise.race([callbackPromise, timeoutPromise]); + + if (!this.isAvailable) { + return false; + } + + const castContext = cast.framework.CastContext.getInstance(); + this.remotePlayer = new cast.framework.RemotePlayer(); + + castContext.setOptions({ + receiverApplicationId: chrome.cast.media.DEFAULT_MEDIA_RECEIVER_APP_ID, + autoJoinPolicy: chrome.cast.AutoJoinPolicy.ORIGIN_SCOPED, + }); + + castContext.addEventListener(cast.framework.CastContextEventType.SESSION_STATE_CHANGED, (event) => + this.onSessionStateChanged(event), + ); + + castContext.addEventListener(cast.framework.CastContextEventType.CAST_STATE_CHANGED, (event) => + this.onCastStateChanged(event), + ); + + const remotePlayerController = new cast.framework.RemotePlayerController(this.remotePlayer); + remotePlayerController.addEventListener(cast.framework.RemotePlayerEventType.ANY_CHANGE, (event) => + this.onRemotePlayerChange(event), + ); + + return true; + } + + async loadMedia(mediaUrl: string, sessionKey: string, reload: boolean = false): Promise { + if (!this.isAvailable || !this.isConnected || !this.session) { + return; + } + + // already playing the same media + if (this.currentUrl === mediaUrl && !reload) { + return; + } + + // we need to send content type in the request + // in the future we can swap this out for an API call to get image metadata + const assetHead = await fetch(mediaUrl, { method: 'HEAD' }); + const contentType = assetHead.headers.get('content-type'); + + if (!contentType) { + throw new Error('No content type found for media url'); + } + + // build the authenticated media request and send it to the cast device + const authenticatedUrl = `${mediaUrl}&sessionKey=${sessionKey}`; + const mediaInfo = new chrome.cast.media.MediaInfo(authenticatedUrl, contentType); + const request = new chrome.cast.media.LoadRequest(mediaInfo); + const successCallback = this.onMediaDiscovered.bind(this, SESSION_DISCOVERY_CAUSE.LOAD_MEDIA); + + this.currentUrl = mediaUrl; + + return this.session.loadMedia(request, successCallback, this.onError.bind(this)); + } + + /// + /// Remote Player Controls + /// + + play(): void { + if (!this.currentMedia) { + return; + } + + const playRequest = new chrome.cast.media.PlayRequest(); + + this.currentMedia.play(playRequest, () => {}, this.onError.bind(this)); + } + + pause(): void { + if (!this.currentMedia) { + return; + } + + const pauseRequest = new chrome.cast.media.PauseRequest(); + + this.currentMedia.pause(pauseRequest, () => {}, this.onError.bind(this)); + } + + seekTo(time: number): void { + const remotePlayer = new cast.framework.RemotePlayer(); + const remotePlayerController = new cast.framework.RemotePlayerController(remotePlayer); + remotePlayer.currentTime = time; + remotePlayerController.seek(); + } + + disconnect(): void { + this.session?.leave(() => { + this.session = null; + this.castState = CastState.IDLE; + this.isConnected = false; + this.receiverName = null; + }, this.onError.bind(this)); + } + + /// + /// Google Cast Callbacks + /// + private onSessionStateChanged(event: cast.framework.SessionStateEventData) { + switch (event.sessionState) { + case cast.framework.SessionState.SESSION_ENDED: { + this.session = null; + break; + } + case cast.framework.SessionState.SESSION_RESUMED: + case cast.framework.SessionState.SESSION_STARTED: { + this.session = event.session.getSessionObj(); + break; + } + } + } + + private onCastStateChanged(event: cast.framework.CastStateEventData) { + this.isConnected = event.castState === cast.framework.CastState.CONNECTED; + this.receiverName = this.session?.receiver.friendlyName ?? null; + + if (event.castState === cast.framework.CastState.NOT_CONNECTED) { + this.currentMedia = null; + this.currentUrl = null; + } + } + + private onRemotePlayerChange(event: cast.framework.RemotePlayerChangedEvent) { + switch (event.field) { + case 'isConnected': { + this.isConnected = event.value; + break; + } + case 'remotePlayer': { + this.remotePlayer = event.value; + break; + } + case 'duration': { + this.duration = event.value; + break; + } + case 'currentTime': { + this.currentTime = event.value; + break; + } + case 'playerState': { + this.castState = event.value; + break; + } + } + } + + onError(error: chrome.cast.Error) { + console.error('Google Cast Error:', error); + } + + private onMediaDiscovered(cause: SESSION_DISCOVERY_CAUSE, currentMedia: chrome.cast.media.Media) { + this.currentMedia = currentMedia; + + if (cause === SESSION_DISCOVERY_CAUSE.LOAD_MEDIA) { + this.castState = CastState.PLAYING; + } else if (cause === SESSION_DISCOVERY_CAUSE.ACTIVE_SESSION) { + // CastState and PlayerState are identical enums + this.castState = currentMedia.playerState as unknown as CastState; + } + } + + static async showCastDialog() { + try { + await cast.framework.CastContext.getInstance().requestSession(); + } catch { + // the cast dialog throws an error if the user closes it + // we don't care about this error + return; + } + } +} diff --git a/web/src/routes/(user)/albums/[albumId=id]/[[photos=photos]]/[[assetId=id]]/+page.svelte b/web/src/routes/(user)/albums/[albumId=id]/[[photos=photos]]/[[assetId=id]]/+page.svelte index 7fcc70ae25..bd9186e3c0 100644 --- a/web/src/routes/(user)/albums/[albumId=id]/[[photos=photos]]/[[assetId=id]]/+page.svelte +++ b/web/src/routes/(user)/albums/[albumId=id]/[[photos=photos]]/[[assetId=id]]/+page.svelte @@ -1,6 +1,7 @@ -
-
-
- + +
+ {$t('welcome_to_immich')} +
+
- {$t('welcome_to_immich')} -
-
+ diff --git a/web/src/routes/auth/onboarding/+page.svelte b/web/src/routes/auth/onboarding/+page.svelte index 4c6e0878b6..091681002e 100644 --- a/web/src/routes/auth/onboarding/+page.svelte +++ b/web/src/routes/auth/onboarding/+page.svelte @@ -58,13 +58,13 @@
-
+
-
+
From 188188a8443bc1c83586ee4a1075a365930a0920 Mon Sep 17 00:00:00 2001 From: Arno <46051866+arnolicious@users.noreply.github.com> Date: Wed, 21 May 2025 18:12:00 +0200 Subject: [PATCH 22/35] fix: Change shortcut listeners from window to document (#18416) * fix: Change shortcut listeners to document * fix: split into window and document * chore: upgrade ui package --- web/package-lock.json | 22 +++++++++---------- web/package.json | 2 +- .../components/album-page/album-viewer.svelte | 2 +- .../actions/add-to-album-action.svelte | 2 +- .../actions/archive-action.svelte | 2 +- .../asset-viewer/actions/close-action.svelte | 2 +- .../asset-viewer/actions/delete-action.svelte | 2 +- .../actions/download-action.svelte | 2 +- .../actions/favorite-action.svelte | 2 +- .../actions/next-asset-action.svelte | 2 +- .../actions/previous-asset-action.svelte | 2 +- .../actions/show-detail-action.svelte | 2 +- .../asset-viewer/editor/editor-panel.svelte | 2 +- .../asset-viewer/photo-viewer.svelte | 2 +- .../asset-viewer/slideshow-bar.svelte | 2 +- .../manage-people-visibility.svelte | 2 +- .../memory-page/memory-viewer.svelte | 2 +- .../actions/download-action.svelte | 2 +- .../components/photos-page/asset-grid.svelte | 2 +- .../context-menu/menu-option.svelte | 10 ++++----- .../gallery-viewer/gallery-viewer.svelte | 2 +- .../search-bar/search-bar.svelte | 2 +- .../duplicates-compare-control.svelte | 2 +- .../[[assetId=id]]/+page.svelte | 3 ++- web/src/routes/+layout.svelte | 2 +- 25 files changed, 40 insertions(+), 39 deletions(-) diff --git a/web/package-lock.json b/web/package-lock.json index c0f441d72c..bcf62268ce 100644 --- a/web/package-lock.json +++ b/web/package-lock.json @@ -11,7 +11,7 @@ "dependencies": { "@formatjs/icu-messageformat-parser": "^2.9.8", "@immich/sdk": "file:../open-api/typescript-sdk", - "@immich/ui": "^0.22.1", + "@immich/ui": "^0.22.2", "@mapbox/mapbox-gl-rtl-text": "0.2.3", "@mdi/js": "^7.4.47", "@photo-sphere-viewer/core": "^5.11.5", @@ -1342,13 +1342,13 @@ "link": true }, "node_modules/@immich/ui": { - "version": "0.22.1", - "resolved": "https://registry.npmjs.org/@immich/ui/-/ui-0.22.1.tgz", - "integrity": "sha512-/QdqctBit+eX8QZgTL4PlgS7l6/NCGXeDjR6kQNLOVBPhbjkmtwqsvZ+RymYClcHAEhutXOKRhnlQU9mNLC/SA==", + "version": "0.22.2", + "resolved": "https://registry.npmjs.org/@immich/ui/-/ui-0.22.2.tgz", + "integrity": "sha512-aP9B54i4SqL+y7EzkI1gVhx/qtiSYDOFz6vjn1PyXfCnA0RrE+dxYu/Y7f9PXLX0MPrTro5MxmNC06JjuS/Gow==", "license": "GNU Affero General Public License version 3", "dependencies": { "@mdi/js": "^7.4.47", - "bits-ui": "^1.0.0-next.46", + "bits-ui": "^1.5.3", "tailwind-merge": "^2.5.4", "tailwind-variants": "^1.0.0" }, @@ -1357,9 +1357,9 @@ } }, "node_modules/@internationalized/date": { - "version": "3.8.0", - "resolved": "https://registry.npmjs.org/@internationalized/date/-/date-3.8.0.tgz", - "integrity": "sha512-J51AJ0fEL68hE4CwGPa6E0PO6JDaVLd8aln48xFCSy7CZkZc96dGEGmLs2OEEbBxcsVZtfrqkXJwI2/MSG8yKw==", + "version": "3.8.1", + "resolved": "https://registry.npmjs.org/@internationalized/date/-/date-3.8.1.tgz", + "integrity": "sha512-PgVE6B6eIZtzf9Gu5HvJxRK3ufUFz9DhspELuhW/N0GuMGMTLvPQNRkHP2hTuP9lblOk+f+1xi96sPiPXANXAA==", "license": "Apache-2.0", "dependencies": { "@swc/helpers": "^0.5.0" @@ -3599,9 +3599,9 @@ "license": "MIT" }, "node_modules/bits-ui": { - "version": "1.4.8", - "resolved": "https://registry.npmjs.org/bits-ui/-/bits-ui-1.4.8.tgz", - "integrity": "sha512-j34GsdSsJ+ZBl9h/70VkufvrlEgTKQSZvm80eM5VvuhLJWvpfEpn9+k0FVmtDQl9NSPgEVtI9imYhm8nW9Nj/w==", + "version": "1.5.3", + "resolved": "https://registry.npmjs.org/bits-ui/-/bits-ui-1.5.3.tgz", + "integrity": "sha512-BTZ9/GU11DaEGyQp+AY+sXCMLZO0gbDC5J8l7+Ngj4Vf6hNOwrpMmoh5iuKktA6cphXYolVkUDgBWmkh415I+w==", "license": "MIT", "dependencies": { "@floating-ui/core": "^1.6.4", diff --git a/web/package.json b/web/package.json index 3f94ebbc73..96b3189d4b 100644 --- a/web/package.json +++ b/web/package.json @@ -28,7 +28,7 @@ "dependencies": { "@formatjs/icu-messageformat-parser": "^2.9.8", "@immich/sdk": "file:../open-api/typescript-sdk", - "@immich/ui": "^0.22.1", + "@immich/ui": "^0.22.2", "@mapbox/mapbox-gl-rtl-text": "0.2.3", "@mdi/js": "^7.4.47", "@photo-sphere-viewer/core": "^5.11.5", diff --git a/web/src/lib/components/album-page/album-viewer.svelte b/web/src/lib/components/album-page/album-viewer.svelte index 227fb999b8..f8a38d9e67 100644 --- a/web/src/lib/components/album-page/album-viewer.svelte +++ b/web/src/lib/components/album-page/album-viewer.svelte @@ -48,7 +48,7 @@ }); - { diff --git a/web/src/lib/components/asset-viewer/actions/add-to-album-action.svelte b/web/src/lib/components/asset-viewer/actions/add-to-album-action.svelte index 4ebe9d002a..0fbd1c8529 100644 --- a/web/src/lib/components/asset-viewer/actions/add-to-album-action.svelte +++ b/web/src/lib/components/asset-viewer/actions/add-to-album-action.svelte @@ -36,7 +36,7 @@ }; - (showSelectionModal = true) }} /> diff --git a/web/src/lib/components/asset-viewer/actions/archive-action.svelte b/web/src/lib/components/asset-viewer/actions/archive-action.svelte index 362a0a693a..c4936d21d0 100644 --- a/web/src/lib/components/asset-viewer/actions/archive-action.svelte +++ b/web/src/lib/components/asset-viewer/actions/archive-action.svelte @@ -28,7 +28,7 @@ }; - + - + diff --git a/web/src/lib/components/asset-viewer/actions/delete-action.svelte b/web/src/lib/components/asset-viewer/actions/delete-action.svelte index 90322c00f0..99a9461528 100644 --- a/web/src/lib/components/asset-viewer/actions/delete-action.svelte +++ b/web/src/lib/components/asset-viewer/actions/delete-action.svelte @@ -73,7 +73,7 @@ }; - trashOrDelete(asset.isTrashed) }, { shortcut: { key: 'Delete', shift: true }, onShortcut: () => trashOrDelete(true) }, diff --git a/web/src/lib/components/asset-viewer/actions/download-action.svelte b/web/src/lib/components/asset-viewer/actions/download-action.svelte index c32766a725..1dfcc8b0f6 100644 --- a/web/src/lib/components/asset-viewer/actions/download-action.svelte +++ b/web/src/lib/components/asset-viewer/actions/download-action.svelte @@ -19,7 +19,7 @@ const onDownloadFile = async () => downloadFile(await getAssetInfo({ id: asset.id, key: authManager.key })); - + {#if !menuItem} diff --git a/web/src/lib/components/asset-viewer/actions/favorite-action.svelte b/web/src/lib/components/asset-viewer/actions/favorite-action.svelte index bb1a9343d9..3c6c7d7621 100644 --- a/web/src/lib/components/asset-viewer/actions/favorite-action.svelte +++ b/web/src/lib/components/asset-viewer/actions/favorite-action.svelte @@ -46,7 +46,7 @@ }; - + - - - + diff --git a/web/src/lib/components/asset-viewer/editor/editor-panel.svelte b/web/src/lib/components/asset-viewer/editor/editor-panel.svelte index 28ca71c806..eee76f4db5 100644 --- a/web/src/lib/components/asset-viewer/editor/editor-panel.svelte +++ b/web/src/lib/components/asset-viewer/editor/editor-panel.svelte @@ -40,7 +40,7 @@ const onConfirm = () => (typeof $showCancelConfirmDialog === 'boolean' ? null : $showCancelConfirmDialog()); - +
diff --git a/web/src/lib/components/asset-viewer/photo-viewer.svelte b/web/src/lib/components/asset-viewer/photo-viewer.svelte index bb817494de..cf846e7af6 100644 --- a/web/src/lib/components/asset-viewer/photo-viewer.svelte +++ b/web/src/lib/components/asset-viewer/photo-viewer.svelte @@ -202,7 +202,7 @@ let containerHeight = $state(0); - - - +
- - + {#if menuItem} diff --git a/web/src/lib/components/photos-page/asset-grid.svelte b/web/src/lib/components/photos-page/asset-grid.svelte index 84bded9f4b..7b45019ec3 100644 --- a/web/src/lib/components/photos-page/asset-grid.svelte +++ b/web/src/lib/components/photos-page/asset-grid.svelte @@ -712,7 +712,7 @@ }); - + {#if isShowDeleteConfirmation} - import Icon from '$lib/components/elements/icon.svelte'; - import { generateId } from '$lib/utils/generate-id'; - import { optionClickCallbackStore, selectedIdStore } from '$lib/stores/context-menu.store'; import type { Shortcut } from '$lib/actions/shortcut'; - import { shortcutLabel as computeShortcutLabel, shortcut as bindShortcut } from '$lib/actions/shortcut'; + import { shortcut as bindShortcut, shortcutLabel as computeShortcutLabel } from '$lib/actions/shortcut'; + import Icon from '$lib/components/elements/icon.svelte'; + import { optionClickCallbackStore, selectedIdStore } from '$lib/stores/context-menu.store'; + import { generateId } from '$lib/utils/generate-id'; interface Props { text: string; @@ -44,7 +44,7 @@ : () => {}; - + diff --git a/web/src/lib/components/shared-components/gallery-viewer/gallery-viewer.svelte b/web/src/lib/components/shared-components/gallery-viewer/gallery-viewer.svelte index 16bb364fb8..6b0a25a84b 100644 --- a/web/src/lib/components/shared-components/gallery-viewer/gallery-viewer.svelte +++ b/web/src/lib/components/shared-components/gallery-viewer/gallery-viewer.svelte @@ -446,7 +446,7 @@ }); - - input?.select() }, diff --git a/web/src/lib/components/utilities-page/duplicates/duplicates-compare-control.svelte b/web/src/lib/components/utilities-page/duplicates/duplicates-compare-control.svelte index dbfc2bdac7..c08abe106d 100644 --- a/web/src/lib/components/utilities-page/duplicates/duplicates-compare-control.svelte +++ b/web/src/lib/components/utilities-page/duplicates/duplicates-compare-control.svelte @@ -98,7 +98,7 @@ }; - - + +
{#if assetInteraction.selectionActive} diff --git a/web/src/routes/+layout.svelte b/web/src/routes/+layout.svelte index 3a6320a265..c9e91f3beb 100644 --- a/web/src/routes/+layout.svelte +++ b/web/src/routes/+layout.svelte @@ -101,7 +101,7 @@ {/if} - copyToClipboard(getMyImmichLink().toString()), From 493b9b7a54deffbfeea6107d897e7d20765866f7 Mon Sep 17 00:00:00 2001 From: Mert <101130780+mertalev@users.noreply.github.com> Date: Wed, 21 May 2025 12:15:30 -0400 Subject: [PATCH 23/35] fix(server): use preview path for person thumbnails from videos (#18419) use preview path for person thumbnails from videos --- server/src/services/media.service.spec.ts | 42 +++++++++++++++++++++++ server/src/services/media.service.ts | 6 ++-- server/test/fixtures/person.stub.ts | 13 +++++++ 3 files changed, 57 insertions(+), 4 deletions(-) diff --git a/server/src/services/media.service.spec.ts b/server/src/services/media.service.spec.ts index 3e77127aa9..3b9eafde8f 100644 --- a/server/src/services/media.service.spec.ts +++ b/server/src/services/media.service.spec.ts @@ -941,6 +941,48 @@ describe(MediaService.name, () => { }); }); + it('should use preview path if video', async () => { + mocks.person.getDataForThumbnailGenerationJob.mockResolvedValue(personThumbnailStub.videoThumbnail); + mocks.media.generateThumbnail.mockResolvedValue(); + const data = Buffer.from(''); + const info = { width: 1000, height: 1000 } as OutputInfo; + mocks.media.decodeImage.mockResolvedValue({ data, info }); + + await expect(sut.handleGeneratePersonThumbnail({ id: personStub.primaryPerson.id })).resolves.toBe( + JobStatus.SUCCESS, + ); + + expect(mocks.person.getDataForThumbnailGenerationJob).toHaveBeenCalledWith(personStub.primaryPerson.id); + expect(mocks.storage.mkdirSync).toHaveBeenCalledWith('upload/thumbs/admin_id/pe/rs'); + expect(mocks.media.decodeImage).toHaveBeenCalledWith(personThumbnailStub.newThumbnailMiddle.previewPath, { + colorspace: Colorspace.P3, + orientation: undefined, + processInvalidImages: false, + }); + expect(mocks.media.generateThumbnail).toHaveBeenCalledWith( + data, + { + colorspace: Colorspace.P3, + format: ImageFormat.JPEG, + quality: 80, + crop: { + left: 238, + top: 163, + width: 274, + height: 274, + }, + raw: info, + processInvalidImages: false, + size: 250, + }, + 'upload/thumbs/admin_id/pe/rs/person-1.jpeg', + ); + expect(mocks.person.update).toHaveBeenCalledWith({ + id: 'person-1', + thumbnailPath: 'upload/thumbs/admin_id/pe/rs/person-1.jpeg', + }); + }); + it('should generate a thumbnail without going negative', async () => { mocks.person.getDataForThumbnailGenerationJob.mockResolvedValue(personThumbnailStub.newThumbnailStart); mocks.media.generateThumbnail.mockResolvedValue(); diff --git a/server/src/services/media.service.ts b/server/src/services/media.service.ts index 5cb9d928c3..bd419f0b34 100644 --- a/server/src/services/media.service.ts +++ b/server/src/services/media.service.ts @@ -328,15 +328,13 @@ export class MediaService extends BaseService { const { ownerId, x1, y1, x2, y2, oldWidth, oldHeight, exifOrientation, previewPath, originalPath } = data; let inputImage: string | Buffer; - if (mimeTypes.isVideo(originalPath)) { + if (data.type === AssetType.VIDEO) { if (!previewPath) { this.logger.error(`Could not generate person thumbnail for video ${id}: missing preview path`); return JobStatus.FAILED; } inputImage = previewPath; - } - - if (image.extractEmbedded && mimeTypes.isRaw(originalPath)) { + } else if (image.extractEmbedded && mimeTypes.isRaw(originalPath)) { const extracted = await this.extractImage(originalPath, image.preview.size); inputImage = extracted ? extracted.buffer : originalPath; } else { diff --git a/server/test/fixtures/person.stub.ts b/server/test/fixtures/person.stub.ts index 21a184035a..86f3bcde21 100644 --- a/server/test/fixtures/person.stub.ts +++ b/server/test/fixtures/person.stub.ts @@ -246,4 +246,17 @@ export const personThumbnailStub = { exifOrientation: '1', previewPath: previewFile.path, }), + videoThumbnail: Object.freeze({ + ownerId: userStub.admin.id, + x1: 100, + y1: 100, + x2: 200, + y2: 200, + oldHeight: 500, + oldWidth: 400, + type: AssetType.VIDEO, + originalPath: '/original/path.mp4', + exifOrientation: '1', + previewPath: previewFile.path, + }), }; From 6b31e333bbc1d480abbfb02ebc1608e218566fa0 Mon Sep 17 00:00:00 2001 From: Matthew Momjian <50788000+mmomjian@users.noreply.github.com> Date: Wed, 21 May 2025 12:48:11 -0400 Subject: [PATCH 24/35] fix(docs): vchord migration (#18418) * vector * add up top * fix vector * pg version --- .../administration/postgres-standalone.md | 22 ++++++++++++------- 1 file changed, 14 insertions(+), 8 deletions(-) diff --git a/docs/docs/administration/postgres-standalone.md b/docs/docs/administration/postgres-standalone.md index 41743899a7..06a29a12fe 100644 --- a/docs/docs/administration/postgres-standalone.md +++ b/docs/docs/administration/postgres-standalone.md @@ -10,10 +10,14 @@ Running with a pre-existing Postgres server can unlock powerful administrative f ## Prerequisites +You must install `pgvector` (`>= 0.7.0, < 1.0.0`), as it is a prerequisite for `vchord`. +The easiest way to do this on Debian/Ubuntu is by adding the [PostgreSQL Apt repository][pg-apt] and then +running `apt install postgresql-NN-pgvector`, where `NN` is your Postgres version (e.g., `16`). + You must install VectorChord into your instance of Postgres using their [instructions][vchord-install]. After installation, add `shared_preload_libraries = 'vchord.so'` to your `postgresql.conf`. If you already have some `shared_preload_libraries` set, you can separate each extension with a comma. For example, `shared_preload_libraries = 'pg_stat_statements, vchord.so'`. :::note -Immich is known to work with Postgres versions 14, 15, 16 and 17. Earlier versions are unsupported. +Immich is known to work with Postgres versions `>= 14, < 18`. Make sure the installed version of VectorChord is compatible with your version of Immich. The current accepted range for VectorChord is `>= 0.3.0, < 1.0.0`. ::: @@ -73,13 +77,14 @@ Support for pgvecto.rs will be dropped in a later release, hence we recommend al The easiest option is to have both extensions installed during the migration: 1. Ensure you still have pgvecto.rs installed -2. [Install VectorChord][vchord-install] -3. Add `shared_preload_libraries= 'vchord.so, vectors.so'` to your `postgresql.conf`, making sure to include _both_ `vchord.so` and `vectors.so`. You may include other libraries here as well if needed -4. If Immich does not have superuser permissions, run the SQL command `CREATE EXTENSION vchord CASCADE;` using psql or your choice of database client -5. Start Immich and wait for the logs `Reindexed face_index` and `Reindexed clip_index` to be output -6. If Immich does not have superuser permissions, run the SQL command `DROP EXTENSION vectors;` -7. Remove the `vectors.so` entry from the `shared_preload_libraries` setting -8. Uninstall pgvecto.rs (e.g. `apt-get purge vectors-pg14` on Debian-based environments, replacing `pg14` as appropriate) +2. Install `pgvector` (`>= 0.7.0, < 1.0.0`). The easiest way to do this is on Debian/Ubuntu by adding the [PostgreSQL Apt repository][pg-apt] and then running `apt install postgresql-NN-pgvector`, where `NN` is your Postgres version (e.g., `16`) +3. [Install VectorChord][vchord-install] +4. Add `shared_preload_libraries= 'vchord.so, vectors.so'` to your `postgresql.conf`, making sure to include _both_ `vchord.so` and `vectors.so`. You may include other libraries here as well if needed +5. If Immich does not have superuser permissions, run the SQL command `CREATE EXTENSION vchord CASCADE;` using psql or your choice of database client +6. Start Immich and wait for the logs `Reindexed face_index` and `Reindexed clip_index` to be output +7. If Immich does not have superuser permissions, run the SQL command `DROP EXTENSION vectors;` +8. Remove the `vectors.so` entry from the `shared_preload_libraries` setting +9. Uninstall pgvecto.rs (e.g. `apt-get purge vectors-pg14` on Debian-based environments, replacing `pg14` as appropriate). `pgvector` must remain install as it provides the data types used by `vchord` If it is not possible to have both VectorChord and pgvecto.rs installed at the same time, you can perform the migration with more manual steps: @@ -131,3 +136,4 @@ Note that VectorChord itself uses pgvector types, so you should not uninstall pg If you get the error `driverError: error: permission denied for view pg_vector_index_stat`, you can fix this by connecting to the Immich database and running `GRANT SELECT ON TABLE pg_vector_index_stat TO ;`. [vchord-install]: https://docs.vectorchord.ai/vectorchord/getting-started/installation.html +[pg-apt]: https://www.postgresql.org/download/linux/#generic From 760b08506a3366aa6f69306fdca905f7415bce04 Mon Sep 17 00:00:00 2001 From: Mert <101130780+mertalev@users.noreply.github.com> Date: Wed, 21 May 2025 13:03:53 -0400 Subject: [PATCH 25/35] chore: tighten vchord version range (#18420) guard minor version --- docs/docs/administration/postgres-standalone.md | 2 +- server/src/constants.ts | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/docs/docs/administration/postgres-standalone.md b/docs/docs/administration/postgres-standalone.md index 06a29a12fe..bbdd06847b 100644 --- a/docs/docs/administration/postgres-standalone.md +++ b/docs/docs/administration/postgres-standalone.md @@ -19,7 +19,7 @@ You must install VectorChord into your instance of Postgres using their [instruc :::note Immich is known to work with Postgres versions `>= 14, < 18`. -Make sure the installed version of VectorChord is compatible with your version of Immich. The current accepted range for VectorChord is `>= 0.3.0, < 1.0.0`. +Make sure the installed version of VectorChord is compatible with your version of Immich. The current accepted range for VectorChord is `>= 0.3.0, < 0.4.0`. ::: ## Specifying the connection URL diff --git a/server/src/constants.ts b/server/src/constants.ts index 8268360d9f..5a5984ab6e 100644 --- a/server/src/constants.ts +++ b/server/src/constants.ts @@ -4,7 +4,7 @@ import { SemVer } from 'semver'; import { DatabaseExtension, ExifOrientation, VectorIndex } from 'src/enum'; export const POSTGRES_VERSION_RANGE = '>=14.0.0'; -export const VECTORCHORD_VERSION_RANGE = '>=0.3 <1'; +export const VECTORCHORD_VERSION_RANGE = '>=0.3 <0.4'; export const VECTORS_VERSION_RANGE = '>=0.2 <0.4'; export const VECTOR_VERSION_RANGE = '>=0.5 <1'; From bc906f7343b60500dad95b97000a292798a8902c Mon Sep 17 00:00:00 2001 From: Mert <101130780+mertalev@users.noreply.github.com> Date: Wed, 21 May 2025 13:41:14 -0400 Subject: [PATCH 26/35] chore: specify vchord version in ci (#18423) --- .github/workflows/test.yml | 2 +- e2e/docker-compose.yml | 2 +- server/test/medium/globalSetup.ts | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index e6aecdb403..e19f9db6fc 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -643,7 +643,7 @@ jobs: contents: read services: postgres: - image: ghcr.io/immich-app/postgres:14@sha256:14bec5d02e8704081eafd566029204a4eb6bb75f3056cfb34e81c5ab1657a490 + image: ghcr.io/immich-app/postgres:14-vectorchord0.3.0@sha256:14bec5d02e8704081eafd566029204a4eb6bb75f3056cfb34e81c5ab1657a490 env: POSTGRES_PASSWORD: postgres POSTGRES_USER: postgres diff --git a/e2e/docker-compose.yml b/e2e/docker-compose.yml index a8cb21aaf7..50be3d9bef 100644 --- a/e2e/docker-compose.yml +++ b/e2e/docker-compose.yml @@ -37,7 +37,7 @@ services: image: redis:6.2-alpine@sha256:3211c33a618c457e5d241922c975dbc4f446d0bdb2dc75694f5573ef8e2d01fa database: - image: ghcr.io/immich-app/postgres:14 + image: ghcr.io/immich-app/postgres:14-vectorchord0.3.0 command: -c fsync=off -c shared_preload_libraries=vchord.so -c config_file=/var/lib/postgresql/data/postgresql.conf environment: POSTGRES_PASSWORD: postgres diff --git a/server/test/medium/globalSetup.ts b/server/test/medium/globalSetup.ts index 323d2c4a53..91f47a8ca7 100644 --- a/server/test/medium/globalSetup.ts +++ b/server/test/medium/globalSetup.ts @@ -7,7 +7,7 @@ import { getKyselyConfig } from 'src/utils/database'; import { GenericContainer, Wait } from 'testcontainers'; const globalSetup = async () => { - const postgresContainer = await new GenericContainer('ghcr.io/immich-app/postgres:14') + const postgresContainer = await new GenericContainer('ghcr.io/immich-app/postgres:14-vectorchord0.3.0') .withExposedPorts(5432) .withEnvironment({ POSTGRES_PASSWORD: 'postgres', From 6954b11be153d1f2d885ba615c28ece0a9106d08 Mon Sep 17 00:00:00 2001 From: bo0tzz Date: Wed, 21 May 2025 20:21:01 +0200 Subject: [PATCH 27/35] chore: remove duplicate finder from community projects (#18424) --- docs/src/components/community-projects.tsx | 5 ----- 1 file changed, 5 deletions(-) diff --git a/docs/src/components/community-projects.tsx b/docs/src/components/community-projects.tsx index e70b5af50f..03a384162b 100644 --- a/docs/src/components/community-projects.tsx +++ b/docs/src/components/community-projects.tsx @@ -44,11 +44,6 @@ const projects: CommunityProjectProps[] = [ 'Lightroom plugin to publish, export photos from Lightroom to Immich. Import from Immich to Lightroom is also supported.', url: 'https://blog.fokuspunk.de/lrc-immich-plugin/', }, - { - title: 'Immich Duplicate Finder', - description: 'Webapp that uses machine learning to identify near-duplicate images.', - url: 'https://github.com/vale46n1/immich_duplicate_finder', - }, { title: 'Immich-Tiktok-Remover', description: 'Script to search for and remove TikTok videos from your Immich library.', From 58af574241d23814e76f772dc9e8cd8275036d54 Mon Sep 17 00:00:00 2001 From: Daniel Dietzler <36593685+danieldietzler@users.noreply.github.com> Date: Wed, 21 May 2025 21:00:10 +0200 Subject: [PATCH 28/35] chore: update milestones (#18426) --- docs/src/pages/roadmap.tsx | 24 ++++++++++++++++-------- 1 file changed, 16 insertions(+), 8 deletions(-) diff --git a/docs/src/pages/roadmap.tsx b/docs/src/pages/roadmap.tsx index 1e0914a651..50f7a47902 100644 --- a/docs/src/pages/roadmap.tsx +++ b/docs/src/pages/roadmap.tsx @@ -78,12 +78,14 @@ import { mdiLinkEdit, mdiTagFaces, mdiMovieOpenPlayOutline, + mdiCast, } from '@mdi/js'; import Layout from '@theme/Layout'; import React from 'react'; import { Item, Timeline } from '../components/timeline'; const releases = { + 'v1.133.0': new Date(2025, 4, 21), 'v1.130.0': new Date(2025, 2, 25), 'v1.127.0': new Date(2025, 1, 26), 'v1.122.0': new Date(2024, 11, 5), @@ -218,14 +220,6 @@ const roadmap: Item[] = [ description: 'Immich goes stable', getDateLabel: () => 'Planned for early 2025', }, - { - done: false, - icon: mdiLockOutline, - iconColor: 'sandybrown', - title: 'Private/locked photos', - description: 'Private assets with extra protections', - getDateLabel: () => 'Planned for 2025', - }, { done: false, icon: mdiCloudUploadOutline, @@ -245,6 +239,20 @@ const roadmap: Item[] = [ ]; const milestones: Item[] = [ + withRelease({ + icon: mdiCast, + iconColor: 'aqua', + title: 'Google Cast (web)', + description: 'Cast assets to Google Cast/Chromecast compatible devices', + release: 'v1.133.0', + }), + withRelease({ + icon: mdiLockOutline, + iconColor: 'sandybrown', + title: 'Private/locked photos', + description: 'Private assets with extra protections', + release: 'v1.133.0', + }), withRelease({ icon: mdiFolderMultiple, iconColor: 'brown', From cd288533a1f9459af8f04dfb34f80b25354cd5c5 Mon Sep 17 00:00:00 2001 From: Jason Rasmussen Date: Wed, 21 May 2025 15:35:32 -0400 Subject: [PATCH 29/35] feat: sync albums and album users (#18377) --- mobile/openapi/README.md | 4 + mobile/openapi/lib/api.dart | 4 + mobile/openapi/lib/api_client.dart | 8 + .../lib/model/sync_album_delete_v1.dart | 99 ++ .../lib/model/sync_album_user_delete_v1.dart | 107 ++ .../openapi/lib/model/sync_album_user_v1.dart | 189 ++++ mobile/openapi/lib/model/sync_album_v1.dart | 167 ++++ .../openapi/lib/model/sync_entity_type.dart | 12 + .../openapi/lib/model/sync_request_type.dart | 6 + open-api/immich-openapi-specs.json | 109 ++- open-api/typescript-sdk/src/fetch-client.ts | 10 +- server/package.json | 1 + server/src/bin/migrations.ts | 1 + server/src/db.d.ts | 18 + server/src/dtos/sync.dto.ts | 39 +- server/src/enum.ts | 7 + server/src/queries/album.user.repository.sql | 4 +- server/src/queries/sync.repository.sql | 95 ++ .../src/repositories/album-user.repository.ts | 10 +- server/src/repositories/sync.repository.ts | 109 ++- server/src/schema/functions.ts | 48 + server/src/schema/index.ts | 13 +- .../1747664684909-AddAlbumAuditTables.ts | 96 ++ server/src/schema/tables/album-audit.table.ts | 17 + .../schema/tables/album-user-audit.table.ts | 17 + server/src/schema/tables/album-user.table.ts | 32 +- server/src/schema/tables/album.table.ts | 9 + server/src/services/sync.service.ts | 40 +- .../decorators/after-insert.decorator.ts | 8 + server/src/sql-tools/public_api.ts | 1 + server/test/medium.factory.ts | 87 +- .../specs/services/sync.service.spec.ts | 910 ------------------ .../medium/specs/sync/sync-album-user.spec.ts | 269 ++++++ .../test/medium/specs/sync/sync-album.spec.ts | 220 +++++ .../medium/specs/sync/sync-asset-exif.spec.ts | 100 ++ .../test/medium/specs/sync/sync-asset.spec.ts | 130 +++ .../sync/sync-partner-asset-exif.spec.ts | 129 +++ .../specs/sync/sync-partner-asset.spec.ts | 208 ++++ .../medium/specs/sync/sync-partner.spec.ts | 221 +++++ .../test/medium/specs/sync/sync-types.spec.ts | 12 + .../test/medium/specs/sync/sync-user.spec.ts | 179 ++++ 41 files changed, 2811 insertions(+), 934 deletions(-) create mode 100644 mobile/openapi/lib/model/sync_album_delete_v1.dart create mode 100644 mobile/openapi/lib/model/sync_album_user_delete_v1.dart create mode 100644 mobile/openapi/lib/model/sync_album_user_v1.dart create mode 100644 mobile/openapi/lib/model/sync_album_v1.dart create mode 100644 server/src/schema/migrations/1747664684909-AddAlbumAuditTables.ts create mode 100644 server/src/schema/tables/album-audit.table.ts create mode 100644 server/src/schema/tables/album-user-audit.table.ts create mode 100644 server/src/sql-tools/from-code/decorators/after-insert.decorator.ts delete mode 100644 server/test/medium/specs/services/sync.service.spec.ts create mode 100644 server/test/medium/specs/sync/sync-album-user.spec.ts create mode 100644 server/test/medium/specs/sync/sync-album.spec.ts create mode 100644 server/test/medium/specs/sync/sync-asset-exif.spec.ts create mode 100644 server/test/medium/specs/sync/sync-asset.spec.ts create mode 100644 server/test/medium/specs/sync/sync-partner-asset-exif.spec.ts create mode 100644 server/test/medium/specs/sync/sync-partner-asset.spec.ts create mode 100644 server/test/medium/specs/sync/sync-partner.spec.ts create mode 100644 server/test/medium/specs/sync/sync-types.spec.ts create mode 100644 server/test/medium/specs/sync/sync-user.spec.ts diff --git a/mobile/openapi/README.md b/mobile/openapi/README.md index 2c5dea7f19..d2cae47fb5 100644 --- a/mobile/openapi/README.md +++ b/mobile/openapi/README.md @@ -443,6 +443,10 @@ Class | Method | HTTP request | Description - [SyncAckDeleteDto](doc//SyncAckDeleteDto.md) - [SyncAckDto](doc//SyncAckDto.md) - [SyncAckSetDto](doc//SyncAckSetDto.md) + - [SyncAlbumDeleteV1](doc//SyncAlbumDeleteV1.md) + - [SyncAlbumUserDeleteV1](doc//SyncAlbumUserDeleteV1.md) + - [SyncAlbumUserV1](doc//SyncAlbumUserV1.md) + - [SyncAlbumV1](doc//SyncAlbumV1.md) - [SyncAssetDeleteV1](doc//SyncAssetDeleteV1.md) - [SyncAssetExifV1](doc//SyncAssetExifV1.md) - [SyncAssetV1](doc//SyncAssetV1.md) diff --git a/mobile/openapi/lib/api.dart b/mobile/openapi/lib/api.dart index 541614ca55..aa8ae348aa 100644 --- a/mobile/openapi/lib/api.dart +++ b/mobile/openapi/lib/api.dart @@ -238,6 +238,10 @@ part 'model/stack_update_dto.dart'; part 'model/sync_ack_delete_dto.dart'; part 'model/sync_ack_dto.dart'; part 'model/sync_ack_set_dto.dart'; +part 'model/sync_album_delete_v1.dart'; +part 'model/sync_album_user_delete_v1.dart'; +part 'model/sync_album_user_v1.dart'; +part 'model/sync_album_v1.dart'; part 'model/sync_asset_delete_v1.dart'; part 'model/sync_asset_exif_v1.dart'; part 'model/sync_asset_v1.dart'; diff --git a/mobile/openapi/lib/api_client.dart b/mobile/openapi/lib/api_client.dart index 540dc11300..a1240c800c 100644 --- a/mobile/openapi/lib/api_client.dart +++ b/mobile/openapi/lib/api_client.dart @@ -532,6 +532,14 @@ class ApiClient { return SyncAckDto.fromJson(value); case 'SyncAckSetDto': return SyncAckSetDto.fromJson(value); + case 'SyncAlbumDeleteV1': + return SyncAlbumDeleteV1.fromJson(value); + case 'SyncAlbumUserDeleteV1': + return SyncAlbumUserDeleteV1.fromJson(value); + case 'SyncAlbumUserV1': + return SyncAlbumUserV1.fromJson(value); + case 'SyncAlbumV1': + return SyncAlbumV1.fromJson(value); case 'SyncAssetDeleteV1': return SyncAssetDeleteV1.fromJson(value); case 'SyncAssetExifV1': diff --git a/mobile/openapi/lib/model/sync_album_delete_v1.dart b/mobile/openapi/lib/model/sync_album_delete_v1.dart new file mode 100644 index 0000000000..ae5ba3da5d --- /dev/null +++ b/mobile/openapi/lib/model/sync_album_delete_v1.dart @@ -0,0 +1,99 @@ +// +// AUTO-GENERATED FILE, DO NOT MODIFY! +// +// @dart=2.18 + +// ignore_for_file: unused_element, unused_import +// ignore_for_file: always_put_required_named_parameters_first +// ignore_for_file: constant_identifier_names +// ignore_for_file: lines_longer_than_80_chars + +part of openapi.api; + +class SyncAlbumDeleteV1 { + /// Returns a new [SyncAlbumDeleteV1] instance. + SyncAlbumDeleteV1({ + required this.albumId, + }); + + String albumId; + + @override + bool operator ==(Object other) => identical(this, other) || other is SyncAlbumDeleteV1 && + other.albumId == albumId; + + @override + int get hashCode => + // ignore: unnecessary_parenthesis + (albumId.hashCode); + + @override + String toString() => 'SyncAlbumDeleteV1[albumId=$albumId]'; + + Map toJson() { + final json = {}; + json[r'albumId'] = this.albumId; + return json; + } + + /// Returns a new [SyncAlbumDeleteV1] instance and imports its values from + /// [value] if it's a [Map], null otherwise. + // ignore: prefer_constructors_over_static_methods + static SyncAlbumDeleteV1? fromJson(dynamic value) { + upgradeDto(value, "SyncAlbumDeleteV1"); + if (value is Map) { + final json = value.cast(); + + return SyncAlbumDeleteV1( + albumId: mapValueOfType(json, r'albumId')!, + ); + } + return null; + } + + static List listFromJson(dynamic json, {bool growable = false,}) { + final result = []; + if (json is List && json.isNotEmpty) { + for (final row in json) { + final value = SyncAlbumDeleteV1.fromJson(row); + if (value != null) { + result.add(value); + } + } + } + return result.toList(growable: growable); + } + + static Map mapFromJson(dynamic json) { + final map = {}; + if (json is Map && json.isNotEmpty) { + json = json.cast(); // ignore: parameter_assignments + for (final entry in json.entries) { + final value = SyncAlbumDeleteV1.fromJson(entry.value); + if (value != null) { + map[entry.key] = value; + } + } + } + return map; + } + + // maps a json object with a list of SyncAlbumDeleteV1-objects as value to a dart map + static Map> mapListFromJson(dynamic json, {bool growable = false,}) { + final map = >{}; + if (json is Map && json.isNotEmpty) { + // ignore: parameter_assignments + json = json.cast(); + for (final entry in json.entries) { + map[entry.key] = SyncAlbumDeleteV1.listFromJson(entry.value, growable: growable,); + } + } + return map; + } + + /// The list of required keys that must be present in a JSON. + static const requiredKeys = { + 'albumId', + }; +} + diff --git a/mobile/openapi/lib/model/sync_album_user_delete_v1.dart b/mobile/openapi/lib/model/sync_album_user_delete_v1.dart new file mode 100644 index 0000000000..f2b0fbee26 --- /dev/null +++ b/mobile/openapi/lib/model/sync_album_user_delete_v1.dart @@ -0,0 +1,107 @@ +// +// AUTO-GENERATED FILE, DO NOT MODIFY! +// +// @dart=2.18 + +// ignore_for_file: unused_element, unused_import +// ignore_for_file: always_put_required_named_parameters_first +// ignore_for_file: constant_identifier_names +// ignore_for_file: lines_longer_than_80_chars + +part of openapi.api; + +class SyncAlbumUserDeleteV1 { + /// Returns a new [SyncAlbumUserDeleteV1] instance. + SyncAlbumUserDeleteV1({ + required this.albumId, + required this.userId, + }); + + String albumId; + + String userId; + + @override + bool operator ==(Object other) => identical(this, other) || other is SyncAlbumUserDeleteV1 && + other.albumId == albumId && + other.userId == userId; + + @override + int get hashCode => + // ignore: unnecessary_parenthesis + (albumId.hashCode) + + (userId.hashCode); + + @override + String toString() => 'SyncAlbumUserDeleteV1[albumId=$albumId, userId=$userId]'; + + Map toJson() { + final json = {}; + json[r'albumId'] = this.albumId; + json[r'userId'] = this.userId; + return json; + } + + /// Returns a new [SyncAlbumUserDeleteV1] instance and imports its values from + /// [value] if it's a [Map], null otherwise. + // ignore: prefer_constructors_over_static_methods + static SyncAlbumUserDeleteV1? fromJson(dynamic value) { + upgradeDto(value, "SyncAlbumUserDeleteV1"); + if (value is Map) { + final json = value.cast(); + + return SyncAlbumUserDeleteV1( + albumId: mapValueOfType(json, r'albumId')!, + userId: mapValueOfType(json, r'userId')!, + ); + } + return null; + } + + static List listFromJson(dynamic json, {bool growable = false,}) { + final result = []; + if (json is List && json.isNotEmpty) { + for (final row in json) { + final value = SyncAlbumUserDeleteV1.fromJson(row); + if (value != null) { + result.add(value); + } + } + } + return result.toList(growable: growable); + } + + static Map mapFromJson(dynamic json) { + final map = {}; + if (json is Map && json.isNotEmpty) { + json = json.cast(); // ignore: parameter_assignments + for (final entry in json.entries) { + final value = SyncAlbumUserDeleteV1.fromJson(entry.value); + if (value != null) { + map[entry.key] = value; + } + } + } + return map; + } + + // maps a json object with a list of SyncAlbumUserDeleteV1-objects as value to a dart map + static Map> mapListFromJson(dynamic json, {bool growable = false,}) { + final map = >{}; + if (json is Map && json.isNotEmpty) { + // ignore: parameter_assignments + json = json.cast(); + for (final entry in json.entries) { + map[entry.key] = SyncAlbumUserDeleteV1.listFromJson(entry.value, growable: growable,); + } + } + return map; + } + + /// The list of required keys that must be present in a JSON. + static const requiredKeys = { + 'albumId', + 'userId', + }; +} + diff --git a/mobile/openapi/lib/model/sync_album_user_v1.dart b/mobile/openapi/lib/model/sync_album_user_v1.dart new file mode 100644 index 0000000000..c2b8ed7f48 --- /dev/null +++ b/mobile/openapi/lib/model/sync_album_user_v1.dart @@ -0,0 +1,189 @@ +// +// AUTO-GENERATED FILE, DO NOT MODIFY! +// +// @dart=2.18 + +// ignore_for_file: unused_element, unused_import +// ignore_for_file: always_put_required_named_parameters_first +// ignore_for_file: constant_identifier_names +// ignore_for_file: lines_longer_than_80_chars + +part of openapi.api; + +class SyncAlbumUserV1 { + /// Returns a new [SyncAlbumUserV1] instance. + SyncAlbumUserV1({ + required this.albumId, + required this.role, + required this.userId, + }); + + String albumId; + + SyncAlbumUserV1RoleEnum role; + + String userId; + + @override + bool operator ==(Object other) => identical(this, other) || other is SyncAlbumUserV1 && + other.albumId == albumId && + other.role == role && + other.userId == userId; + + @override + int get hashCode => + // ignore: unnecessary_parenthesis + (albumId.hashCode) + + (role.hashCode) + + (userId.hashCode); + + @override + String toString() => 'SyncAlbumUserV1[albumId=$albumId, role=$role, userId=$userId]'; + + Map toJson() { + final json = {}; + json[r'albumId'] = this.albumId; + json[r'role'] = this.role; + json[r'userId'] = this.userId; + return json; + } + + /// Returns a new [SyncAlbumUserV1] instance and imports its values from + /// [value] if it's a [Map], null otherwise. + // ignore: prefer_constructors_over_static_methods + static SyncAlbumUserV1? fromJson(dynamic value) { + upgradeDto(value, "SyncAlbumUserV1"); + if (value is Map) { + final json = value.cast(); + + return SyncAlbumUserV1( + albumId: mapValueOfType(json, r'albumId')!, + role: SyncAlbumUserV1RoleEnum.fromJson(json[r'role'])!, + userId: mapValueOfType(json, r'userId')!, + ); + } + return null; + } + + static List listFromJson(dynamic json, {bool growable = false,}) { + final result = []; + if (json is List && json.isNotEmpty) { + for (final row in json) { + final value = SyncAlbumUserV1.fromJson(row); + if (value != null) { + result.add(value); + } + } + } + return result.toList(growable: growable); + } + + static Map mapFromJson(dynamic json) { + final map = {}; + if (json is Map && json.isNotEmpty) { + json = json.cast(); // ignore: parameter_assignments + for (final entry in json.entries) { + final value = SyncAlbumUserV1.fromJson(entry.value); + if (value != null) { + map[entry.key] = value; + } + } + } + return map; + } + + // maps a json object with a list of SyncAlbumUserV1-objects as value to a dart map + static Map> mapListFromJson(dynamic json, {bool growable = false,}) { + final map = >{}; + if (json is Map && json.isNotEmpty) { + // ignore: parameter_assignments + json = json.cast(); + for (final entry in json.entries) { + map[entry.key] = SyncAlbumUserV1.listFromJson(entry.value, growable: growable,); + } + } + return map; + } + + /// The list of required keys that must be present in a JSON. + static const requiredKeys = { + 'albumId', + 'role', + 'userId', + }; +} + + +class SyncAlbumUserV1RoleEnum { + /// Instantiate a new enum with the provided [value]. + const SyncAlbumUserV1RoleEnum._(this.value); + + /// The underlying value of this enum member. + final String value; + + @override + String toString() => value; + + String toJson() => value; + + static const editor = SyncAlbumUserV1RoleEnum._(r'editor'); + static const viewer = SyncAlbumUserV1RoleEnum._(r'viewer'); + + /// List of all possible values in this [enum][SyncAlbumUserV1RoleEnum]. + static const values = [ + editor, + viewer, + ]; + + static SyncAlbumUserV1RoleEnum? fromJson(dynamic value) => SyncAlbumUserV1RoleEnumTypeTransformer().decode(value); + + static List listFromJson(dynamic json, {bool growable = false,}) { + final result = []; + if (json is List && json.isNotEmpty) { + for (final row in json) { + final value = SyncAlbumUserV1RoleEnum.fromJson(row); + if (value != null) { + result.add(value); + } + } + } + return result.toList(growable: growable); + } +} + +/// Transformation class that can [encode] an instance of [SyncAlbumUserV1RoleEnum] to String, +/// and [decode] dynamic data back to [SyncAlbumUserV1RoleEnum]. +class SyncAlbumUserV1RoleEnumTypeTransformer { + factory SyncAlbumUserV1RoleEnumTypeTransformer() => _instance ??= const SyncAlbumUserV1RoleEnumTypeTransformer._(); + + const SyncAlbumUserV1RoleEnumTypeTransformer._(); + + String encode(SyncAlbumUserV1RoleEnum data) => data.value; + + /// Decodes a [dynamic value][data] to a SyncAlbumUserV1RoleEnum. + /// + /// If [allowNull] is true and the [dynamic value][data] cannot be decoded successfully, + /// then null is returned. However, if [allowNull] is false and the [dynamic value][data] + /// cannot be decoded successfully, then an [UnimplementedError] is thrown. + /// + /// The [allowNull] is very handy when an API changes and a new enum value is added or removed, + /// and users are still using an old app with the old code. + SyncAlbumUserV1RoleEnum? decode(dynamic data, {bool allowNull = true}) { + if (data != null) { + switch (data) { + case r'editor': return SyncAlbumUserV1RoleEnum.editor; + case r'viewer': return SyncAlbumUserV1RoleEnum.viewer; + default: + if (!allowNull) { + throw ArgumentError('Unknown enum value to decode: $data'); + } + } + } + return null; + } + + /// Singleton [SyncAlbumUserV1RoleEnumTypeTransformer] instance. + static SyncAlbumUserV1RoleEnumTypeTransformer? _instance; +} + + diff --git a/mobile/openapi/lib/model/sync_album_v1.dart b/mobile/openapi/lib/model/sync_album_v1.dart new file mode 100644 index 0000000000..8ac8246d46 --- /dev/null +++ b/mobile/openapi/lib/model/sync_album_v1.dart @@ -0,0 +1,167 @@ +// +// AUTO-GENERATED FILE, DO NOT MODIFY! +// +// @dart=2.18 + +// ignore_for_file: unused_element, unused_import +// ignore_for_file: always_put_required_named_parameters_first +// ignore_for_file: constant_identifier_names +// ignore_for_file: lines_longer_than_80_chars + +part of openapi.api; + +class SyncAlbumV1 { + /// Returns a new [SyncAlbumV1] instance. + SyncAlbumV1({ + required this.createdAt, + required this.description, + required this.id, + required this.isActivityEnabled, + required this.name, + required this.order, + required this.ownerId, + required this.thumbnailAssetId, + required this.updatedAt, + }); + + DateTime createdAt; + + String description; + + String id; + + bool isActivityEnabled; + + String name; + + AssetOrder order; + + String ownerId; + + String? thumbnailAssetId; + + DateTime updatedAt; + + @override + bool operator ==(Object other) => identical(this, other) || other is SyncAlbumV1 && + other.createdAt == createdAt && + other.description == description && + other.id == id && + other.isActivityEnabled == isActivityEnabled && + other.name == name && + other.order == order && + other.ownerId == ownerId && + other.thumbnailAssetId == thumbnailAssetId && + other.updatedAt == updatedAt; + + @override + int get hashCode => + // ignore: unnecessary_parenthesis + (createdAt.hashCode) + + (description.hashCode) + + (id.hashCode) + + (isActivityEnabled.hashCode) + + (name.hashCode) + + (order.hashCode) + + (ownerId.hashCode) + + (thumbnailAssetId == null ? 0 : thumbnailAssetId!.hashCode) + + (updatedAt.hashCode); + + @override + String toString() => 'SyncAlbumV1[createdAt=$createdAt, description=$description, id=$id, isActivityEnabled=$isActivityEnabled, name=$name, order=$order, ownerId=$ownerId, thumbnailAssetId=$thumbnailAssetId, updatedAt=$updatedAt]'; + + Map toJson() { + final json = {}; + json[r'createdAt'] = this.createdAt.toUtc().toIso8601String(); + json[r'description'] = this.description; + json[r'id'] = this.id; + json[r'isActivityEnabled'] = this.isActivityEnabled; + json[r'name'] = this.name; + json[r'order'] = this.order; + json[r'ownerId'] = this.ownerId; + if (this.thumbnailAssetId != null) { + json[r'thumbnailAssetId'] = this.thumbnailAssetId; + } else { + // json[r'thumbnailAssetId'] = null; + } + json[r'updatedAt'] = this.updatedAt.toUtc().toIso8601String(); + return json; + } + + /// Returns a new [SyncAlbumV1] instance and imports its values from + /// [value] if it's a [Map], null otherwise. + // ignore: prefer_constructors_over_static_methods + static SyncAlbumV1? fromJson(dynamic value) { + upgradeDto(value, "SyncAlbumV1"); + if (value is Map) { + final json = value.cast(); + + return SyncAlbumV1( + createdAt: mapDateTime(json, r'createdAt', r'')!, + description: mapValueOfType(json, r'description')!, + id: mapValueOfType(json, r'id')!, + isActivityEnabled: mapValueOfType(json, r'isActivityEnabled')!, + name: mapValueOfType(json, r'name')!, + order: AssetOrder.fromJson(json[r'order'])!, + ownerId: mapValueOfType(json, r'ownerId')!, + thumbnailAssetId: mapValueOfType(json, r'thumbnailAssetId'), + updatedAt: mapDateTime(json, r'updatedAt', r'')!, + ); + } + return null; + } + + static List listFromJson(dynamic json, {bool growable = false,}) { + final result = []; + if (json is List && json.isNotEmpty) { + for (final row in json) { + final value = SyncAlbumV1.fromJson(row); + if (value != null) { + result.add(value); + } + } + } + return result.toList(growable: growable); + } + + static Map mapFromJson(dynamic json) { + final map = {}; + if (json is Map && json.isNotEmpty) { + json = json.cast(); // ignore: parameter_assignments + for (final entry in json.entries) { + final value = SyncAlbumV1.fromJson(entry.value); + if (value != null) { + map[entry.key] = value; + } + } + } + return map; + } + + // maps a json object with a list of SyncAlbumV1-objects as value to a dart map + static Map> mapListFromJson(dynamic json, {bool growable = false,}) { + final map = >{}; + if (json is Map && json.isNotEmpty) { + // ignore: parameter_assignments + json = json.cast(); + for (final entry in json.entries) { + map[entry.key] = SyncAlbumV1.listFromJson(entry.value, growable: growable,); + } + } + return map; + } + + /// The list of required keys that must be present in a JSON. + static const requiredKeys = { + 'createdAt', + 'description', + 'id', + 'isActivityEnabled', + 'name', + 'order', + 'ownerId', + 'thumbnailAssetId', + 'updatedAt', + }; +} + diff --git a/mobile/openapi/lib/model/sync_entity_type.dart b/mobile/openapi/lib/model/sync_entity_type.dart index 5e52a10e7a..600371545a 100644 --- a/mobile/openapi/lib/model/sync_entity_type.dart +++ b/mobile/openapi/lib/model/sync_entity_type.dart @@ -33,6 +33,10 @@ class SyncEntityType { static const partnerAssetV1 = SyncEntityType._(r'PartnerAssetV1'); static const partnerAssetDeleteV1 = SyncEntityType._(r'PartnerAssetDeleteV1'); static const partnerAssetExifV1 = SyncEntityType._(r'PartnerAssetExifV1'); + static const albumV1 = SyncEntityType._(r'AlbumV1'); + static const albumDeleteV1 = SyncEntityType._(r'AlbumDeleteV1'); + static const albumUserV1 = SyncEntityType._(r'AlbumUserV1'); + static const albumUserDeleteV1 = SyncEntityType._(r'AlbumUserDeleteV1'); /// List of all possible values in this [enum][SyncEntityType]. static const values = [ @@ -46,6 +50,10 @@ class SyncEntityType { partnerAssetV1, partnerAssetDeleteV1, partnerAssetExifV1, + albumV1, + albumDeleteV1, + albumUserV1, + albumUserDeleteV1, ]; static SyncEntityType? fromJson(dynamic value) => SyncEntityTypeTypeTransformer().decode(value); @@ -94,6 +102,10 @@ class SyncEntityTypeTypeTransformer { case r'PartnerAssetV1': return SyncEntityType.partnerAssetV1; case r'PartnerAssetDeleteV1': return SyncEntityType.partnerAssetDeleteV1; case r'PartnerAssetExifV1': return SyncEntityType.partnerAssetExifV1; + case r'AlbumV1': return SyncEntityType.albumV1; + case r'AlbumDeleteV1': return SyncEntityType.albumDeleteV1; + case r'AlbumUserV1': return SyncEntityType.albumUserV1; + case r'AlbumUserDeleteV1': return SyncEntityType.albumUserDeleteV1; default: if (!allowNull) { throw ArgumentError('Unknown enum value to decode: $data'); diff --git a/mobile/openapi/lib/model/sync_request_type.dart b/mobile/openapi/lib/model/sync_request_type.dart index 08f977ad57..c149c329de 100644 --- a/mobile/openapi/lib/model/sync_request_type.dart +++ b/mobile/openapi/lib/model/sync_request_type.dart @@ -29,6 +29,8 @@ class SyncRequestType { static const assetExifsV1 = SyncRequestType._(r'AssetExifsV1'); static const partnerAssetsV1 = SyncRequestType._(r'PartnerAssetsV1'); static const partnerAssetExifsV1 = SyncRequestType._(r'PartnerAssetExifsV1'); + static const albumsV1 = SyncRequestType._(r'AlbumsV1'); + static const albumUsersV1 = SyncRequestType._(r'AlbumUsersV1'); /// List of all possible values in this [enum][SyncRequestType]. static const values = [ @@ -38,6 +40,8 @@ class SyncRequestType { assetExifsV1, partnerAssetsV1, partnerAssetExifsV1, + albumsV1, + albumUsersV1, ]; static SyncRequestType? fromJson(dynamic value) => SyncRequestTypeTypeTransformer().decode(value); @@ -82,6 +86,8 @@ class SyncRequestTypeTypeTransformer { case r'AssetExifsV1': return SyncRequestType.assetExifsV1; case r'PartnerAssetsV1': return SyncRequestType.partnerAssetsV1; case r'PartnerAssetExifsV1': return SyncRequestType.partnerAssetExifsV1; + case r'AlbumsV1': return SyncRequestType.albumsV1; + case r'AlbumUsersV1': return SyncRequestType.albumUsersV1; default: if (!allowNull) { throw ArgumentError('Unknown enum value to decode: $data'); diff --git a/open-api/immich-openapi-specs.json b/open-api/immich-openapi-specs.json index 2a8555f82c..cdd1f00763 100644 --- a/open-api/immich-openapi-specs.json +++ b/open-api/immich-openapi-specs.json @@ -12710,6 +12710,105 @@ ], "type": "object" }, + "SyncAlbumDeleteV1": { + "properties": { + "albumId": { + "type": "string" + } + }, + "required": [ + "albumId" + ], + "type": "object" + }, + "SyncAlbumUserDeleteV1": { + "properties": { + "albumId": { + "type": "string" + }, + "userId": { + "type": "string" + } + }, + "required": [ + "albumId", + "userId" + ], + "type": "object" + }, + "SyncAlbumUserV1": { + "properties": { + "albumId": { + "type": "string" + }, + "role": { + "enum": [ + "editor", + "viewer" + ], + "type": "string" + }, + "userId": { + "type": "string" + } + }, + "required": [ + "albumId", + "role", + "userId" + ], + "type": "object" + }, + "SyncAlbumV1": { + "properties": { + "createdAt": { + "format": "date-time", + "type": "string" + }, + "description": { + "type": "string" + }, + "id": { + "type": "string" + }, + "isActivityEnabled": { + "type": "boolean" + }, + "name": { + "type": "string" + }, + "order": { + "allOf": [ + { + "$ref": "#/components/schemas/AssetOrder" + } + ] + }, + "ownerId": { + "type": "string" + }, + "thumbnailAssetId": { + "nullable": true, + "type": "string" + }, + "updatedAt": { + "format": "date-time", + "type": "string" + } + }, + "required": [ + "createdAt", + "description", + "id", + "isActivityEnabled", + "name", + "order", + "ownerId", + "thumbnailAssetId", + "updatedAt" + ], + "type": "object" + }, "SyncAssetDeleteV1": { "properties": { "assetId": { @@ -12937,7 +13036,11 @@ "AssetExifV1", "PartnerAssetV1", "PartnerAssetDeleteV1", - "PartnerAssetExifV1" + "PartnerAssetExifV1", + "AlbumV1", + "AlbumDeleteV1", + "AlbumUserV1", + "AlbumUserDeleteV1" ], "type": "string" }, @@ -12982,7 +13085,9 @@ "AssetsV1", "AssetExifsV1", "PartnerAssetsV1", - "PartnerAssetExifsV1" + "PartnerAssetExifsV1", + "AlbumsV1", + "AlbumUsersV1" ], "type": "string" }, diff --git a/open-api/typescript-sdk/src/fetch-client.ts b/open-api/typescript-sdk/src/fetch-client.ts index c27c9bc194..bb1ba605a5 100644 --- a/open-api/typescript-sdk/src/fetch-client.ts +++ b/open-api/typescript-sdk/src/fetch-client.ts @@ -3860,7 +3860,11 @@ export enum SyncEntityType { AssetExifV1 = "AssetExifV1", PartnerAssetV1 = "PartnerAssetV1", PartnerAssetDeleteV1 = "PartnerAssetDeleteV1", - PartnerAssetExifV1 = "PartnerAssetExifV1" + PartnerAssetExifV1 = "PartnerAssetExifV1", + AlbumV1 = "AlbumV1", + AlbumDeleteV1 = "AlbumDeleteV1", + AlbumUserV1 = "AlbumUserV1", + AlbumUserDeleteV1 = "AlbumUserDeleteV1" } export enum SyncRequestType { UsersV1 = "UsersV1", @@ -3868,7 +3872,9 @@ export enum SyncRequestType { AssetsV1 = "AssetsV1", AssetExifsV1 = "AssetExifsV1", PartnerAssetsV1 = "PartnerAssetsV1", - PartnerAssetExifsV1 = "PartnerAssetExifsV1" + PartnerAssetExifsV1 = "PartnerAssetExifsV1", + AlbumsV1 = "AlbumsV1", + AlbumUsersV1 = "AlbumUsersV1" } export enum TranscodeHWAccel { Nvenc = "nvenc", diff --git a/server/package.json b/server/package.json index 681fd687d0..f95817342e 100644 --- a/server/package.json +++ b/server/package.json @@ -23,6 +23,7 @@ "test:medium": "vitest --config test/vitest.config.medium.mjs", "typeorm": "typeorm", "lifecycle": "node ./dist/utils/lifecycle.js", + "migrations:debug": "node ./dist/bin/migrations.js debug", "migrations:generate": "node ./dist/bin/migrations.js generate", "migrations:create": "node ./dist/bin/migrations.js create", "migrations:run": "node ./dist/bin/migrations.js run", diff --git a/server/src/bin/migrations.ts b/server/src/bin/migrations.ts index 69070dc0cf..b3329e6331 100644 --- a/server/src/bin/migrations.ts +++ b/server/src/bin/migrations.ts @@ -125,6 +125,7 @@ const compare = async () => { const down = schemaDiff(target, source, { tables: { ignoreExtra: false }, functions: { ignoreExtra: false }, + extension: { ignoreMissing: true }, }); return { up, down }; diff --git a/server/src/db.d.ts b/server/src/db.d.ts index 943c9ddfa0..af1dd964fd 100644 --- a/server/src/db.d.ts +++ b/server/src/db.d.ts @@ -74,6 +74,20 @@ export interface Albums { updateId: Generated; } +export interface AlbumsAudit { + deletedAt: Generated; + id: Generated; + albumId: string; + userId: string; +} + +export interface AlbumUsersAudit { + deletedAt: Generated; + id: Generated; + albumId: string; + userId: string; +} + export interface AlbumsAssetsAssets { albumsId: string; assetsId: string; @@ -84,6 +98,8 @@ export interface AlbumsSharedUsersUsers { albumsId: string; role: Generated; usersId: string; + updatedAt: Generated; + updateId: Generated; } export interface ApiKeys { @@ -466,8 +482,10 @@ export interface VersionHistory { export interface DB { activity: Activity; albums: Albums; + albums_audit: AlbumsAudit; albums_assets_assets: AlbumsAssetsAssets; albums_shared_users_users: AlbumsSharedUsersUsers; + album_users_audit: AlbumUsersAudit; api_keys: ApiKeys; asset_faces: AssetFaces; asset_files: AssetFiles; diff --git a/server/src/dtos/sync.dto.ts b/server/src/dtos/sync.dto.ts index cc11c3410b..0043cfb40b 100644 --- a/server/src/dtos/sync.dto.ts +++ b/server/src/dtos/sync.dto.ts @@ -1,7 +1,7 @@ import { ApiProperty } from '@nestjs/swagger'; import { IsEnum, IsInt, IsPositive, IsString } from 'class-validator'; import { AssetResponseDto } from 'src/dtos/asset-response.dto'; -import { AssetType, AssetVisibility, SyncEntityType, SyncRequestType } from 'src/enum'; +import { AlbumUserRole, AssetOrder, AssetType, AssetVisibility, SyncEntityType, SyncRequestType } from 'src/enum'; import { Optional, ValidateDate, ValidateUUID } from 'src/validation'; export class AssetFullSyncDto { @@ -112,6 +112,34 @@ export class SyncAssetExifV1 { fps!: number | null; } +export class SyncAlbumDeleteV1 { + albumId!: string; +} + +export class SyncAlbumUserDeleteV1 { + albumId!: string; + userId!: string; +} + +export class SyncAlbumUserV1 { + albumId!: string; + userId!: string; + role!: AlbumUserRole; +} + +export class SyncAlbumV1 { + id!: string; + ownerId!: string; + name!: string; + description!: string; + createdAt!: Date; + updatedAt!: Date; + thumbnailAssetId!: string | null; + isActivityEnabled!: boolean; + @ApiProperty({ enumName: 'AssetOrder', enum: AssetOrder }) + order!: AssetOrder; +} + export type SyncItem = { [SyncEntityType.UserV1]: SyncUserV1; [SyncEntityType.UserDeleteV1]: SyncUserDeleteV1; @@ -123,10 +151,13 @@ export type SyncItem = { [SyncEntityType.PartnerAssetV1]: SyncAssetV1; [SyncEntityType.PartnerAssetDeleteV1]: SyncAssetDeleteV1; [SyncEntityType.PartnerAssetExifV1]: SyncAssetExifV1; + [SyncEntityType.AlbumV1]: SyncAlbumV1; + [SyncEntityType.AlbumDeleteV1]: SyncAlbumDeleteV1; + [SyncEntityType.AlbumUserV1]: SyncAlbumUserV1; + [SyncEntityType.AlbumUserDeleteV1]: SyncAlbumUserDeleteV1; }; const responseDtos = [ - // SyncUserV1, SyncUserDeleteV1, SyncPartnerV1, @@ -134,6 +165,10 @@ const responseDtos = [ SyncAssetV1, SyncAssetDeleteV1, SyncAssetExifV1, + SyncAlbumV1, + SyncAlbumDeleteV1, + SyncAlbumUserV1, + SyncAlbumUserDeleteV1, ]; export const extraSyncModels = responseDtos; diff --git a/server/src/enum.ts b/server/src/enum.ts index c9cf34383e..b00b013393 100644 --- a/server/src/enum.ts +++ b/server/src/enum.ts @@ -578,6 +578,8 @@ export enum SyncRequestType { AssetExifsV1 = 'AssetExifsV1', PartnerAssetsV1 = 'PartnerAssetsV1', PartnerAssetExifsV1 = 'PartnerAssetExifsV1', + AlbumsV1 = 'AlbumsV1', + AlbumUsersV1 = 'AlbumUsersV1', } export enum SyncEntityType { @@ -594,6 +596,11 @@ export enum SyncEntityType { PartnerAssetV1 = 'PartnerAssetV1', PartnerAssetDeleteV1 = 'PartnerAssetDeleteV1', PartnerAssetExifV1 = 'PartnerAssetExifV1', + + AlbumV1 = 'AlbumV1', + AlbumDeleteV1 = 'AlbumDeleteV1', + AlbumUserV1 = 'AlbumUserV1', + AlbumUserDeleteV1 = 'AlbumUserDeleteV1', } export enum NotificationLevel { diff --git a/server/src/queries/album.user.repository.sql b/server/src/queries/album.user.repository.sql index d628e4980a..08f337c150 100644 --- a/server/src/queries/album.user.repository.sql +++ b/server/src/queries/album.user.repository.sql @@ -6,7 +6,9 @@ insert into values ($1, $2) returning - * + "usersId", + "albumsId", + "role" -- AlbumUserRepository.update update "albums_shared_users_users" diff --git a/server/src/queries/sync.repository.sql b/server/src/queries/sync.repository.sql index 54c1292d80..f797f5c0b5 100644 --- a/server/src/queries/sync.repository.sql +++ b/server/src/queries/sync.repository.sql @@ -246,3 +246,98 @@ where and "updatedAt" < now() - interval '1 millisecond' order by "updateId" asc + +-- SyncRepository.getAlbumDeletes +select + "id", + "albumId" +from + "albums_audit" +where + "userId" = $1 + and "deletedAt" < now() - interval '1 millisecond' +order by + "id" asc + +-- SyncRepository.getAlbumUpserts +select distinct + on ("albums"."id", "albums"."updateId") "albums"."id", + "albums"."ownerId", + "albums"."albumName" as "name", + "albums"."description", + "albums"."createdAt", + "albums"."updatedAt", + "albums"."albumThumbnailAssetId" as "thumbnailAssetId", + "albums"."isActivityEnabled", + "albums"."order", + "albums"."updateId" +from + "albums" + left join "albums_shared_users_users" as "album_users" on "albums"."id" = "album_users"."albumsId" +where + "albums"."updatedAt" < now() - interval '1 millisecond' + and ( + "albums"."ownerId" = $1 + or "album_users"."usersId" = $2 + ) +order by + "albums"."updateId" asc + +-- SyncRepository.getAlbumUserDeletes +select + "id", + "userId", + "albumId" +from + "album_users_audit" +where + "albumId" in ( + select + "id" + from + "albums" + where + "ownerId" = $1 + union + ( + select + "albumUsers"."albumsId" as "id" + from + "albums_shared_users_users" as "albumUsers" + where + "albumUsers"."usersId" = $2 + ) + ) + and "deletedAt" < now() - interval '1 millisecond' +order by + "id" asc + +-- SyncRepository.getAlbumUserUpserts +select + "albums_shared_users_users"."albumsId" as "albumId", + "albums_shared_users_users"."usersId" as "userId", + "albums_shared_users_users"."role", + "albums_shared_users_users"."updateId" +from + "albums_shared_users_users" +where + "albums_shared_users_users"."updatedAt" < now() - interval '1 millisecond' + and "albums_shared_users_users"."albumsId" in ( + select + "id" + from + "albums" + where + "ownerId" = $1 + union + ( + select + "albumUsers"."albumsId" as "id" + from + "albums_shared_users_users" as "albumUsers" + where + "albumUsers"."usersId" = $2 + ) + ) +order by + "albums_shared_users_users"."updateId" asc diff --git a/server/src/repositories/album-user.repository.ts b/server/src/repositories/album-user.repository.ts index f363f2e91a..ad7ba8d6cd 100644 --- a/server/src/repositories/album-user.repository.ts +++ b/server/src/repositories/album-user.repository.ts @@ -1,5 +1,5 @@ import { Injectable } from '@nestjs/common'; -import { Insertable, Kysely, Selectable, Updateable } from 'kysely'; +import { Insertable, Kysely, Updateable } from 'kysely'; import { InjectKysely } from 'nestjs-kysely'; import { AlbumsSharedUsersUsers, DB } from 'src/db'; import { DummyValue, GenerateSql } from 'src/decorators'; @@ -15,8 +15,12 @@ export class AlbumUserRepository { constructor(@InjectKysely() private db: Kysely) {} @GenerateSql({ params: [{ usersId: DummyValue.UUID, albumsId: DummyValue.UUID }] }) - create(albumUser: Insertable): Promise> { - return this.db.insertInto('albums_shared_users_users').values(albumUser).returningAll().executeTakeFirstOrThrow(); + create(albumUser: Insertable) { + return this.db + .insertInto('albums_shared_users_users') + .values(albumUser) + .returning(['usersId', 'albumsId', 'role']) + .executeTakeFirstOrThrow(); } @GenerateSql({ params: [{ usersId: DummyValue.UUID, albumsId: DummyValue.UUID }, { role: AlbumUserRole.VIEWER }] }) diff --git a/server/src/repositories/sync.repository.ts b/server/src/repositories/sync.repository.ts index f0c535ecf2..43fd732747 100644 --- a/server/src/repositories/sync.repository.ts +++ b/server/src/repositories/sync.repository.ts @@ -7,8 +7,8 @@ import { DummyValue, GenerateSql } from 'src/decorators'; import { SyncEntityType } from 'src/enum'; import { SyncAck } from 'src/types'; -type auditTables = 'users_audit' | 'partners_audit' | 'assets_audit'; -type upsertTables = 'users' | 'partners' | 'assets' | 'exif'; +type AuditTables = 'users_audit' | 'partners_audit' | 'assets_audit' | 'albums_audit' | 'album_users_audit'; +type UpsertTables = 'users' | 'partners' | 'assets' | 'exif' | 'albums' | 'albums_shared_users_users'; @Injectable() export class SyncRepository { @@ -110,7 +110,6 @@ export class SyncRepository { .selectFrom('assets_audit') .select(['id', 'assetId']) .where('ownerId', '=', userId) - .$if(!!ack, (qb) => qb.where('id', '>', ack!.updateId)) .$call((qb) => this.auditTableFilters(qb, ack)) .stream(); } @@ -154,19 +153,115 @@ export class SyncRepository { .stream(); } - private auditTableFilters, D>(qb: SelectQueryBuilder, ack?: SyncAck) { - const builder = qb as SelectQueryBuilder; + @GenerateSql({ params: [DummyValue.UUID], stream: true }) + getAlbumDeletes(userId: string, ack?: SyncAck) { + return this.db + .selectFrom('albums_audit') + .select(['id', 'albumId']) + .where('userId', '=', userId) + .$call((qb) => this.auditTableFilters(qb, ack)) + .stream(); + } + + @GenerateSql({ params: [DummyValue.UUID], stream: true }) + getAlbumUpserts(userId: string, ack?: SyncAck) { + return this.db + .selectFrom('albums') + .distinctOn(['albums.id', 'albums.updateId']) + .where('albums.updatedAt', '<', sql.raw("now() - interval '1 millisecond'")) + .$if(!!ack, (qb) => qb.where('albums.updateId', '>', ack!.updateId)) + .orderBy('albums.updateId', 'asc') + .leftJoin('albums_shared_users_users as album_users', 'albums.id', 'album_users.albumsId') + .where((eb) => eb.or([eb('albums.ownerId', '=', userId), eb('album_users.usersId', '=', userId)])) + .select([ + 'albums.id', + 'albums.ownerId', + 'albums.albumName as name', + 'albums.description', + 'albums.createdAt', + 'albums.updatedAt', + 'albums.albumThumbnailAssetId as thumbnailAssetId', + 'albums.isActivityEnabled', + 'albums.order', + 'albums.updateId', + ]) + .stream(); + } + + @GenerateSql({ params: [DummyValue.UUID], stream: true }) + getAlbumUserDeletes(userId: string, ack?: SyncAck) { + return this.db + .selectFrom('album_users_audit') + .select(['id', 'userId', 'albumId']) + .where((eb) => + eb( + 'albumId', + 'in', + eb + .selectFrom('albums') + .select(['id']) + .where('ownerId', '=', userId) + .union((eb) => + eb.parens( + eb + .selectFrom('albums_shared_users_users as albumUsers') + .select(['albumUsers.albumsId as id']) + .where('albumUsers.usersId', '=', userId), + ), + ), + ), + ) + .$call((qb) => this.auditTableFilters(qb, ack)) + .stream(); + } + + @GenerateSql({ params: [DummyValue.UUID], stream: true }) + getAlbumUserUpserts(userId: string, ack?: SyncAck) { + return this.db + .selectFrom('albums_shared_users_users') + .select([ + 'albums_shared_users_users.albumsId as albumId', + 'albums_shared_users_users.usersId as userId', + 'albums_shared_users_users.role', + 'albums_shared_users_users.updateId', + ]) + .where('albums_shared_users_users.updatedAt', '<', sql.raw("now() - interval '1 millisecond'")) + .$if(!!ack, (qb) => qb.where('albums_shared_users_users.updateId', '>', ack!.updateId)) + .orderBy('albums_shared_users_users.updateId', 'asc') + .where((eb) => + eb( + 'albums_shared_users_users.albumsId', + 'in', + eb + .selectFrom('albums') + .select(['id']) + .where('ownerId', '=', userId) + .union((eb) => + eb.parens( + eb + .selectFrom('albums_shared_users_users as albumUsers') + .select(['albumUsers.albumsId as id']) + .where('albumUsers.usersId', '=', userId), + ), + ), + ), + ) + .stream(); + } + + private auditTableFilters, D>(qb: SelectQueryBuilder, ack?: SyncAck) { + const builder = qb as SelectQueryBuilder; return builder .where('deletedAt', '<', sql.raw("now() - interval '1 millisecond'")) .$if(!!ack, (qb) => qb.where('id', '>', ack!.updateId)) .orderBy('id', 'asc') as SelectQueryBuilder; } - private upsertTableFilters, D>( + private upsertTableFilters, D>( qb: SelectQueryBuilder, ack?: SyncAck, ) { - const builder = qb as SelectQueryBuilder; + const builder = qb as SelectQueryBuilder; return builder .where('updatedAt', '<', sql.raw("now() - interval '1 millisecond'")) .$if(!!ack, (qb) => qb.where('updateId', '>', ack!.updateId)) diff --git a/server/src/schema/functions.ts b/server/src/schema/functions.ts index 65ad2b72dc..a03f715bff 100644 --- a/server/src/schema/functions.ts +++ b/server/src/schema/functions.ts @@ -23,6 +23,19 @@ export const immich_uuid_v7 = registerFunction({ synchronize: false, }); +export const album_user_after_insert = registerFunction({ + name: 'album_user_after_insert', + returnType: 'TRIGGER', + language: 'PLPGSQL', + body: ` + BEGIN + UPDATE albums SET "updatedAt" = clock_timestamp(), "updateId" = immich_uuid_v7(clock_timestamp()) + WHERE "id" IN (SELECT DISTINCT "albumsId" FROM inserted_rows); + RETURN NULL; + END`, + synchronize: false, +}); + export const updated_at = registerFunction({ name: 'updated_at', returnType: 'TRIGGER', @@ -114,3 +127,38 @@ export const assets_delete_audit = registerFunction({ END`, synchronize: false, }); + +export const albums_delete_audit = registerFunction({ + name: 'albums_delete_audit', + returnType: 'TRIGGER', + language: 'PLPGSQL', + body: ` + BEGIN + INSERT INTO albums_audit ("albumId", "userId") + SELECT "id", "ownerId" + FROM OLD; + RETURN NULL; + END`, + synchronize: false, +}); + +export const album_users_delete_audit = registerFunction({ + name: 'album_users_delete_audit', + returnType: 'TRIGGER', + language: 'PLPGSQL', + body: ` + BEGIN + INSERT INTO albums_audit ("albumId", "userId") + SELECT "albumsId", "usersId" + FROM OLD; + + IF pg_trigger_depth() = 1 THEN + INSERT INTO album_users_audit ("albumId", "userId") + SELECT "albumsId", "usersId" + FROM OLD; + END IF; + + RETURN NULL; + END`, + synchronize: false, +}); diff --git a/server/src/schema/index.ts b/server/src/schema/index.ts index 735dfd3ae9..d2f8d80afc 100644 --- a/server/src/schema/index.ts +++ b/server/src/schema/index.ts @@ -1,5 +1,8 @@ import { asset_face_source_type, asset_visibility_enum, assets_status_enum } from 'src/schema/enums'; import { + album_user_after_insert, + album_users_delete_audit, + albums_delete_audit, assets_delete_audit, f_concat_ws, f_unaccent, @@ -11,6 +14,8 @@ import { } from 'src/schema/functions'; import { ActivityTable } from 'src/schema/tables/activity.table'; import { AlbumAssetTable } from 'src/schema/tables/album-asset.table'; +import { AlbumAuditTable } from 'src/schema/tables/album-audit.table'; +import { AlbumUserAuditTable } from 'src/schema/tables/album-user-audit.table'; import { AlbumUserTable } from 'src/schema/tables/album-user.table'; import { AlbumTable } from 'src/schema/tables/album.table'; import { APIKeyTable } from 'src/schema/tables/api-key.table'; @@ -45,15 +50,16 @@ import { UserAuditTable } from 'src/schema/tables/user-audit.table'; import { UserMetadataTable } from 'src/schema/tables/user-metadata.table'; import { UserTable } from 'src/schema/tables/user.table'; import { VersionHistoryTable } from 'src/schema/tables/version-history.table'; -import { ConfigurationParameter, Database, Extensions } from 'src/sql-tools'; +import { Database, Extensions } from 'src/sql-tools'; @Extensions(['uuid-ossp', 'unaccent', 'cube', 'earthdistance', 'pg_trgm', 'plpgsql']) -@ConfigurationParameter({ name: 'search_path', value: () => '"$user", public, vectors', scope: 'database' }) @Database({ name: 'immich' }) export class ImmichDatabase { tables = [ ActivityTable, AlbumAssetTable, + AlbumAuditTable, + AlbumUserAuditTable, AlbumUserTable, AlbumTable, APIKeyTable, @@ -99,6 +105,9 @@ export class ImmichDatabase { users_delete_audit, partners_delete_audit, assets_delete_audit, + albums_delete_audit, + album_user_after_insert, + album_users_delete_audit, ]; enum = [assets_status_enum, asset_face_source_type, asset_visibility_enum]; diff --git a/server/src/schema/migrations/1747664684909-AddAlbumAuditTables.ts b/server/src/schema/migrations/1747664684909-AddAlbumAuditTables.ts new file mode 100644 index 0000000000..25ccfee710 --- /dev/null +++ b/server/src/schema/migrations/1747664684909-AddAlbumAuditTables.ts @@ -0,0 +1,96 @@ +import { Kysely, sql } from 'kysely'; + +export async function up(db: Kysely): Promise { + await sql`CREATE OR REPLACE FUNCTION album_user_after_insert() + RETURNS TRIGGER + LANGUAGE PLPGSQL + AS $$ + BEGIN + UPDATE albums SET "updatedAt" = clock_timestamp(), "updateId" = immich_uuid_v7(clock_timestamp()) + WHERE "id" IN (SELECT DISTINCT "albumsId" FROM inserted_rows); + RETURN NULL; + END + $$;`.execute(db); + await sql`CREATE OR REPLACE FUNCTION albums_delete_audit() + RETURNS TRIGGER + LANGUAGE PLPGSQL + AS $$ + BEGIN + INSERT INTO albums_audit ("albumId", "userId") + SELECT "id", "ownerId" + FROM OLD; + RETURN NULL; + END + $$;`.execute(db); + await sql`CREATE OR REPLACE FUNCTION album_users_delete_audit() + RETURNS TRIGGER + LANGUAGE PLPGSQL + AS $$ + BEGIN + INSERT INTO albums_audit ("albumId", "userId") + SELECT "albumsId", "usersId" + FROM OLD; + + IF pg_trigger_depth() = 1 THEN + INSERT INTO album_users_audit ("albumId", "userId") + SELECT "albumsId", "usersId" + FROM OLD; + END IF; + + RETURN NULL; + END + $$;`.execute(db); + await sql`CREATE TABLE "albums_audit" ("id" uuid NOT NULL DEFAULT immich_uuid_v7(), "albumId" uuid NOT NULL, "userId" uuid NOT NULL, "deletedAt" timestamp with time zone NOT NULL DEFAULT clock_timestamp());`.execute(db); + await sql`CREATE TABLE "album_users_audit" ("id" uuid NOT NULL DEFAULT immich_uuid_v7(), "albumId" uuid NOT NULL, "userId" uuid NOT NULL, "deletedAt" timestamp with time zone NOT NULL DEFAULT clock_timestamp());`.execute(db); + await sql`ALTER TABLE "albums_audit" ADD CONSTRAINT "PK_c75efea8d4dce316ad29b851a8b" PRIMARY KEY ("id");`.execute(db); + await sql`ALTER TABLE "album_users_audit" ADD CONSTRAINT "PK_f479a2e575b7ebc9698362c1688" PRIMARY KEY ("id");`.execute(db); + await sql`ALTER TABLE "albums_shared_users_users" ADD "updateId" uuid NOT NULL DEFAULT immich_uuid_v7();`.execute(db); + await sql`ALTER TABLE "albums_shared_users_users" ADD "updatedAt" timestamp with time zone NOT NULL DEFAULT now();`.execute(db); + await sql`CREATE INDEX "IDX_album_users_update_id" ON "albums_shared_users_users" ("updateId")`.execute(db); + await sql`CREATE INDEX "IDX_albums_audit_album_id" ON "albums_audit" ("albumId")`.execute(db); + await sql`CREATE INDEX "IDX_albums_audit_user_id" ON "albums_audit" ("userId")`.execute(db); + await sql`CREATE INDEX "IDX_albums_audit_deleted_at" ON "albums_audit" ("deletedAt")`.execute(db); + await sql`CREATE INDEX "IDX_album_users_audit_album_id" ON "album_users_audit" ("albumId")`.execute(db); + await sql`CREATE INDEX "IDX_album_users_audit_user_id" ON "album_users_audit" ("userId")`.execute(db); + await sql`CREATE INDEX "IDX_album_users_audit_deleted_at" ON "album_users_audit" ("deletedAt")`.execute(db); + await sql`CREATE OR REPLACE TRIGGER "albums_delete_audit" + AFTER DELETE ON "albums" + REFERENCING OLD TABLE AS "old" + FOR EACH STATEMENT + WHEN (pg_trigger_depth() = 0) + EXECUTE FUNCTION albums_delete_audit();`.execute(db); + await sql`CREATE OR REPLACE TRIGGER "album_users_delete_audit" + AFTER DELETE ON "albums_shared_users_users" + REFERENCING OLD TABLE AS "old" + FOR EACH STATEMENT + WHEN (pg_trigger_depth() <= 1) + EXECUTE FUNCTION album_users_delete_audit();`.execute(db); + await sql`CREATE OR REPLACE TRIGGER "album_user_after_insert" + AFTER INSERT ON "albums_shared_users_users" + REFERENCING NEW TABLE AS "inserted_rows" + FOR EACH STATEMENT + EXECUTE FUNCTION album_user_after_insert();`.execute(db); + await sql`CREATE OR REPLACE TRIGGER "album_users_updated_at" + BEFORE UPDATE ON "albums_shared_users_users" + FOR EACH ROW + EXECUTE FUNCTION updated_at();`.execute(db); +} + +export async function down(db: Kysely): Promise { + await sql`DROP TRIGGER "albums_delete_audit" ON "albums";`.execute(db); + await sql`DROP TRIGGER "album_users_delete_audit" ON "albums_shared_users_users";`.execute(db); + await sql`DROP TRIGGER "album_user_after_insert" ON "albums_shared_users_users";`.execute(db); + await sql`DROP INDEX "IDX_albums_audit_album_id";`.execute(db); + await sql`DROP INDEX "IDX_albums_audit_user_id";`.execute(db); + await sql`DROP INDEX "IDX_albums_audit_deleted_at";`.execute(db); + await sql`DROP INDEX "IDX_album_users_audit_album_id";`.execute(db); + await sql`DROP INDEX "IDX_album_users_audit_user_id";`.execute(db); + await sql`DROP INDEX "IDX_album_users_audit_deleted_at";`.execute(db); + await sql`ALTER TABLE "albums_audit" DROP CONSTRAINT "PK_c75efea8d4dce316ad29b851a8b";`.execute(db); + await sql`ALTER TABLE "album_users_audit" DROP CONSTRAINT "PK_f479a2e575b7ebc9698362c1688";`.execute(db); + await sql`DROP TABLE "albums_audit";`.execute(db); + await sql`DROP TABLE "album_users_audit";`.execute(db); + await sql`DROP FUNCTION album_user_after_insert;`.execute(db); + await sql`DROP FUNCTION albums_delete_audit;`.execute(db); + await sql`DROP FUNCTION album_users_delete_audit;`.execute(db); +} diff --git a/server/src/schema/tables/album-audit.table.ts b/server/src/schema/tables/album-audit.table.ts new file mode 100644 index 0000000000..66b70654e9 --- /dev/null +++ b/server/src/schema/tables/album-audit.table.ts @@ -0,0 +1,17 @@ +import { PrimaryGeneratedUuidV7Column } from 'src/decorators'; +import { Column, CreateDateColumn, Table } from 'src/sql-tools'; + +@Table('albums_audit') +export class AlbumAuditTable { + @PrimaryGeneratedUuidV7Column() + id!: string; + + @Column({ type: 'uuid', indexName: 'IDX_albums_audit_album_id' }) + albumId!: string; + + @Column({ type: 'uuid', indexName: 'IDX_albums_audit_user_id' }) + userId!: string; + + @CreateDateColumn({ default: () => 'clock_timestamp()', indexName: 'IDX_albums_audit_deleted_at' }) + deletedAt!: Date; +} diff --git a/server/src/schema/tables/album-user-audit.table.ts b/server/src/schema/tables/album-user-audit.table.ts new file mode 100644 index 0000000000..46ad6b682b --- /dev/null +++ b/server/src/schema/tables/album-user-audit.table.ts @@ -0,0 +1,17 @@ +import { PrimaryGeneratedUuidV7Column } from 'src/decorators'; +import { Column, CreateDateColumn, Table } from 'src/sql-tools'; + +@Table('album_users_audit') +export class AlbumUserAuditTable { + @PrimaryGeneratedUuidV7Column() + id!: string; + + @Column({ type: 'uuid', indexName: 'IDX_album_users_audit_album_id' }) + albumId!: string; + + @Column({ type: 'uuid', indexName: 'IDX_album_users_audit_user_id' }) + userId!: string; + + @CreateDateColumn({ default: () => 'clock_timestamp()', indexName: 'IDX_album_users_audit_deleted_at' }) + deletedAt!: Date; +} diff --git a/server/src/schema/tables/album-user.table.ts b/server/src/schema/tables/album-user.table.ts index 8bd05df2ee..276efd126a 100644 --- a/server/src/schema/tables/album-user.table.ts +++ b/server/src/schema/tables/album-user.table.ts @@ -1,12 +1,36 @@ +import { UpdatedAtTrigger, UpdateIdColumn } from 'src/decorators'; import { AlbumUserRole } from 'src/enum'; +import { album_user_after_insert, album_users_delete_audit } from 'src/schema/functions'; import { AlbumTable } from 'src/schema/tables/album.table'; import { UserTable } from 'src/schema/tables/user.table'; -import { Column, ForeignKeyColumn, Index, Table } from 'src/sql-tools'; +import { + AfterDeleteTrigger, + AfterInsertTrigger, + Column, + ForeignKeyColumn, + Index, + Table, + UpdateDateColumn, +} from 'src/sql-tools'; @Table({ name: 'albums_shared_users_users', primaryConstraintName: 'PK_7df55657e0b2e8b626330a0ebc8' }) // Pre-existing indices from original album <--> user ManyToMany mapping @Index({ name: 'IDX_427c350ad49bd3935a50baab73', columns: ['albumsId'] }) @Index({ name: 'IDX_f48513bf9bccefd6ff3ad30bd0', columns: ['usersId'] }) +@UpdatedAtTrigger('album_users_updated_at') +@AfterInsertTrigger({ + name: 'album_user_after_insert', + scope: 'statement', + referencingNewTableAs: 'inserted_rows', + function: album_user_after_insert, +}) +@AfterDeleteTrigger({ + name: 'album_users_delete_audit', + scope: 'statement', + function: album_users_delete_audit, + referencingOldTableAs: 'old', + when: 'pg_trigger_depth() <= 1', +}) export class AlbumUserTable { @ForeignKeyColumn(() => AlbumTable, { onDelete: 'CASCADE', @@ -26,4 +50,10 @@ export class AlbumUserTable { @Column({ type: 'character varying', default: AlbumUserRole.EDITOR }) role!: AlbumUserRole; + + @UpdateIdColumn({ indexName: 'IDX_album_users_update_id' }) + updateId?: string; + + @UpdateDateColumn() + updatedAt!: Date; } diff --git a/server/src/schema/tables/album.table.ts b/server/src/schema/tables/album.table.ts index 428947fa51..5d02cc9f25 100644 --- a/server/src/schema/tables/album.table.ts +++ b/server/src/schema/tables/album.table.ts @@ -1,8 +1,10 @@ import { UpdatedAtTrigger, UpdateIdColumn } from 'src/decorators'; import { AssetOrder } from 'src/enum'; +import { albums_delete_audit } from 'src/schema/functions'; import { AssetTable } from 'src/schema/tables/asset.table'; import { UserTable } from 'src/schema/tables/user.table'; import { + AfterDeleteTrigger, Column, CreateDateColumn, DeleteDateColumn, @@ -14,6 +16,13 @@ import { @Table({ name: 'albums', primaryConstraintName: 'PK_7f71c7b5bc7c87b8f94c9a93a00' }) @UpdatedAtTrigger('albums_updated_at') +@AfterDeleteTrigger({ + name: 'albums_delete_audit', + scope: 'statement', + function: albums_delete_audit, + referencingOldTableAs: 'old', + when: 'pg_trigger_depth() = 0', +}) export class AlbumTable { @PrimaryGeneratedColumn() id!: string; diff --git a/server/src/services/sync.service.ts b/server/src/services/sync.service.ts index bd3c09098f..d6cbc17a29 100644 --- a/server/src/services/sync.service.ts +++ b/server/src/services/sync.service.ts @@ -24,13 +24,14 @@ import { fromAck, serialize } from 'src/utils/sync'; const FULL_SYNC = { needsFullSync: true, deleted: [], upserted: [] }; export const SYNC_TYPES_ORDER = [ - // SyncRequestType.UsersV1, SyncRequestType.PartnersV1, SyncRequestType.AssetsV1, SyncRequestType.AssetExifsV1, SyncRequestType.PartnerAssetsV1, SyncRequestType.PartnerAssetExifsV1, + SyncRequestType.AlbumsV1, + SyncRequestType.AlbumUsersV1, ]; const throwSessionRequired = () => { @@ -206,6 +207,43 @@ export class SyncService extends BaseService { break; } + case SyncRequestType.AlbumsV1: { + const deletes = this.syncRepository.getAlbumDeletes( + auth.user.id, + checkpointMap[SyncEntityType.AlbumDeleteV1], + ); + for await (const { id, ...data } of deletes) { + response.write(serialize({ type: SyncEntityType.AlbumDeleteV1, updateId: id, data })); + } + + const upserts = this.syncRepository.getAlbumUpserts(auth.user.id, checkpointMap[SyncEntityType.AlbumV1]); + for await (const { updateId, ...data } of upserts) { + response.write(serialize({ type: SyncEntityType.AlbumV1, updateId, data })); + } + + break; + } + + case SyncRequestType.AlbumUsersV1: { + const deletes = this.syncRepository.getAlbumUserDeletes( + auth.user.id, + checkpointMap[SyncEntityType.AlbumUserDeleteV1], + ); + for await (const { id, ...data } of deletes) { + response.write(serialize({ type: SyncEntityType.AlbumUserDeleteV1, updateId: id, data })); + } + + const upserts = this.syncRepository.getAlbumUserUpserts( + auth.user.id, + checkpointMap[SyncEntityType.AlbumUserV1], + ); + for await (const { updateId, ...data } of upserts) { + response.write(serialize({ type: SyncEntityType.AlbumUserV1, updateId, data })); + } + + break; + } + default: { this.logger.warn(`Unsupported sync type: ${type}`); break; diff --git a/server/src/sql-tools/from-code/decorators/after-insert.decorator.ts b/server/src/sql-tools/from-code/decorators/after-insert.decorator.ts new file mode 100644 index 0000000000..103d59b4fc --- /dev/null +++ b/server/src/sql-tools/from-code/decorators/after-insert.decorator.ts @@ -0,0 +1,8 @@ +import { TriggerFunction, TriggerFunctionOptions } from 'src/sql-tools/from-code/decorators/trigger-function.decorator'; + +export const AfterInsertTrigger = (options: Omit) => + TriggerFunction({ + timing: 'after', + actions: ['insert'], + ...options, + }); diff --git a/server/src/sql-tools/public_api.ts b/server/src/sql-tools/public_api.ts index b41cce4ab5..c7a3023a4d 100644 --- a/server/src/sql-tools/public_api.ts +++ b/server/src/sql-tools/public_api.ts @@ -1,6 +1,7 @@ export { schemaDiff } from 'src/sql-tools/diff'; export { schemaFromCode } from 'src/sql-tools/from-code'; export * from 'src/sql-tools/from-code/decorators/after-delete.decorator'; +export * from 'src/sql-tools/from-code/decorators/after-insert.decorator'; export * from 'src/sql-tools/from-code/decorators/before-update.decorator'; export * from 'src/sql-tools/from-code/decorators/check.decorator'; export * from 'src/sql-tools/from-code/decorators/column.decorator'; diff --git a/server/test/medium.factory.ts b/server/test/medium.factory.ts index 8b730c2b41..cab74f70fb 100644 --- a/server/test/medium.factory.ts +++ b/server/test/medium.factory.ts @@ -4,9 +4,11 @@ import { DateTime } from 'luxon'; import { createHash, randomBytes } from 'node:crypto'; import { Writable } from 'node:stream'; import { AssetFace } from 'src/database'; -import { AssetJobStatus, Assets, DB, FaceSearch, Person, Sessions } from 'src/db'; -import { AssetType, AssetVisibility, SourceType } from 'src/enum'; +import { Albums, AssetJobStatus, Assets, DB, FaceSearch, Person, Sessions } from 'src/db'; +import { AuthDto } from 'src/dtos/auth.dto'; +import { AssetType, AssetVisibility, SourceType, SyncRequestType } from 'src/enum'; import { ActivityRepository } from 'src/repositories/activity.repository'; +import { AlbumUserRepository } from 'src/repositories/album-user.repository'; import { AlbumRepository } from 'src/repositories/album.repository'; import { AssetJobRepository } from 'src/repositories/asset-job.repository'; import { AssetRepository } from 'src/repositories/asset.repository'; @@ -28,8 +30,9 @@ import { UserRepository } from 'src/repositories/user.repository'; import { VersionHistoryRepository } from 'src/repositories/version-history.repository'; import { UserTable } from 'src/schema/tables/user.table'; import { BaseService } from 'src/services/base.service'; +import { SyncService } from 'src/services/sync.service'; import { RepositoryInterface } from 'src/types'; -import { newDate, newEmbedding, newUuid } from 'test/small.factory'; +import { factory, newDate, newEmbedding, newUuid } from 'test/small.factory'; import { automock, ServiceOverrides } from 'test/utils'; import { Mocked } from 'vitest'; @@ -39,6 +42,7 @@ const sha256 = (value: string) => createHash('sha256').update(value).digest('bas type RepositoriesTypes = { activity: ActivityRepository; album: AlbumRepository; + albumUser: AlbumUserRepository; asset: AssetRepository; assetJob: AssetJobRepository; config: ConfigRepository; @@ -76,6 +80,61 @@ export type Context = { getRepository(key: T): RepositoriesTypes[T]; }; +export type SyncTestOptions = { + db: Kysely; +}; + +export const newSyncAuthUser = () => { + const user = mediumFactory.userInsert(); + const session = mediumFactory.sessionInsert({ userId: user.id }); + + const auth = factory.auth({ + session, + user: { + id: user.id, + name: user.name, + email: user.email, + }, + }); + + return { + auth, + session, + user, + create: async (db: Kysely) => { + await new UserRepository(db).create(user); + await new SessionRepository(db).create(session); + }, + }; +}; + +export const newSyncTest = (options: SyncTestOptions) => { + const { sut, mocks, repos, getRepository } = newMediumService(SyncService, { + database: options.db, + repos: { + sync: 'real', + session: 'real', + }, + }); + + const testSync = async (auth: AuthDto, types: SyncRequestType[]) => { + const stream = mediumFactory.syncStream(); + // Wait for 2ms to ensure all updates are available and account for setTimeout inaccuracy + await new Promise((resolve) => setTimeout(resolve, 2)); + await sut.stream(auth, stream, { types }); + + return stream.getResponse(); + }; + + return { + sut, + mocks, + repos, + getRepository, + testSync, + }; +}; + export const newMediumService = ( Service: ClassConstructor, options: { @@ -125,6 +184,14 @@ export const getRepository = (key: K, db: Kys return new ActivityRepository(db); } + case 'album': { + return new AlbumRepository(db); + } + + case 'albumUser': { + return new AlbumUserRepository(db); + } + case 'asset': { return new AssetRepository(db); } @@ -380,6 +447,19 @@ const assetInsert = (asset: Partial> = {}) => { }; }; +const albumInsert = (album: Partial> & { ownerId: string }) => { + const id = album.id || newUuid(); + const defaults: Omit, 'ownerId'> = { + albumName: 'Album', + }; + + return { + ...defaults, + ...album, + id, + }; +}; + const faceInsert = (face: Partial> & { faceId: string }) => { const defaults = { faceId: face.faceId, @@ -502,6 +582,7 @@ export const mediumFactory = { assetInsert, assetFaceInsert, assetJobStatusInsert, + albumInsert, faceInsert, personInsert, sessionInsert, diff --git a/server/test/medium/specs/services/sync.service.spec.ts b/server/test/medium/specs/services/sync.service.spec.ts deleted file mode 100644 index 67cfeafdbf..0000000000 --- a/server/test/medium/specs/services/sync.service.spec.ts +++ /dev/null @@ -1,910 +0,0 @@ -import { AuthDto } from 'src/dtos/auth.dto'; -import { SyncEntityType, SyncRequestType } from 'src/enum'; -import { SYNC_TYPES_ORDER, SyncService } from 'src/services/sync.service'; -import { mediumFactory, newMediumService } from 'test/medium.factory'; -import { factory } from 'test/small.factory'; -import { getKyselyDB } from 'test/utils'; - -const setup = async () => { - const db = await getKyselyDB(); - - const { sut, mocks, repos, getRepository } = newMediumService(SyncService, { - database: db, - repos: { - sync: 'real', - session: 'real', - }, - }); - - const user = mediumFactory.userInsert(); - const session = mediumFactory.sessionInsert({ userId: user.id }); - const auth = factory.auth({ - session, - user: { - id: user.id, - name: user.name, - email: user.email, - }, - }); - - await getRepository('user').create(user); - await getRepository('session').create(session); - - const testSync = async (auth: AuthDto, types: SyncRequestType[]) => { - const stream = mediumFactory.syncStream(); - // Wait for 1ms to ensure all updates are available - await new Promise((resolve) => setTimeout(resolve, 1)); - await sut.stream(auth, stream, { types }); - - return stream.getResponse(); - }; - - return { - sut, - auth, - mocks, - repos, - getRepository, - testSync, - }; -}; - -describe(SyncService.name, () => { - it('should have all the types in the ordering variable', () => { - for (const key in SyncRequestType) { - expect(SYNC_TYPES_ORDER).includes(key); - } - - expect(SYNC_TYPES_ORDER.length).toBe(Object.keys(SyncRequestType).length); - }); - - describe.concurrent(SyncEntityType.UserV1, () => { - it('should detect and sync the first user', async () => { - const { auth, sut, getRepository, testSync } = await setup(); - - const userRepo = getRepository('user'); - const user = await userRepo.get(auth.user.id, { withDeleted: false }); - if (!user) { - expect.fail('First user should exist'); - } - - const initialSyncResponse = await testSync(auth, [SyncRequestType.UsersV1]); - expect(initialSyncResponse).toHaveLength(1); - expect(initialSyncResponse).toEqual([ - { - ack: expect.any(String), - data: { - deletedAt: user.deletedAt, - email: user.email, - id: user.id, - name: user.name, - }, - type: 'UserV1', - }, - ]); - - const acks = [initialSyncResponse[0].ack]; - await sut.setAcks(auth, { acks }); - const ackSyncResponse = await testSync(auth, [SyncRequestType.UsersV1]); - - expect(ackSyncResponse).toHaveLength(0); - }); - - it('should detect and sync a soft deleted user', async () => { - const { auth, sut, getRepository, testSync } = await setup(); - - const deletedAt = new Date().toISOString(); - const deletedUser = mediumFactory.userInsert({ deletedAt }); - const deleted = await getRepository('user').create(deletedUser); - - const response = await testSync(auth, [SyncRequestType.UsersV1]); - - expect(response).toHaveLength(2); - expect(response).toEqual( - expect.arrayContaining([ - { - ack: expect.any(String), - data: { - deletedAt: null, - email: auth.user.email, - id: auth.user.id, - name: auth.user.name, - }, - type: 'UserV1', - }, - { - ack: expect.any(String), - data: { - deletedAt, - email: deleted.email, - id: deleted.id, - name: deleted.name, - }, - type: 'UserV1', - }, - ]), - ); - - const acks = [response[1].ack]; - await sut.setAcks(auth, { acks }); - const ackSyncResponse = await testSync(auth, [SyncRequestType.UsersV1]); - - expect(ackSyncResponse).toHaveLength(0); - }); - - it('should detect and sync a deleted user', async () => { - const { auth, sut, getRepository, testSync } = await setup(); - - const userRepo = getRepository('user'); - const user = mediumFactory.userInsert(); - await userRepo.create(user); - await userRepo.delete({ id: user.id }, true); - - const response = await testSync(auth, [SyncRequestType.UsersV1]); - - expect(response).toHaveLength(2); - expect(response).toEqual( - expect.arrayContaining([ - { - ack: expect.any(String), - data: { - userId: user.id, - }, - type: 'UserDeleteV1', - }, - { - ack: expect.any(String), - data: { - deletedAt: null, - email: auth.user.email, - id: auth.user.id, - name: auth.user.name, - }, - type: 'UserV1', - }, - ]), - ); - - const acks = response.map(({ ack }) => ack); - await sut.setAcks(auth, { acks }); - const ackSyncResponse = await testSync(auth, [SyncRequestType.UsersV1]); - - expect(ackSyncResponse).toHaveLength(0); - }); - - it('should sync a user and then an update to that same user', async () => { - const { auth, sut, getRepository, testSync } = await setup(); - - const initialSyncResponse = await testSync(auth, [SyncRequestType.UsersV1]); - - expect(initialSyncResponse).toHaveLength(1); - expect(initialSyncResponse).toEqual( - expect.arrayContaining([ - { - ack: expect.any(String), - data: { - deletedAt: null, - email: auth.user.email, - id: auth.user.id, - name: auth.user.name, - }, - type: 'UserV1', - }, - ]), - ); - - const acks = [initialSyncResponse[0].ack]; - await sut.setAcks(auth, { acks }); - - const userRepo = getRepository('user'); - const updated = await userRepo.update(auth.user.id, { name: 'new name' }); - const updatedSyncResponse = await testSync(auth, [SyncRequestType.UsersV1]); - - expect(updatedSyncResponse).toHaveLength(1); - expect(updatedSyncResponse).toEqual( - expect.arrayContaining([ - { - ack: expect.any(String), - data: { - deletedAt: null, - email: auth.user.email, - id: auth.user.id, - name: updated.name, - }, - type: 'UserV1', - }, - ]), - ); - }); - }); - - describe.concurrent(SyncEntityType.PartnerV1, () => { - it('should detect and sync the first partner', async () => { - const { auth, sut, getRepository, testSync } = await setup(); - - const user1 = auth.user; - const userRepo = getRepository('user'); - const partnerRepo = getRepository('partner'); - - const user2 = mediumFactory.userInsert(); - await userRepo.create(user2); - - const partner = await partnerRepo.create({ sharedById: user2.id, sharedWithId: user1.id }); - - const initialSyncResponse = await testSync(auth, [SyncRequestType.PartnersV1]); - - expect(initialSyncResponse).toHaveLength(1); - expect(initialSyncResponse).toEqual( - expect.arrayContaining([ - { - ack: expect.any(String), - data: { - inTimeline: partner.inTimeline, - sharedById: partner.sharedById, - sharedWithId: partner.sharedWithId, - }, - type: 'PartnerV1', - }, - ]), - ); - - const acks = [initialSyncResponse[0].ack]; - await sut.setAcks(auth, { acks }); - - const ackSyncResponse = await testSync(auth, [SyncRequestType.PartnersV1]); - - expect(ackSyncResponse).toHaveLength(0); - }); - - it('should detect and sync a deleted partner', async () => { - const { auth, sut, getRepository, testSync } = await setup(); - - const userRepo = getRepository('user'); - const user1 = auth.user; - const user2 = mediumFactory.userInsert(); - await userRepo.create(user2); - - const partnerRepo = getRepository('partner'); - const partner = await partnerRepo.create({ sharedById: user2.id, sharedWithId: user1.id }); - await partnerRepo.remove(partner); - - const response = await testSync(auth, [SyncRequestType.PartnersV1]); - - expect(response).toHaveLength(1); - expect(response).toEqual( - expect.arrayContaining([ - { - ack: expect.any(String), - data: { - sharedById: partner.sharedById, - sharedWithId: partner.sharedWithId, - }, - type: 'PartnerDeleteV1', - }, - ]), - ); - - const acks = response.map(({ ack }) => ack); - await sut.setAcks(auth, { acks }); - - const ackSyncResponse = await testSync(auth, [SyncRequestType.PartnersV1]); - - expect(ackSyncResponse).toHaveLength(0); - }); - - it('should detect and sync a partner share both to and from another user', async () => { - const { auth, sut, getRepository, testSync } = await setup(); - - const userRepo = getRepository('user'); - const user1 = auth.user; - const user2 = await userRepo.create(mediumFactory.userInsert()); - - const partnerRepo = getRepository('partner'); - const partner1 = await partnerRepo.create({ sharedById: user2.id, sharedWithId: user1.id }); - const partner2 = await partnerRepo.create({ sharedById: user1.id, sharedWithId: user2.id }); - - const response = await testSync(auth, [SyncRequestType.PartnersV1]); - - expect(response).toHaveLength(2); - expect(response).toEqual( - expect.arrayContaining([ - { - ack: expect.any(String), - data: { - inTimeline: partner1.inTimeline, - sharedById: partner1.sharedById, - sharedWithId: partner1.sharedWithId, - }, - type: 'PartnerV1', - }, - { - ack: expect.any(String), - data: { - inTimeline: partner2.inTimeline, - sharedById: partner2.sharedById, - sharedWithId: partner2.sharedWithId, - }, - type: 'PartnerV1', - }, - ]), - ); - - await sut.setAcks(auth, { acks: [response[1].ack] }); - - const ackSyncResponse = await testSync(auth, [SyncRequestType.PartnersV1]); - - expect(ackSyncResponse).toHaveLength(0); - }); - - it('should sync a partner and then an update to that same partner', async () => { - const { auth, sut, getRepository, testSync } = await setup(); - - const userRepo = getRepository('user'); - const user1 = auth.user; - const user2 = await userRepo.create(mediumFactory.userInsert()); - - const partnerRepo = getRepository('partner'); - const partner = await partnerRepo.create({ sharedById: user2.id, sharedWithId: user1.id }); - - const initialSyncResponse = await testSync(auth, [SyncRequestType.PartnersV1]); - - expect(initialSyncResponse).toHaveLength(1); - expect(initialSyncResponse).toEqual( - expect.arrayContaining([ - { - ack: expect.any(String), - data: { - inTimeline: partner.inTimeline, - sharedById: partner.sharedById, - sharedWithId: partner.sharedWithId, - }, - type: 'PartnerV1', - }, - ]), - ); - - const acks = [initialSyncResponse[0].ack]; - await sut.setAcks(auth, { acks }); - - const updated = await partnerRepo.update( - { sharedById: partner.sharedById, sharedWithId: partner.sharedWithId }, - { inTimeline: true }, - ); - - const updatedSyncResponse = await testSync(auth, [SyncRequestType.PartnersV1]); - - expect(updatedSyncResponse).toHaveLength(1); - expect(updatedSyncResponse).toEqual( - expect.arrayContaining([ - { - ack: expect.any(String), - data: { - inTimeline: updated.inTimeline, - sharedById: updated.sharedById, - sharedWithId: updated.sharedWithId, - }, - type: 'PartnerV1', - }, - ]), - ); - }); - - it('should not sync a partner or partner delete for an unrelated user', async () => { - const { auth, getRepository, testSync } = await setup(); - - const userRepo = getRepository('user'); - const user2 = await userRepo.create(mediumFactory.userInsert()); - const user3 = await userRepo.create(mediumFactory.userInsert()); - - const partnerRepo = getRepository('partner'); - const partner = await partnerRepo.create({ sharedById: user2.id, sharedWithId: user3.id }); - - expect(await testSync(auth, [SyncRequestType.PartnersV1])).toHaveLength(0); - - await partnerRepo.remove(partner); - - expect(await testSync(auth, [SyncRequestType.PartnersV1])).toHaveLength(0); - }); - - it('should not sync a partner delete after a user is deleted', async () => { - const { auth, getRepository, testSync } = await setup(); - - const userRepo = getRepository('user'); - const user2 = await userRepo.create(mediumFactory.userInsert()); - - const partnerRepo = getRepository('partner'); - await partnerRepo.create({ sharedById: user2.id, sharedWithId: auth.user.id }); - await userRepo.delete({ id: user2.id }, true); - - expect(await testSync(auth, [SyncRequestType.PartnersV1])).toHaveLength(0); - }); - }); - - describe.concurrent(SyncEntityType.AssetV1, () => { - it('should detect and sync the first asset', async () => { - const { auth, sut, getRepository, testSync } = await setup(); - - const checksum = '1115vHcVkZzNp3Q9G+FEA0nu6zUbGb4Tj4UOXkN0wRA='; - const thumbhash = '2225vHcVkZzNp3Q9G+FEA0nu6zUbGb4Tj4UOXkN0wRA='; - const date = new Date().toISOString(); - - const assetRepo = getRepository('asset'); - const asset = mediumFactory.assetInsert({ - ownerId: auth.user.id, - checksum: Buffer.from(checksum, 'base64'), - thumbhash: Buffer.from(thumbhash, 'base64'), - fileCreatedAt: date, - fileModifiedAt: date, - localDateTime: date, - deletedAt: null, - }); - await assetRepo.create(asset); - - const initialSyncResponse = await testSync(auth, [SyncRequestType.AssetsV1]); - - expect(initialSyncResponse).toHaveLength(1); - expect(initialSyncResponse).toEqual( - expect.arrayContaining([ - { - ack: expect.any(String), - data: { - id: asset.id, - ownerId: asset.ownerId, - thumbhash, - checksum, - deletedAt: asset.deletedAt, - fileCreatedAt: asset.fileCreatedAt, - fileModifiedAt: asset.fileModifiedAt, - isFavorite: asset.isFavorite, - localDateTime: asset.localDateTime, - type: asset.type, - visibility: asset.visibility, - }, - type: 'AssetV1', - }, - ]), - ); - - const acks = [initialSyncResponse[0].ack]; - await sut.setAcks(auth, { acks }); - - const ackSyncResponse = await testSync(auth, [SyncRequestType.AssetsV1]); - - expect(ackSyncResponse).toHaveLength(0); - }); - - it('should detect and sync a deleted asset', async () => { - const { auth, sut, getRepository, testSync } = await setup(); - - const assetRepo = getRepository('asset'); - const asset = mediumFactory.assetInsert({ ownerId: auth.user.id }); - await assetRepo.create(asset); - await assetRepo.remove(asset); - - const response = await testSync(auth, [SyncRequestType.AssetsV1]); - - expect(response).toHaveLength(1); - expect(response).toEqual( - expect.arrayContaining([ - { - ack: expect.any(String), - data: { - assetId: asset.id, - }, - type: 'AssetDeleteV1', - }, - ]), - ); - - const acks = response.map(({ ack }) => ack); - await sut.setAcks(auth, { acks }); - - const ackSyncResponse = await testSync(auth, [SyncRequestType.AssetsV1]); - - expect(ackSyncResponse).toHaveLength(0); - }); - - it('should not sync an asset or asset delete for an unrelated user', async () => { - const { auth, getRepository, testSync } = await setup(); - - const userRepo = getRepository('user'); - const user2 = mediumFactory.userInsert(); - await userRepo.create(user2); - - const sessionRepo = getRepository('session'); - const session = mediumFactory.sessionInsert({ userId: user2.id }); - await sessionRepo.create(session); - - const assetRepo = getRepository('asset'); - const asset = mediumFactory.assetInsert({ ownerId: user2.id }); - await assetRepo.create(asset); - - const auth2 = factory.auth({ session, user: user2 }); - - expect(await testSync(auth2, [SyncRequestType.AssetsV1])).toHaveLength(1); - expect(await testSync(auth, [SyncRequestType.AssetsV1])).toHaveLength(0); - - await assetRepo.remove(asset); - expect(await testSync(auth2, [SyncRequestType.AssetsV1])).toHaveLength(1); - expect(await testSync(auth, [SyncRequestType.AssetsV1])).toHaveLength(0); - }); - }); - - describe.concurrent(SyncRequestType.PartnerAssetsV1, () => { - it('should detect and sync the first partner asset', async () => { - const { auth, sut, getRepository, testSync } = await setup(); - - const checksum = '1115vHcVkZzNp3Q9G+FEA0nu6zUbGb4Tj4UOXkN0wRA='; - const thumbhash = '2225vHcVkZzNp3Q9G+FEA0nu6zUbGb4Tj4UOXkN0wRA='; - const date = new Date().toISOString(); - - const userRepo = getRepository('user'); - const user2 = mediumFactory.userInsert(); - await userRepo.create(user2); - - const assetRepo = getRepository('asset'); - const asset = mediumFactory.assetInsert({ - ownerId: user2.id, - checksum: Buffer.from(checksum, 'base64'), - thumbhash: Buffer.from(thumbhash, 'base64'), - fileCreatedAt: date, - fileModifiedAt: date, - localDateTime: date, - deletedAt: null, - }); - await assetRepo.create(asset); - - const partnerRepo = getRepository('partner'); - await partnerRepo.create({ sharedById: user2.id, sharedWithId: auth.user.id }); - - const initialSyncResponse = await testSync(auth, [SyncRequestType.PartnerAssetsV1]); - - expect(initialSyncResponse).toHaveLength(1); - expect(initialSyncResponse).toEqual( - expect.arrayContaining([ - { - ack: expect.any(String), - data: { - id: asset.id, - ownerId: asset.ownerId, - thumbhash, - checksum, - deletedAt: null, - fileCreatedAt: date, - fileModifiedAt: date, - isFavorite: false, - localDateTime: date, - type: asset.type, - visibility: asset.visibility, - }, - type: SyncEntityType.PartnerAssetV1, - }, - ]), - ); - - const acks = [initialSyncResponse[0].ack]; - await sut.setAcks(auth, { acks }); - - const ackSyncResponse = await testSync(auth, [SyncRequestType.PartnerAssetsV1]); - - expect(ackSyncResponse).toHaveLength(0); - }); - - it('should detect and sync a deleted partner asset', async () => { - const { auth, sut, getRepository, testSync } = await setup(); - - const userRepo = getRepository('user'); - const user2 = mediumFactory.userInsert(); - await userRepo.create(user2); - const asset = mediumFactory.assetInsert({ ownerId: user2.id }); - - const assetRepo = getRepository('asset'); - await assetRepo.create(asset); - - const partnerRepo = getRepository('partner'); - await partnerRepo.create({ sharedById: user2.id, sharedWithId: auth.user.id }); - await assetRepo.remove(asset); - - const response = await testSync(auth, [SyncRequestType.PartnerAssetsV1]); - - expect(response).toHaveLength(1); - expect(response).toEqual( - expect.arrayContaining([ - { - ack: expect.any(String), - data: { - assetId: asset.id, - }, - type: SyncEntityType.PartnerAssetDeleteV1, - }, - ]), - ); - - const acks = response.map(({ ack }) => ack); - await sut.setAcks(auth, { acks }); - - const ackSyncResponse = await testSync(auth, [SyncRequestType.PartnerAssetsV1]); - - expect(ackSyncResponse).toHaveLength(0); - }); - - it('should not sync a deleted partner asset due to a user delete', async () => { - const { auth, getRepository, testSync } = await setup(); - - const userRepo = getRepository('user'); - const user2 = mediumFactory.userInsert(); - await userRepo.create(user2); - - const partnerRepo = getRepository('partner'); - await partnerRepo.create({ sharedById: user2.id, sharedWithId: auth.user.id }); - - const assetRepo = getRepository('asset'); - await assetRepo.create(mediumFactory.assetInsert({ ownerId: user2.id })); - - await userRepo.delete({ id: user2.id }, true); - - const response = await testSync(auth, [SyncRequestType.PartnerAssetsV1]); - expect(response).toHaveLength(0); - }); - - it('should not sync a deleted partner asset due to a partner delete (unshare)', async () => { - const { auth, getRepository, testSync } = await setup(); - - const userRepo = getRepository('user'); - const user2 = mediumFactory.userInsert(); - await userRepo.create(user2); - - const assetRepo = getRepository('asset'); - await assetRepo.create(mediumFactory.assetInsert({ ownerId: user2.id })); - - const partnerRepo = getRepository('partner'); - const partner = { sharedById: user2.id, sharedWithId: auth.user.id }; - await partnerRepo.create(partner); - - await expect(testSync(auth, [SyncRequestType.PartnerAssetsV1])).resolves.toHaveLength(1); - - await partnerRepo.remove(partner); - - await expect(testSync(auth, [SyncRequestType.PartnerAssetsV1])).resolves.toHaveLength(0); - }); - - it('should not sync an asset or asset delete for own user', async () => { - const { auth, getRepository, testSync } = await setup(); - - const userRepo = getRepository('user'); - const user2 = mediumFactory.userInsert(); - await userRepo.create(user2); - - const assetRepo = getRepository('asset'); - const asset = mediumFactory.assetInsert({ ownerId: auth.user.id }); - await assetRepo.create(asset); - - const partnerRepo = getRepository('partner'); - await partnerRepo.create({ sharedById: user2.id, sharedWithId: auth.user.id }); - - await expect(testSync(auth, [SyncRequestType.AssetsV1])).resolves.toHaveLength(1); - await expect(testSync(auth, [SyncRequestType.PartnerAssetsV1])).resolves.toHaveLength(0); - - await assetRepo.remove(asset); - - await expect(testSync(auth, [SyncRequestType.AssetsV1])).resolves.toHaveLength(1); - await expect(testSync(auth, [SyncRequestType.PartnerAssetsV1])).resolves.toHaveLength(0); - }); - - it('should not sync an asset or asset delete for unrelated user', async () => { - const { auth, getRepository, testSync } = await setup(); - - const userRepo = getRepository('user'); - const user2 = mediumFactory.userInsert(); - await userRepo.create(user2); - - const sessionRepo = getRepository('session'); - const session = mediumFactory.sessionInsert({ userId: user2.id }); - await sessionRepo.create(session); - - const auth2 = factory.auth({ session, user: user2 }); - - const assetRepo = getRepository('asset'); - const asset = mediumFactory.assetInsert({ ownerId: user2.id }); - await assetRepo.create(asset); - - await expect(testSync(auth2, [SyncRequestType.AssetsV1])).resolves.toHaveLength(1); - await expect(testSync(auth, [SyncRequestType.PartnerAssetsV1])).resolves.toHaveLength(0); - - await assetRepo.remove(asset); - - await expect(testSync(auth2, [SyncRequestType.AssetsV1])).resolves.toHaveLength(1); - await expect(testSync(auth, [SyncRequestType.PartnerAssetsV1])).resolves.toHaveLength(0); - }); - }); - - describe.concurrent(SyncRequestType.AssetExifsV1, () => { - it('should detect and sync the first asset exif', async () => { - const { auth, sut, getRepository, testSync } = await setup(); - - const assetRepo = getRepository('asset'); - const asset = mediumFactory.assetInsert({ ownerId: auth.user.id }); - await assetRepo.create(asset); - await assetRepo.upsertExif({ assetId: asset.id, make: 'Canon' }); - - const initialSyncResponse = await testSync(auth, [SyncRequestType.AssetExifsV1]); - - expect(initialSyncResponse).toHaveLength(1); - expect(initialSyncResponse).toEqual( - expect.arrayContaining([ - { - ack: expect.any(String), - data: { - assetId: asset.id, - city: null, - country: null, - dateTimeOriginal: null, - description: '', - exifImageHeight: null, - exifImageWidth: null, - exposureTime: null, - fNumber: null, - fileSizeInByte: null, - focalLength: null, - fps: null, - iso: null, - latitude: null, - lensModel: null, - longitude: null, - make: 'Canon', - model: null, - modifyDate: null, - orientation: null, - profileDescription: null, - projectionType: null, - rating: null, - state: null, - timeZone: null, - }, - type: SyncEntityType.AssetExifV1, - }, - ]), - ); - - const acks = [initialSyncResponse[0].ack]; - await sut.setAcks(auth, { acks }); - - const ackSyncResponse = await testSync(auth, [SyncRequestType.AssetExifsV1]); - - expect(ackSyncResponse).toHaveLength(0); - }); - - it('should only sync asset exif for own user', async () => { - const { auth, getRepository, testSync } = await setup(); - - const userRepo = getRepository('user'); - const user2 = mediumFactory.userInsert(); - await userRepo.create(user2); - - const partnerRepo = getRepository('partner'); - await partnerRepo.create({ sharedById: user2.id, sharedWithId: auth.user.id }); - - const assetRepo = getRepository('asset'); - const asset = mediumFactory.assetInsert({ ownerId: user2.id }); - await assetRepo.create(asset); - await assetRepo.upsertExif({ assetId: asset.id, make: 'Canon' }); - - const sessionRepo = getRepository('session'); - const session = mediumFactory.sessionInsert({ userId: user2.id }); - await sessionRepo.create(session); - - const auth2 = factory.auth({ session, user: user2 }); - await expect(testSync(auth2, [SyncRequestType.AssetExifsV1])).resolves.toHaveLength(1); - await expect(testSync(auth, [SyncRequestType.AssetExifsV1])).resolves.toHaveLength(0); - }); - }); - - describe.concurrent(SyncRequestType.PartnerAssetExifsV1, () => { - it('should detect and sync the first partner asset exif', async () => { - const { auth, sut, getRepository, testSync } = await setup(); - - const userRepo = getRepository('user'); - const user2 = mediumFactory.userInsert(); - await userRepo.create(user2); - - const partnerRepo = getRepository('partner'); - await partnerRepo.create({ sharedById: user2.id, sharedWithId: auth.user.id }); - - const assetRepo = getRepository('asset'); - const asset = mediumFactory.assetInsert({ ownerId: user2.id }); - await assetRepo.create(asset); - await assetRepo.upsertExif({ assetId: asset.id, make: 'Canon' }); - - const initialSyncResponse = await testSync(auth, [SyncRequestType.PartnerAssetExifsV1]); - - expect(initialSyncResponse).toHaveLength(1); - expect(initialSyncResponse).toEqual( - expect.arrayContaining([ - { - ack: expect.any(String), - data: { - assetId: asset.id, - city: null, - country: null, - dateTimeOriginal: null, - description: '', - exifImageHeight: null, - exifImageWidth: null, - exposureTime: null, - fNumber: null, - fileSizeInByte: null, - focalLength: null, - fps: null, - iso: null, - latitude: null, - lensModel: null, - longitude: null, - make: 'Canon', - model: null, - modifyDate: null, - orientation: null, - profileDescription: null, - projectionType: null, - rating: null, - state: null, - timeZone: null, - }, - type: SyncEntityType.PartnerAssetExifV1, - }, - ]), - ); - - const acks = [initialSyncResponse[0].ack]; - await sut.setAcks(auth, { acks }); - - const ackSyncResponse = await testSync(auth, [SyncRequestType.PartnerAssetExifsV1]); - - expect(ackSyncResponse).toHaveLength(0); - }); - - it('should not sync partner asset exif for own user', async () => { - const { auth, getRepository, testSync } = await setup(); - - const userRepo = getRepository('user'); - const user2 = mediumFactory.userInsert(); - await userRepo.create(user2); - - const partnerRepo = getRepository('partner'); - await partnerRepo.create({ sharedById: user2.id, sharedWithId: auth.user.id }); - - const assetRepo = getRepository('asset'); - const asset = mediumFactory.assetInsert({ ownerId: auth.user.id }); - await assetRepo.create(asset); - await assetRepo.upsertExif({ assetId: asset.id, make: 'Canon' }); - - await expect(testSync(auth, [SyncRequestType.AssetExifsV1])).resolves.toHaveLength(1); - await expect(testSync(auth, [SyncRequestType.PartnerAssetExifsV1])).resolves.toHaveLength(0); - }); - - it('should not sync partner asset exif for unrelated user', async () => { - const { auth, getRepository, testSync } = await setup(); - - const userRepo = getRepository('user'); - - const user2 = mediumFactory.userInsert(); - const user3 = mediumFactory.userInsert(); - await Promise.all([userRepo.create(user2), userRepo.create(user3)]); - - const partnerRepo = getRepository('partner'); - await partnerRepo.create({ sharedById: user2.id, sharedWithId: auth.user.id }); - - const assetRepo = getRepository('asset'); - const asset = mediumFactory.assetInsert({ ownerId: user3.id }); - await assetRepo.create(asset); - await assetRepo.upsertExif({ assetId: asset.id, make: 'Canon' }); - - const sessionRepo = getRepository('session'); - const session = mediumFactory.sessionInsert({ userId: user3.id }); - await sessionRepo.create(session); - - const authUser3 = factory.auth({ session, user: user3 }); - await expect(testSync(authUser3, [SyncRequestType.AssetExifsV1])).resolves.toHaveLength(1); - await expect(testSync(auth, [SyncRequestType.PartnerAssetExifsV1])).resolves.toHaveLength(0); - }); - }); -}); diff --git a/server/test/medium/specs/sync/sync-album-user.spec.ts b/server/test/medium/specs/sync/sync-album-user.spec.ts new file mode 100644 index 0000000000..4967df5264 --- /dev/null +++ b/server/test/medium/specs/sync/sync-album-user.spec.ts @@ -0,0 +1,269 @@ +import { Kysely } from 'kysely'; +import { DB } from 'src/db'; +import { AlbumUserRole, SyncEntityType, SyncRequestType } from 'src/enum'; +import { mediumFactory, newSyncAuthUser, newSyncTest } from 'test/medium.factory'; +import { getKyselyDB } from 'test/utils'; + +let defaultDatabase: Kysely; + +const setup = async (db?: Kysely) => { + const database = db || defaultDatabase; + const result = newSyncTest({ db: database }); + const { auth, create } = newSyncAuthUser(); + await create(database); + return { ...result, auth }; +}; + +beforeAll(async () => { + defaultDatabase = await getKyselyDB(); +}); + +describe(SyncRequestType.AlbumUsersV1, () => { + it('should sync an album user with the correct properties', async () => { + const { auth, getRepository, testSync } = await setup(); + + const albumRepo = getRepository('album'); + const albumUserRepo = getRepository('albumUser'); + const userRepo = getRepository('user'); + + const album = mediumFactory.albumInsert({ ownerId: auth.user.id }); + await albumRepo.create(album, [], []); + + const user = mediumFactory.userInsert(); + await userRepo.create(user); + + const albumUser = { albumsId: album.id, usersId: user.id, role: AlbumUserRole.EDITOR }; + await albumUserRepo.create(albumUser); + + await expect(testSync(auth, [SyncRequestType.AlbumUsersV1])).resolves.toEqual([ + { + ack: expect.any(String), + data: expect.objectContaining({ + albumId: albumUser.albumsId, + role: albumUser.role, + userId: albumUser.usersId, + }), + type: SyncEntityType.AlbumUserV1, + }, + ]); + }); + describe('owner', () => { + it('should detect and sync a new shared user', async () => { + const { auth, testSync, getRepository } = await setup(); + + const albumRepo = getRepository('album'); + const albumUserRepo = getRepository('albumUser'); + const userRepo = getRepository('user'); + + const user1 = mediumFactory.userInsert(); + await userRepo.create(user1); + + const album = mediumFactory.albumInsert({ ownerId: auth.user.id }); + await albumRepo.create(album, [], []); + + const albumUser = { albumsId: album.id, usersId: user1.id, role: AlbumUserRole.EDITOR }; + await albumUserRepo.create(albumUser); + + await expect(testSync(auth, [SyncRequestType.AlbumUsersV1])).resolves.toEqual([ + { + ack: expect.any(String), + data: expect.objectContaining({ + albumId: albumUser.albumsId, + role: albumUser.role, + userId: albumUser.usersId, + }), + type: SyncEntityType.AlbumUserV1, + }, + ]); + }); + + it('should detect and sync an updated shared user', async () => { + const { auth, testSync, getRepository, sut } = await setup(); + + const albumRepo = getRepository('album'); + const albumUserRepo = getRepository('albumUser'); + const userRepo = getRepository('user'); + + const user1 = mediumFactory.userInsert(); + await userRepo.create(user1); + + const album = mediumFactory.albumInsert({ ownerId: auth.user.id }); + await albumRepo.create(album, [], []); + + const albumUser = { albumsId: album.id, usersId: user1.id, role: AlbumUserRole.EDITOR }; + await albumUserRepo.create(albumUser); + + const initialSyncResponse = await testSync(auth, [SyncRequestType.AlbumUsersV1]); + const acks = [initialSyncResponse[0].ack]; + await sut.setAcks(auth, { acks }); + + await expect(testSync(auth, [SyncRequestType.AlbumUsersV1])).resolves.toEqual([]); + + await albumUserRepo.update({ albumsId: album.id, usersId: user1.id }, { role: AlbumUserRole.VIEWER }); + + await expect(testSync(auth, [SyncRequestType.AlbumUsersV1])).resolves.toEqual([ + { + ack: expect.any(String), + data: expect.objectContaining({ + albumId: albumUser.albumsId, + role: AlbumUserRole.VIEWER, + userId: albumUser.usersId, + }), + type: SyncEntityType.AlbumUserV1, + }, + ]); + }); + + it('should detect and sync a deleted shared user', async () => { + const { auth, testSync, getRepository, sut } = await setup(); + + const albumRepo = getRepository('album'); + const albumUserRepo = getRepository('albumUser'); + const userRepo = getRepository('user'); + + const user1 = mediumFactory.userInsert(); + await userRepo.create(user1); + + const album = mediumFactory.albumInsert({ ownerId: auth.user.id }); + await albumRepo.create(album, [], []); + + const albumUser = { albumsId: album.id, usersId: user1.id, role: AlbumUserRole.EDITOR }; + await albumUserRepo.create(albumUser); + + const initialSyncResponse = await testSync(auth, [SyncRequestType.AlbumUsersV1]); + const acks = [initialSyncResponse[0].ack]; + await sut.setAcks(auth, { acks }); + + await expect(testSync(auth, [SyncRequestType.AlbumUsersV1])).resolves.toEqual([]); + + await albumUserRepo.delete({ albumsId: album.id, usersId: user1.id }); + + await expect(testSync(auth, [SyncRequestType.AlbumUsersV1])).resolves.toEqual([ + { + ack: expect.any(String), + data: expect.objectContaining({ + albumId: albumUser.albumsId, + userId: albumUser.usersId, + }), + type: SyncEntityType.AlbumUserDeleteV1, + }, + ]); + }); + }); + + describe('shared user', () => { + it('should detect and sync a new shared user', async () => { + const { auth, testSync, getRepository } = await setup(); + + const albumRepo = getRepository('album'); + const albumUserRepo = getRepository('albumUser'); + const userRepo = getRepository('user'); + + const user1 = mediumFactory.userInsert(); + await userRepo.create(user1); + + const album = mediumFactory.albumInsert({ ownerId: user1.id }); + await albumRepo.create(album, [], []); + + const albumUser = { albumsId: album.id, usersId: auth.user.id, role: AlbumUserRole.EDITOR }; + await albumUserRepo.create(albumUser); + + await expect(testSync(auth, [SyncRequestType.AlbumUsersV1])).resolves.toEqual([ + { + ack: expect.any(String), + data: expect.objectContaining({ + albumId: albumUser.albumsId, + role: albumUser.role, + userId: albumUser.usersId, + }), + type: SyncEntityType.AlbumUserV1, + }, + ]); + }); + + it('should detect and sync an updated shared user', async () => { + const { auth, testSync, getRepository, sut } = await setup(); + + const albumRepo = getRepository('album'); + const albumUserRepo = getRepository('albumUser'); + const userRepo = getRepository('user'); + + const owner = mediumFactory.userInsert(); + const user = mediumFactory.userInsert(); + await Promise.all([userRepo.create(owner), userRepo.create(user)]); + + const album = mediumFactory.albumInsert({ ownerId: owner.id }); + await albumRepo.create( + album, + [], + [ + { userId: auth.user.id, role: AlbumUserRole.EDITOR }, + { userId: user.id, role: AlbumUserRole.EDITOR }, + ], + ); + + const initialSyncResponse = await testSync(auth, [SyncRequestType.AlbumUsersV1]); + expect(initialSyncResponse).toHaveLength(2); + const acks = [initialSyncResponse[1].ack]; + await sut.setAcks(auth, { acks }); + + await expect(testSync(auth, [SyncRequestType.AlbumUsersV1])).resolves.toEqual([]); + + await albumUserRepo.update({ albumsId: album.id, usersId: user.id }, { role: AlbumUserRole.VIEWER }); + + await expect(testSync(auth, [SyncRequestType.AlbumUsersV1])).resolves.toEqual([ + { + ack: expect.any(String), + data: expect.objectContaining({ + albumId: album.id, + role: AlbumUserRole.VIEWER, + userId: user.id, + }), + type: SyncEntityType.AlbumUserV1, + }, + ]); + }); + + it('should detect and sync a deleted shared user', async () => { + const { auth, testSync, getRepository, sut } = await setup(); + + const albumRepo = getRepository('album'); + const albumUserRepo = getRepository('albumUser'); + const userRepo = getRepository('user'); + + const owner = mediumFactory.userInsert(); + const user = mediumFactory.userInsert(); + await Promise.all([userRepo.create(owner), userRepo.create(user)]); + + const album = mediumFactory.albumInsert({ ownerId: owner.id }); + await albumRepo.create( + album, + [], + [ + { userId: auth.user.id, role: AlbumUserRole.EDITOR }, + { userId: user.id, role: AlbumUserRole.EDITOR }, + ], + ); + + const initialSyncResponse = await testSync(auth, [SyncRequestType.AlbumUsersV1]); + expect(initialSyncResponse).toHaveLength(2); + const acks = [initialSyncResponse[1].ack]; + await sut.setAcks(auth, { acks }); + + await expect(testSync(auth, [SyncRequestType.AlbumUsersV1])).resolves.toEqual([]); + + await albumUserRepo.delete({ albumsId: album.id, usersId: user.id }); + + await expect(testSync(auth, [SyncRequestType.AlbumUsersV1])).resolves.toEqual([ + { + ack: expect.any(String), + data: expect.objectContaining({ + albumId: album.id, + userId: user.id, + }), + type: SyncEntityType.AlbumUserDeleteV1, + }, + ]); + }); + }); +}); diff --git a/server/test/medium/specs/sync/sync-album.spec.ts b/server/test/medium/specs/sync/sync-album.spec.ts new file mode 100644 index 0000000000..7ee7bf624f --- /dev/null +++ b/server/test/medium/specs/sync/sync-album.spec.ts @@ -0,0 +1,220 @@ +import { Kysely } from 'kysely'; +import { DB } from 'src/db'; +import { AlbumUserRole, SyncEntityType, SyncRequestType } from 'src/enum'; +import { mediumFactory, newSyncAuthUser, newSyncTest } from 'test/medium.factory'; +import { getKyselyDB } from 'test/utils'; + +let defaultDatabase: Kysely; + +const setup = async (db?: Kysely) => { + const database = db || defaultDatabase; + const result = newSyncTest({ db: database }); + const { auth, create } = newSyncAuthUser(); + await create(database); + return { ...result, auth }; +}; + +beforeAll(async () => { + defaultDatabase = await getKyselyDB(); +}); + +describe(SyncRequestType.AlbumsV1, () => { + it('should sync an album with the correct properties', async () => { + const { auth, getRepository, testSync } = await setup(); + const albumRepo = getRepository('album'); + const album = mediumFactory.albumInsert({ ownerId: auth.user.id }); + await albumRepo.create(album, [], []); + await expect(testSync(auth, [SyncRequestType.AlbumsV1])).resolves.toEqual([ + { + ack: expect.any(String), + data: expect.objectContaining({ + id: album.id, + name: album.albumName, + ownerId: album.ownerId, + }), + type: SyncEntityType.AlbumV1, + }, + ]); + }); + + it('should detect and sync a new album', async () => { + const { auth, getRepository, testSync } = await setup(); + const albumRepo = getRepository('album'); + const album = mediumFactory.albumInsert({ ownerId: auth.user.id }); + await albumRepo.create(album, [], []); + await expect(testSync(auth, [SyncRequestType.AlbumsV1])).resolves.toEqual([ + { + ack: expect.any(String), + data: expect.objectContaining({ + id: album.id, + }), + type: SyncEntityType.AlbumV1, + }, + ]); + }); + + it('should detect and sync an album delete', async () => { + const { auth, getRepository, testSync } = await setup(); + const albumRepo = getRepository('album'); + const album = mediumFactory.albumInsert({ ownerId: auth.user.id }); + await albumRepo.create(album, [], []); + await expect(testSync(auth, [SyncRequestType.AlbumsV1])).resolves.toEqual([ + { + ack: expect.any(String), + data: expect.objectContaining({ + id: album.id, + }), + type: SyncEntityType.AlbumV1, + }, + ]); + + await albumRepo.delete(album.id); + await expect(testSync(auth, [SyncRequestType.AlbumsV1])).resolves.toEqual([ + { + ack: expect.any(String), + data: { + albumId: album.id, + }, + type: SyncEntityType.AlbumDeleteV1, + }, + ]); + }); + + describe('shared albums', () => { + it('should detect and sync an album create', async () => { + const { auth, getRepository, testSync } = await setup(); + const albumRepo = getRepository('album'); + const userRepo = getRepository('user'); + + const user2 = mediumFactory.userInsert(); + await userRepo.create(user2); + + const album = mediumFactory.albumInsert({ ownerId: user2.id }); + await albumRepo.create(album, [], [{ userId: auth.user.id, role: AlbumUserRole.EDITOR }]); + + await expect(testSync(auth, [SyncRequestType.AlbumsV1])).resolves.toEqual([ + { + ack: expect.any(String), + data: expect.objectContaining({ id: album.id }), + type: SyncEntityType.AlbumV1, + }, + ]); + }); + + it('should detect and sync an album share (share before sync)', async () => { + const { auth, getRepository, testSync } = await setup(); + const albumRepo = getRepository('album'); + const albumUserRepo = getRepository('albumUser'); + const userRepo = getRepository('user'); + + const user2 = mediumFactory.userInsert(); + await userRepo.create(user2); + + const album = mediumFactory.albumInsert({ ownerId: user2.id }); + await albumRepo.create(album, [], []); + await albumUserRepo.create({ usersId: auth.user.id, albumsId: album.id, role: AlbumUserRole.EDITOR }); + + await expect(testSync(auth, [SyncRequestType.AlbumsV1])).resolves.toEqual([ + { + ack: expect.any(String), + data: expect.objectContaining({ id: album.id }), + type: SyncEntityType.AlbumV1, + }, + ]); + }); + + it('should detect and sync an album share (share after sync)', async () => { + const { auth, getRepository, sut, testSync } = await setup(); + const albumRepo = getRepository('album'); + const albumUserRepo = getRepository('albumUser'); + const userRepo = getRepository('user'); + + const user2 = mediumFactory.userInsert(); + await userRepo.create(user2); + + const userAlbum = mediumFactory.albumInsert({ ownerId: auth.user.id }); + const user2Album = mediumFactory.albumInsert({ ownerId: user2.id }); + await Promise.all([albumRepo.create(user2Album, [], []), albumRepo.create(userAlbum, [], [])]); + + const initialSyncResponse = await testSync(auth, [SyncRequestType.AlbumsV1]); + + expect(initialSyncResponse).toEqual([ + { + ack: expect.any(String), + data: expect.objectContaining({ id: userAlbum.id }), + type: SyncEntityType.AlbumV1, + }, + ]); + + const acks = [initialSyncResponse[0].ack]; + await sut.setAcks(auth, { acks }); + + await albumUserRepo.create({ usersId: auth.user.id, albumsId: user2Album.id, role: AlbumUserRole.EDITOR }); + + await expect(testSync(auth, [SyncRequestType.AlbumsV1])).resolves.toEqual([ + { + ack: expect.any(String), + data: expect.objectContaining({ id: user2Album.id }), + type: SyncEntityType.AlbumV1, + }, + ]); + }); + + it('should detect and sync an album delete`', async () => { + const { auth, getRepository, testSync, sut } = await setup(); + const albumRepo = getRepository('album'); + const userRepo = getRepository('user'); + + const user2 = mediumFactory.userInsert(); + await userRepo.create(user2); + + const album = mediumFactory.albumInsert({ ownerId: user2.id }); + await albumRepo.create(album, [], [{ userId: auth.user.id, role: AlbumUserRole.EDITOR }]); + + const initialSyncResponse = await testSync(auth, [SyncRequestType.AlbumsV1]); + const acks = [initialSyncResponse[0].ack]; + await sut.setAcks(auth, { acks }); + + await expect(testSync(auth, [SyncRequestType.AlbumsV1])).resolves.toEqual([]); + + await albumRepo.delete(album.id); + + await expect(testSync(auth, [SyncRequestType.AlbumsV1])).resolves.toEqual([ + { + ack: expect.any(String), + data: { albumId: album.id }, + type: SyncEntityType.AlbumDeleteV1, + }, + ]); + }); + + it('should detect and sync an album unshare as an album delete', async () => { + const { auth, getRepository, testSync, sut } = await setup(); + const albumRepo = getRepository('album'); + const albumUserRepo = getRepository('albumUser'); + const userRepo = getRepository('user'); + + const user2 = mediumFactory.userInsert(); + await userRepo.create(user2); + + const album = mediumFactory.albumInsert({ ownerId: user2.id }); + await albumRepo.create(album, [], [{ userId: auth.user.id, role: AlbumUserRole.EDITOR }]); + + const initialSyncResponse = await testSync(auth, [SyncRequestType.AlbumsV1]); + const acks = [initialSyncResponse[0].ack]; + await sut.setAcks(auth, { acks }); + + await expect(testSync(auth, [SyncRequestType.AlbumsV1])).resolves.toEqual([]); + + await albumUserRepo.delete({ albumsId: album.id, usersId: auth.user.id }); + + await expect(testSync(auth, [SyncRequestType.AlbumsV1])).resolves.toEqual([ + { + ack: expect.any(String), + data: { albumId: album.id }, + type: SyncEntityType.AlbumDeleteV1, + }, + ]); + }); + }); +}); diff --git a/server/test/medium/specs/sync/sync-asset-exif.spec.ts b/server/test/medium/specs/sync/sync-asset-exif.spec.ts new file mode 100644 index 0000000000..9a3bcb4314 --- /dev/null +++ b/server/test/medium/specs/sync/sync-asset-exif.spec.ts @@ -0,0 +1,100 @@ +import { Kysely } from 'kysely'; +import { DB } from 'src/db'; +import { SyncEntityType, SyncRequestType } from 'src/enum'; +import { mediumFactory, newSyncAuthUser, newSyncTest } from 'test/medium.factory'; +import { factory } from 'test/small.factory'; +import { getKyselyDB } from 'test/utils'; + +let defaultDatabase: Kysely; + +const setup = async (db?: Kysely) => { + const database = db || defaultDatabase; + const result = newSyncTest({ db: database }); + const { auth, create } = newSyncAuthUser(); + await create(database); + return { ...result, auth }; +}; +beforeAll(async () => { + defaultDatabase = await getKyselyDB(); +}); + +describe.concurrent(SyncRequestType.AssetExifsV1, () => { + it('should detect and sync the first asset exif', async () => { + const { auth, sut, getRepository, testSync } = await setup(); + + const assetRepo = getRepository('asset'); + const asset = mediumFactory.assetInsert({ ownerId: auth.user.id }); + await assetRepo.create(asset); + await assetRepo.upsertExif({ assetId: asset.id, make: 'Canon' }); + + const initialSyncResponse = await testSync(auth, [SyncRequestType.AssetExifsV1]); + + expect(initialSyncResponse).toHaveLength(1); + expect(initialSyncResponse).toEqual( + expect.arrayContaining([ + { + ack: expect.any(String), + data: { + assetId: asset.id, + city: null, + country: null, + dateTimeOriginal: null, + description: '', + exifImageHeight: null, + exifImageWidth: null, + exposureTime: null, + fNumber: null, + fileSizeInByte: null, + focalLength: null, + fps: null, + iso: null, + latitude: null, + lensModel: null, + longitude: null, + make: 'Canon', + model: null, + modifyDate: null, + orientation: null, + profileDescription: null, + projectionType: null, + rating: null, + state: null, + timeZone: null, + }, + type: SyncEntityType.AssetExifV1, + }, + ]), + ); + + const acks = [initialSyncResponse[0].ack]; + await sut.setAcks(auth, { acks }); + + const ackSyncResponse = await testSync(auth, [SyncRequestType.AssetExifsV1]); + + expect(ackSyncResponse).toHaveLength(0); + }); + + it('should only sync asset exif for own user', async () => { + const { auth, getRepository, testSync } = await setup(); + + const userRepo = getRepository('user'); + const user2 = mediumFactory.userInsert(); + await userRepo.create(user2); + + const partnerRepo = getRepository('partner'); + await partnerRepo.create({ sharedById: user2.id, sharedWithId: auth.user.id }); + + const assetRepo = getRepository('asset'); + const asset = mediumFactory.assetInsert({ ownerId: user2.id }); + await assetRepo.create(asset); + await assetRepo.upsertExif({ assetId: asset.id, make: 'Canon' }); + + const sessionRepo = getRepository('session'); + const session = mediumFactory.sessionInsert({ userId: user2.id }); + await sessionRepo.create(session); + + const auth2 = factory.auth({ session, user: user2 }); + await expect(testSync(auth2, [SyncRequestType.AssetExifsV1])).resolves.toHaveLength(1); + await expect(testSync(auth, [SyncRequestType.AssetExifsV1])).resolves.toHaveLength(0); + }); +}); diff --git a/server/test/medium/specs/sync/sync-asset.spec.ts b/server/test/medium/specs/sync/sync-asset.spec.ts new file mode 100644 index 0000000000..3cf6d7d30d --- /dev/null +++ b/server/test/medium/specs/sync/sync-asset.spec.ts @@ -0,0 +1,130 @@ +import { Kysely } from 'kysely'; +import { DB } from 'src/db'; +import { SyncEntityType, SyncRequestType } from 'src/enum'; +import { mediumFactory, newSyncAuthUser, newSyncTest } from 'test/medium.factory'; +import { factory } from 'test/small.factory'; +import { getKyselyDB } from 'test/utils'; + +let defaultDatabase: Kysely; + +const setup = async (db?: Kysely) => { + const database = db || defaultDatabase; + const result = newSyncTest({ db: database }); + const { auth, create } = newSyncAuthUser(); + await create(database); + return { ...result, auth }; +}; + +beforeAll(async () => { + defaultDatabase = await getKyselyDB(); +}); + +describe.concurrent(SyncEntityType.AssetV1, () => { + it('should detect and sync the first asset', async () => { + const { auth, sut, getRepository, testSync } = await setup(); + + const checksum = '1115vHcVkZzNp3Q9G+FEA0nu6zUbGb4Tj4UOXkN0wRA='; + const thumbhash = '2225vHcVkZzNp3Q9G+FEA0nu6zUbGb4Tj4UOXkN0wRA='; + const date = new Date().toISOString(); + + const assetRepo = getRepository('asset'); + const asset = mediumFactory.assetInsert({ + ownerId: auth.user.id, + checksum: Buffer.from(checksum, 'base64'), + thumbhash: Buffer.from(thumbhash, 'base64'), + fileCreatedAt: date, + fileModifiedAt: date, + localDateTime: date, + deletedAt: null, + }); + await assetRepo.create(asset); + + const initialSyncResponse = await testSync(auth, [SyncRequestType.AssetsV1]); + + expect(initialSyncResponse).toHaveLength(1); + expect(initialSyncResponse).toEqual( + expect.arrayContaining([ + { + ack: expect.any(String), + data: { + id: asset.id, + ownerId: asset.ownerId, + thumbhash, + checksum, + deletedAt: asset.deletedAt, + fileCreatedAt: asset.fileCreatedAt, + fileModifiedAt: asset.fileModifiedAt, + isFavorite: asset.isFavorite, + localDateTime: asset.localDateTime, + type: asset.type, + visibility: asset.visibility, + }, + type: 'AssetV1', + }, + ]), + ); + + const acks = [initialSyncResponse[0].ack]; + await sut.setAcks(auth, { acks }); + + const ackSyncResponse = await testSync(auth, [SyncRequestType.AssetsV1]); + + expect(ackSyncResponse).toHaveLength(0); + }); + + it('should detect and sync a deleted asset', async () => { + const { auth, sut, getRepository, testSync } = await setup(); + + const assetRepo = getRepository('asset'); + const asset = mediumFactory.assetInsert({ ownerId: auth.user.id }); + await assetRepo.create(asset); + await assetRepo.remove(asset); + + const response = await testSync(auth, [SyncRequestType.AssetsV1]); + + expect(response).toHaveLength(1); + expect(response).toEqual( + expect.arrayContaining([ + { + ack: expect.any(String), + data: { + assetId: asset.id, + }, + type: 'AssetDeleteV1', + }, + ]), + ); + + const acks = response.map(({ ack }) => ack); + await sut.setAcks(auth, { acks }); + + const ackSyncResponse = await testSync(auth, [SyncRequestType.AssetsV1]); + + expect(ackSyncResponse).toHaveLength(0); + }); + + it('should not sync an asset or asset delete for an unrelated user', async () => { + const { auth, getRepository, testSync } = await setup(); + + const userRepo = getRepository('user'); + const user2 = mediumFactory.userInsert(); + await userRepo.create(user2); + + const sessionRepo = getRepository('session'); + const session = mediumFactory.sessionInsert({ userId: user2.id }); + await sessionRepo.create(session); + + const assetRepo = getRepository('asset'); + const asset = mediumFactory.assetInsert({ ownerId: user2.id }); + await assetRepo.create(asset); + + const auth2 = factory.auth({ session, user: user2 }); + + expect(await testSync(auth2, [SyncRequestType.AssetsV1])).toHaveLength(1); + expect(await testSync(auth, [SyncRequestType.AssetsV1])).toHaveLength(0); + + await assetRepo.remove(asset); + expect(await testSync(auth2, [SyncRequestType.AssetsV1])).toHaveLength(1); + expect(await testSync(auth, [SyncRequestType.AssetsV1])).toHaveLength(0); + }); +}); diff --git a/server/test/medium/specs/sync/sync-partner-asset-exif.spec.ts b/server/test/medium/specs/sync/sync-partner-asset-exif.spec.ts new file mode 100644 index 0000000000..8d9e6d6ac5 --- /dev/null +++ b/server/test/medium/specs/sync/sync-partner-asset-exif.spec.ts @@ -0,0 +1,129 @@ +import { Kysely } from 'kysely'; +import { DB } from 'src/db'; +import { SyncEntityType, SyncRequestType } from 'src/enum'; +import { mediumFactory, newSyncAuthUser, newSyncTest } from 'test/medium.factory'; +import { factory } from 'test/small.factory'; +import { getKyselyDB } from 'test/utils'; + +let defaultDatabase: Kysely; + +const setup = async (db?: Kysely) => { + const database = db || defaultDatabase; + const result = newSyncTest({ db: database }); + const { auth, create } = newSyncAuthUser(); + await create(database); + return { ...result, auth }; +}; + +beforeAll(async () => { + defaultDatabase = await getKyselyDB(); +}); + +describe.concurrent(SyncRequestType.PartnerAssetExifsV1, () => { + it('should detect and sync the first partner asset exif', async () => { + const { auth, sut, getRepository, testSync } = await setup(); + + const userRepo = getRepository('user'); + const user2 = mediumFactory.userInsert(); + await userRepo.create(user2); + + const partnerRepo = getRepository('partner'); + await partnerRepo.create({ sharedById: user2.id, sharedWithId: auth.user.id }); + + const assetRepo = getRepository('asset'); + const asset = mediumFactory.assetInsert({ ownerId: user2.id }); + await assetRepo.create(asset); + await assetRepo.upsertExif({ assetId: asset.id, make: 'Canon' }); + + const initialSyncResponse = await testSync(auth, [SyncRequestType.PartnerAssetExifsV1]); + + expect(initialSyncResponse).toHaveLength(1); + expect(initialSyncResponse).toEqual( + expect.arrayContaining([ + { + ack: expect.any(String), + data: { + assetId: asset.id, + city: null, + country: null, + dateTimeOriginal: null, + description: '', + exifImageHeight: null, + exifImageWidth: null, + exposureTime: null, + fNumber: null, + fileSizeInByte: null, + focalLength: null, + fps: null, + iso: null, + latitude: null, + lensModel: null, + longitude: null, + make: 'Canon', + model: null, + modifyDate: null, + orientation: null, + profileDescription: null, + projectionType: null, + rating: null, + state: null, + timeZone: null, + }, + type: SyncEntityType.PartnerAssetExifV1, + }, + ]), + ); + + const acks = [initialSyncResponse[0].ack]; + await sut.setAcks(auth, { acks }); + + const ackSyncResponse = await testSync(auth, [SyncRequestType.PartnerAssetExifsV1]); + + expect(ackSyncResponse).toHaveLength(0); + }); + + it('should not sync partner asset exif for own user', async () => { + const { auth, getRepository, testSync } = await setup(); + + const userRepo = getRepository('user'); + const user2 = mediumFactory.userInsert(); + await userRepo.create(user2); + + const partnerRepo = getRepository('partner'); + await partnerRepo.create({ sharedById: user2.id, sharedWithId: auth.user.id }); + + const assetRepo = getRepository('asset'); + const asset = mediumFactory.assetInsert({ ownerId: auth.user.id }); + await assetRepo.create(asset); + await assetRepo.upsertExif({ assetId: asset.id, make: 'Canon' }); + + await expect(testSync(auth, [SyncRequestType.AssetExifsV1])).resolves.toHaveLength(1); + await expect(testSync(auth, [SyncRequestType.PartnerAssetExifsV1])).resolves.toHaveLength(0); + }); + + it('should not sync partner asset exif for unrelated user', async () => { + const { auth, getRepository, testSync } = await setup(); + + const userRepo = getRepository('user'); + + const user2 = mediumFactory.userInsert(); + const user3 = mediumFactory.userInsert(); + await Promise.all([userRepo.create(user2), userRepo.create(user3)]); + + const partnerRepo = getRepository('partner'); + await partnerRepo.create({ sharedById: user2.id, sharedWithId: auth.user.id }); + + const assetRepo = getRepository('asset'); + const asset = mediumFactory.assetInsert({ ownerId: user3.id }); + await assetRepo.create(asset); + await assetRepo.upsertExif({ assetId: asset.id, make: 'Canon' }); + + const sessionRepo = getRepository('session'); + const session = mediumFactory.sessionInsert({ userId: user3.id }); + await sessionRepo.create(session); + + const authUser3 = factory.auth({ session, user: user3 }); + await expect(testSync(authUser3, [SyncRequestType.AssetExifsV1])).resolves.toHaveLength(1); + await expect(testSync(auth, [SyncRequestType.PartnerAssetExifsV1])).resolves.toHaveLength(0); + }); +}); diff --git a/server/test/medium/specs/sync/sync-partner-asset.spec.ts b/server/test/medium/specs/sync/sync-partner-asset.spec.ts new file mode 100644 index 0000000000..70e31eca4c --- /dev/null +++ b/server/test/medium/specs/sync/sync-partner-asset.spec.ts @@ -0,0 +1,208 @@ +import { Kysely } from 'kysely'; +import { DB } from 'src/db'; +import { SyncEntityType, SyncRequestType } from 'src/enum'; +import { mediumFactory, newSyncAuthUser, newSyncTest } from 'test/medium.factory'; +import { factory } from 'test/small.factory'; +import { getKyselyDB } from 'test/utils'; + +let defaultDatabase: Kysely; + +const setup = async (db?: Kysely) => { + const database = db || defaultDatabase; + const result = newSyncTest({ db: database }); + const { auth, create } = newSyncAuthUser(); + await create(database); + return { ...result, auth }; +}; + +beforeAll(async () => { + defaultDatabase = await getKyselyDB(); +}); + +describe.concurrent(SyncRequestType.PartnerAssetsV1, () => { + it('should detect and sync the first partner asset', async () => { + const { auth, sut, getRepository, testSync } = await setup(); + + const checksum = '1115vHcVkZzNp3Q9G+FEA0nu6zUbGb4Tj4UOXkN0wRA='; + const thumbhash = '2225vHcVkZzNp3Q9G+FEA0nu6zUbGb4Tj4UOXkN0wRA='; + const date = new Date().toISOString(); + + const userRepo = getRepository('user'); + const user2 = mediumFactory.userInsert(); + await userRepo.create(user2); + + const assetRepo = getRepository('asset'); + const asset = mediumFactory.assetInsert({ + ownerId: user2.id, + checksum: Buffer.from(checksum, 'base64'), + thumbhash: Buffer.from(thumbhash, 'base64'), + fileCreatedAt: date, + fileModifiedAt: date, + localDateTime: date, + deletedAt: null, + }); + await assetRepo.create(asset); + + const partnerRepo = getRepository('partner'); + await partnerRepo.create({ sharedById: user2.id, sharedWithId: auth.user.id }); + + const initialSyncResponse = await testSync(auth, [SyncRequestType.PartnerAssetsV1]); + + expect(initialSyncResponse).toHaveLength(1); + expect(initialSyncResponse).toEqual( + expect.arrayContaining([ + { + ack: expect.any(String), + data: { + id: asset.id, + ownerId: asset.ownerId, + thumbhash, + checksum, + deletedAt: null, + fileCreatedAt: date, + fileModifiedAt: date, + isFavorite: false, + localDateTime: date, + type: asset.type, + visibility: asset.visibility, + }, + type: SyncEntityType.PartnerAssetV1, + }, + ]), + ); + + const acks = [initialSyncResponse[0].ack]; + await sut.setAcks(auth, { acks }); + + const ackSyncResponse = await testSync(auth, [SyncRequestType.PartnerAssetsV1]); + + expect(ackSyncResponse).toHaveLength(0); + }); + + it('should detect and sync a deleted partner asset', async () => { + const { auth, sut, getRepository, testSync } = await setup(); + + const userRepo = getRepository('user'); + const user2 = mediumFactory.userInsert(); + await userRepo.create(user2); + const asset = mediumFactory.assetInsert({ ownerId: user2.id }); + + const assetRepo = getRepository('asset'); + await assetRepo.create(asset); + + const partnerRepo = getRepository('partner'); + await partnerRepo.create({ sharedById: user2.id, sharedWithId: auth.user.id }); + await assetRepo.remove(asset); + + const response = await testSync(auth, [SyncRequestType.PartnerAssetsV1]); + + expect(response).toHaveLength(1); + expect(response).toEqual( + expect.arrayContaining([ + { + ack: expect.any(String), + data: { + assetId: asset.id, + }, + type: SyncEntityType.PartnerAssetDeleteV1, + }, + ]), + ); + + const acks = response.map(({ ack }) => ack); + await sut.setAcks(auth, { acks }); + + const ackSyncResponse = await testSync(auth, [SyncRequestType.PartnerAssetsV1]); + + expect(ackSyncResponse).toHaveLength(0); + }); + + it('should not sync a deleted partner asset due to a user delete', async () => { + const { auth, getRepository, testSync } = await setup(); + + const userRepo = getRepository('user'); + const user2 = mediumFactory.userInsert(); + await userRepo.create(user2); + + const partnerRepo = getRepository('partner'); + await partnerRepo.create({ sharedById: user2.id, sharedWithId: auth.user.id }); + + const assetRepo = getRepository('asset'); + await assetRepo.create(mediumFactory.assetInsert({ ownerId: user2.id })); + + await userRepo.delete({ id: user2.id }, true); + + const response = await testSync(auth, [SyncRequestType.PartnerAssetsV1]); + expect(response).toHaveLength(0); + }); + + it('should not sync a deleted partner asset due to a partner delete (unshare)', async () => { + const { auth, getRepository, testSync } = await setup(); + + const userRepo = getRepository('user'); + const user2 = mediumFactory.userInsert(); + await userRepo.create(user2); + + const assetRepo = getRepository('asset'); + await assetRepo.create(mediumFactory.assetInsert({ ownerId: user2.id })); + + const partnerRepo = getRepository('partner'); + const partner = { sharedById: user2.id, sharedWithId: auth.user.id }; + await partnerRepo.create(partner); + + await expect(testSync(auth, [SyncRequestType.PartnerAssetsV1])).resolves.toHaveLength(1); + + await partnerRepo.remove(partner); + + await expect(testSync(auth, [SyncRequestType.PartnerAssetsV1])).resolves.toHaveLength(0); + }); + + it('should not sync an asset or asset delete for own user', async () => { + const { auth, getRepository, testSync } = await setup(); + + const userRepo = getRepository('user'); + const user2 = mediumFactory.userInsert(); + await userRepo.create(user2); + + const assetRepo = getRepository('asset'); + const asset = mediumFactory.assetInsert({ ownerId: auth.user.id }); + await assetRepo.create(asset); + + const partnerRepo = getRepository('partner'); + await partnerRepo.create({ sharedById: user2.id, sharedWithId: auth.user.id }); + + await expect(testSync(auth, [SyncRequestType.AssetsV1])).resolves.toHaveLength(1); + await expect(testSync(auth, [SyncRequestType.PartnerAssetsV1])).resolves.toHaveLength(0); + + await assetRepo.remove(asset); + + await expect(testSync(auth, [SyncRequestType.AssetsV1])).resolves.toHaveLength(1); + await expect(testSync(auth, [SyncRequestType.PartnerAssetsV1])).resolves.toHaveLength(0); + }); + + it('should not sync an asset or asset delete for unrelated user', async () => { + const { auth, getRepository, testSync } = await setup(); + + const userRepo = getRepository('user'); + const user2 = mediumFactory.userInsert(); + await userRepo.create(user2); + + const sessionRepo = getRepository('session'); + const session = mediumFactory.sessionInsert({ userId: user2.id }); + await sessionRepo.create(session); + + const auth2 = factory.auth({ session, user: user2 }); + + const assetRepo = getRepository('asset'); + const asset = mediumFactory.assetInsert({ ownerId: user2.id }); + await assetRepo.create(asset); + + await expect(testSync(auth2, [SyncRequestType.AssetsV1])).resolves.toHaveLength(1); + await expect(testSync(auth, [SyncRequestType.PartnerAssetsV1])).resolves.toHaveLength(0); + + await assetRepo.remove(asset); + + await expect(testSync(auth2, [SyncRequestType.AssetsV1])).resolves.toHaveLength(1); + await expect(testSync(auth, [SyncRequestType.PartnerAssetsV1])).resolves.toHaveLength(0); + }); +}); diff --git a/server/test/medium/specs/sync/sync-partner.spec.ts b/server/test/medium/specs/sync/sync-partner.spec.ts new file mode 100644 index 0000000000..f262eec853 --- /dev/null +++ b/server/test/medium/specs/sync/sync-partner.spec.ts @@ -0,0 +1,221 @@ +import { Kysely } from 'kysely'; +import { DB } from 'src/db'; +import { SyncEntityType, SyncRequestType } from 'src/enum'; +import { mediumFactory, newSyncAuthUser, newSyncTest } from 'test/medium.factory'; +import { getKyselyDB } from 'test/utils'; + +let defaultDatabase: Kysely; + +const setup = async (db?: Kysely) => { + const database = db || defaultDatabase; + const result = newSyncTest({ db: database }); + const { auth, create } = newSyncAuthUser(); + await create(database); + return { ...result, auth }; +}; + +beforeAll(async () => { + defaultDatabase = await getKyselyDB(); +}); + +describe.concurrent(SyncEntityType.PartnerV1, () => { + it('should detect and sync the first partner', async () => { + const { auth, sut, getRepository, testSync } = await setup(); + + const user1 = auth.user; + const userRepo = getRepository('user'); + const partnerRepo = getRepository('partner'); + + const user2 = mediumFactory.userInsert(); + await userRepo.create(user2); + + const partner = await partnerRepo.create({ sharedById: user2.id, sharedWithId: user1.id }); + + const initialSyncResponse = await testSync(auth, [SyncRequestType.PartnersV1]); + + expect(initialSyncResponse).toHaveLength(1); + expect(initialSyncResponse).toEqual( + expect.arrayContaining([ + { + ack: expect.any(String), + data: { + inTimeline: partner.inTimeline, + sharedById: partner.sharedById, + sharedWithId: partner.sharedWithId, + }, + type: 'PartnerV1', + }, + ]), + ); + + const acks = [initialSyncResponse[0].ack]; + await sut.setAcks(auth, { acks }); + + const ackSyncResponse = await testSync(auth, [SyncRequestType.PartnersV1]); + + expect(ackSyncResponse).toHaveLength(0); + }); + + it('should detect and sync a deleted partner', async () => { + const { auth, sut, getRepository, testSync } = await setup(); + + const userRepo = getRepository('user'); + const user1 = auth.user; + const user2 = mediumFactory.userInsert(); + await userRepo.create(user2); + + const partnerRepo = getRepository('partner'); + const partner = await partnerRepo.create({ sharedById: user2.id, sharedWithId: user1.id }); + await partnerRepo.remove(partner); + + const response = await testSync(auth, [SyncRequestType.PartnersV1]); + + expect(response).toHaveLength(1); + expect(response).toEqual( + expect.arrayContaining([ + { + ack: expect.any(String), + data: { + sharedById: partner.sharedById, + sharedWithId: partner.sharedWithId, + }, + type: 'PartnerDeleteV1', + }, + ]), + ); + + const acks = response.map(({ ack }) => ack); + await sut.setAcks(auth, { acks }); + + const ackSyncResponse = await testSync(auth, [SyncRequestType.PartnersV1]); + + expect(ackSyncResponse).toHaveLength(0); + }); + + it('should detect and sync a partner share both to and from another user', async () => { + const { auth, sut, getRepository, testSync } = await setup(); + + const userRepo = getRepository('user'); + const user1 = auth.user; + const user2 = await userRepo.create(mediumFactory.userInsert()); + + const partnerRepo = getRepository('partner'); + const partner1 = await partnerRepo.create({ sharedById: user2.id, sharedWithId: user1.id }); + const partner2 = await partnerRepo.create({ sharedById: user1.id, sharedWithId: user2.id }); + + const response = await testSync(auth, [SyncRequestType.PartnersV1]); + + expect(response).toHaveLength(2); + expect(response).toEqual( + expect.arrayContaining([ + { + ack: expect.any(String), + data: { + inTimeline: partner1.inTimeline, + sharedById: partner1.sharedById, + sharedWithId: partner1.sharedWithId, + }, + type: 'PartnerV1', + }, + { + ack: expect.any(String), + data: { + inTimeline: partner2.inTimeline, + sharedById: partner2.sharedById, + sharedWithId: partner2.sharedWithId, + }, + type: 'PartnerV1', + }, + ]), + ); + + await sut.setAcks(auth, { acks: [response[1].ack] }); + + const ackSyncResponse = await testSync(auth, [SyncRequestType.PartnersV1]); + + expect(ackSyncResponse).toHaveLength(0); + }); + + it('should sync a partner and then an update to that same partner', async () => { + const { auth, sut, getRepository, testSync } = await setup(); + + const userRepo = getRepository('user'); + const user1 = auth.user; + const user2 = await userRepo.create(mediumFactory.userInsert()); + + const partnerRepo = getRepository('partner'); + const partner = await partnerRepo.create({ sharedById: user2.id, sharedWithId: user1.id }); + + const initialSyncResponse = await testSync(auth, [SyncRequestType.PartnersV1]); + + expect(initialSyncResponse).toHaveLength(1); + expect(initialSyncResponse).toEqual( + expect.arrayContaining([ + { + ack: expect.any(String), + data: { + inTimeline: partner.inTimeline, + sharedById: partner.sharedById, + sharedWithId: partner.sharedWithId, + }, + type: 'PartnerV1', + }, + ]), + ); + + const acks = [initialSyncResponse[0].ack]; + await sut.setAcks(auth, { acks }); + + const updated = await partnerRepo.update( + { sharedById: partner.sharedById, sharedWithId: partner.sharedWithId }, + { inTimeline: true }, + ); + + const updatedSyncResponse = await testSync(auth, [SyncRequestType.PartnersV1]); + + expect(updatedSyncResponse).toHaveLength(1); + expect(updatedSyncResponse).toEqual( + expect.arrayContaining([ + { + ack: expect.any(String), + data: { + inTimeline: updated.inTimeline, + sharedById: updated.sharedById, + sharedWithId: updated.sharedWithId, + }, + type: 'PartnerV1', + }, + ]), + ); + }); + + it('should not sync a partner or partner delete for an unrelated user', async () => { + const { auth, getRepository, testSync } = await setup(); + + const userRepo = getRepository('user'); + const user2 = await userRepo.create(mediumFactory.userInsert()); + const user3 = await userRepo.create(mediumFactory.userInsert()); + + const partnerRepo = getRepository('partner'); + const partner = await partnerRepo.create({ sharedById: user2.id, sharedWithId: user3.id }); + + expect(await testSync(auth, [SyncRequestType.PartnersV1])).toHaveLength(0); + + await partnerRepo.remove(partner); + + expect(await testSync(auth, [SyncRequestType.PartnersV1])).toHaveLength(0); + }); + + it('should not sync a partner delete after a user is deleted', async () => { + const { auth, getRepository, testSync } = await setup(); + + const userRepo = getRepository('user'); + const user2 = await userRepo.create(mediumFactory.userInsert()); + + const partnerRepo = getRepository('partner'); + await partnerRepo.create({ sharedById: user2.id, sharedWithId: auth.user.id }); + await userRepo.delete({ id: user2.id }, true); + + expect(await testSync(auth, [SyncRequestType.PartnersV1])).toHaveLength(0); + }); +}); diff --git a/server/test/medium/specs/sync/sync-types.spec.ts b/server/test/medium/specs/sync/sync-types.spec.ts new file mode 100644 index 0000000000..1af5a68fd6 --- /dev/null +++ b/server/test/medium/specs/sync/sync-types.spec.ts @@ -0,0 +1,12 @@ +import { SyncRequestType } from 'src/enum'; +import { SYNC_TYPES_ORDER } from 'src/services/sync.service'; + +describe('types', () => { + it('should have all the types in the ordering variable', () => { + for (const key in SyncRequestType) { + expect(SYNC_TYPES_ORDER).includes(key); + } + + expect(SYNC_TYPES_ORDER.length).toBe(Object.keys(SyncRequestType).length); + }); +}); diff --git a/server/test/medium/specs/sync/sync-user.spec.ts b/server/test/medium/specs/sync/sync-user.spec.ts new file mode 100644 index 0000000000..2cea38267c --- /dev/null +++ b/server/test/medium/specs/sync/sync-user.spec.ts @@ -0,0 +1,179 @@ +import { Kysely } from 'kysely'; +import { DB } from 'src/db'; +import { SyncEntityType, SyncRequestType } from 'src/enum'; +import { mediumFactory, newSyncAuthUser, newSyncTest } from 'test/medium.factory'; +import { getKyselyDB } from 'test/utils'; + +let defaultDatabase: Kysely; + +const setup = async (db?: Kysely) => { + const database = db || defaultDatabase; + const result = newSyncTest({ db: database }); + const { auth, create } = newSyncAuthUser(); + await create(database); + return { ...result, auth }; +}; + +beforeAll(async () => { + defaultDatabase = await getKyselyDB(); +}); + +describe.concurrent(SyncEntityType.UserV1, () => { + it('should detect and sync the first user', async () => { + const { auth, sut, getRepository, testSync } = await setup(await getKyselyDB()); + + const userRepo = getRepository('user'); + const user = await userRepo.get(auth.user.id, { withDeleted: false }); + if (!user) { + expect.fail('First user should exist'); + } + + const initialSyncResponse = await testSync(auth, [SyncRequestType.UsersV1]); + expect(initialSyncResponse).toHaveLength(1); + expect(initialSyncResponse).toEqual([ + { + ack: expect.any(String), + data: { + deletedAt: user.deletedAt, + email: user.email, + id: user.id, + name: user.name, + }, + type: 'UserV1', + }, + ]); + + const acks = [initialSyncResponse[0].ack]; + await sut.setAcks(auth, { acks }); + const ackSyncResponse = await testSync(auth, [SyncRequestType.UsersV1]); + + expect(ackSyncResponse).toHaveLength(0); + }); + + it('should detect and sync a soft deleted user', async () => { + const { auth, sut, getRepository, testSync } = await setup(await getKyselyDB()); + + const deletedAt = new Date().toISOString(); + const deletedUser = mediumFactory.userInsert({ deletedAt }); + const deleted = await getRepository('user').create(deletedUser); + + const response = await testSync(auth, [SyncRequestType.UsersV1]); + + expect(response).toHaveLength(2); + expect(response).toEqual( + expect.arrayContaining([ + { + ack: expect.any(String), + data: { + deletedAt: null, + email: auth.user.email, + id: auth.user.id, + name: auth.user.name, + }, + type: 'UserV1', + }, + { + ack: expect.any(String), + data: { + deletedAt, + email: deleted.email, + id: deleted.id, + name: deleted.name, + }, + type: 'UserV1', + }, + ]), + ); + + const acks = [response[1].ack]; + await sut.setAcks(auth, { acks }); + const ackSyncResponse = await testSync(auth, [SyncRequestType.UsersV1]); + + expect(ackSyncResponse).toHaveLength(0); + }); + + it('should detect and sync a deleted user', async () => { + const { auth, sut, getRepository, testSync } = await setup(await getKyselyDB()); + + const userRepo = getRepository('user'); + const user = mediumFactory.userInsert(); + await userRepo.create(user); + await userRepo.delete({ id: user.id }, true); + + const response = await testSync(auth, [SyncRequestType.UsersV1]); + + expect(response).toHaveLength(2); + expect(response).toEqual( + expect.arrayContaining([ + { + ack: expect.any(String), + data: { + userId: user.id, + }, + type: 'UserDeleteV1', + }, + { + ack: expect.any(String), + data: { + deletedAt: null, + email: auth.user.email, + id: auth.user.id, + name: auth.user.name, + }, + type: 'UserV1', + }, + ]), + ); + + const acks = response.map(({ ack }) => ack); + await sut.setAcks(auth, { acks }); + const ackSyncResponse = await testSync(auth, [SyncRequestType.UsersV1]); + + expect(ackSyncResponse).toHaveLength(0); + }); + + it('should sync a user and then an update to that same user', async () => { + const { auth, sut, getRepository, testSync } = await setup(await getKyselyDB()); + + const initialSyncResponse = await testSync(auth, [SyncRequestType.UsersV1]); + + expect(initialSyncResponse).toHaveLength(1); + expect(initialSyncResponse).toEqual( + expect.arrayContaining([ + { + ack: expect.any(String), + data: { + deletedAt: null, + email: auth.user.email, + id: auth.user.id, + name: auth.user.name, + }, + type: 'UserV1', + }, + ]), + ); + + const acks = [initialSyncResponse[0].ack]; + await sut.setAcks(auth, { acks }); + + const userRepo = getRepository('user'); + const updated = await userRepo.update(auth.user.id, { name: 'new name' }); + const updatedSyncResponse = await testSync(auth, [SyncRequestType.UsersV1]); + + expect(updatedSyncResponse).toHaveLength(1); + expect(updatedSyncResponse).toEqual( + expect.arrayContaining([ + { + ack: expect.any(String), + data: { + deletedAt: null, + email: auth.user.email, + id: auth.user.id, + name: updated.name, + }, + type: 'UserV1', + }, + ]), + ); + }); +}); From 55adc136c8d3f81f63e904246d29e83f224ba304 Mon Sep 17 00:00:00 2001 From: github-actions <41898282+github-actions[bot]@users.noreply.github.com> Date: Wed, 21 May 2025 19:47:42 +0000 Subject: [PATCH 30/35] chore: version v1.133.0 --- cli/package-lock.json | 6 +- cli/package.json | 2 +- docs/static/archived-versions.json | 4 ++ e2e/package-lock.json | 8 +-- e2e/package.json | 2 +- mobile/android/fastlane/Fastfile | 4 +- mobile/ios/fastlane/Fastfile | 2 +- mobile/openapi/README.md | 2 +- mobile/pubspec.yaml | 2 +- open-api/immich-openapi-specs.json | 2 +- open-api/typescript-sdk/package-lock.json | 4 +- open-api/typescript-sdk/package.json | 2 +- open-api/typescript-sdk/src/fetch-client.ts | 2 +- server/package-lock.json | 4 +- server/package.json | 2 +- web/package-lock.json | 66 ++++++++++++++++++++- web/package.json | 2 +- 17 files changed, 90 insertions(+), 26 deletions(-) diff --git a/cli/package-lock.json b/cli/package-lock.json index 86aedc0875..8fdfd8bcf4 100644 --- a/cli/package-lock.json +++ b/cli/package-lock.json @@ -1,12 +1,12 @@ { "name": "@immich/cli", - "version": "2.2.65", + "version": "2.2.66", "lockfileVersion": 3, "requires": true, "packages": { "": { "name": "@immich/cli", - "version": "2.2.65", + "version": "2.2.66", "license": "GNU Affero General Public License version 3", "dependencies": { "chokidar": "^4.0.3", @@ -54,7 +54,7 @@ }, "../open-api/typescript-sdk": { "name": "@immich/sdk", - "version": "1.132.3", + "version": "1.133.0", "dev": true, "license": "GNU Affero General Public License version 3", "dependencies": { diff --git a/cli/package.json b/cli/package.json index 40c19c91b1..462974b41a 100644 --- a/cli/package.json +++ b/cli/package.json @@ -1,6 +1,6 @@ { "name": "@immich/cli", - "version": "2.2.65", + "version": "2.2.66", "description": "Command Line Interface (CLI) for Immich", "type": "module", "exports": "./dist/index.js", diff --git a/docs/static/archived-versions.json b/docs/static/archived-versions.json index aefd29ebb5..a7debc30c8 100644 --- a/docs/static/archived-versions.json +++ b/docs/static/archived-versions.json @@ -1,4 +1,8 @@ [ + { + "label": "v1.133.0", + "url": "https://v1.133.0.archive.immich.app" + }, { "label": "v1.132.3", "url": "https://v1.132.3.archive.immich.app" diff --git a/e2e/package-lock.json b/e2e/package-lock.json index c9737b747a..be5606903a 100644 --- a/e2e/package-lock.json +++ b/e2e/package-lock.json @@ -1,12 +1,12 @@ { "name": "immich-e2e", - "version": "1.132.3", + "version": "1.133.0", "lockfileVersion": 3, "requires": true, "packages": { "": { "name": "immich-e2e", - "version": "1.132.3", + "version": "1.133.0", "license": "GNU Affero General Public License version 3", "devDependencies": { "@eslint/eslintrc": "^3.1.0", @@ -44,7 +44,7 @@ }, "../cli": { "name": "@immich/cli", - "version": "2.2.65", + "version": "2.2.66", "dev": true, "license": "GNU Affero General Public License version 3", "dependencies": { @@ -93,7 +93,7 @@ }, "../open-api/typescript-sdk": { "name": "@immich/sdk", - "version": "1.132.3", + "version": "1.133.0", "dev": true, "license": "GNU Affero General Public License version 3", "dependencies": { diff --git a/e2e/package.json b/e2e/package.json index fc0196fb99..abd222c601 100644 --- a/e2e/package.json +++ b/e2e/package.json @@ -1,6 +1,6 @@ { "name": "immich-e2e", - "version": "1.132.3", + "version": "1.133.0", "description": "", "main": "index.js", "type": "module", diff --git a/mobile/android/fastlane/Fastfile b/mobile/android/fastlane/Fastfile index a0b08bb316..428dac5d0f 100644 --- a/mobile/android/fastlane/Fastfile +++ b/mobile/android/fastlane/Fastfile @@ -35,8 +35,8 @@ platform :android do task: 'bundle', build_type: 'Release', properties: { - "android.injected.version.code" => 197, - "android.injected.version.name" => "1.132.3", + "android.injected.version.code" => 198, + "android.injected.version.name" => "1.133.0", } ) upload_to_play_store(skip_upload_apk: true, skip_upload_images: true, skip_upload_screenshots: true, aab: '../build/app/outputs/bundle/release/app-release.aab') diff --git a/mobile/ios/fastlane/Fastfile b/mobile/ios/fastlane/Fastfile index 3306fef1e2..fc8c35715b 100644 --- a/mobile/ios/fastlane/Fastfile +++ b/mobile/ios/fastlane/Fastfile @@ -22,7 +22,7 @@ platform :ios do path: "./Runner.xcodeproj", ) increment_version_number( - version_number: "1.132.3" + version_number: "1.133.0" ) increment_build_number( build_number: latest_testflight_build_number + 1, diff --git a/mobile/openapi/README.md b/mobile/openapi/README.md index d2cae47fb5..29dd5592aa 100644 --- a/mobile/openapi/README.md +++ b/mobile/openapi/README.md @@ -3,7 +3,7 @@ Immich API This Dart package is automatically generated by the [OpenAPI Generator](https://openapi-generator.tech) project: -- API version: 1.132.3 +- API version: 1.133.0 - Generator version: 7.8.0 - Build package: org.openapitools.codegen.languages.DartClientCodegen diff --git a/mobile/pubspec.yaml b/mobile/pubspec.yaml index 37c9ef7498..dacb40f5bd 100644 --- a/mobile/pubspec.yaml +++ b/mobile/pubspec.yaml @@ -2,7 +2,7 @@ name: immich_mobile description: Immich - selfhosted backup media file on mobile phone publish_to: 'none' -version: 1.132.3+197 +version: 1.133.0+198 environment: sdk: '>=3.3.0 <4.0.0' diff --git a/open-api/immich-openapi-specs.json b/open-api/immich-openapi-specs.json index cdd1f00763..1d6bd3b048 100644 --- a/open-api/immich-openapi-specs.json +++ b/open-api/immich-openapi-specs.json @@ -8132,7 +8132,7 @@ "info": { "title": "Immich", "description": "Immich API", - "version": "1.132.3", + "version": "1.133.0", "contact": {} }, "tags": [], diff --git a/open-api/typescript-sdk/package-lock.json b/open-api/typescript-sdk/package-lock.json index 542f67d62e..0919dffd42 100644 --- a/open-api/typescript-sdk/package-lock.json +++ b/open-api/typescript-sdk/package-lock.json @@ -1,12 +1,12 @@ { "name": "@immich/sdk", - "version": "1.132.3", + "version": "1.133.0", "lockfileVersion": 3, "requires": true, "packages": { "": { "name": "@immich/sdk", - "version": "1.132.3", + "version": "1.133.0", "license": "GNU Affero General Public License version 3", "dependencies": { "@oazapfts/runtime": "^1.0.2" diff --git a/open-api/typescript-sdk/package.json b/open-api/typescript-sdk/package.json index a5d4a1592b..cae3abbd48 100644 --- a/open-api/typescript-sdk/package.json +++ b/open-api/typescript-sdk/package.json @@ -1,6 +1,6 @@ { "name": "@immich/sdk", - "version": "1.132.3", + "version": "1.133.0", "description": "Auto-generated TypeScript SDK for the Immich API", "type": "module", "main": "./build/index.js", diff --git a/open-api/typescript-sdk/src/fetch-client.ts b/open-api/typescript-sdk/src/fetch-client.ts index bb1ba605a5..361d5d0b09 100644 --- a/open-api/typescript-sdk/src/fetch-client.ts +++ b/open-api/typescript-sdk/src/fetch-client.ts @@ -1,6 +1,6 @@ /** * Immich - * 1.132.3 + * 1.133.0 * DO NOT MODIFY - This file has been generated using oazapfts. * See https://www.npmjs.com/package/oazapfts */ diff --git a/server/package-lock.json b/server/package-lock.json index d862a6d271..3e7ca7120f 100644 --- a/server/package-lock.json +++ b/server/package-lock.json @@ -1,12 +1,12 @@ { "name": "immich", - "version": "1.132.3", + "version": "1.133.0", "lockfileVersion": 3, "requires": true, "packages": { "": { "name": "immich", - "version": "1.132.3", + "version": "1.133.0", "hasInstallScript": true, "license": "GNU Affero General Public License version 3", "dependencies": { diff --git a/server/package.json b/server/package.json index f95817342e..c81d1f5b35 100644 --- a/server/package.json +++ b/server/package.json @@ -1,6 +1,6 @@ { "name": "immich", - "version": "1.132.3", + "version": "1.133.0", "description": "", "author": "", "private": true, diff --git a/web/package-lock.json b/web/package-lock.json index bcf62268ce..c68f337e77 100644 --- a/web/package-lock.json +++ b/web/package-lock.json @@ -1,12 +1,12 @@ { "name": "immich-web", - "version": "1.132.3", + "version": "1.133.0", "lockfileVersion": 3, "requires": true, "packages": { "": { "name": "immich-web", - "version": "1.132.3", + "version": "1.133.0", "license": "GNU Affero General Public License version 3", "dependencies": { "@formatjs/icu-messageformat-parser": "^2.9.8", @@ -86,7 +86,7 @@ }, "../open-api/typescript-sdk": { "name": "@immich/sdk", - "version": "1.132.3", + "version": "1.133.0", "license": "GNU Affero General Public License version 3", "dependencies": { "@oazapfts/runtime": "^1.0.2" @@ -2490,6 +2490,66 @@ "node": ">=14.0.0" } }, + "node_modules/@tailwindcss/oxide-wasm32-wasi/node_modules/@emnapi/core": { + "version": "1.4.3", + "dev": true, + "inBundle": true, + "license": "MIT", + "optional": true, + "dependencies": { + "@emnapi/wasi-threads": "1.0.2", + "tslib": "^2.4.0" + } + }, + "node_modules/@tailwindcss/oxide-wasm32-wasi/node_modules/@emnapi/runtime": { + "version": "1.4.3", + "dev": true, + "inBundle": true, + "license": "MIT", + "optional": true, + "dependencies": { + "tslib": "^2.4.0" + } + }, + "node_modules/@tailwindcss/oxide-wasm32-wasi/node_modules/@emnapi/wasi-threads": { + "version": "1.0.2", + "dev": true, + "inBundle": true, + "license": "MIT", + "optional": true, + "dependencies": { + "tslib": "^2.4.0" + } + }, + "node_modules/@tailwindcss/oxide-wasm32-wasi/node_modules/@napi-rs/wasm-runtime": { + "version": "0.2.9", + "dev": true, + "inBundle": true, + "license": "MIT", + "optional": true, + "dependencies": { + "@emnapi/core": "^1.4.0", + "@emnapi/runtime": "^1.4.0", + "@tybys/wasm-util": "^0.9.0" + } + }, + "node_modules/@tailwindcss/oxide-wasm32-wasi/node_modules/@tybys/wasm-util": { + "version": "0.9.0", + "dev": true, + "inBundle": true, + "license": "MIT", + "optional": true, + "dependencies": { + "tslib": "^2.4.0" + } + }, + "node_modules/@tailwindcss/oxide-wasm32-wasi/node_modules/tslib": { + "version": "2.8.0", + "dev": true, + "inBundle": true, + "license": "0BSD", + "optional": true + }, "node_modules/@tailwindcss/oxide-win32-arm64-msvc": { "version": "4.1.7", "resolved": "https://registry.npmjs.org/@tailwindcss/oxide-win32-arm64-msvc/-/oxide-win32-arm64-msvc-4.1.7.tgz", diff --git a/web/package.json b/web/package.json index 96b3189d4b..495ceb56c1 100644 --- a/web/package.json +++ b/web/package.json @@ -1,6 +1,6 @@ { "name": "immich-web", - "version": "1.132.3", + "version": "1.133.0", "license": "GNU Affero General Public License version 3", "type": "module", "scripts": { From 58c1b928161843606ca775ea33d58f5cdc094ef1 Mon Sep 17 00:00:00 2001 From: Mert <101130780+mertalev@users.noreply.github.com> Date: Wed, 21 May 2025 17:27:28 -0400 Subject: [PATCH 31/35] fix(server): missing button for duplicate detection not working (#18433) qualify column --- server/src/queries/asset.job.repository.sql | 2 +- server/src/repositories/asset-job.repository.ts | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/server/src/queries/asset.job.repository.sql b/server/src/queries/asset.job.repository.sql index 2301408ffe..3d47b7517e 100644 --- a/server/src/queries/asset.job.repository.sql +++ b/server/src/queries/asset.job.repository.sql @@ -184,7 +184,7 @@ select from "assets" inner join "smart_search" on "assets"."id" = "smart_search"."assetId" - inner join "asset_job_status" as "job_status" on "assetId" = "assets"."id" + inner join "asset_job_status" as "job_status" on "job_status"."assetId" = "assets"."id" where "assets"."visibility" != $1 and "assets"."deletedAt" is null diff --git a/server/src/repositories/asset-job.repository.ts b/server/src/repositories/asset-job.repository.ts index b9ce52962c..6f86edaaa1 100644 --- a/server/src/repositories/asset-job.repository.ts +++ b/server/src/repositories/asset-job.repository.ts @@ -145,7 +145,7 @@ export class AssetJobRepository { .innerJoin('smart_search', 'assets.id', 'smart_search.assetId') .$if(!force, (qb) => qb - .innerJoin('asset_job_status as job_status', 'assetId', 'assets.id') + .innerJoin('asset_job_status as job_status', 'job_status.assetId', 'assets.id') .where('job_status.duplicatesDetectedAt', 'is', null), ) .stream(); From 7bb25a5c8daf1a38f18617dd7399156e088b5e82 Mon Sep 17 00:00:00 2001 From: Christos Gkantidis Date: Wed, 21 May 2025 23:38:48 +0200 Subject: [PATCH 32/35] fix: typo in english translation (#18434) --- i18n/en.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/i18n/en.json b/i18n/en.json index d813b7f335..470fbd56e3 100644 --- a/i18n/en.json +++ b/i18n/en.json @@ -1292,7 +1292,7 @@ "no_explore_results_message": "Upload more photos to explore your collection.", "no_favorites_message": "Add favorites to quickly find your best pictures and videos", "no_libraries_message": "Create an external library to view your photos and videos", - "no_locked_photos_message": "Photos and videos in Locked Folder are hidden and won't show up as you browser your library.", + "no_locked_photos_message": "Photos and videos in Locked Folder are hidden and won't show up as you browse your library.", "no_name": "No Name", "no_notifications": "No notifications", "no_people_found": "No matching people found", From 0b8fc7b4930e01f441aca5b95858e412920f3afd Mon Sep 17 00:00:00 2001 From: Matthew Momjian <50788000+mmomjian@users.noreply.github.com> Date: Wed, 21 May 2025 19:19:04 -0400 Subject: [PATCH 33/35] fix(docs): more vchord details (#18435) * more details * typo --- .../administration/postgres-standalone.md | 19 ++++++++----------- 1 file changed, 8 insertions(+), 11 deletions(-) diff --git a/docs/docs/administration/postgres-standalone.md b/docs/docs/administration/postgres-standalone.md index bbdd06847b..95a281d0e1 100644 --- a/docs/docs/administration/postgres-standalone.md +++ b/docs/docs/administration/postgres-standalone.md @@ -80,11 +80,14 @@ The easiest option is to have both extensions installed during the migration: 2. Install `pgvector` (`>= 0.7.0, < 1.0.0`). The easiest way to do this is on Debian/Ubuntu by adding the [PostgreSQL Apt repository][pg-apt] and then running `apt install postgresql-NN-pgvector`, where `NN` is your Postgres version (e.g., `16`) 3. [Install VectorChord][vchord-install] 4. Add `shared_preload_libraries= 'vchord.so, vectors.so'` to your `postgresql.conf`, making sure to include _both_ `vchord.so` and `vectors.so`. You may include other libraries here as well if needed -5. If Immich does not have superuser permissions, run the SQL command `CREATE EXTENSION vchord CASCADE;` using psql or your choice of database client -6. Start Immich and wait for the logs `Reindexed face_index` and `Reindexed clip_index` to be output -7. If Immich does not have superuser permissions, run the SQL command `DROP EXTENSION vectors;` -8. Remove the `vectors.so` entry from the `shared_preload_libraries` setting -9. Uninstall pgvecto.rs (e.g. `apt-get purge vectors-pg14` on Debian-based environments, replacing `pg14` as appropriate). `pgvector` must remain install as it provides the data types used by `vchord` +5. Restart the Postgres database +6. If Immich does not have superuser permissions, run the SQL command `CREATE EXTENSION vchord CASCADE;` using psql or your choice of database client +7. Start Immich and wait for the logs `Reindexed face_index` and `Reindexed clip_index` to be output +8. If Immich does not have superuser permissions, run the SQL command `DROP EXTENSION vectors;` +9. Drop the old schema by running `DROP SCHEMA vectors;` +10. Remove the `vectors.so` entry from the `shared_preload_libraries` setting +11. Restart the Postgres database +12. Uninstall pgvecto.rs (e.g. `apt-get purge vectors-pg14` on Debian-based environments, replacing `pg14` as appropriate). `pgvector` must remain installed as it provides the data types used by `vchord` If it is not possible to have both VectorChord and pgvecto.rs installed at the same time, you can perform the migration with more manual steps: @@ -129,11 +132,5 @@ ALTER TABLE face_search ALTER COLUMN embedding SET DATA TYPE vector(512); Note that VectorChord itself uses pgvector types, so you should not uninstall pgvector after following these steps. -### Common errors - -#### Permission denied for view - -If you get the error `driverError: error: permission denied for view pg_vector_index_stat`, you can fix this by connecting to the Immich database and running `GRANT SELECT ON TABLE pg_vector_index_stat TO ;`. - [vchord-install]: https://docs.vectorchord.ai/vectorchord/getting-started/installation.html [pg-apt]: https://www.postgresql.org/download/linux/#generic From bc8e08f5e89f533cee2218759f09d8a574f3c5df Mon Sep 17 00:00:00 2001 From: Mert <101130780+mertalev@users.noreply.github.com> Date: Thu, 22 May 2025 03:41:10 -0400 Subject: [PATCH 34/35] feat: lower disk usage during migration (#18440) feat: less disk usage during migration --- server/src/repositories/database.repository.ts | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/server/src/repositories/database.repository.ts b/server/src/repositories/database.repository.ts index 187bddf534..a87ded5776 100644 --- a/server/src/repositories/database.repository.ts +++ b/server/src/repositories/database.repository.ts @@ -247,8 +247,8 @@ export class DatabaseRepository { return; } const dimSize = await this.getDimensionSize(table); + await sql`DROP INDEX IF EXISTS ${sql.raw(indexName)}`.execute(this.db); await this.db.transaction().execute(async (tx) => { - await sql`DROP INDEX IF EXISTS ${sql.raw(indexName)}`.execute(tx); if (!rows.some((row) => row.columnName === 'embedding')) { this.logger.warn(`Column 'embedding' does not exist in table '${table}', truncating and adding column.`); await sql`TRUNCATE TABLE ${sql.raw(table)}`.execute(tx); @@ -262,6 +262,11 @@ export class DatabaseRepository { SET DATA TYPE ${sql.raw(schema)}vector(${sql.raw(String(dimSize))})`.execute(tx); await sql.raw(vectorIndexQuery({ vectorExtension, table, indexName, lists })).execute(tx); }); + try { + await sql`VACUUM ANALYZE ${sql.raw(table)}`.execute(this.db); + } catch (error: any) { + this.logger.warn(`Failed to vacuum table '${table}'. The DB will temporarily use more disk space: ${error}`); + } this.logger.log(`Reindexed ${indexName}`); } From c278bb0e5ba05571e38ba80ff2b21a3e35720465 Mon Sep 17 00:00:00 2001 From: Daniel Dietzler <36593685+danieldietzler@users.noreply.github.com> Date: Thu, 22 May 2025 14:48:07 +0200 Subject: [PATCH 35/35] fix: avatar selection z-index issues (#18425) --- .../navigation-bar/account-info-panel.svelte | 5 ++++- .../shared-components/navigation-bar/navigation-bar.svelte | 7 +++++-- 2 files changed, 9 insertions(+), 3 deletions(-) diff --git a/web/src/lib/components/shared-components/navigation-bar/account-info-panel.svelte b/web/src/lib/components/shared-components/navigation-bar/account-info-panel.svelte index f259f493ca..22b74767d5 100644 --- a/web/src/lib/components/shared-components/navigation-bar/account-info-panel.svelte +++ b/web/src/lib/components/shared-components/navigation-bar/account-info-panel.svelte @@ -41,7 +41,10 @@ class="border" size="12" padding="2" - onclick={() => modalManager.show(AvatarEditModal, {})} + onclick={async () => { + onClose(); + await modalManager.show(AvatarEditModal, {}); + }} />
diff --git a/web/src/lib/components/shared-components/navigation-bar/navigation-bar.svelte b/web/src/lib/components/shared-components/navigation-bar/navigation-bar.svelte index 582270b1af..f0e140c8c1 100644 --- a/web/src/lib/components/shared-components/navigation-bar/navigation-bar.svelte +++ b/web/src/lib/components/shared-components/navigation-bar/navigation-bar.svelte @@ -5,6 +5,7 @@