merge main

This commit is contained in:
Alex 2025-07-08 11:13:05 -05:00
commit 99eb879188
No known key found for this signature in database
GPG Key ID: 53CD082B3A5E1082
142 changed files with 1774 additions and 422 deletions

View File

@ -50,7 +50,7 @@ jobs:
# Initializes the CodeQL tools for scanning.
- name: Initialize CodeQL
uses: github/codeql-action/init@ce28f5bb42b7a9f2c824e633a3f6ee835bab6858 # v3.29.0
uses: github/codeql-action/init@181d5eefc20863364f96762470ba6f862bdef56b # v3.29.2
with:
languages: ${{ matrix.language }}
# If you wish to specify custom queries, you can do so here or in a config file.
@ -63,7 +63,7 @@ jobs:
# Autobuild attempts to build any compiled languages (C/C++, C#, or Java).
# If this step fails, then you should remove it and run the build manually (see below)
- name: Autobuild
uses: github/codeql-action/autobuild@ce28f5bb42b7a9f2c824e633a3f6ee835bab6858 # v3.29.0
uses: github/codeql-action/autobuild@181d5eefc20863364f96762470ba6f862bdef56b # v3.29.2
# Command-line programs to run using the OS shell.
# 📚 See https://docs.github.com/en/actions/using-workflows/workflow-syntax-for-github-actions#jobsjob_idstepsrun
@ -76,6 +76,6 @@ jobs:
# ./location_of_script_within_repo/buildscript.sh
- name: Perform CodeQL Analysis
uses: github/codeql-action/analyze@ce28f5bb42b7a9f2c824e633a3f6ee835bab6858 # v3.29.0
uses: github/codeql-action/analyze@181d5eefc20863364f96762470ba6f862bdef56b # v3.29.2
with:
category: '/language:${{matrix.language}}'

View File

@ -130,7 +130,7 @@ jobs:
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
- name: Upload SARIF file
uses: github/codeql-action/upload-sarif@ce28f5bb42b7a9f2c824e633a3f6ee835bab6858 # v3.29.0
uses: github/codeql-action/upload-sarif@181d5eefc20863364f96762470ba6f862bdef56b # v3.29.2
with:
sarif_file: results.sarif
category: zizmor

View File

@ -36,7 +36,7 @@ services:
- 2285:2285
redis:
image: redis:6.2-alpine@sha256:3211c33a618c457e5d241922c975dbc4f446d0bdb2dc75694f5573ef8e2d01fa
image: redis:6.2-alpine@sha256:03fd052257735b41cd19f3d8ae9782926bf9b704fb6a9dc5e29f9ccfbe8827f0
database:
image: ghcr.io/immich-app/postgres:14-vectorchord0.3.0@sha256:3aef84a0a4fabbda17ef115c3019ba0c914ec73e9f6e59203674322d858b8eea

View File

@ -0,0 +1,17 @@
import 'package:immich_mobile/domain/models/album/local_album.model.dart';
import 'package:immich_mobile/domain/models/asset/base_asset.model.dart';
import 'package:immich_mobile/infrastructure/repositories/local_album.repository.dart';
class LocalAlbumService {
final DriftLocalAlbumRepository _repository;
const LocalAlbumService(this._repository);
Future<List<LocalAlbum>> getAll() {
return _repository.getAll();
}
Future<LocalAsset?> getThumbnail(String albumId) {
return _repository.getThumbnail(albumId);
}
}

View File

@ -42,16 +42,29 @@ class TimelineFactory {
TimelineService localAlbum({required String albumId}) => TimelineService(
assetSource: (offset, count) => _timelineRepository
.getLocalBucketAssets(albumId, offset: offset, count: count),
bucketSource: () =>
_timelineRepository.watchLocalBucket(albumId, groupBy: groupBy),
.getLocalAlbumBucketAssets(albumId, offset: offset, count: count),
bucketSource: () => _timelineRepository.watchLocalAlbumBucket(
albumId,
groupBy: groupBy,
),
);
TimelineService remoteAlbum({required String albumId}) => TimelineService(
assetSource: (offset, count) => _timelineRepository
.getRemoteBucketAssets(albumId, offset: offset, count: count),
bucketSource: () =>
_timelineRepository.watchRemoteBucket(albumId, groupBy: groupBy),
.getRemoteAlbumBucketAssets(albumId, offset: offset, count: count),
bucketSource: () => _timelineRepository.watchRemoteAlbumBucket(
albumId,
groupBy: groupBy,
),
);
TimelineService remoteAssets(String ownerId) => TimelineService(
assetSource: (offset, count) => _timelineRepository
.getRemoteBucketAssets(ownerId, offset: offset, count: count),
bucketSource: () => _timelineRepository.watchRemoteBucket(
ownerId,
groupBy: GroupAssetsBy.month,
),
);
TimelineService favorite(String userId) => TimelineService(

View File

@ -361,6 +361,24 @@ class DriftLocalAlbumRepository extends DriftDatabaseRepository {
batch.deleteWhere(_db.localAssetEntity, (f) => f.id.isIn(ids));
});
}
Future<LocalAsset?> getThumbnail(String albumId) async {
final query = _db.localAlbumAssetEntity.select().join([
innerJoin(
_db.localAssetEntity,
_db.localAlbumAssetEntity.assetId.equalsExp(_db.localAssetEntity.id),
),
])
..where(_db.localAlbumAssetEntity.albumId.equals(albumId))
..orderBy([OrderingTerm.asc(_db.localAssetEntity.id)])
..limit(1);
final results = await query
.map((row) => row.readTable(_db.localAssetEntity).toDto())
.get();
return results.isNotEmpty ? results.first : null;
}
}
extension on LocalAlbumEntityData {

View File

@ -18,13 +18,21 @@ class DriftRemoteAlbumRepository extends DriftDatabaseRepository {
_db.remoteAlbumAssetEntity.albumId.equalsExp(_db.remoteAlbumEntity.id),
useColumns: false,
),
leftOuterJoin(
_db.remoteAssetEntity,
_db.remoteAssetEntity.id.equalsExp(_db.remoteAlbumAssetEntity.assetId),
useColumns: false,
),
leftOuterJoin(
_db.userEntity,
_db.userEntity.id.equalsExp(_db.remoteAlbumEntity.ownerId),
useColumns: false,
),
]);
query
..where(_db.remoteAssetEntity.deletedAt.isNull())
..addColumns([assetCount])
..addColumns([_db.userEntity.name])
..groupBy([_db.remoteAlbumEntity.id]);
if (sortBy.isNotEmpty) {
@ -43,7 +51,7 @@ class DriftRemoteAlbumRepository extends DriftDatabaseRepository {
.map(
(row) => row.readTable(_db.remoteAlbumEntity).toDto(
assetCount: row.read(assetCount) ?? 0,
ownerName: row.readTable(_db.userEntity).name,
ownerName: row.read(_db.userEntity.name)!,
),
)
.get();

View File

@ -13,6 +13,22 @@ class RemoteAssetRepository extends DriftDatabaseRepository {
final Drift _db;
const RemoteAssetRepository(this._db) : super(_db);
/// For testing purposes
Future<List<RemoteAsset>> getSome(String userId) {
final query = _db.remoteAssetEntity.select()
..where(
(row) =>
_db.remoteAssetEntity.ownerId.equals(userId) &
_db.remoteAssetEntity.deletedAt.isNull() &
_db.remoteAssetEntity.visibility
.equalsValue(AssetVisibility.timeline),
)
..orderBy([(row) => OrderingTerm.desc(row.createdAt)])
..limit(10);
return query.map((row) => row.toDto()).get();
}
Stream<RemoteAsset?> watchAsset(String id) {
final query = _db.remoteAssetEntity
.select()

View File

@ -104,7 +104,7 @@ class DriftTimelineRepository extends DriftDatabaseRepository {
).get();
}
Stream<List<Bucket>> watchLocalBucket(
Stream<List<Bucket>> watchLocalAlbumBucket(
String albumId, {
GroupAssetsBy groupBy = GroupAssetsBy.day,
}) {
@ -124,6 +124,7 @@ class DriftTimelineRepository extends DriftDatabaseRepository {
innerJoin(
_db.localAlbumAssetEntity,
_db.localAlbumAssetEntity.assetId.equalsExp(_db.localAssetEntity.id),
useColumns: false,
),
])
..where(_db.localAlbumAssetEntity.albumId.equals(albumId))
@ -137,7 +138,7 @@ class DriftTimelineRepository extends DriftDatabaseRepository {
}).watch();
}
Future<List<BaseAsset>> getLocalBucketAssets(
Future<List<BaseAsset>> getLocalAlbumBucketAssets(
String albumId, {
required int offset,
required int count,
@ -147,6 +148,7 @@ class DriftTimelineRepository extends DriftDatabaseRepository {
innerJoin(
_db.localAlbumAssetEntity,
_db.localAlbumAssetEntity.assetId.equalsExp(_db.localAssetEntity.id),
useColumns: false,
),
],
)
@ -158,7 +160,7 @@ class DriftTimelineRepository extends DriftDatabaseRepository {
.get();
}
Stream<List<Bucket>> watchRemoteBucket(
Stream<List<Bucket>> watchRemoteAlbumBucket(
String albumId, {
GroupAssetsBy groupBy = GroupAssetsBy.day,
}) {
@ -179,9 +181,13 @@ class DriftTimelineRepository extends DriftDatabaseRepository {
_db.remoteAlbumAssetEntity,
_db.remoteAlbumAssetEntity.assetId
.equalsExp(_db.remoteAssetEntity.id),
useColumns: false,
),
])
..where(_db.remoteAlbumAssetEntity.albumId.equals(albumId))
..where(
_db.remoteAssetEntity.deletedAt.isNull() &
_db.remoteAlbumAssetEntity.albumId.equals(albumId),
)
..groupBy([dateExp])
..orderBy([OrderingTerm.desc(dateExp)]);
@ -192,7 +198,7 @@ class DriftTimelineRepository extends DriftDatabaseRepository {
}).watch();
}
Future<List<BaseAsset>> getRemoteBucketAssets(
Future<List<BaseAsset>> getRemoteAlbumBucketAssets(
String albumId, {
required int offset,
required int count,
@ -203,10 +209,14 @@ class DriftTimelineRepository extends DriftDatabaseRepository {
_db.remoteAlbumAssetEntity,
_db.remoteAlbumAssetEntity.assetId
.equalsExp(_db.remoteAssetEntity.id),
useColumns: false,
),
],
)
..where(_db.remoteAlbumAssetEntity.albumId.equals(albumId))
..where(
_db.remoteAssetEntity.deletedAt.isNull() &
_db.remoteAlbumAssetEntity.albumId.equals(albumId),
)
..orderBy([OrderingTerm.desc(_db.remoteAssetEntity.createdAt)])
..limit(count, offset: offset);
return query
@ -214,15 +224,17 @@ class DriftTimelineRepository extends DriftDatabaseRepository {
.get();
}
Stream<List<Bucket>> watchFavoriteBucket(
String userId, {
Stream<List<Bucket>> watchRemoteBucket(
String ownerId, {
GroupAssetsBy groupBy = GroupAssetsBy.day,
}) {
if (groupBy == GroupAssetsBy.none) {
return _db.remoteAssetEntity
.count(
where: (row) =>
row.isFavorite.equals(true) & row.ownerId.equals(userId),
row.deletedAt.isNull() &
row.visibility.equalsValue(AssetVisibility.timeline) &
row.ownerId.equals(ownerId),
)
.map(_generateBuckets)
.watchSingle();
@ -234,6 +246,62 @@ class DriftTimelineRepository extends DriftDatabaseRepository {
final query = _db.remoteAssetEntity.selectOnly()
..addColumns([assetCountExp, dateExp])
..where(
_db.remoteAssetEntity.deletedAt.isNull() &
_db.remoteAssetEntity.visibility
.equalsValue(AssetVisibility.timeline) &
_db.remoteAssetEntity.ownerId.equals(ownerId),
)
..groupBy([dateExp])
..orderBy([OrderingTerm.desc(dateExp)]);
return query.map((row) {
final timeline = row.read(dateExp)!.dateFmt(groupBy);
final assetCount = row.read(assetCountExp)!;
return TimeBucket(date: timeline, assetCount: assetCount);
}).watch();
}
Future<List<BaseAsset>> getRemoteBucketAssets(
String ownerId, {
required int offset,
required int count,
}) {
final query = _db.remoteAssetEntity.select()
..where(
(row) =>
row.deletedAt.isNull() &
row.visibility.equalsValue(AssetVisibility.timeline) &
row.ownerId.equals(ownerId),
)
..orderBy([(row) => OrderingTerm.desc(row.createdAt)])
..limit(count, offset: offset);
return query.map((row) => row.toDto()).get();
}
Stream<List<Bucket>> watchFavoriteBucket(
String userId, {
GroupAssetsBy groupBy = GroupAssetsBy.day,
}) {
if (groupBy == GroupAssetsBy.none) {
return _db.remoteAssetEntity
.count(
where: (row) =>
row.deletedAt.isNull() &
row.isFavorite.equals(true) &
row.ownerId.equals(userId),
)
.map(_generateBuckets)
.watchSingle();
}
final assetCountExp = _db.remoteAssetEntity.id.count();
final dateExp = _db.remoteAssetEntity.createdAt.dateFmt(groupBy);
final query = _db.remoteAssetEntity.selectOnly()
..addColumns([assetCountExp, dateExp])
..where(
_db.remoteAssetEntity.deletedAt.isNull() &
_db.remoteAssetEntity.ownerId.equals(userId) &
_db.remoteAssetEntity.isFavorite.equals(true),
)
@ -254,7 +322,10 @@ class DriftTimelineRepository extends DriftDatabaseRepository {
}) {
final query = _db.remoteAssetEntity.select()
..where(
(row) => row.isFavorite.equals(true) & row.ownerId.equals(userId),
(row) =>
row.deletedAt.isNull() &
row.isFavorite.equals(true) &
row.ownerId.equals(userId),
)
..orderBy([(row) => OrderingTerm.desc(row.createdAt)])
..limit(count, offset: offset);
@ -318,6 +389,7 @@ class DriftTimelineRepository extends DriftDatabaseRepository {
return _db.remoteAssetEntity
.count(
where: (row) =>
row.deletedAt.isNull() &
row.visibility.equalsValue(AssetVisibility.archive) &
row.ownerId.equals(userId),
)
@ -331,6 +403,7 @@ class DriftTimelineRepository extends DriftDatabaseRepository {
final query = _db.remoteAssetEntity.selectOnly()
..addColumns([assetCountExp, dateExp])
..where(
_db.remoteAssetEntity.deletedAt.isNull() &
_db.remoteAssetEntity.ownerId.equals(userId) &
_db.remoteAssetEntity.visibility
.equalsValue(AssetVisibility.archive),
@ -353,6 +426,7 @@ class DriftTimelineRepository extends DriftDatabaseRepository {
final query = _db.remoteAssetEntity.select()
..where(
(row) =>
row.deletedAt.isNull() &
row.ownerId.equals(userId) &
row.visibility.equalsValue(AssetVisibility.archive),
)
@ -370,6 +444,7 @@ class DriftTimelineRepository extends DriftDatabaseRepository {
return _db.remoteAssetEntity
.count(
where: (row) =>
row.deletedAt.isNull() &
row.visibility.equalsValue(AssetVisibility.locked) &
row.ownerId.equals(userId),
)
@ -383,6 +458,7 @@ class DriftTimelineRepository extends DriftDatabaseRepository {
final query = _db.remoteAssetEntity.selectOnly()
..addColumns([assetCountExp, dateExp])
..where(
_db.remoteAssetEntity.deletedAt.isNull() &
_db.remoteAssetEntity.ownerId.equals(userId) &
_db.remoteAssetEntity.visibility
.equalsValue(AssetVisibility.locked),
@ -405,6 +481,7 @@ class DriftTimelineRepository extends DriftDatabaseRepository {
final query = _db.remoteAssetEntity.select()
..where(
(row) =>
row.deletedAt.isNull() &
row.visibility.equalsValue(AssetVisibility.locked) &
row.ownerId.equals(userId),
)
@ -422,6 +499,7 @@ class DriftTimelineRepository extends DriftDatabaseRepository {
return _db.remoteAssetEntity
.count(
where: (row) =>
row.deletedAt.isNull() &
row.type.equalsValue(AssetType.video) &
row.visibility.equalsValue(AssetVisibility.timeline) &
row.ownerId.equals(userId),
@ -436,6 +514,7 @@ class DriftTimelineRepository extends DriftDatabaseRepository {
final query = _db.remoteAssetEntity.selectOnly()
..addColumns([assetCountExp, dateExp])
..where(
_db.remoteAssetEntity.deletedAt.isNull() &
_db.remoteAssetEntity.ownerId.equals(userId) &
_db.remoteAssetEntity.type.equalsValue(AssetType.video) &
_db.remoteAssetEntity.visibility
@ -459,10 +538,10 @@ class DriftTimelineRepository extends DriftDatabaseRepository {
final query = _db.remoteAssetEntity.select()
..where(
(row) =>
_db.remoteAssetEntity.type.equalsValue(AssetType.video) &
_db.remoteAssetEntity.visibility
.equalsValue(AssetVisibility.timeline) &
_db.remoteAssetEntity.ownerId.equals(userId),
row.deletedAt.isNull() &
row.type.equalsValue(AssetType.video) &
row.visibility.equalsValue(AssetVisibility.timeline) &
row.ownerId.equals(userId),
)
..orderBy([(row) => OrderingTerm.desc(row.createdAt)])
..limit(count, offset: offset);

View File

@ -0,0 +1,116 @@
import 'package:auto_route/auto_route.dart';
import 'package:flutter/material.dart';
import 'package:hooks_riverpod/hooks_riverpod.dart';
import 'package:immich_mobile/extensions/build_context_extensions.dart';
import 'package:immich_mobile/extensions/theme_extensions.dart';
import 'package:immich_mobile/extensions/translate_extensions.dart';
import 'package:immich_mobile/pages/common/large_leading_tile.dart';
import 'package:immich_mobile/presentation/widgets/images/local_album_thumbnail.widget.dart';
import 'package:immich_mobile/providers/infrastructure/album.provider.dart';
import 'package:immich_mobile/routing/router.dart';
import 'package:immich_mobile/widgets/common/local_album_sliver_app_bar.dart';
@RoutePage()
class DriftLocalAlbumsPage extends StatelessWidget {
const DriftLocalAlbumsPage({super.key});
@override
Widget build(BuildContext context) {
return const Scaffold(
body: CustomScrollView(
slivers: [
LocalAlbumsSliverAppBar(),
_AlbumList(),
],
),
);
}
}
class _AlbumList extends ConsumerWidget {
const _AlbumList();
@override
Widget build(BuildContext context, WidgetRef ref) {
final albums = ref.watch(localAlbumProvider);
return albums.when(
loading: () => const SliverToBoxAdapter(
child: Center(
child: Padding(
padding: EdgeInsets.all(20.0),
child: CircularProgressIndicator(),
),
),
),
error: (error, stack) => SliverToBoxAdapter(
child: Center(
child: Padding(
padding: const EdgeInsets.all(20.0),
child: Text(
'Error loading albums: $error, stack: $stack',
style: TextStyle(
color: context.colorScheme.error,
),
),
),
),
),
data: (albums) {
if (albums.isEmpty) {
return const SliverToBoxAdapter(
child: Center(
child: Padding(
padding: EdgeInsets.all(20.0),
child: Text('No albums found'),
),
),
);
}
return SliverPadding(
padding: const EdgeInsets.all(18.0),
sliver: SliverList.builder(
itemBuilder: (_, index) {
final album = albums[index];
return Padding(
padding: const EdgeInsets.only(bottom: 8.0),
child: LargeLeadingTile(
leadingPadding: const EdgeInsets.only(
right: 16,
),
leading: SizedBox(
width: 80,
height: 80,
child: LocalAlbumThumbnail(
albumId: album.id,
),
),
title: Text(
album.name,
style: context.textTheme.titleSmall?.copyWith(
fontWeight: FontWeight.w600,
),
),
subtitle: Text(
'items_count'.t(
context: context,
args: {'count': album.assetCount},
),
style: context.textTheme.bodyMedium?.copyWith(
color: context.colorScheme.onSurfaceSecondary,
),
),
onTap: () =>
context.pushRoute(LocalTimelineRoute(albumId: album.id)),
),
);
},
itemCount: albums.length,
),
);
},
);
}
}

View File

@ -0,0 +1,29 @@
import 'package:auto_route/auto_route.dart';
import 'package:flutter/widgets.dart';
import 'package:hooks_riverpod/hooks_riverpod.dart';
import 'package:immich_mobile/presentation/widgets/timeline/timeline.widget.dart';
import 'package:immich_mobile/providers/infrastructure/timeline.provider.dart';
@RoutePage()
class DriftPartnerDetailPage extends StatelessWidget {
final String partnerId;
const DriftPartnerDetailPage({super.key, required this.partnerId});
@override
Widget build(BuildContext context) {
return ProviderScope(
overrides: [
timelineServiceProvider.overrideWith(
(ref) {
final timelineService =
ref.watch(timelineFactoryProvider).remoteAssets(partnerId);
ref.onDispose(timelineService.dispose);
return timelineService;
},
),
],
child: const Timeline(),
);
}
}

View File

@ -0,0 +1,35 @@
import 'package:auto_route/auto_route.dart';
import 'package:flutter/widgets.dart';
import 'package:hooks_riverpod/hooks_riverpod.dart';
import 'package:immich_mobile/presentation/widgets/timeline/timeline.widget.dart';
import 'package:immich_mobile/providers/infrastructure/timeline.provider.dart';
import 'package:immich_mobile/providers/user.provider.dart';
@RoutePage()
class DriftRecentlyTakenPage extends StatelessWidget {
const DriftRecentlyTakenPage({super.key});
@override
Widget build(BuildContext context) {
return ProviderScope(
overrides: [
timelineServiceProvider.overrideWith(
(ref) {
final user = ref.watch(currentUserProvider);
if (user == null) {
throw Exception(
'User must be logged in to access recently taken',
);
}
final timelineService =
ref.watch(timelineFactoryProvider).remoteAssets(user.id);
ref.onDispose(timelineService.dispose);
return timelineService;
},
),
],
child: const Timeline(),
);
}
}

View File

@ -5,15 +5,43 @@ import 'package:drift/drift.dart' hide Column;
import 'package:easy_localization/easy_localization.dart';
import 'package:flutter/material.dart';
import 'package:hooks_riverpod/hooks_riverpod.dart';
import 'package:immich_mobile/domain/models/asset/base_asset.model.dart';
import 'package:immich_mobile/extensions/build_context_extensions.dart';
import 'package:immich_mobile/extensions/theme_extensions.dart';
import 'package:immich_mobile/presentation/pages/dev/dev_logger.dart';
import 'package:immich_mobile/providers/background_sync.provider.dart';
import 'package:immich_mobile/providers/infrastructure/asset.provider.dart';
import 'package:immich_mobile/providers/infrastructure/db.provider.dart';
import 'package:immich_mobile/providers/infrastructure/platform.provider.dart';
import 'package:immich_mobile/providers/user.provider.dart';
import 'package:immich_mobile/routing/router.dart';
final _features = [
_Feature(
name: 'Selection Mode Timeline',
icon: Icons.developer_mode_rounded,
onTap: (ctx, ref) async {
final user = ref.watch(currentUserProvider);
if (user == null) {
return Future.value();
}
final assets =
await ref.read(remoteAssetRepositoryProvider).getSome(user.id);
final selectedAssets = await ctx.pushRoute<Set<BaseAsset>>(
DriftAssetSelectionTimelineRoute(
lockedSelectionAssets: assets.toSet(),
),
);
DLog.log(
"Selected ${selectedAssets?.length ?? 0} assets",
);
return Future.value();
},
),
_Feature(
name: 'Sync Local',
icon: Icons.photo_album_rounded,
@ -104,6 +132,11 @@ final _features = [
icon: Icons.video_collection_outlined,
onTap: (ctx, _) => ctx.pushRoute(const DriftVideoRoute()),
),
_Feature(
name: 'Recently Taken',
icon: Icons.schedule_outlined,
onTap: (ctx, _) => ctx.pushRoute(const DriftRecentlyTakenRoute()),
),
];
@RoutePage()

View File

@ -0,0 +1,50 @@
import 'package:auto_route/auto_route.dart';
import 'package:flutter/material.dart';
import 'package:hooks_riverpod/hooks_riverpod.dart';
import 'package:immich_mobile/domain/models/asset/base_asset.model.dart';
import 'package:immich_mobile/presentation/widgets/timeline/timeline.widget.dart';
import 'package:immich_mobile/providers/infrastructure/timeline.provider.dart';
import 'package:immich_mobile/providers/timeline/multiselect.provider.dart';
import 'package:immich_mobile/providers/user.provider.dart';
@RoutePage()
class DriftAssetSelectionTimelinePage extends ConsumerWidget {
final Set<BaseAsset> lockedSelectionAssets;
const DriftAssetSelectionTimelinePage({
super.key,
this.lockedSelectionAssets = const {},
});
@override
Widget build(BuildContext context, WidgetRef ref) {
return ProviderScope(
overrides: [
multiSelectProvider.overrideWith(
() => MultiSelectNotifier(
MultiSelectState(
selectedAssets: {},
lockedSelectionAssets: lockedSelectionAssets,
forceEnable: true,
),
),
),
timelineServiceProvider.overrideWith(
(ref) {
final user = ref.watch(currentUserProvider);
if (user == null) {
throw Exception(
'User must be logged in to access recently taken',
);
}
final timelineService =
ref.watch(timelineFactoryProvider).remoteAssets(user.id);
ref.onDispose(timelineService.dispose);
return timelineService;
},
),
],
child: const Timeline(),
);
}
}

View File

@ -5,14 +5,14 @@ import 'package:immich_mobile/domain/models/user.model.dart';
import 'package:immich_mobile/extensions/asyncvalue_extensions.dart';
import 'package:immich_mobile/extensions/build_context_extensions.dart';
import 'package:immich_mobile/extensions/translate_extensions.dart';
import 'package:immich_mobile/providers/album/album.provider.dart';
import 'package:immich_mobile/presentation/widgets/images/local_album_thumbnail.widget.dart';
import 'package:immich_mobile/providers/infrastructure/album.provider.dart';
import 'package:immich_mobile/providers/partner.provider.dart';
import 'package:immich_mobile/providers/search/people.provider.dart';
import 'package:immich_mobile/providers/server_info.provider.dart';
import 'package:immich_mobile/routing/router.dart';
import 'package:immich_mobile/services/api.service.dart';
import 'package:immich_mobile/utils/image_url_builder.dart';
import 'package:immich_mobile/widgets/album/album_thumbnail_card.dart';
import 'package:immich_mobile/widgets/common/immich_sliver_app_bar.dart';
import 'package:immich_mobile/widgets/common/user_avatar.dart';
import 'package:immich_mobile/widgets/map/map_thumbnail.dart';
@ -310,8 +310,7 @@ class _LocalAlbumsCollectionCard extends ConsumerWidget {
@override
Widget build(BuildContext context, WidgetRef ref) {
// TODO: Migrate to the drift after local album page
final albums = ref.watch(localAlbumsProvider);
final albums = ref.watch(localAlbumProvider);
return LayoutBuilder(
builder: (context, constraints) {
@ -320,9 +319,7 @@ class _LocalAlbumsCollectionCard extends ConsumerWidget {
final size = context.width * widthFactor - 20.0;
return GestureDetector(
onTap: () => context.pushRoute(
const LocalAlbumsRoute(),
),
onTap: () => context.pushRoute(const DriftLocalAlbumsRoute()),
child: Column(
crossAxisAlignment: CrossAxisAlignment.start,
children: [
@ -347,12 +344,29 @@ class _LocalAlbumsCollectionCard extends ConsumerWidget {
crossAxisSpacing: 8,
mainAxisSpacing: 8,
physics: const NeverScrollableScrollPhysics(),
children: albums.take(4).map((album) {
return AlbumThumbnailCard(
album: album,
showTitle: false,
children: albums.when(
data: (data) {
return data.take(4).map((album) {
return LocalAlbumThumbnail(
albumId: album.id,
);
}).toList(),
}).toList();
},
error: (error, _) {
return [
Center(
child: Text('Error: $error'),
),
];
},
loading: () {
return [
const Center(
child: CircularProgressIndicator(),
),
];
},
),
),
),
),
@ -498,7 +512,8 @@ class _PartnerList extends StatelessWidget {
fontWeight: FontWeight.w500,
),
).t(context: context, args: {'user': partner.name}),
onTap: () => context.pushRoute(PartnerDetailRoute(partner: partner)),
onTap: () =>
context.pushRoute(DriftPartnerDetailRoute(partnerId: partner.id)),
);
},
);

View File

@ -0,0 +1,54 @@
import 'package:flutter/material.dart';
import 'package:hooks_riverpod/hooks_riverpod.dart';
import 'package:immich_mobile/extensions/build_context_extensions.dart';
import 'package:immich_mobile/presentation/widgets/images/thumbnail.widget.dart';
import 'package:immich_mobile/providers/infrastructure/album.provider.dart';
class LocalAlbumThumbnail extends ConsumerWidget {
const LocalAlbumThumbnail({
super.key,
required this.albumId,
});
final String albumId;
@override
Widget build(BuildContext context, WidgetRef ref) {
final localAlbumThumbnail = ref.watch(localAlbumThumbnailProvider(albumId));
return localAlbumThumbnail.when(
data: (data) {
if (data == null) {
return Container(
decoration: BoxDecoration(
color: context.colorScheme.surfaceContainer,
borderRadius: const BorderRadius.all(Radius.circular(16)),
border: Border.all(
color: context.colorScheme.outline.withAlpha(50),
width: 1,
),
),
child: Icon(
Icons.collections,
size: 24,
color: context.primaryColor,
),
);
}
return ClipRRect(
borderRadius: const BorderRadius.all(Radius.circular(16)),
child: Thumbnail(
asset: data,
),
);
},
error: (error, stack) {
return const Icon(Icons.error, size: 24);
},
loading: () => const SizedBox(
width: 24,
height: 24,
child: Center(child: CircularProgressIndicator()),
),
);
}
}

View File

@ -12,7 +12,7 @@ class ThumbnailTile extends ConsumerWidget {
this.size = const Size.square(256),
this.fit = BoxFit.cover,
this.showStorageIndicator = true,
this.canDeselect = true,
this.lockSelection = false,
super.key,
});
@ -20,15 +20,13 @@ class ThumbnailTile extends ConsumerWidget {
final Size size;
final BoxFit fit;
final bool showStorageIndicator;
/// If we are allowed to deselect this image
final bool canDeselect;
final bool lockSelection;
@override
Widget build(BuildContext context, WidgetRef ref) {
final assetContainerColor = context.isDarkTheme
? context.primaryColor.darken(amount: 0.6)
: context.primaryColor.lighten(amount: 0.8);
? context.primaryColor.darken(amount: 0.4)
: context.primaryColor.lighten(amount: 0.75);
final isSelected = ref.watch(
multiSelectProvider.select(
@ -36,24 +34,29 @@ class ThumbnailTile extends ConsumerWidget {
),
);
final borderStyle = lockSelection
? BoxDecoration(
color: context.colorScheme.surfaceContainerHighest,
border: Border.all(
color: context.colorScheme.surfaceContainerHighest,
width: 6,
),
)
: isSelected
? BoxDecoration(
color: assetContainerColor,
border: Border.all(color: assetContainerColor, width: 6),
)
: const BoxDecoration();
return Stack(
children: [
AnimatedContainer(
duration: Durations.short4,
curve: Curves.decelerate,
decoration: BoxDecoration(
color: isSelected
? (canDeselect ? assetContainerColor : Colors.grey)
: null,
border: isSelected
? Border.all(
color: canDeselect ? assetContainerColor : Colors.grey,
width: 8,
)
: const Border(),
),
decoration: borderStyle,
child: ClipRRect(
borderRadius: isSelected
borderRadius: isSelected || lockSelection
? const BorderRadius.all(Radius.circular(15.0))
: BorderRadius.zero,
child: Stack(
@ -102,14 +105,17 @@ class ThumbnailTile extends ConsumerWidget {
),
),
),
if (isSelected)
if (isSelected || lockSelection)
Padding(
padding: const EdgeInsets.all(3.0),
child: Align(
alignment: Alignment.topLeft,
child: _SelectionIndicator(
isSelected: isSelected,
color: assetContainerColor,
isLocked: lockSelection,
color: lockSelection
? context.colorScheme.surfaceContainerHighest
: assetContainerColor,
),
),
),
@ -120,15 +126,29 @@ class ThumbnailTile extends ConsumerWidget {
class _SelectionIndicator extends StatelessWidget {
final bool isSelected;
final bool isLocked;
final Color? color;
const _SelectionIndicator({
required this.isSelected,
required this.isLocked,
this.color,
});
@override
Widget build(BuildContext context) {
if (isSelected) {
if (isLocked) {
return Container(
decoration: BoxDecoration(
shape: BoxShape.circle,
color: color,
),
child: const Icon(
Icons.check_circle_rounded,
color: Colors.grey,
),
);
} else if (isSelected) {
return Container(
decoration: BoxDecoration(
shape: BoxShape.circle,

View File

@ -166,22 +166,22 @@ class _AssetTileWidget extends ConsumerWidget {
BaseAsset asset,
) {
final multiSelectState = ref.read(multiSelectProvider);
if (!multiSelectState.isEnabled) {
if (multiSelectState.forceEnable || multiSelectState.isEnabled) {
ref.read(multiSelectProvider.notifier).toggleAssetSelection(asset);
} else {
ctx.pushRoute(
AssetViewerRoute(
initialIndex: assetIndex,
timelineService: ref.read(timelineServiceProvider),
),
);
return;
}
ref.read(multiSelectProvider.notifier).toggleAssetSelection(asset);
}
void _handleOnLongPress(WidgetRef ref, BaseAsset asset) {
final multiSelectState = ref.read(multiSelectProvider);
if (multiSelectState.isEnabled) {
if (multiSelectState.isEnabled || multiSelectState.forceEnable) {
return;
}
@ -189,13 +189,35 @@ class _AssetTileWidget extends ConsumerWidget {
ref.read(multiSelectProvider.notifier).toggleAssetSelection(asset);
}
bool _getLockSelectionStatus(WidgetRef ref) {
final lockSelectionAssets = ref.read(
multiSelectProvider.select(
(state) => state.lockedSelectionAssets,
),
);
if (lockSelectionAssets.isEmpty) {
return false;
}
return lockSelectionAssets.contains(asset);
}
@override
Widget build(BuildContext context, WidgetRef ref) {
final lockSelection = _getLockSelectionStatus(ref);
return RepaintBoundary(
child: GestureDetector(
onTap: () => _handleOnTap(context, ref, assetIndex, asset),
onLongPress: () => _handleOnLongPress(ref, asset),
child: ThumbnailTile(asset),
onTap: () => lockSelection
? null
: _handleOnTap(context, ref, assetIndex, asset),
onLongPress: () =>
lockSelection ? null : _handleOnLongPress(ref, asset),
child: ThumbnailTile(
asset,
lockSelection: lockSelection,
),
),
);
}

View File

@ -354,6 +354,8 @@ class ScrubberState extends ConsumerState<Scrubber>
isDragging: _isDragging,
),
),
if (_scrollController.hasClients &&
_scrollController.position.maxScrollExtent > 0)
PositionedDirectional(
top: _thumbTopOffset + widget.topPadding,
end: 0,

View File

@ -18,6 +18,7 @@ import 'package:immich_mobile/providers/infrastructure/setting.provider.dart';
import 'package:immich_mobile/providers/infrastructure/timeline.provider.dart';
import 'package:immich_mobile/providers/timeline/multiselect.provider.dart';
import 'package:immich_mobile/widgets/common/immich_sliver_app_bar.dart';
import 'package:immich_mobile/widgets/common/selection_sliver_app_bar.dart';
class Timeline extends StatelessWidget {
const Timeline({
@ -96,6 +97,10 @@ class _SliverTimelineState extends ConsumerState<_SliverTimeline> {
final asyncSegments = ref.watch(timelineSegmentProvider);
final maxHeight =
ref.watch(timelineArgsProvider.select((args) => args.maxHeight));
final isSelectionMode = ref.watch(
multiSelectProvider.select((s) => s.forceEnable),
);
return asyncSegments.widgetWhen(
onData: (segments) {
final childCount = (segments.lastOrNull?.lastIndex ?? -1) + 1;
@ -117,6 +122,9 @@ class _SliverTimelineState extends ConsumerState<_SliverTimeline> {
primary: true,
cacheExtent: maxHeight * 2,
slivers: [
if (isSelectionMode)
const SelectionSliverAppBar()
else
widget.appBar ??
const ImmichSliverAppBar(
floating: true,
@ -147,6 +155,7 @@ class _SliverTimelineState extends ConsumerState<_SliverTimeline> {
],
),
),
if (!isSelectionMode) ...[
Consumer(
builder: (_, consumerRef, child) {
final isMultiSelectEnabled = consumerRef.watch(
@ -182,6 +191,7 @@ class _SliverTimelineState extends ConsumerState<_SliverTimeline> {
child: const HomeBottomAppBar(),
),
],
],
),
);
},

View File

@ -1,4 +1,7 @@
import 'package:hooks_riverpod/hooks_riverpod.dart';
import 'package:immich_mobile/domain/models/album/local_album.model.dart';
import 'package:immich_mobile/domain/models/asset/base_asset.model.dart';
import 'package:immich_mobile/domain/services/local_album.service.dart';
import 'package:immich_mobile/domain/services/remote_album.service.dart';
import 'package:immich_mobile/infrastructure/repositories/local_album.repository.dart';
import 'package:immich_mobile/infrastructure/repositories/remote_album.repository.dart';
@ -9,6 +12,19 @@ final localAlbumRepository = Provider<DriftLocalAlbumRepository>(
(ref) => DriftLocalAlbumRepository(ref.watch(driftProvider)),
);
final localAlbumServiceProvider = Provider<LocalAlbumService>(
(ref) => LocalAlbumService(ref.watch(localAlbumRepository)),
);
final localAlbumProvider = FutureProvider<List<LocalAlbum>>(
(ref) => LocalAlbumService(ref.watch(localAlbumRepository)).getAll(),
);
final localAlbumThumbnailProvider = FutureProvider.family<LocalAsset?, String>(
(ref, albumId) =>
LocalAlbumService(ref.watch(localAlbumRepository)).getThumbnail(albumId),
);
final remoteAlbumRepository = Provider<DriftRemoteAlbumRepository>(
(ref) => DriftRemoteAlbumRepository(ref.watch(driftProvider)),
);

View File

@ -1,5 +1,6 @@
import 'package:collection/collection.dart';
import 'package:hooks_riverpod/hooks_riverpod.dart';
import 'package:immich_mobile/domain/models/asset/base_asset.model.dart';
import 'package:immich_mobile/domain/services/timeline.service.dart';
import 'package:immich_mobile/providers/infrastructure/timeline.provider.dart';
@ -12,8 +13,14 @@ final multiSelectProvider =
class MultiSelectState {
final Set<BaseAsset> selectedAssets;
final Set<BaseAsset> lockedSelectionAssets;
final bool forceEnable;
const MultiSelectState({required this.selectedAssets});
const MultiSelectState({
required this.selectedAssets,
required this.lockedSelectionAssets,
this.forceEnable = false,
});
bool get isEnabled => selectedAssets.isNotEmpty;
bool get hasRemote => selectedAssets.any(
@ -25,33 +32,54 @@ class MultiSelectState {
(asset) => asset.storage == AssetState.local,
);
MultiSelectState copyWith({Set<BaseAsset>? selectedAssets}) {
MultiSelectState copyWith({
Set<BaseAsset>? selectedAssets,
Set<BaseAsset>? lockedSelectionAssets,
bool? forceEnable,
}) {
return MultiSelectState(
selectedAssets: selectedAssets ?? this.selectedAssets,
lockedSelectionAssets:
lockedSelectionAssets ?? this.lockedSelectionAssets,
forceEnable: forceEnable ?? this.forceEnable,
);
}
@override
String toString() => 'MultiSelectState(selectedAssets: $selectedAssets)';
String toString() =>
'MultiSelectState(selectedAssets: $selectedAssets, lockedSelectionAssets: $lockedSelectionAssets, forceEnable: $forceEnable)';
@override
bool operator ==(covariant MultiSelectState other) {
if (identical(this, other)) return true;
final listEquals = const DeepCollectionEquality().equals;
final setEquals = const DeepCollectionEquality().equals;
return listEquals(other.selectedAssets, selectedAssets);
return setEquals(other.selectedAssets, selectedAssets) &&
setEquals(other.lockedSelectionAssets, lockedSelectionAssets) &&
other.forceEnable == forceEnable;
}
@override
int get hashCode => selectedAssets.hashCode;
int get hashCode =>
selectedAssets.hashCode ^
lockedSelectionAssets.hashCode ^
forceEnable.hashCode;
}
class MultiSelectNotifier extends Notifier<MultiSelectState> {
MultiSelectNotifier([this._defaultState]);
final MultiSelectState? _defaultState;
TimelineService get _timelineService => ref.read(timelineServiceProvider);
@override
MultiSelectState build() {
return const MultiSelectState(selectedAssets: {});
return _defaultState ??
const MultiSelectState(
selectedAssets: {},
lockedSelectionAssets: {},
forceEnable: false,
);
}
void selectAsset(BaseAsset asset) {
@ -83,7 +111,11 @@ class MultiSelectNotifier extends Notifier<MultiSelectState> {
}
void reset() {
state = const MultiSelectState(selectedAssets: {});
state = const MultiSelectState(
selectedAssets: {},
lockedSelectionAssets: {},
forceEnable: false,
);
}
/// Bucket bulk operations
@ -131,6 +163,12 @@ class MultiSelectNotifier extends Notifier<MultiSelectState> {
state = state.copyWith(selectedAssets: selectedAssets);
}
void setLockedSelectionAssets(Set<BaseAsset> assets) {
state = state.copyWith(
lockedSelectionAssets: assets,
);
}
}
final bucketSelectionProvider = Provider.family<bool, List<BaseAsset>>(

View File

@ -0,0 +1,31 @@
import 'package:auto_route/auto_route.dart';
import 'package:immich_mobile/routing/router.dart';
/// Handles duplicate navigation to this route (primarily for deep linking)
class GalleryGuard extends AutoRouteGuard {
const GalleryGuard();
@override
void onNavigation(NavigationResolver resolver, StackRouter router) async {
final newRouteName = resolver.route.name;
final currentTopRouteName =
router.stack.isNotEmpty ? router.stack.last.name : null;
if (currentTopRouteName == newRouteName) {
// Replace instead of pushing duplicate
final args = resolver.route.args as GalleryViewerRouteArgs;
router.replace(
GalleryViewerRoute(
renderList: args.renderList,
initialIndex: args.initialIndex,
heroOffset: args.heroOffset,
showStack: args.showStack,
),
);
// Prevent further navigation since we replaced the route
resolver.next(false);
return;
}
resolver.next(true);
}
}

View File

@ -1,6 +1,7 @@
import 'package:auto_route/auto_route.dart';
import 'package:flutter/material.dart';
import 'package:hooks_riverpod/hooks_riverpod.dart';
import 'package:immich_mobile/domain/models/asset/base_asset.model.dart';
import 'package:immich_mobile/domain/models/log.model.dart';
import 'package:immich_mobile/domain/models/memory.model.dart';
import 'package:immich_mobile/domain/models/user.model.dart';
@ -67,6 +68,9 @@ import 'package:immich_mobile/pages/search/recently_taken.page.dart';
import 'package:immich_mobile/pages/search/search.page.dart';
import 'package:immich_mobile/pages/share_intent/share_intent.page.dart';
import 'package:immich_mobile/presentation/pages/dev/drift_favorite.page.dart';
import 'package:immich_mobile/presentation/pages/dev/drift_partner_detail.page.dart';
import 'package:immich_mobile/presentation/pages/dev/drift_local_album.page.dart';
import 'package:immich_mobile/presentation/pages/dev/drift_recently_taken.page.dart';
import 'package:immich_mobile/presentation/pages/dev/drift_video.page.dart';
import 'package:immich_mobile/presentation/pages/dev/drift_trash.page.dart';
import 'package:immich_mobile/presentation/pages/dev/drift_archive.page.dart';
@ -78,6 +82,7 @@ import 'package:immich_mobile/presentation/pages/dev/media_stat.page.dart';
import 'package:immich_mobile/presentation/pages/dev/remote_timeline.page.dart';
import 'package:immich_mobile/presentation/pages/drift_album.page.dart';
import 'package:immich_mobile/presentation/pages/drift_library.page.dart';
import 'package:immich_mobile/presentation/pages/drift_asset_selection_timeline.page.dart';
import 'package:immich_mobile/presentation/pages/drift_memory.page.dart';
import 'package:immich_mobile/presentation/widgets/asset_viewer/asset_viewer.page.dart';
import 'package:immich_mobile/providers/api.provider.dart';
@ -86,6 +91,7 @@ import 'package:immich_mobile/routing/auth_guard.dart';
import 'package:immich_mobile/routing/backup_permission_guard.dart';
import 'package:immich_mobile/routing/custom_transition_builders.dart';
import 'package:immich_mobile/routing/duplicate_guard.dart';
import 'package:immich_mobile/routing/gallery_guard.dart';
import 'package:immich_mobile/routing/locked_guard.dart';
import 'package:immich_mobile/services/api.service.dart';
import 'package:immich_mobile/services/local_auth.service.dart';
@ -111,6 +117,7 @@ class AppRouter extends RootStackRouter {
late final DuplicateGuard _duplicateGuard;
late final BackupPermissionGuard _backupPermissionGuard;
late final LockedGuard _lockedGuard;
late final GalleryGuard _galleryGuard;
AppRouter(
ApiService apiService,
@ -123,6 +130,7 @@ class AppRouter extends RootStackRouter {
_lockedGuard =
LockedGuard(apiService, secureStorageService, localAuthService);
_backupPermissionGuard = BackupPermissionGuard(galleryPermissionNotifier);
_galleryGuard = const GalleryGuard();
}
@override
@ -192,7 +200,7 @@ class AppRouter extends RootStackRouter {
),
CustomRoute(
page: GalleryViewerRoute.page,
guards: [_authGuard, _duplicateGuard],
guards: [_authGuard, _galleryGuard],
transitionsBuilder: CustomTransitionsBuilders.zoomedPage,
),
AutoRoute(
@ -422,6 +430,22 @@ class AppRouter extends RootStackRouter {
page: DriftLibraryRoute.page,
guards: [_authGuard, _duplicateGuard],
),
AutoRoute(
page: DriftAssetSelectionTimelineRoute.page,
guards: [_authGuard, _duplicateGuard],
),
AutoRoute(
page: DriftPartnerDetailRoute.page,
guards: [_authGuard, _duplicateGuard],
),
AutoRoute(
page: DriftRecentlyTakenRoute.page,
guards: [_authGuard, _duplicateGuard],
),
AutoRoute(
page: DriftLocalAlbumsRoute.page,
guards: [_authGuard, _duplicateGuard],
),
// required to handle all deeplinks in deep_link.service.dart
// auto_route_library#1722
RedirectRoute(path: '*', redirectTo: '/'),

View File

@ -634,6 +634,55 @@ class DriftArchiveRoute extends PageRouteInfo<void> {
);
}
/// generated route for
/// [DriftAssetSelectionTimelinePage]
class DriftAssetSelectionTimelineRoute
extends PageRouteInfo<DriftAssetSelectionTimelineRouteArgs> {
DriftAssetSelectionTimelineRoute({
Key? key,
Set<BaseAsset> lockedSelectionAssets = const {},
List<PageRouteInfo>? children,
}) : super(
DriftAssetSelectionTimelineRoute.name,
args: DriftAssetSelectionTimelineRouteArgs(
key: key,
lockedSelectionAssets: lockedSelectionAssets,
),
initialChildren: children,
);
static const String name = 'DriftAssetSelectionTimelineRoute';
static PageInfo page = PageInfo(
name,
builder: (data) {
final args = data.argsAs<DriftAssetSelectionTimelineRouteArgs>(
orElse: () => const DriftAssetSelectionTimelineRouteArgs(),
);
return DriftAssetSelectionTimelinePage(
key: args.key,
lockedSelectionAssets: args.lockedSelectionAssets,
);
},
);
}
class DriftAssetSelectionTimelineRouteArgs {
const DriftAssetSelectionTimelineRouteArgs({
this.key,
this.lockedSelectionAssets = const {},
});
final Key? key;
final Set<BaseAsset> lockedSelectionAssets;
@override
String toString() {
return 'DriftAssetSelectionTimelineRouteArgs{key: $key, lockedSelectionAssets: $lockedSelectionAssets}';
}
}
/// generated route for
/// [DriftFavoritePage]
class DriftFavoriteRoute extends PageRouteInfo<void> {
@ -666,6 +715,22 @@ class DriftLibraryRoute extends PageRouteInfo<void> {
);
}
/// generated route for
/// [DriftLocalAlbumsPage]
class DriftLocalAlbumsRoute extends PageRouteInfo<void> {
const DriftLocalAlbumsRoute({List<PageRouteInfo>? children})
: super(DriftLocalAlbumsRoute.name, initialChildren: children);
static const String name = 'DriftLocalAlbumsRoute';
static PageInfo page = PageInfo(
name,
builder: (data) {
return const DriftLocalAlbumsPage();
},
);
}
/// generated route for
/// [DriftLockedFolderPage]
class DriftLockedFolderRoute extends PageRouteInfo<void> {
@ -734,6 +799,60 @@ class DriftMemoryRouteArgs {
}
}
/// generated route for
/// [DriftPartnerDetailPage]
class DriftPartnerDetailRoute
extends PageRouteInfo<DriftPartnerDetailRouteArgs> {
DriftPartnerDetailRoute({
Key? key,
required String partnerId,
List<PageRouteInfo>? children,
}) : super(
DriftPartnerDetailRoute.name,
args: DriftPartnerDetailRouteArgs(key: key, partnerId: partnerId),
initialChildren: children,
);
static const String name = 'DriftPartnerDetailRoute';
static PageInfo page = PageInfo(
name,
builder: (data) {
final args = data.argsAs<DriftPartnerDetailRouteArgs>();
return DriftPartnerDetailPage(key: args.key, partnerId: args.partnerId);
},
);
}
class DriftPartnerDetailRouteArgs {
const DriftPartnerDetailRouteArgs({this.key, required this.partnerId});
final Key? key;
final String partnerId;
@override
String toString() {
return 'DriftPartnerDetailRouteArgs{key: $key, partnerId: $partnerId}';
}
}
/// generated route for
/// [DriftRecentlyTakenPage]
class DriftRecentlyTakenRoute extends PageRouteInfo<void> {
const DriftRecentlyTakenRoute({List<PageRouteInfo>? children})
: super(DriftRecentlyTakenRoute.name, initialChildren: children);
static const String name = 'DriftRecentlyTakenRoute';
static PageInfo page = PageInfo(
name,
builder: (data) {
return const DriftRecentlyTakenPage();
},
);
}
/// generated route for
/// [DriftTrashPage]
class DriftTrashRoute extends PageRouteInfo<void> {

View File

@ -106,7 +106,6 @@ class DeepLinkService {
Future<PageRouteInfo?> _buildAssetDeepLink(String assetId) async {
final asset = await _assetService.getAssetByRemoteId(assetId);
if (asset == null) {
return null;
}

View File

@ -0,0 +1,25 @@
import 'package:flutter/material.dart';
import 'package:immich_mobile/extensions/build_context_extensions.dart';
import 'package:immich_mobile/extensions/translate_extensions.dart';
class LocalAlbumsSliverAppBar extends StatelessWidget {
const LocalAlbumsSliverAppBar({super.key});
@override
Widget build(BuildContext context) {
return SliverAppBar(
floating: true,
pinned: true,
snap: false,
backgroundColor: context.colorScheme.surfaceContainer,
shape: const RoundedRectangleBorder(
borderRadius: BorderRadius.all(Radius.circular(5)),
),
automaticallyImplyLeading: true,
centerTitle: true,
title: Text(
"on_this_device".t(context: context),
),
);
}
}

View File

@ -0,0 +1,77 @@
import 'package:auto_route/auto_route.dart';
import 'package:flutter/material.dart';
import 'package:hooks_riverpod/hooks_riverpod.dart';
import 'package:immich_mobile/domain/models/asset/base_asset.model.dart';
import 'package:immich_mobile/extensions/build_context_extensions.dart';
import 'package:immich_mobile/extensions/translate_extensions.dart';
import 'package:immich_mobile/providers/timeline/multiselect.provider.dart';
class SelectionSliverAppBar extends ConsumerStatefulWidget {
const SelectionSliverAppBar({
super.key,
});
@override
ConsumerState<SelectionSliverAppBar> createState() =>
_SelectionSliverAppBarState();
}
class _SelectionSliverAppBarState extends ConsumerState<SelectionSliverAppBar> {
@override
Widget build(BuildContext context) {
final selection = ref.watch(
multiSelectProvider.select((s) => s.selectedAssets),
);
final toExclude = ref.watch(
multiSelectProvider.select((s) => s.lockedSelectionAssets),
);
final filteredAssets = selection.where((asset) {
return !toExclude.contains(asset);
}).toSet();
onDone(Set<BaseAsset> selected) {
ref.read(multiSelectProvider.notifier).reset();
context.maybePop<Set<BaseAsset>>(selected);
}
return SliverAppBar(
floating: true,
pinned: true,
snap: false,
backgroundColor: context.colorScheme.surfaceContainer,
shape: const RoundedRectangleBorder(
borderRadius: BorderRadius.all(Radius.circular(5)),
),
automaticallyImplyLeading: false,
leading: IconButton(
icon: const Icon(Icons.close_rounded),
onPressed: () {
ref.read(multiSelectProvider.notifier).reset();
context.pop<Set<BaseAsset>>(null);
},
),
centerTitle: true,
title: Text(
"Select {count}".t(
context: context,
args: {
'count': filteredAssets.length.toString(),
},
),
),
actions: [
TextButton(
onPressed: () => onDone(filteredAssets),
child: Text(
'done'.t(context: context),
style: context.textTheme.titleSmall?.copyWith(
color: context.colorScheme.primary,
),
),
),
],
);
}
}

View File

@ -107,25 +107,21 @@ const compare = async () => {
const { database } = configRepository.getEnv();
const db = postgres(asPostgresConnectionConfig(database.config));
const source = schemaFromCode();
const source = schemaFromCode({ overrides: true });
const target = await schemaFromDatabase(db, {});
const sourceParams = new Set(source.parameters.map(({ name }) => name));
target.parameters = target.parameters.filter(({ name }) => sourceParams.has(name));
const sourceTables = new Set(source.tables.map(({ name }) => name));
target.tables = target.tables.filter(({ name }) => sourceTables.has(name));
console.log(source.warnings.join('\n'));
const up = schemaDiff(source, target, {
tables: { ignoreExtra: true },
functions: { ignoreExtra: false },
parameters: { ignoreExtra: true },
});
const down = schemaDiff(target, source, {
tables: { ignoreExtra: false },
tables: { ignoreExtra: false, ignoreMissing: true },
functions: { ignoreExtra: false },
extension: { ignoreMissing: true },
extensions: { ignoreMissing: true },
parameters: { ignoreMissing: true },
});
return { up, down };

View File

@ -20,7 +20,6 @@ export const immich_uuid_v7 = registerFunction({
),
'hex')::uuid;
`,
synchronize: false,
});
export const album_user_after_insert = registerFunction({
@ -33,7 +32,6 @@ export const album_user_after_insert = registerFunction({
WHERE "id" IN (SELECT DISTINCT "albumsId" FROM inserted_rows);
RETURN NULL;
END`,
synchronize: false,
});
export const updated_at = registerFunction({
@ -48,7 +46,6 @@ export const updated_at = registerFunction({
new."updateId" = immich_uuid_v7(clock_timestamp);
return new;
END;`,
synchronize: false,
});
export const f_concat_ws = registerFunction({
@ -59,7 +56,6 @@ export const f_concat_ws = registerFunction({
parallel: 'safe',
behavior: 'immutable',
body: `SELECT array_to_string($2, $1)`,
synchronize: false,
});
export const f_unaccent = registerFunction({
@ -71,7 +67,6 @@ export const f_unaccent = registerFunction({
strict: true,
behavior: 'immutable',
return: `unaccent('unaccent', $1)`,
synchronize: false,
});
export const ll_to_earth_public = registerFunction({
@ -83,7 +78,6 @@ export const ll_to_earth_public = registerFunction({
strict: true,
behavior: 'immutable',
body: `SELECT public.cube(public.cube(public.cube(public.earth()*cos(radians(latitude))*cos(radians(longitude))),public.earth()*cos(radians(latitude))*sin(radians(longitude))),public.earth()*sin(radians(latitude)))::public.earth`,
synchronize: false,
});
export const users_delete_audit = registerFunction({
@ -97,7 +91,6 @@ export const users_delete_audit = registerFunction({
FROM OLD;
RETURN NULL;
END`,
synchronize: false,
});
export const partners_delete_audit = registerFunction({
@ -111,7 +104,6 @@ export const partners_delete_audit = registerFunction({
FROM OLD;
RETURN NULL;
END`,
synchronize: false,
});
export const assets_delete_audit = registerFunction({
@ -125,7 +117,6 @@ export const assets_delete_audit = registerFunction({
FROM OLD;
RETURN NULL;
END`,
synchronize: false,
});
export const albums_delete_audit = registerFunction({
@ -139,7 +130,6 @@ export const albums_delete_audit = registerFunction({
FROM OLD;
RETURN NULL;
END`,
synchronize: false,
});
export const album_assets_delete_audit = registerFunction({
@ -153,7 +143,6 @@ export const album_assets_delete_audit = registerFunction({
WHERE "albumsId" IN (SELECT "id" FROM albums WHERE "id" IN (SELECT "albumsId" FROM OLD));
RETURN NULL;
END`,
synchronize: false,
});
export const album_users_delete_audit = registerFunction({
@ -174,7 +163,6 @@ export const album_users_delete_audit = registerFunction({
RETURN NULL;
END`,
synchronize: false,
});
export const memories_delete_audit = registerFunction({
@ -188,7 +176,6 @@ export const memories_delete_audit = registerFunction({
FROM OLD;
RETURN NULL;
END`,
synchronize: false,
});
export const memory_assets_delete_audit = registerFunction({
@ -202,7 +189,6 @@ export const memory_assets_delete_audit = registerFunction({
WHERE "memoriesId" IN (SELECT "id" FROM memories WHERE "id" IN (SELECT "memoriesId" FROM OLD));
RETURN NULL;
END`,
synchronize: false,
});
export const stacks_delete_audit = registerFunction({
@ -216,5 +202,4 @@ export const stacks_delete_audit = registerFunction({
FROM OLD;
RETURN NULL;
END`,
synchronize: false,
});

View File

@ -0,0 +1,66 @@
import { Kysely, sql } from 'kysely';
export async function up(db: Kysely<any>): Promise<void> {
await sql`CREATE TABLE "migration_overrides" ("name" character varying NOT NULL, "value" jsonb NOT NULL);`.execute(db);
await sql`ALTER TABLE "migration_overrides" ADD CONSTRAINT "migration_overrides_pkey" PRIMARY KEY ("name");`.execute(db);
await sql`INSERT INTO "migration_overrides" ("name", "value") VALUES ('function_immich_uuid_v7', '{"type":"function","name":"immich_uuid_v7","sql":"CREATE OR REPLACE FUNCTION immich_uuid_v7(p_timestamp timestamp with time zone default clock_timestamp())\\n RETURNS uuid\\n VOLATILE LANGUAGE SQL\\n AS $$\\n SELECT encode(\\n set_bit(\\n set_bit(\\n overlay(uuid_send(gen_random_uuid())\\n placing substring(int8send(floor(extract(epoch from p_timestamp) * 1000)::bigint) from 3)\\n from 1 for 6\\n ),\\n 52, 1\\n ),\\n 53, 1\\n ),\\n ''hex'')::uuid;\\n $$;"}'::jsonb);`.execute(db);
await sql`INSERT INTO "migration_overrides" ("name", "value") VALUES ('function_album_user_after_insert', '{"type":"function","name":"album_user_after_insert","sql":"CREATE OR REPLACE FUNCTION album_user_after_insert()\\n RETURNS TRIGGER\\n LANGUAGE PLPGSQL\\n AS $$\\n BEGIN\\n UPDATE albums SET \\"updatedAt\\" = clock_timestamp(), \\"updateId\\" = immich_uuid_v7(clock_timestamp())\\n WHERE \\"id\\" IN (SELECT DISTINCT \\"albumsId\\" FROM inserted_rows);\\n RETURN NULL;\\n END\\n $$;"}'::jsonb);`.execute(db);
await sql`INSERT INTO "migration_overrides" ("name", "value") VALUES ('function_updated_at', '{"type":"function","name":"updated_at","sql":"CREATE OR REPLACE FUNCTION updated_at()\\n RETURNS TRIGGER\\n LANGUAGE PLPGSQL\\n AS $$\\n DECLARE\\n clock_timestamp TIMESTAMP := clock_timestamp();\\n BEGIN\\n new.\\"updatedAt\\" = clock_timestamp;\\n new.\\"updateId\\" = immich_uuid_v7(clock_timestamp);\\n return new;\\n END;\\n $$;"}'::jsonb);`.execute(db);
await sql`INSERT INTO "migration_overrides" ("name", "value") VALUES ('function_f_concat_ws', '{"type":"function","name":"f_concat_ws","sql":"CREATE OR REPLACE FUNCTION f_concat_ws(text, text[])\\n RETURNS text\\n PARALLEL SAFE IMMUTABLE LANGUAGE SQL\\n AS $$SELECT array_to_string($2, $1)$$;"}'::jsonb);`.execute(db);
await sql`INSERT INTO "migration_overrides" ("name", "value") VALUES ('function_f_unaccent', '{"type":"function","name":"f_unaccent","sql":"CREATE OR REPLACE FUNCTION f_unaccent(text)\\n RETURNS text\\n PARALLEL SAFE STRICT IMMUTABLE LANGUAGE SQL\\n RETURN unaccent(''unaccent'', $1)"}'::jsonb);`.execute(db);
await sql`INSERT INTO "migration_overrides" ("name", "value") VALUES ('function_ll_to_earth_public', '{"type":"function","name":"ll_to_earth_public","sql":"CREATE OR REPLACE FUNCTION ll_to_earth_public(latitude double precision, longitude double precision)\\n RETURNS public.earth\\n PARALLEL SAFE STRICT IMMUTABLE LANGUAGE SQL\\n AS $$SELECT public.cube(public.cube(public.cube(public.earth()*cos(radians(latitude))*cos(radians(longitude))),public.earth()*cos(radians(latitude))*sin(radians(longitude))),public.earth()*sin(radians(latitude)))::public.earth$$;"}'::jsonb);`.execute(db);
await sql`INSERT INTO "migration_overrides" ("name", "value") VALUES ('function_users_delete_audit', '{"type":"function","name":"users_delete_audit","sql":"CREATE OR REPLACE FUNCTION users_delete_audit()\\n RETURNS TRIGGER\\n LANGUAGE PLPGSQL\\n AS $$\\n BEGIN\\n INSERT INTO users_audit (\\"userId\\")\\n SELECT \\"id\\"\\n FROM OLD;\\n RETURN NULL;\\n END\\n $$;"}'::jsonb);`.execute(db);
await sql`INSERT INTO "migration_overrides" ("name", "value") VALUES ('function_partners_delete_audit', '{"type":"function","name":"partners_delete_audit","sql":"CREATE OR REPLACE FUNCTION partners_delete_audit()\\n RETURNS TRIGGER\\n LANGUAGE PLPGSQL\\n AS $$\\n BEGIN\\n INSERT INTO partners_audit (\\"sharedById\\", \\"sharedWithId\\")\\n SELECT \\"sharedById\\", \\"sharedWithId\\"\\n FROM OLD;\\n RETURN NULL;\\n END\\n $$;"}'::jsonb);`.execute(db);
await sql`INSERT INTO "migration_overrides" ("name", "value") VALUES ('function_assets_delete_audit', '{"type":"function","name":"assets_delete_audit","sql":"CREATE OR REPLACE FUNCTION assets_delete_audit()\\n RETURNS TRIGGER\\n LANGUAGE PLPGSQL\\n AS $$\\n BEGIN\\n INSERT INTO assets_audit (\\"assetId\\", \\"ownerId\\")\\n SELECT \\"id\\", \\"ownerId\\"\\n FROM OLD;\\n RETURN NULL;\\n END\\n $$;"}'::jsonb);`.execute(db);
await sql`INSERT INTO "migration_overrides" ("name", "value") VALUES ('function_albums_delete_audit', '{"type":"function","name":"albums_delete_audit","sql":"CREATE OR REPLACE FUNCTION albums_delete_audit()\\n RETURNS TRIGGER\\n LANGUAGE PLPGSQL\\n AS $$\\n BEGIN\\n INSERT INTO albums_audit (\\"albumId\\", \\"userId\\")\\n SELECT \\"id\\", \\"ownerId\\"\\n FROM OLD;\\n RETURN NULL;\\n END\\n $$;"}'::jsonb);`.execute(db);
await sql`INSERT INTO "migration_overrides" ("name", "value") VALUES ('function_album_assets_delete_audit', '{"type":"function","name":"album_assets_delete_audit","sql":"CREATE OR REPLACE FUNCTION album_assets_delete_audit()\\n RETURNS TRIGGER\\n LANGUAGE PLPGSQL\\n AS $$\\n BEGIN\\n INSERT INTO album_assets_audit (\\"albumId\\", \\"assetId\\")\\n SELECT \\"albumsId\\", \\"assetsId\\" FROM OLD\\n WHERE \\"albumsId\\" IN (SELECT \\"id\\" FROM albums WHERE \\"id\\" IN (SELECT \\"albumsId\\" FROM OLD));\\n RETURN NULL;\\n END\\n $$;"}'::jsonb);`.execute(db);
await sql`INSERT INTO "migration_overrides" ("name", "value") VALUES ('function_album_users_delete_audit', '{"type":"function","name":"album_users_delete_audit","sql":"CREATE OR REPLACE FUNCTION album_users_delete_audit()\\n RETURNS TRIGGER\\n LANGUAGE PLPGSQL\\n AS $$\\n BEGIN\\n INSERT INTO albums_audit (\\"albumId\\", \\"userId\\")\\n SELECT \\"albumsId\\", \\"usersId\\"\\n FROM OLD;\\n\\n IF pg_trigger_depth() = 1 THEN\\n INSERT INTO album_users_audit (\\"albumId\\", \\"userId\\")\\n SELECT \\"albumsId\\", \\"usersId\\"\\n FROM OLD;\\n END IF;\\n\\n RETURN NULL;\\n END\\n $$;"}'::jsonb);`.execute(db);
await sql`INSERT INTO "migration_overrides" ("name", "value") VALUES ('function_memories_delete_audit', '{"type":"function","name":"memories_delete_audit","sql":"CREATE OR REPLACE FUNCTION memories_delete_audit()\\n RETURNS TRIGGER\\n LANGUAGE PLPGSQL\\n AS $$\\n BEGIN\\n INSERT INTO memories_audit (\\"memoryId\\", \\"userId\\")\\n SELECT \\"id\\", \\"ownerId\\"\\n FROM OLD;\\n RETURN NULL;\\n END\\n $$;"}'::jsonb);`.execute(db);
await sql`INSERT INTO "migration_overrides" ("name", "value") VALUES ('function_memory_assets_delete_audit', '{"type":"function","name":"memory_assets_delete_audit","sql":"CREATE OR REPLACE FUNCTION memory_assets_delete_audit()\\n RETURNS TRIGGER\\n LANGUAGE PLPGSQL\\n AS $$\\n BEGIN\\n INSERT INTO memory_assets_audit (\\"memoryId\\", \\"assetId\\")\\n SELECT \\"memoriesId\\", \\"assetsId\\" FROM OLD\\n WHERE \\"memoriesId\\" IN (SELECT \\"id\\" FROM memories WHERE \\"id\\" IN (SELECT \\"memoriesId\\" FROM OLD));\\n RETURN NULL;\\n END\\n $$;"}'::jsonb);`.execute(db);
await sql`INSERT INTO "migration_overrides" ("name", "value") VALUES ('function_stacks_delete_audit', '{"type":"function","name":"stacks_delete_audit","sql":"CREATE OR REPLACE FUNCTION stacks_delete_audit()\\n RETURNS TRIGGER\\n LANGUAGE PLPGSQL\\n AS $$\\n BEGIN\\n INSERT INTO stacks_audit (\\"stackId\\", \\"userId\\")\\n SELECT \\"id\\", \\"ownerId\\"\\n FROM OLD;\\n RETURN NULL;\\n END\\n $$;"}'::jsonb);`.execute(db);
await sql`INSERT INTO "migration_overrides" ("name", "value") VALUES ('trigger_users_delete_audit', '{"type":"trigger","name":"users_delete_audit","sql":"CREATE OR REPLACE TRIGGER \\"users_delete_audit\\"\\n AFTER DELETE ON \\"users\\"\\n REFERENCING OLD TABLE AS \\"old\\"\\n FOR EACH STATEMENT\\n WHEN (pg_trigger_depth() = 0)\\n EXECUTE FUNCTION users_delete_audit();"}'::jsonb);`.execute(db);
await sql`INSERT INTO "migration_overrides" ("name", "value") VALUES ('trigger_users_updated_at', '{"type":"trigger","name":"users_updated_at","sql":"CREATE OR REPLACE TRIGGER \\"users_updated_at\\"\\n BEFORE UPDATE ON \\"users\\"\\n FOR EACH ROW\\n EXECUTE FUNCTION updated_at();"}'::jsonb);`.execute(db);
await sql`INSERT INTO "migration_overrides" ("name", "value") VALUES ('trigger_libraries_updated_at', '{"type":"trigger","name":"libraries_updated_at","sql":"CREATE OR REPLACE TRIGGER \\"libraries_updated_at\\"\\n BEFORE UPDATE ON \\"libraries\\"\\n FOR EACH ROW\\n EXECUTE FUNCTION updated_at();"}'::jsonb);`.execute(db);
await sql`INSERT INTO "migration_overrides" ("name", "value") VALUES ('trigger_stacks_delete_audit', '{"type":"trigger","name":"stacks_delete_audit","sql":"CREATE OR REPLACE TRIGGER \\"stacks_delete_audit\\"\\n AFTER DELETE ON \\"asset_stack\\"\\n REFERENCING OLD TABLE AS \\"old\\"\\n FOR EACH STATEMENT\\n WHEN (pg_trigger_depth() = 0)\\n EXECUTE FUNCTION stacks_delete_audit();"}'::jsonb);`.execute(db);
await sql`INSERT INTO "migration_overrides" ("name", "value") VALUES ('trigger_stacks_updated_at', '{"type":"trigger","name":"stacks_updated_at","sql":"CREATE OR REPLACE TRIGGER \\"stacks_updated_at\\"\\n BEFORE UPDATE ON \\"asset_stack\\"\\n FOR EACH ROW\\n EXECUTE FUNCTION updated_at();"}'::jsonb);`.execute(db);
await sql`INSERT INTO "migration_overrides" ("name", "value") VALUES ('trigger_assets_delete_audit', '{"type":"trigger","name":"assets_delete_audit","sql":"CREATE OR REPLACE TRIGGER \\"assets_delete_audit\\"\\n AFTER DELETE ON \\"assets\\"\\n REFERENCING OLD TABLE AS \\"old\\"\\n FOR EACH STATEMENT\\n WHEN (pg_trigger_depth() = 0)\\n EXECUTE FUNCTION assets_delete_audit();"}'::jsonb);`.execute(db);
await sql`INSERT INTO "migration_overrides" ("name", "value") VALUES ('trigger_assets_updated_at', '{"type":"trigger","name":"assets_updated_at","sql":"CREATE OR REPLACE TRIGGER \\"assets_updated_at\\"\\n BEFORE UPDATE ON \\"assets\\"\\n FOR EACH ROW\\n EXECUTE FUNCTION updated_at();"}'::jsonb);`.execute(db);
await sql`INSERT INTO "migration_overrides" ("name", "value") VALUES ('index_idx_originalfilename_trigram', '{"type":"index","name":"idx_originalfilename_trigram","sql":"CREATE INDEX \\"idx_originalfilename_trigram\\" ON \\"assets\\" USING gin (f_unaccent(\\"originalFileName\\") gin_trgm_ops)"}'::jsonb);`.execute(db);
await sql`INSERT INTO "migration_overrides" ("name", "value") VALUES ('index_idx_local_date_time_month', '{"type":"index","name":"idx_local_date_time_month","sql":"CREATE INDEX \\"idx_local_date_time_month\\" ON \\"assets\\" ((date_trunc(''MONTH''::text, (\\"localDateTime\\" AT TIME ZONE ''UTC''::text)) AT TIME ZONE ''UTC''::text))"}'::jsonb);`.execute(db);
await sql`INSERT INTO "migration_overrides" ("name", "value") VALUES ('index_idx_local_date_time', '{"type":"index","name":"idx_local_date_time","sql":"CREATE INDEX \\"idx_local_date_time\\" ON \\"assets\\" (((\\"localDateTime\\" at time zone ''UTC'')::date))"}'::jsonb);`.execute(db);
await sql`INSERT INTO "migration_overrides" ("name", "value") VALUES ('index_UQ_assets_owner_library_checksum', '{"type":"index","name":"UQ_assets_owner_library_checksum","sql":"CREATE UNIQUE INDEX \\"UQ_assets_owner_library_checksum\\" ON \\"assets\\" (\\"ownerId\\", \\"libraryId\\", \\"checksum\\") WHERE (\\"libraryId\\" IS NOT NULL)"}'::jsonb);`.execute(db);
await sql`INSERT INTO "migration_overrides" ("name", "value") VALUES ('index_UQ_assets_owner_checksum', '{"type":"index","name":"UQ_assets_owner_checksum","sql":"CREATE UNIQUE INDEX \\"UQ_assets_owner_checksum\\" ON \\"assets\\" (\\"ownerId\\", \\"checksum\\") WHERE (\\"libraryId\\" IS NULL)"}'::jsonb);`.execute(db);
await sql`INSERT INTO "migration_overrides" ("name", "value") VALUES ('trigger_albums_delete_audit', '{"type":"trigger","name":"albums_delete_audit","sql":"CREATE OR REPLACE TRIGGER \\"albums_delete_audit\\"\\n AFTER DELETE ON \\"albums\\"\\n REFERENCING OLD TABLE AS \\"old\\"\\n FOR EACH STATEMENT\\n WHEN (pg_trigger_depth() = 0)\\n EXECUTE FUNCTION albums_delete_audit();"}'::jsonb);`.execute(db);
await sql`INSERT INTO "migration_overrides" ("name", "value") VALUES ('trigger_albums_updated_at', '{"type":"trigger","name":"albums_updated_at","sql":"CREATE OR REPLACE TRIGGER \\"albums_updated_at\\"\\n BEFORE UPDATE ON \\"albums\\"\\n FOR EACH ROW\\n EXECUTE FUNCTION updated_at();"}'::jsonb);`.execute(db);
await sql`INSERT INTO "migration_overrides" ("name", "value") VALUES ('trigger_activity_updated_at', '{"type":"trigger","name":"activity_updated_at","sql":"CREATE OR REPLACE TRIGGER \\"activity_updated_at\\"\\n BEFORE UPDATE ON \\"activity\\"\\n FOR EACH ROW\\n EXECUTE FUNCTION updated_at();"}'::jsonb);`.execute(db);
await sql`INSERT INTO "migration_overrides" ("name", "value") VALUES ('index_IDX_activity_like', '{"type":"index","name":"IDX_activity_like","sql":"CREATE UNIQUE INDEX \\"IDX_activity_like\\" ON \\"activity\\" (\\"assetId\\", \\"userId\\", \\"albumId\\") WHERE (\\"isLiked\\" = true)"}'::jsonb);`.execute(db);
await sql`INSERT INTO "migration_overrides" ("name", "value") VALUES ('trigger_album_assets_delete_audit', '{"type":"trigger","name":"album_assets_delete_audit","sql":"CREATE OR REPLACE TRIGGER \\"album_assets_delete_audit\\"\\n AFTER DELETE ON \\"albums_assets_assets\\"\\n REFERENCING OLD TABLE AS \\"old\\"\\n FOR EACH STATEMENT\\n WHEN (pg_trigger_depth() <= 1)\\n EXECUTE FUNCTION album_assets_delete_audit();"}'::jsonb);`.execute(db);
await sql`INSERT INTO "migration_overrides" ("name", "value") VALUES ('trigger_album_assets_updated_at', '{"type":"trigger","name":"album_assets_updated_at","sql":"CREATE OR REPLACE TRIGGER \\"album_assets_updated_at\\"\\n BEFORE UPDATE ON \\"albums_assets_assets\\"\\n FOR EACH ROW\\n EXECUTE FUNCTION updated_at();"}'::jsonb);`.execute(db);
await sql`INSERT INTO "migration_overrides" ("name", "value") VALUES ('trigger_album_users_delete_audit', '{"type":"trigger","name":"album_users_delete_audit","sql":"CREATE OR REPLACE TRIGGER \\"album_users_delete_audit\\"\\n AFTER DELETE ON \\"albums_shared_users_users\\"\\n REFERENCING OLD TABLE AS \\"old\\"\\n FOR EACH STATEMENT\\n WHEN (pg_trigger_depth() <= 1)\\n EXECUTE FUNCTION album_users_delete_audit();"}'::jsonb);`.execute(db);
await sql`INSERT INTO "migration_overrides" ("name", "value") VALUES ('trigger_album_user_after_insert', '{"type":"trigger","name":"album_user_after_insert","sql":"CREATE OR REPLACE TRIGGER \\"album_user_after_insert\\"\\n AFTER INSERT ON \\"albums_shared_users_users\\"\\n REFERENCING NEW TABLE AS \\"inserted_rows\\"\\n FOR EACH STATEMENT\\n EXECUTE FUNCTION album_user_after_insert();"}'::jsonb);`.execute(db);
await sql`INSERT INTO "migration_overrides" ("name", "value") VALUES ('trigger_album_users_updated_at', '{"type":"trigger","name":"album_users_updated_at","sql":"CREATE OR REPLACE TRIGGER \\"album_users_updated_at\\"\\n BEFORE UPDATE ON \\"albums_shared_users_users\\"\\n FOR EACH ROW\\n EXECUTE FUNCTION updated_at();"}'::jsonb);`.execute(db);
await sql`INSERT INTO "migration_overrides" ("name", "value") VALUES ('trigger_api_keys_updated_at', '{"type":"trigger","name":"api_keys_updated_at","sql":"CREATE OR REPLACE TRIGGER \\"api_keys_updated_at\\"\\n BEFORE UPDATE ON \\"api_keys\\"\\n FOR EACH ROW\\n EXECUTE FUNCTION updated_at();"}'::jsonb);`.execute(db);
await sql`INSERT INTO "migration_overrides" ("name", "value") VALUES ('trigger_person_updated_at', '{"type":"trigger","name":"person_updated_at","sql":"CREATE OR REPLACE TRIGGER \\"person_updated_at\\"\\n BEFORE UPDATE ON \\"person\\"\\n FOR EACH ROW\\n EXECUTE FUNCTION updated_at();"}'::jsonb);`.execute(db);
await sql`INSERT INTO "migration_overrides" ("name", "value") VALUES ('trigger_asset_files_updated_at', '{"type":"trigger","name":"asset_files_updated_at","sql":"CREATE OR REPLACE TRIGGER \\"asset_files_updated_at\\"\\n BEFORE UPDATE ON \\"asset_files\\"\\n FOR EACH ROW\\n EXECUTE FUNCTION updated_at();"}'::jsonb);`.execute(db);
await sql`INSERT INTO "migration_overrides" ("name", "value") VALUES ('trigger_asset_exif_updated_at', '{"type":"trigger","name":"asset_exif_updated_at","sql":"CREATE OR REPLACE TRIGGER \\"asset_exif_updated_at\\"\\n BEFORE UPDATE ON \\"exif\\"\\n FOR EACH ROW\\n EXECUTE FUNCTION updated_at();"}'::jsonb);`.execute(db);
await sql`INSERT INTO "migration_overrides" ("name", "value") VALUES ('index_face_index', '{"type":"index","name":"face_index","sql":"CREATE INDEX \\"face_index\\" ON \\"face_search\\" USING hnsw (embedding vector_cosine_ops) WITH (ef_construction = 300, m = 16)"}'::jsonb);`.execute(db);
await sql`INSERT INTO "migration_overrides" ("name", "value") VALUES ('index_IDX_geodata_gist_earthcoord', '{"type":"index","name":"IDX_geodata_gist_earthcoord","sql":"CREATE INDEX \\"IDX_geodata_gist_earthcoord\\" ON \\"geodata_places\\" (ll_to_earth_public(latitude, longitude))"}'::jsonb);`.execute(db);
await sql`INSERT INTO "migration_overrides" ("name", "value") VALUES ('index_idx_geodata_places_name', '{"type":"index","name":"idx_geodata_places_name","sql":"CREATE INDEX \\"idx_geodata_places_name\\" ON \\"geodata_places\\" USING gin (f_unaccent(\\"name\\") gin_trgm_ops)"}'::jsonb);`.execute(db);
await sql`INSERT INTO "migration_overrides" ("name", "value") VALUES ('index_idx_geodata_places_admin2_name', '{"type":"index","name":"idx_geodata_places_admin2_name","sql":"CREATE INDEX \\"idx_geodata_places_admin2_name\\" ON \\"geodata_places\\" USING gin (f_unaccent(\\"admin2Name\\") gin_trgm_ops)"}'::jsonb);`.execute(db);
await sql`INSERT INTO "migration_overrides" ("name", "value") VALUES ('index_idx_geodata_places_admin1_name', '{"type":"index","name":"idx_geodata_places_admin1_name","sql":"CREATE INDEX \\"idx_geodata_places_admin1_name\\" ON \\"geodata_places\\" USING gin (f_unaccent(\\"admin1Name\\") gin_trgm_ops)"}'::jsonb);`.execute(db);
await sql`INSERT INTO "migration_overrides" ("name", "value") VALUES ('index_idx_geodata_places_alternate_names', '{"type":"index","name":"idx_geodata_places_alternate_names","sql":"CREATE INDEX \\"idx_geodata_places_alternate_names\\" ON \\"geodata_places\\" USING gin (f_unaccent(\\"alternateNames\\") gin_trgm_ops)"}'::jsonb);`.execute(db);
await sql`INSERT INTO "migration_overrides" ("name", "value") VALUES ('trigger_memories_delete_audit', '{"type":"trigger","name":"memories_delete_audit","sql":"CREATE OR REPLACE TRIGGER \\"memories_delete_audit\\"\\n AFTER DELETE ON \\"memories\\"\\n REFERENCING OLD TABLE AS \\"old\\"\\n FOR EACH STATEMENT\\n WHEN (pg_trigger_depth() = 0)\\n EXECUTE FUNCTION memories_delete_audit();"}'::jsonb);`.execute(db);
await sql`INSERT INTO "migration_overrides" ("name", "value") VALUES ('trigger_memories_updated_at', '{"type":"trigger","name":"memories_updated_at","sql":"CREATE OR REPLACE TRIGGER \\"memories_updated_at\\"\\n BEFORE UPDATE ON \\"memories\\"\\n FOR EACH ROW\\n EXECUTE FUNCTION updated_at();"}'::jsonb);`.execute(db);
await sql`INSERT INTO "migration_overrides" ("name", "value") VALUES ('trigger_memory_assets_delete_audit', '{"type":"trigger","name":"memory_assets_delete_audit","sql":"CREATE OR REPLACE TRIGGER \\"memory_assets_delete_audit\\"\\n AFTER DELETE ON \\"memories_assets_assets\\"\\n REFERENCING OLD TABLE AS \\"old\\"\\n FOR EACH STATEMENT\\n WHEN (pg_trigger_depth() <= 1)\\n EXECUTE FUNCTION memory_assets_delete_audit();"}'::jsonb);`.execute(db);
await sql`INSERT INTO "migration_overrides" ("name", "value") VALUES ('trigger_memory_assets_updated_at', '{"type":"trigger","name":"memory_assets_updated_at","sql":"CREATE OR REPLACE TRIGGER \\"memory_assets_updated_at\\"\\n BEFORE UPDATE ON \\"memories_assets_assets\\"\\n FOR EACH ROW\\n EXECUTE FUNCTION updated_at();"}'::jsonb);`.execute(db);
await sql`INSERT INTO "migration_overrides" ("name", "value") VALUES ('trigger_notifications_updated_at', '{"type":"trigger","name":"notifications_updated_at","sql":"CREATE OR REPLACE TRIGGER \\"notifications_updated_at\\"\\n BEFORE UPDATE ON \\"notifications\\"\\n FOR EACH ROW\\n EXECUTE FUNCTION updated_at();"}'::jsonb);`.execute(db);
await sql`INSERT INTO "migration_overrides" ("name", "value") VALUES ('trigger_partners_delete_audit', '{"type":"trigger","name":"partners_delete_audit","sql":"CREATE OR REPLACE TRIGGER \\"partners_delete_audit\\"\\n AFTER DELETE ON \\"partners\\"\\n REFERENCING OLD TABLE AS \\"old\\"\\n FOR EACH STATEMENT\\n WHEN (pg_trigger_depth() = 0)\\n EXECUTE FUNCTION partners_delete_audit();"}'::jsonb);`.execute(db);
await sql`INSERT INTO "migration_overrides" ("name", "value") VALUES ('trigger_partners_updated_at', '{"type":"trigger","name":"partners_updated_at","sql":"CREATE OR REPLACE TRIGGER \\"partners_updated_at\\"\\n BEFORE UPDATE ON \\"partners\\"\\n FOR EACH ROW\\n EXECUTE FUNCTION updated_at();"}'::jsonb);`.execute(db);
await sql`INSERT INTO "migration_overrides" ("name", "value") VALUES ('trigger_sessions_updated_at', '{"type":"trigger","name":"sessions_updated_at","sql":"CREATE OR REPLACE TRIGGER \\"sessions_updated_at\\"\\n BEFORE UPDATE ON \\"sessions\\"\\n FOR EACH ROW\\n EXECUTE FUNCTION updated_at();"}'::jsonb);`.execute(db);
await sql`INSERT INTO "migration_overrides" ("name", "value") VALUES ('trigger_session_sync_checkpoints_updated_at', '{"type":"trigger","name":"session_sync_checkpoints_updated_at","sql":"CREATE OR REPLACE TRIGGER \\"session_sync_checkpoints_updated_at\\"\\n BEFORE UPDATE ON \\"session_sync_checkpoints\\"\\n FOR EACH ROW\\n EXECUTE FUNCTION updated_at();"}'::jsonb);`.execute(db);
await sql`INSERT INTO "migration_overrides" ("name", "value") VALUES ('trigger_tags_updated_at', '{"type":"trigger","name":"tags_updated_at","sql":"CREATE OR REPLACE TRIGGER \\"tags_updated_at\\"\\n BEFORE UPDATE ON \\"tags\\"\\n FOR EACH ROW\\n EXECUTE FUNCTION updated_at();"}'::jsonb);`.execute(db);
}
export async function down(db: Kysely<any>): Promise<void> {
await sql`DROP TABLE "migration_overrides";`.execute(db);
}

View File

@ -44,7 +44,6 @@ import { ASSET_CHECKSUM_CONSTRAINT } from 'src/utils/database';
@Index({
name: 'idx_local_date_time',
expression: `(("localDateTime" at time zone 'UTC')::date)`,
synchronize: false,
})
@Index({
name: 'idx_local_date_time_month',
@ -56,7 +55,6 @@ import { ASSET_CHECKSUM_CONSTRAINT } from 'src/utils/database';
name: 'idx_originalfilename_trigram',
using: 'gin',
expression: 'f_unaccent("originalFileName") gin_trgm_ops',
synchronize: false,
})
// For all assets, each originalpath must be unique per user and library
export class AssetTable {

View File

@ -7,7 +7,6 @@ import { Column, ForeignKeyColumn, Index, Table } from 'src/sql-tools';
using: 'hnsw',
expression: `embedding vector_cosine_ops`,
with: 'ef_construction = 300, m = 16',
synchronize: false,
})
export class FaceSearchTable {
@ForeignKeyColumn(() => AssetFaceTable, {

View File

@ -1,34 +1,29 @@
import { Column, Index, PrimaryColumn, Table, Timestamp } from 'src/sql-tools';
@Table({ name: 'geodata_places', synchronize: false })
@Table({ name: 'geodata_places' })
@Index({
name: 'idx_geodata_places_alternate_names',
using: 'gin',
expression: 'f_unaccent("alternateNames") gin_trgm_ops',
synchronize: false,
})
@Index({
name: 'idx_geodata_places_admin1_name',
using: 'gin',
expression: 'f_unaccent("admin1Name") gin_trgm_ops',
synchronize: false,
})
@Index({
name: 'idx_geodata_places_admin2_name',
using: 'gin',
expression: 'f_unaccent("admin2Name") gin_trgm_ops',
synchronize: false,
})
@Index({
name: 'idx_geodata_places_name',
using: 'gin',
expression: 'f_unaccent("name") gin_trgm_ops',
synchronize: false,
})
@Index({
name: 'IDX_geodata_gist_earthcoord',
expression: 'll_to_earth_public(latitude, longitude)',
synchronize: false,
})
export class GeodataPlacesTable {
@PrimaryColumn({ type: 'integer' })

View File

@ -0,0 +1,69 @@
import { compareOverrides } from 'src/sql-tools/comparers/override.comparer';
import { DatabaseOverride, Reason } from 'src/sql-tools/types';
import { describe, expect, it } from 'vitest';
const testOverride: DatabaseOverride = {
name: 'test',
value: { type: 'function', name: 'test_func', sql: 'func implementation' },
synchronize: true,
};
describe('compareOverrides', () => {
describe('onExtra', () => {
it('should work', () => {
expect(compareOverrides.onExtra(testOverride)).toEqual([
{
type: 'OverrideDrop',
overrideName: 'test',
reason: Reason.MissingInSource,
},
]);
});
});
describe('onMissing', () => {
it('should work', () => {
expect(compareOverrides.onMissing(testOverride)).toEqual([
{
type: 'OverrideCreate',
override: testOverride,
reason: Reason.MissingInTarget,
},
]);
});
});
describe('onCompare', () => {
it('should work', () => {
expect(compareOverrides.onCompare(testOverride, testOverride)).toEqual([]);
});
it('should drop and recreate when the value changes', () => {
const source: DatabaseOverride = {
name: 'test',
value: {
type: 'function',
name: 'test_func',
sql: 'func implementation',
},
synchronize: true,
};
const target: DatabaseOverride = {
name: 'test',
value: {
type: 'function',
name: 'test_func',
sql: 'func implementation2',
},
synchronize: true,
};
expect(compareOverrides.onCompare(source, target)).toEqual([
{
override: source,
type: 'OverrideUpdate',
reason: expect.stringContaining('value is different'),
},
]);
});
});
});

View File

@ -0,0 +1,29 @@
import { Comparer, DatabaseOverride, Reason } from 'src/sql-tools/types';
export const compareOverrides: Comparer<DatabaseOverride> = {
onMissing: (source) => [
{
type: 'OverrideCreate',
override: source,
reason: Reason.MissingInTarget,
},
],
onExtra: (target) => [
{
type: 'OverrideDrop',
overrideName: target.name,
reason: Reason.MissingInSource,
},
],
onCompare: (source, target) => {
if (source.value.name !== target.value.name || source.value.sql !== target.value.sql) {
const sourceValue = JSON.stringify(source.value);
const targetValue = JSON.stringify(target.value);
return [
{ type: 'OverrideUpdate', override: source, reason: `value is different (${sourceValue} vs ${targetValue})` },
];
}
return [];
},
};

View File

@ -0,0 +1,74 @@
import {
BaseContextOptions,
DatabaseEnum,
DatabaseExtension,
DatabaseFunction,
DatabaseOverride,
DatabaseParameter,
DatabaseSchema,
DatabaseTable,
} from 'src/sql-tools/types';
const asOverrideKey = (type: string, name: string) => `${type}:${name}`;
export class BaseContext {
databaseName: string;
schemaName: string;
overrideTableName: string;
tables: DatabaseTable[] = [];
functions: DatabaseFunction[] = [];
enums: DatabaseEnum[] = [];
extensions: DatabaseExtension[] = [];
parameters: DatabaseParameter[] = [];
overrides: DatabaseOverride[] = [];
warnings: string[] = [];
constructor(options: BaseContextOptions) {
this.databaseName = options.databaseName ?? 'postgres';
this.schemaName = options.schemaName ?? 'public';
this.overrideTableName = options.overrideTableName ?? 'migration_overrides';
}
getTableByName(name: string) {
return this.tables.find((table) => table.name === name);
}
warn(context: string, message: string) {
this.warnings.push(`[${context}] ${message}`);
}
build(): DatabaseSchema {
const overrideMap = new Map<string, DatabaseOverride>();
for (const override of this.overrides) {
const { type, name } = override.value;
overrideMap.set(asOverrideKey(type, name), override);
}
for (const func of this.functions) {
func.override = overrideMap.get(asOverrideKey('function', func.name));
}
for (const { indexes, triggers } of this.tables) {
for (const index of indexes) {
index.override = overrideMap.get(asOverrideKey('index', index.name));
}
for (const trigger of triggers) {
trigger.override = overrideMap.get(asOverrideKey('trigger', trigger.name));
}
}
return {
databaseName: this.databaseName,
schemaName: this.schemaName,
tables: this.tables,
functions: this.functions,
enums: this.enums,
extensions: this.extensions,
parameters: this.parameters,
overrides: this.overrides,
warnings: this.warnings,
};
}
}

View File

@ -1,45 +1,25 @@
/* eslint-disable @typescript-eslint/no-unsafe-function-type */
import { BaseContext } from 'src/sql-tools/contexts/base-context';
import { ColumnOptions, TableOptions } from 'src/sql-tools/decorators';
import { asKey } from 'src/sql-tools/helpers';
import {
DatabaseColumn,
DatabaseEnum,
DatabaseExtension,
DatabaseFunction,
DatabaseParameter,
DatabaseSchema,
DatabaseTable,
SchemaFromCodeOptions,
} from 'src/sql-tools/types';
import { DatabaseColumn, DatabaseTable, SchemaFromCodeOptions } from 'src/sql-tools/types';
type TableMetadata = { options: TableOptions; object: Function; methodToColumn: Map<string | symbol, DatabaseColumn> };
export class SchemaBuilder {
databaseName: string;
schemaName: string;
tables: DatabaseTable[] = [];
functions: DatabaseFunction[] = [];
enums: DatabaseEnum[] = [];
extensions: DatabaseExtension[] = [];
parameters: DatabaseParameter[] = [];
warnings: string[] = [];
export class ProcessorContext extends BaseContext {
constructor(public options: SchemaFromCodeOptions) {
options.createForeignKeyIndexes = options.createForeignKeyIndexes ?? true;
options.overrides = options.overrides ?? false;
super(options);
}
classToTable: WeakMap<Function, DatabaseTable> = new WeakMap();
tableToMetadata: WeakMap<DatabaseTable, TableMetadata> = new WeakMap();
constructor(options: SchemaFromCodeOptions) {
this.databaseName = options.databaseName ?? 'postgres';
this.schemaName = options.schemaName ?? 'public';
}
getTableByObject(object: Function) {
return this.classToTable.get(object);
}
getTableByName(name: string) {
return this.tables.find((table) => table.name === name);
}
getTableMetadata(table: DatabaseTable) {
const metadata = this.tableToMetadata.get(table);
if (!metadata) {
@ -92,10 +72,6 @@ export class SchemaBuilder {
return asKey('IDX_', table, items);
}
warn(context: string, message: string) {
this.warnings.push(`[${context}] ${message}`);
}
warnMissingTable(context: string, object: object, propertyName?: symbol | string) {
const label = object.constructor.name + (propertyName ? '.' + String(propertyName) : '');
this.warn(context, `Unable to find table (${label})`);
@ -105,17 +81,4 @@ export class SchemaBuilder {
const label = object.constructor.name + (propertyName ? '.' + String(propertyName) : '');
this.warn(context, `Unable to find column (${label})`);
}
build(): DatabaseSchema {
return {
databaseName: this.databaseName,
schemaName: this.schemaName,
tables: this.tables,
functions: this.functions,
enums: this.enums,
extensions: this.extensions,
parameters: this.parameters,
warnings: this.warnings,
};
}
}

View File

@ -0,0 +1,8 @@
import { BaseContext } from 'src/sql-tools/contexts/base-context';
import { SchemaFromDatabaseOptions } from 'src/sql-tools/types';
export class ReaderContext extends BaseContext {
constructor(public options: SchemaFromDatabaseOptions) {
super(options);
}
}

View File

@ -1,6 +1,6 @@
import { createHash } from 'node:crypto';
import { ColumnValue } from 'src/sql-tools/decorators/column.decorator';
import { Comparer, DatabaseColumn, IgnoreOptions, SchemaDiff } from 'src/sql-tools/types';
import { Comparer, DatabaseColumn, DatabaseOverride, IgnoreOptions, SchemaDiff } from 'src/sql-tools/types';
export const asMetadataKey = (name: string) => `sql-tools:${name}`;
@ -56,6 +56,17 @@ export const haveEqualColumns = (sourceColumns?: string[], targetColumns?: strin
return setIsEqual(new Set(sourceColumns ?? []), new Set(targetColumns ?? []));
};
export const haveEqualOverrides = <T extends { override?: DatabaseOverride }>(source: T, target: T) => {
if (!source.override || !target.override) {
return false;
}
const sourceValue = source.override.value;
const targetValue = target.override.value;
return sourceValue.name === targetValue.name && sourceValue.sql === targetValue.sql;
};
export const compare = <T extends { name: string; synchronize: boolean }>(
sources: T[],
targets: T[],
@ -72,7 +83,7 @@ export const compare = <T extends { name: string; synchronize: boolean }>(
const source = sourceMap[key];
const target = targetMap[key];
if (isIgnored(source, target, options)) {
if (isIgnored(source, target, options ?? true)) {
continue;
}
@ -85,6 +96,14 @@ export const compare = <T extends { name: string; synchronize: boolean }>(
} else if (!source && target) {
items.push(...comparer.onExtra(target));
} else {
if (
haveEqualOverrides(
source as unknown as { override?: DatabaseOverride },
target as unknown as { override?: DatabaseOverride },
)
) {
continue;
}
items.push(...comparer.onCompare(source, target));
}
}
@ -97,6 +116,9 @@ const isIgnored = (
target: { synchronize?: boolean } | undefined,
options: IgnoreOptions,
) => {
if (typeof options === 'boolean') {
return !options;
}
return (options.ignoreExtra && !source) || (options.ignoreMissing && !target);
};
@ -165,3 +187,18 @@ export const asColumnComment = (tableName: string, columnName: string, comment:
export const asColumnList = (columns: string[]) => columns.map((column) => `"${column}"`).join(', ');
export const asForeignKeyConstraintName = (table: string, columns: string[]) => asKey('FK_', table, [...columns]);
export const asJsonString = (value: unknown): string => {
return `'${escape(JSON.stringify(value))}'::jsonb`;
};
const escape = (value: string) => {
return value
.replaceAll("'", "''")
.replaceAll(/[\\]/g, '\\\\')
.replaceAll(/[\b]/g, String.raw`\b`)
.replaceAll(/[\f]/g, String.raw`\f`)
.replaceAll(/[\n]/g, String.raw`\n`)
.replaceAll(/[\r]/g, String.raw`\r`)
.replaceAll(/[\t]/g, String.raw`\t`);
};

View File

@ -1,13 +1,13 @@
import { asKey } from 'src/sql-tools/helpers';
import { ConstraintType, Processor } from 'src/sql-tools/types';
export const processCheckConstraints: Processor = (builder, items) => {
export const processCheckConstraints: Processor = (ctx, items) => {
for (const {
item: { object, options },
} of items.filter((item) => item.type === 'checkConstraint')) {
const table = builder.getTableByObject(object);
const table = ctx.getTableByObject(object);
if (!table) {
builder.warnMissingTable('@Check', object);
ctx.warnMissingTable('@Check', object);
continue;
}

View File

@ -2,14 +2,14 @@ import { ColumnOptions } from 'src/sql-tools/decorators/column.decorator';
import { fromColumnValue } from 'src/sql-tools/helpers';
import { Processor } from 'src/sql-tools/types';
export const processColumns: Processor = (builder, items) => {
export const processColumns: Processor = (ctx, items) => {
for (const {
type,
item: { object, propertyName, options },
} of items.filter((item) => item.type === 'column' || item.type === 'foreignKeyColumn')) {
const table = builder.getTableByObject(object.constructor);
const table = ctx.getTableByObject(object.constructor);
if (!table) {
builder.warnMissingTable(type === 'column' ? '@Column' : '@ForeignKeyColumn', object, propertyName);
ctx.warnMissingTable(type === 'column' ? '@Column' : '@ForeignKeyColumn', object, propertyName);
continue;
}
@ -31,7 +31,7 @@ export const processColumns: Processor = (builder, items) => {
const isEnum = !!(options as ColumnOptions).enum;
builder.addColumn(
ctx.addColumn(
table,
{
name: columnName,

View File

@ -1,12 +1,12 @@
import { fromColumnValue } from 'src/sql-tools/helpers';
import { Processor } from 'src/sql-tools/types';
export const processConfigurationParameters: Processor = (builder, items) => {
export const processConfigurationParameters: Processor = (ctx, items) => {
for (const {
item: { options },
} of items.filter((item) => item.type === 'configurationParameter')) {
builder.parameters.push({
databaseName: builder.databaseName,
ctx.parameters.push({
databaseName: ctx.databaseName,
name: options.name,
value: fromColumnValue(options.value),
scope: options.scope,

View File

@ -1,10 +1,10 @@
import { asSnakeCase } from 'src/sql-tools/helpers';
import { Processor } from 'src/sql-tools/types';
export const processDatabases: Processor = (builder, items) => {
export const processDatabases: Processor = (ctx, items) => {
for (const {
item: { object, options },
} of items.filter((item) => item.type === 'database')) {
builder.databaseName = options.name || asSnakeCase(object.name);
ctx.databaseName = options.name || asSnakeCase(object.name);
}
};

View File

@ -1,8 +1,8 @@
import { Processor } from 'src/sql-tools/types';
export const processEnums: Processor = (builder, items) => {
export const processEnums: Processor = (ctx, items) => {
for (const { item } of items.filter((item) => item.type === 'enum')) {
// TODO log warnings if enum name is not unique
builder.enums.push(item);
ctx.enums.push(item);
}
};

View File

@ -1,10 +1,14 @@
import { Processor } from 'src/sql-tools/types';
export const processExtensions: Processor = (builder, items) => {
export const processExtensions: Processor = (ctx, items) => {
if (ctx.options.extensions === false) {
return;
}
for (const {
item: { options },
} of items.filter((item) => item.type === 'extension')) {
builder.extensions.push({
ctx.extensions.push({
name: options.name,
synchronize: options.synchronize ?? true,
});

View File

@ -1,25 +1,25 @@
import { asForeignKeyConstraintName, asKey } from 'src/sql-tools/helpers';
import { ActionType, ConstraintType, Processor } from 'src/sql-tools/types';
export const processForeignKeyColumns: Processor = (builder, items) => {
export const processForeignKeyColumns: Processor = (ctx, items) => {
for (const {
item: { object, propertyName, options, target },
} of items.filter((item) => item.type === 'foreignKeyColumn')) {
const { table, column } = builder.getColumnByObjectAndPropertyName(object, propertyName);
const { table, column } = ctx.getColumnByObjectAndPropertyName(object, propertyName);
if (!table) {
builder.warnMissingTable('@ForeignKeyColumn', object);
ctx.warnMissingTable('@ForeignKeyColumn', object);
continue;
}
if (!column) {
// should be impossible since they are pre-created in `column.processor.ts`
builder.warnMissingColumn('@ForeignKeyColumn', object, propertyName);
ctx.warnMissingColumn('@ForeignKeyColumn', object, propertyName);
continue;
}
const referenceTable = builder.getTableByObject(target());
const referenceTable = ctx.getTableByObject(target());
if (!referenceTable) {
builder.warnMissingTable('@ForeignKeyColumn', object, propertyName);
ctx.warnMissingTable('@ForeignKeyColumn', object, propertyName);
continue;
}

View File

@ -1,20 +1,20 @@
import { asForeignKeyConstraintName } from 'src/sql-tools/helpers';
import { ActionType, ConstraintType, Processor } from 'src/sql-tools/types';
export const processForeignKeyConstraints: Processor = (builder, items, config) => {
export const processForeignKeyConstraints: Processor = (ctx, items) => {
for (const {
item: { object, options },
} of items.filter((item) => item.type === 'foreignKeyConstraint')) {
const table = builder.getTableByObject(object);
const table = ctx.getTableByObject(object);
if (!table) {
builder.warnMissingTable('@ForeignKeyConstraint', { name: 'referenceTable' });
ctx.warnMissingTable('@ForeignKeyConstraint', { name: 'referenceTable' });
continue;
}
const referenceTable = builder.getTableByObject(options.referenceTable());
const referenceTable = ctx.getTableByObject(options.referenceTable());
if (!referenceTable) {
const referenceTableName = options.referenceTable()?.name;
builder.warn(
ctx.warn(
'@ForeignKeyConstraint.referenceTable',
`Unable to find table` + (referenceTableName ? ` (${referenceTableName})` : ''),
);
@ -25,16 +25,16 @@ export const processForeignKeyConstraints: Processor = (builder, items, config)
for (const columnName of options.columns) {
if (!table.columns.some(({ name }) => name === columnName)) {
const metadata = builder.getTableMetadata(table);
builder.warn('@ForeignKeyConstraint.columns', `Unable to find column (${metadata.object.name}.${columnName})`);
const metadata = ctx.getTableMetadata(table);
ctx.warn('@ForeignKeyConstraint.columns', `Unable to find column (${metadata.object.name}.${columnName})`);
missingColumn = true;
}
}
for (const columnName of options.referenceColumns || []) {
if (!referenceTable.columns.some(({ name }) => name === columnName)) {
const metadata = builder.getTableMetadata(referenceTable);
builder.warn(
const metadata = ctx.getTableMetadata(referenceTable);
ctx.warn(
'@ForeignKeyConstraint.referenceColumns',
`Unable to find column (${metadata.object.name}.${columnName})`,
);
@ -67,9 +67,9 @@ export const processForeignKeyConstraints: Processor = (builder, items, config)
continue;
}
if (options.index || options.indexName || config.createForeignKeyIndexes) {
if (options.index || options.indexName || ctx.options.createForeignKeyIndexes) {
table.indexes.push({
name: options.indexName || builder.asIndexName(table.name, options.columns),
name: options.indexName || ctx.asIndexName(table.name, options.columns),
tableName: table.name,
columnNames: options.columns,
unique: false,

View File

@ -1,8 +1,12 @@
import { Processor } from 'src/sql-tools/types';
export const processFunctions: Processor = (builder, items) => {
export const processFunctions: Processor = (ctx, items) => {
if (ctx.options.functions === false) {
return;
}
for (const { item } of items.filter((item) => item.type === 'function')) {
// TODO log warnings if function name is not unique
builder.functions.push(item);
ctx.functions.push(item);
}
};

View File

@ -1,17 +1,17 @@
import { Processor } from 'src/sql-tools/types';
export const processIndexes: Processor = (builder, items, config) => {
export const processIndexes: Processor = (ctx, items) => {
for (const {
item: { object, options },
} of items.filter((item) => item.type === 'index')) {
const table = builder.getTableByObject(object);
const table = ctx.getTableByObject(object);
if (!table) {
builder.warnMissingTable('@Check', object);
ctx.warnMissingTable('@Check', object);
continue;
}
table.indexes.push({
name: options.name || builder.asIndexName(table.name, options.columns, options.where),
name: options.name || ctx.asIndexName(table.name, options.columns, options.where),
tableName: table.name,
unique: options.unique ?? false,
expression: options.expression,
@ -28,15 +28,15 @@ export const processIndexes: Processor = (builder, items, config) => {
type,
item: { object, propertyName, options },
} of items.filter((item) => item.type === 'column' || item.type === 'foreignKeyColumn')) {
const { table, column } = builder.getColumnByObjectAndPropertyName(object, propertyName);
const { table, column } = ctx.getColumnByObjectAndPropertyName(object, propertyName);
if (!table) {
builder.warnMissingTable('@Column', object);
ctx.warnMissingTable('@Column', object);
continue;
}
if (!column) {
// should be impossible since they are created in `column.processor.ts`
builder.warnMissingColumn('@Column', object, propertyName);
ctx.warnMissingColumn('@Column', object, propertyName);
continue;
}
@ -45,12 +45,12 @@ export const processIndexes: Processor = (builder, items, config) => {
}
const isIndexRequested =
options.indexName || options.index || (type === 'foreignKeyColumn' && config.createForeignKeyIndexes);
options.indexName || options.index || (type === 'foreignKeyColumn' && ctx.options.createForeignKeyIndexes);
if (!isIndexRequested) {
continue;
}
const indexName = options.indexName || builder.asIndexName(table.name, [column.name]);
const indexName = options.indexName || ctx.asIndexName(table.name, [column.name]);
const isIndexPresent = table.indexes.some((index) => index.name === indexName);
if (isIndexPresent) {

View File

@ -8,6 +8,7 @@ import { processForeignKeyColumns } from 'src/sql-tools/processors/foreign-key-c
import { processForeignKeyConstraints } from 'src/sql-tools/processors/foreign-key-constraint.processor';
import { processFunctions } from 'src/sql-tools/processors/function.processor';
import { processIndexes } from 'src/sql-tools/processors/index.processor';
import { processOverrides } from 'src/sql-tools/processors/override.processor';
import { processPrimaryKeyConstraints } from 'src/sql-tools/processors/primary-key-contraint.processor';
import { processTables } from 'src/sql-tools/processors/table.processor';
import { processTriggers } from 'src/sql-tools/processors/trigger.processor';
@ -29,4 +30,5 @@ export const processors: Processor[] = [
processPrimaryKeyConstraints,
processIndexes,
processTriggers,
processOverrides,
];

View File

@ -0,0 +1,50 @@
import { asFunctionCreate } from 'src/sql-tools/transformers/function.transformer';
import { asIndexCreate } from 'src/sql-tools/transformers/index.transformer';
import { asTriggerCreate } from 'src/sql-tools/transformers/trigger.transformer';
import { Processor } from 'src/sql-tools/types';
export const processOverrides: Processor = (ctx) => {
if (ctx.options.overrides === false) {
return;
}
for (const func of ctx.functions) {
if (!func.synchronize) {
continue;
}
ctx.overrides.push({
name: `function_${func.name}`,
value: { type: 'function', name: func.name, sql: asFunctionCreate(func) },
synchronize: true,
});
}
for (const { triggers, indexes } of ctx.tables) {
for (const trigger of triggers) {
if (!trigger.synchronize) {
continue;
}
ctx.overrides.push({
name: `trigger_${trigger.name}`,
value: { type: 'trigger', name: trigger.name, sql: asTriggerCreate(trigger) },
synchronize: true,
});
}
for (const index of indexes) {
if (!index.synchronize) {
continue;
}
if (index.expression || index.using || index.with || index.where) {
ctx.overrides.push({
name: `index_${index.name}`,
value: { type: 'index', name: index.name, sql: asIndexCreate(index) },
synchronize: true,
});
}
}
}
};

View File

@ -1,8 +1,8 @@
import { asKey } from 'src/sql-tools/helpers';
import { ConstraintType, Processor } from 'src/sql-tools/types';
export const processPrimaryKeyConstraints: Processor = (builder) => {
for (const table of builder.tables) {
export const processPrimaryKeyConstraints: Processor = (ctx) => {
for (const table of ctx.tables) {
const columnNames: string[] = [];
for (const column of table.columns) {
@ -12,7 +12,7 @@ export const processPrimaryKeyConstraints: Processor = (builder) => {
}
if (columnNames.length > 0) {
const tableMetadata = builder.getTableMetadata(table);
const tableMetadata = ctx.getTableMetadata(table);
table.constraints.push({
type: ConstraintType.PRIMARY_KEY,
name: tableMetadata.options.primaryConstraintName || asPrimaryKeyConstraintName(table.name, columnNames),

View File

@ -1,18 +1,18 @@
import { asSnakeCase } from 'src/sql-tools/helpers';
import { Processor } from 'src/sql-tools/types';
export const processTables: Processor = (builder, items) => {
export const processTables: Processor = (ctx, items) => {
for (const {
item: { options, object },
} of items.filter((item) => item.type === 'table')) {
const test = builder.getTableByObject(object);
const test = ctx.getTableByObject(object);
if (test) {
throw new Error(
`Table ${test.name} has already been registered. Does ${object.name} have two @Table() decorators?`,
);
}
builder.addTable(
ctx.addTable(
{
name: options.name || asSnakeCase(object.name),
columns: [],

View File

@ -2,13 +2,13 @@ import { TriggerOptions } from 'src/sql-tools/decorators/trigger.decorator';
import { asKey } from 'src/sql-tools/helpers';
import { Processor } from 'src/sql-tools/types';
export const processTriggers: Processor = (builder, items) => {
export const processTriggers: Processor = (ctx, items) => {
for (const {
item: { object, options },
} of items.filter((item) => item.type === 'trigger')) {
const table = builder.getTableByObject(object);
const table = ctx.getTableByObject(object);
if (!table) {
builder.warnMissingTable('@Trigger', object);
ctx.warnMissingTable('@Trigger', object);
continue;
}

View File

@ -1,13 +1,13 @@
import { asKey } from 'src/sql-tools/helpers';
import { ConstraintType, Processor } from 'src/sql-tools/types';
export const processUniqueConstraints: Processor = (builder, items) => {
export const processUniqueConstraints: Processor = (ctx, items) => {
for (const {
item: { object, options },
} of items.filter((item) => item.type === 'uniqueConstraint')) {
const table = builder.getTableByObject(object);
const table = ctx.getTableByObject(object);
if (!table) {
builder.warnMissingTable('@Unique', object);
ctx.warnMissingTable('@Unique', object);
continue;
}
@ -28,15 +28,15 @@ export const processUniqueConstraints: Processor = (builder, items) => {
type,
item: { object, propertyName, options },
} of items.filter((item) => item.type === 'column' || item.type === 'foreignKeyColumn')) {
const { table, column } = builder.getColumnByObjectAndPropertyName(object, propertyName);
const { table, column } = ctx.getColumnByObjectAndPropertyName(object, propertyName);
if (!table) {
builder.warnMissingTable('@Column', object);
ctx.warnMissingTable('@Column', object);
continue;
}
if (!column) {
// should be impossible since they are created in `column.processor.ts`
builder.warnMissingColumn('@Column', object, propertyName);
ctx.warnMissingColumn('@Column', object, propertyName);
continue;
}

View File

@ -1,8 +1,8 @@
import { sql } from 'kysely';
import { jsonArrayFrom } from 'kysely/helpers/postgres';
import { ColumnType, DatabaseColumn, DatabaseReader } from 'src/sql-tools/types';
import { ColumnType, DatabaseColumn, Reader } from 'src/sql-tools/types';
export const readColumns: DatabaseReader = async (schema, db) => {
export const readColumns: Reader = async (ctx, db) => {
const columns = await db
.selectFrom('information_schema.columns as c')
.leftJoin('information_schema.element_types as o', (join) =>
@ -42,13 +42,13 @@ export const readColumns: DatabaseReader = async (schema, db) => {
// data type for ARRAYs
'o.data_type as array_type',
])
.where('table_schema', '=', schema.schemaName)
.where('table_schema', '=', ctx.schemaName)
.execute();
const enumRaw = await db
.selectFrom('pg_type')
.innerJoin('pg_namespace', (join) =>
join.onRef('pg_namespace.oid', '=', 'pg_type.typnamespace').on('pg_namespace.nspname', '=', schema.schemaName),
join.onRef('pg_namespace.oid', '=', 'pg_type.typnamespace').on('pg_namespace.nspname', '=', ctx.schemaName),
)
.where('typtype', '=', sql.lit('e'))
.select((eb) => [
@ -61,13 +61,13 @@ export const readColumns: DatabaseReader = async (schema, db) => {
const enums = enumRaw.map((item) => ({ name: item.name, values: item.values.map(({ value }) => value) }));
for (const { name, values } of enums) {
schema.enums.push({ name, values, synchronize: true });
ctx.enums.push({ name, values, synchronize: true });
}
const enumMap = Object.fromEntries(enums.map((e) => [e.name, e.values]));
// add columns to tables
for (const column of columns) {
const table = schema.tables.find((table) => table.name === column.table_name);
const table = ctx.getTableByName(column.table_name);
if (!table) {
continue;
}
@ -93,7 +93,7 @@ export const readColumns: DatabaseReader = async (schema, db) => {
// array types
case 'ARRAY': {
if (!column.array_type) {
schema.warnings.push(`Unable to find type for ${columnLabel} (ARRAY)`);
ctx.warnings.push(`Unable to find type for ${columnLabel} (ARRAY)`);
continue;
}
item.type = column.array_type as ColumnType;
@ -103,7 +103,7 @@ export const readColumns: DatabaseReader = async (schema, db) => {
// enum types
case 'USER-DEFINED': {
if (!enumMap[column.udt_name]) {
schema.warnings.push(`Unable to find type for ${columnLabel} (ENUM)`);
ctx.warnings.push(`Unable to find type for ${columnLabel} (ENUM)`);
continue;
}

View File

@ -1,6 +1,6 @@
import { DatabaseReader } from 'src/sql-tools/types';
import { Reader } from 'src/sql-tools/types';
export const readComments: DatabaseReader = async (schema, db) => {
export const readComments: Reader = async (ctx, db) => {
const comments = await db
.selectFrom('pg_description as d')
.innerJoin('pg_class as c', 'd.objoid', 'c.oid')
@ -20,7 +20,7 @@ export const readComments: DatabaseReader = async (schema, db) => {
for (const comment of comments) {
if (comment.object_type === 'r') {
const table = schema.tables.find((table) => table.name === comment.object_name);
const table = ctx.getTableByName(comment.object_name);
if (!table) {
continue;
}

View File

@ -1,7 +1,7 @@
import { sql } from 'kysely';
import { ActionType, ConstraintType, DatabaseReader } from 'src/sql-tools/types';
import { ActionType, ConstraintType, Reader } from 'src/sql-tools/types';
export const readConstraints: DatabaseReader = async (schema, db) => {
export const readConstraints: Reader = async (ctx, db) => {
const constraints = await db
.selectFrom('pg_constraint')
.innerJoin('pg_namespace', 'pg_namespace.oid', 'pg_constraint.connamespace') // namespace
@ -40,11 +40,11 @@ export const readConstraints: DatabaseReader = async (schema, db) => {
.as('reference_column_names'),
eb.fn<string>('pg_get_constraintdef', ['pg_constraint.oid']).as('expression'),
])
.where('pg_namespace.nspname', '=', schema.schemaName)
.where('pg_namespace.nspname', '=', ctx.schemaName)
.execute();
for (const constraint of constraints) {
const table = schema.tables.find((table) => table.name === constraint.table_name);
const table = ctx.getTableByName(constraint.table_name);
if (!table) {
continue;
}
@ -55,7 +55,7 @@ export const readConstraints: DatabaseReader = async (schema, db) => {
// primary key constraint
case 'p': {
if (!constraint.column_names) {
schema.warnings.push(`Skipping CONSTRAINT "${constraintName}", no columns found`);
ctx.warnings.push(`Skipping CONSTRAINT "${constraintName}", no columns found`);
continue;
}
table.constraints.push({
@ -71,7 +71,7 @@ export const readConstraints: DatabaseReader = async (schema, db) => {
// foreign key constraint
case 'f': {
if (!constraint.column_names || !constraint.reference_table_name || !constraint.reference_column_names) {
schema.warnings.push(
ctx.warnings.push(
`Skipping CONSTRAINT "${constraintName}", missing either columns, referenced table, or referenced columns,`,
);
continue;

View File

@ -1,6 +1,6 @@
import { DatabaseReader } from 'src/sql-tools/types';
import { Reader } from 'src/sql-tools/types';
export const readExtensions: DatabaseReader = async (schema, db) => {
export const readExtensions: Reader = async (ctx, db) => {
const extensions = await db
.selectFrom('pg_catalog.pg_extension')
// .innerJoin('pg_namespace', 'pg_namespace.oid', 'pg_catalog.pg_extension.extnamespace')
@ -9,6 +9,6 @@ export const readExtensions: DatabaseReader = async (schema, db) => {
.execute();
for (const { name } of extensions) {
schema.extensions.push({ name, synchronize: true });
ctx.extensions.push({ name, synchronize: true });
}
};

View File

@ -1,14 +1,14 @@
import { sql } from 'kysely';
import { DatabaseReader } from 'src/sql-tools/types';
import { Reader } from 'src/sql-tools/types';
export const readFunctions: DatabaseReader = async (schema, db) => {
export const readFunctions: Reader = async (ctx, db) => {
const routines = await db
.selectFrom('pg_proc as p')
.innerJoin('pg_namespace', 'pg_namespace.oid', 'p.pronamespace')
.leftJoin('pg_depend as d', (join) => join.onRef('d.objid', '=', 'p.oid').on('d.deptype', '=', sql.lit('e')))
.where('d.objid', 'is', sql.lit(null))
.where('p.prokind', '=', sql.lit('f'))
.where('pg_namespace.nspname', '=', schema.schemaName)
.where('pg_namespace.nspname', '=', ctx.schemaName)
.select((eb) => [
'p.proname as name',
eb.fn<string>('pg_get_function_identity_arguments', ['p.oid']).as('arguments'),
@ -17,7 +17,7 @@ export const readFunctions: DatabaseReader = async (schema, db) => {
.execute();
for (const { name, expression } of routines) {
schema.functions.push({
ctx.functions.push({
name,
// TODO read expression from the overrides table
expression,

View File

@ -1,7 +1,7 @@
import { sql } from 'kysely';
import { DatabaseReader } from 'src/sql-tools/types';
import { Reader } from 'src/sql-tools/types';
export const readIndexes: DatabaseReader = async (schema, db) => {
export const readIndexes: Reader = async (ctx, db) => {
const indexes = await db
.selectFrom('pg_index as ix')
// matching index, which has column information
@ -34,12 +34,12 @@ export const readIndexes: DatabaseReader = async (schema, db) => {
.select((eb) => eb.fn<string[]>('json_agg', ['a.attname']).as('column_name'))
.as('column_names'),
])
.where('pg_namespace.nspname', '=', schema.schemaName)
.where('pg_namespace.nspname', '=', ctx.schemaName)
.where('ix.indisprimary', '=', sql.lit(false))
.execute();
for (const index of indexes) {
const table = schema.tables.find((table) => table.name === index.table_name);
const table = ctx.getTableByName(index.table_name);
if (!table) {
continue;
}

View File

@ -5,13 +5,13 @@ import { readExtensions } from 'src/sql-tools/readers/extension.reader';
import { readFunctions } from 'src/sql-tools/readers/function.reader';
import { readIndexes } from 'src/sql-tools/readers/index.reader';
import { readName } from 'src/sql-tools/readers/name.reader';
import { readOverrides } from 'src/sql-tools/readers/override.reader';
import { readParameters } from 'src/sql-tools/readers/parameter.reader';
import { readTables } from 'src/sql-tools/readers/table.reader';
import { readTriggers } from 'src/sql-tools/readers/trigger.reader';
import { DatabaseReader } from 'src/sql-tools/types';
import { Reader } from 'src/sql-tools/types';
export const readers: DatabaseReader[] = [
//
export const readers: Reader[] = [
readName,
readParameters,
readExtensions,
@ -22,4 +22,5 @@ export const readers: DatabaseReader[] = [
readConstraints,
readTriggers,
readComments,
readOverrides,
];

View File

@ -1,8 +1,8 @@
import { QueryResult, sql } from 'kysely';
import { DatabaseReader } from 'src/sql-tools/types';
import { Reader } from 'src/sql-tools/types';
export const readName: DatabaseReader = async (schema, db) => {
export const readName: Reader = async (ctx, db) => {
const result = (await sql`SELECT current_database() as name`.execute(db)) as QueryResult<{ name: string }>;
schema.databaseName = result.rows[0].name;
ctx.databaseName = result.rows[0].name;
};

View File

@ -0,0 +1,19 @@
import { sql } from 'kysely';
import { OverrideType, Reader } from 'src/sql-tools/types';
export const readOverrides: Reader = async (ctx, db) => {
try {
const result = await sql
.raw<{
name: string;
value: { type: OverrideType; name: string; sql: string };
}>(`SELECT name, value FROM "${ctx.overrideTableName}"`)
.execute(db);
for (const { name, value } of result.rows) {
ctx.overrides.push({ name, value, synchronize: true });
}
} catch (error) {
ctx.warn('Overrides', `Error reading override table: ${error}`);
}
};

View File

@ -1,7 +1,7 @@
import { sql } from 'kysely';
import { DatabaseReader, ParameterScope } from 'src/sql-tools/types';
import { ParameterScope, Reader } from 'src/sql-tools/types';
export const readParameters: DatabaseReader = async (schema, db) => {
export const readParameters: Reader = async (ctx, db) => {
const parameters = await db
.selectFrom('pg_settings')
.where('source', 'in', [sql.lit('database'), sql.lit('user')])
@ -9,10 +9,10 @@ export const readParameters: DatabaseReader = async (schema, db) => {
.execute();
for (const parameter of parameters) {
schema.parameters.push({
ctx.parameters.push({
name: parameter.name,
value: parameter.value,
databaseName: schema.databaseName,
databaseName: ctx.databaseName,
scope: parameter.scope as ParameterScope,
synchronize: true,
});

View File

@ -1,16 +1,16 @@
import { sql } from 'kysely';
import { DatabaseReader } from 'src/sql-tools/types';
import { Reader } from 'src/sql-tools/types';
export const readTables: DatabaseReader = async (schema, db) => {
export const readTables: Reader = async (ctx, db) => {
const tables = await db
.selectFrom('information_schema.tables')
.where('table_schema', '=', schema.schemaName)
.where('table_schema', '=', ctx.schemaName)
.where('table_type', '=', sql.lit('BASE TABLE'))
.selectAll()
.execute();
for (const table of tables) {
schema.tables.push({
ctx.tables.push({
name: table.table_name,
columns: [],
indexes: [],

View File

@ -1,6 +1,6 @@
import { DatabaseReader, TriggerAction, TriggerScope, TriggerTiming } from 'src/sql-tools/types';
import { Reader, TriggerAction, TriggerScope, TriggerTiming } from 'src/sql-tools/types';
export const readTriggers: DatabaseReader = async (schema, db) => {
export const readTriggers: Reader = async (ctx, db) => {
const triggers = await db
.selectFrom('pg_trigger as t')
.innerJoin('pg_proc as p', 't.tgfoid', 'p.oid')
@ -21,12 +21,12 @@ export const readTriggers: DatabaseReader = async (schema, db) => {
'c.relname as table_name',
])
.where('t.tgisinternal', '=', false) // Exclude internal system triggers
.where('n.nspname', '=', schema.schemaName)
.where('n.nspname', '=', ctx.schemaName)
.execute();
// add triggers to tables
for (const trigger of triggers) {
const table = schema.tables.find((table) => table.name === trigger.table_name);
const table = ctx.getTableByName(trigger.table_name);
if (!table) {
continue;
}

View File

@ -21,6 +21,7 @@ const fromColumn = (column: Partial<Omit<DatabaseColumn, 'tableName'>>): Databas
enums: [],
extensions: [],
parameters: [],
overrides: [],
tables: [
{
name: tableName,
@ -55,6 +56,7 @@ const fromConstraint = (constraint?: DatabaseConstraint): DatabaseSchema => {
enums: [],
extensions: [],
parameters: [],
overrides: [],
tables: [
{
name: tableName,
@ -88,6 +90,7 @@ const fromIndex = (index?: DatabaseIndex): DatabaseSchema => {
enums: [],
extensions: [],
parameters: [],
overrides: [],
tables: [
{
name: tableName,
@ -161,6 +164,7 @@ const newSchema = (schema: {
enums: [],
extensions: [],
parameters: [],
overrides: [],
tables,
warnings: [],
};

View File

@ -1,8 +1,10 @@
import { compareEnums } from 'src/sql-tools/comparers/enum.comparer';
import { compareExtensions } from 'src/sql-tools/comparers/extension.comparer';
import { compareFunctions } from 'src/sql-tools/comparers/function.comparer';
import { compareOverrides } from 'src/sql-tools/comparers/override.comparer';
import { compareParameters } from 'src/sql-tools/comparers/parameter.comparer';
import { compareTables } from 'src/sql-tools/comparers/table.comparer';
import { BaseContext } from 'src/sql-tools/contexts/base-context';
import { compare } from 'src/sql-tools/helpers';
import { transformers } from 'src/sql-tools/transformers';
import {
@ -19,10 +21,11 @@ import {
export const schemaDiff = (source: DatabaseSchema, target: DatabaseSchema, options: SchemaDiffOptions = {}) => {
const items = [
...compare(source.parameters, target.parameters, options.parameters, compareParameters),
...compare(source.extensions, target.extensions, options.extension, compareExtensions),
...compare(source.extensions, target.extensions, options.extensions, compareExtensions),
...compare(source.functions, target.functions, options.functions, compareFunctions),
...compare(source.enums, target.enums, options.enums, compareEnums),
...compare(source.tables, target.tables, options.tables, compareTables),
...compare(source.overrides, target.overrides, options.overrides, compareOverrides),
];
type SchemaName = SchemaDiff['type'];
@ -46,6 +49,9 @@ export const schemaDiff = (source: DatabaseSchema, target: DatabaseSchema, optio
TriggerDrop: [],
ParameterSet: [],
ParameterReset: [],
OverrideCreate: [],
OverrideUpdate: [],
OverrideDrop: [],
};
for (const item of items) {
@ -76,6 +82,9 @@ export const schemaDiff = (source: DatabaseSchema, target: DatabaseSchema, optio
...itemMap.TableDrop,
...itemMap.EnumDrop,
...itemMap.FunctionDrop,
...itemMap.OverrideCreate,
...itemMap.OverrideUpdate,
...itemMap.OverrideDrop,
];
return {
@ -88,17 +97,18 @@ export const schemaDiff = (source: DatabaseSchema, target: DatabaseSchema, optio
* Convert schema diffs into SQL statements
*/
export const schemaDiffToSql = (items: SchemaDiff[], options: SchemaDiffToSqlOptions = {}): string[] => {
return items.flatMap((item) => asSql(item).map((result) => result + withComments(options.comments, item)));
return items.flatMap((item) => asSql(item, options));
};
const asSql = (item: SchemaDiff): string[] => {
const asSql = (item: SchemaDiff, options: SchemaDiffToSqlOptions): string[] => {
const ctx = new BaseContext(options);
for (const transform of transformers) {
const result = transform(item);
const result = transform(ctx, item);
if (!result) {
continue;
}
return asArray(result);
return asArray(result).map((result) => result + withComments(options.comments, item));
}
throw new Error(`Unhandled schema diff type: ${item.type}`);

View File

@ -1,8 +1,16 @@
import { readdirSync } from 'node:fs';
import { join } from 'node:path';
import { schemaFromCode } from 'src/sql-tools/schema-from-code';
import { SchemaFromCodeOptions } from 'src/sql-tools/types';
import { describe, expect, it } from 'vitest';
const importModule = async (filePath: string) => {
const module = await import(filePath);
const options: SchemaFromCodeOptions = module.options;
return { module, options };
};
describe(schemaFromCode.name, () => {
it('should work', () => {
expect(schemaFromCode({ reset: true })).toEqual({
@ -12,6 +20,7 @@ describe(schemaFromCode.name, () => {
enums: [],
extensions: [],
parameters: [],
overrides: [],
tables: [],
warnings: [],
});
@ -22,9 +31,10 @@ describe(schemaFromCode.name, () => {
for (const file of errorStubs) {
const filePath = join(file.parentPath, file.name);
it(filePath, async () => {
const module = await import(filePath);
const { module, options } = await importModule(filePath);
expect(module.message).toBeDefined();
expect(() => schemaFromCode({ reset: true })).toThrowError(module.message);
expect(() => schemaFromCode({ ...options, reset: true })).toThrowError(module.message);
});
}
@ -36,10 +46,11 @@ describe(schemaFromCode.name, () => {
const filePath = join(file.parentPath, file.name);
it(filePath, async () => {
const module = await import(filePath);
const { module, options } = await importModule(filePath);
expect(module.description).toBeDefined();
expect(module.schema).toBeDefined();
expect(schemaFromCode({ reset: true }), module.description).toEqual(module.schema);
expect(schemaFromCode({ ...options, reset: true }), module.description).toEqual(module.schema);
});
}
});

View File

@ -1,26 +1,58 @@
import { ProcessorContext } from 'src/sql-tools/contexts/processor-context';
import { processors } from 'src/sql-tools/processors';
import { getRegisteredItems, resetRegisteredItems } from 'src/sql-tools/register';
import { SchemaBuilder } from 'src/sql-tools/schema-builder';
import { SchemaFromCodeOptions } from 'src/sql-tools/types';
import { ConstraintType, SchemaFromCodeOptions } from 'src/sql-tools/types';
/**
* Load schema from code (decorators, etc)
*/
export const schemaFromCode = (options: SchemaFromCodeOptions = {}) => {
try {
const globalOptions = {
createForeignKeyIndexes: options.createForeignKeyIndexes ?? true,
};
const builder = new SchemaBuilder(options);
const ctx = new ProcessorContext(options);
const items = getRegisteredItems();
for (const processor of processors) {
processor(builder, items, globalOptions);
processor(ctx, items);
}
const newSchema = builder.build();
if (ctx.options.overrides) {
ctx.tables.push({
name: ctx.overrideTableName,
columns: [
{
primary: true,
name: 'name',
tableName: ctx.overrideTableName,
type: 'character varying',
nullable: false,
isArray: false,
synchronize: true,
},
{
name: 'value',
tableName: ctx.overrideTableName,
type: 'jsonb',
nullable: false,
isArray: false,
synchronize: true,
},
],
indexes: [],
triggers: [],
constraints: [
{
type: ConstraintType.PRIMARY_KEY,
name: `${ctx.overrideTableName}_pkey`,
tableName: ctx.overrideTableName,
columnNames: ['name'],
synchronize: true,
},
],
synchronize: true,
});
}
return newSchema;
return ctx.build();
} finally {
if (options.reset) {
resetRegisteredItems();

View File

@ -1,6 +1,7 @@
import { Kysely } from 'kysely';
import { PostgresJSDialect } from 'kysely-postgres-js';
import { Sql } from 'postgres';
import { ReaderContext } from 'src/sql-tools/contexts/reader-context';
import { readers } from 'src/sql-tools/readers';
import { DatabaseSchema, PostgresDB, SchemaFromDatabaseOptions } from 'src/sql-tools/types';
@ -11,23 +12,16 @@ export const schemaFromDatabase = async (
postgres: Sql,
options: SchemaFromDatabaseOptions = {},
): Promise<DatabaseSchema> => {
const schema: DatabaseSchema = {
databaseName: 'immich',
schemaName: options.schemaName || 'public',
parameters: [],
functions: [],
enums: [],
extensions: [],
tables: [],
warnings: [],
};
const db = new Kysely<PostgresDB>({ dialect: new PostgresJSDialect({ postgres }) });
const ctx = new ReaderContext(options);
try {
for (const reader of readers) {
await reader(schema, db);
await reader(ctx, db);
}
return ctx.build();
} finally {
await db.destroy();
return schema;
}
};

View File

@ -1,11 +1,14 @@
import { BaseContext } from 'src/sql-tools/contexts/base-context';
import { transformColumns } from 'src/sql-tools/transformers/column.transformer';
import { describe, expect, it } from 'vitest';
const ctx = new BaseContext({});
describe(transformColumns.name, () => {
describe('ColumnAdd', () => {
it('should work', () => {
expect(
transformColumns({
transformColumns(ctx, {
type: 'ColumnAdd',
column: {
name: 'column1',
@ -22,7 +25,7 @@ describe(transformColumns.name, () => {
it('should add a nullable column', () => {
expect(
transformColumns({
transformColumns(ctx, {
type: 'ColumnAdd',
column: {
name: 'column1',
@ -39,7 +42,7 @@ describe(transformColumns.name, () => {
it('should add a column with an enum type', () => {
expect(
transformColumns({
transformColumns(ctx, {
type: 'ColumnAdd',
column: {
name: 'column1',
@ -57,7 +60,7 @@ describe(transformColumns.name, () => {
it('should add a column that is an array type', () => {
expect(
transformColumns({
transformColumns(ctx, {
type: 'ColumnAdd',
column: {
name: 'column1',
@ -76,7 +79,7 @@ describe(transformColumns.name, () => {
describe('ColumnAlter', () => {
it('should make a column nullable', () => {
expect(
transformColumns({
transformColumns(ctx, {
type: 'ColumnAlter',
tableName: 'table1',
columnName: 'column1',
@ -88,7 +91,7 @@ describe(transformColumns.name, () => {
it('should make a column non-nullable', () => {
expect(
transformColumns({
transformColumns(ctx, {
type: 'ColumnAlter',
tableName: 'table1',
columnName: 'column1',
@ -100,7 +103,7 @@ describe(transformColumns.name, () => {
it('should update the default value', () => {
expect(
transformColumns({
transformColumns(ctx, {
type: 'ColumnAlter',
tableName: 'table1',
columnName: 'column1',
@ -114,7 +117,7 @@ describe(transformColumns.name, () => {
describe('ColumnDrop', () => {
it('should work', () => {
expect(
transformColumns({
transformColumns(ctx, {
type: 'ColumnDrop',
tableName: 'table1',
columnName: 'column1',

View File

@ -1,8 +1,8 @@
import { asColumnComment, getColumnModifiers, getColumnType } from 'src/sql-tools/helpers';
import { SqlTransformer } from 'src/sql-tools/transformers/types';
import { ColumnChanges, DatabaseColumn, SchemaDiff } from 'src/sql-tools/types';
import { ColumnChanges, DatabaseColumn } from 'src/sql-tools/types';
export const transformColumns: SqlTransformer = (item: SchemaDiff) => {
export const transformColumns: SqlTransformer = (ctx, item) => {
switch (item.type) {
case 'ColumnAdd': {
return asColumnAdd(item.column);

View File

@ -1,13 +1,16 @@
import { BaseContext } from 'src/sql-tools/contexts/base-context';
import { transformConstraints } from 'src/sql-tools/transformers/constraint.transformer';
import { ConstraintType } from 'src/sql-tools/types';
import { describe, expect, it } from 'vitest';
const ctx = new BaseContext({});
describe(transformConstraints.name, () => {
describe('ConstraintAdd', () => {
describe('primary keys', () => {
it('should work', () => {
expect(
transformConstraints({
transformConstraints(ctx, {
type: 'ConstraintAdd',
constraint: {
type: ConstraintType.PRIMARY_KEY,
@ -25,7 +28,7 @@ describe(transformConstraints.name, () => {
describe('foreign keys', () => {
it('should work', () => {
expect(
transformConstraints({
transformConstraints(ctx, {
type: 'ConstraintAdd',
constraint: {
type: ConstraintType.FOREIGN_KEY,
@ -47,7 +50,7 @@ describe(transformConstraints.name, () => {
describe('unique', () => {
it('should work', () => {
expect(
transformConstraints({
transformConstraints(ctx, {
type: 'ConstraintAdd',
constraint: {
type: ConstraintType.UNIQUE,
@ -65,7 +68,7 @@ describe(transformConstraints.name, () => {
describe('check', () => {
it('should work', () => {
expect(
transformConstraints({
transformConstraints(ctx, {
type: 'ConstraintAdd',
constraint: {
type: ConstraintType.CHECK,
@ -84,7 +87,7 @@ describe(transformConstraints.name, () => {
describe('ConstraintDrop', () => {
it('should work', () => {
expect(
transformConstraints({
transformConstraints(ctx, {
type: 'ConstraintDrop',
tableName: 'table1',
constraintName: 'PK_test',

View File

@ -1,8 +1,8 @@
import { asColumnList } from 'src/sql-tools/helpers';
import { SqlTransformer } from 'src/sql-tools/transformers/types';
import { ActionType, ConstraintType, DatabaseConstraint, SchemaDiff } from 'src/sql-tools/types';
import { ActionType, ConstraintType, DatabaseConstraint } from 'src/sql-tools/types';
export const transformConstraints: SqlTransformer = (item: SchemaDiff) => {
export const transformConstraints: SqlTransformer = (ctx, item) => {
switch (item.type) {
case 'ConstraintAdd': {
return asConstraintAdd(item.constraint);

View File

@ -1,7 +1,7 @@
import { SqlTransformer } from 'src/sql-tools/transformers/types';
import { DatabaseEnum, SchemaDiff } from 'src/sql-tools/types';
import { DatabaseEnum } from 'src/sql-tools/types';
export const transformEnums: SqlTransformer = (item: SchemaDiff) => {
export const transformEnums: SqlTransformer = (ctx, item) => {
switch (item.type) {
case 'EnumCreate': {
return asEnumCreate(item.enum);

View File

@ -1,11 +1,14 @@
import { BaseContext } from 'src/sql-tools/contexts/base-context';
import { transformExtensions } from 'src/sql-tools/transformers/extension.transformer';
import { describe, expect, it } from 'vitest';
const ctx = new BaseContext({});
describe(transformExtensions.name, () => {
describe('ExtensionDrop', () => {
it('should work', () => {
expect(
transformExtensions({
transformExtensions(ctx, {
type: 'ExtensionDrop',
extensionName: 'cube',
reason: 'unknown',
@ -17,7 +20,7 @@ describe(transformExtensions.name, () => {
describe('ExtensionCreate', () => {
it('should work', () => {
expect(
transformExtensions({
transformExtensions(ctx, {
type: 'ExtensionCreate',
extension: {
name: 'cube',

View File

@ -1,7 +1,7 @@
import { SqlTransformer } from 'src/sql-tools/transformers/types';
import { DatabaseExtension, SchemaDiff } from 'src/sql-tools/types';
import { DatabaseExtension } from 'src/sql-tools/types';
export const transformExtensions: SqlTransformer = (item: SchemaDiff) => {
export const transformExtensions: SqlTransformer = (ctx, item) => {
switch (item.type) {
case 'ExtensionCreate': {
return asExtensionCreate(item.extension);

View File

@ -1,11 +1,14 @@
import { BaseContext } from 'src/sql-tools/contexts/base-context';
import { transformFunctions } from 'src/sql-tools/transformers/function.transformer';
import { describe, expect, it } from 'vitest';
const ctx = new BaseContext({});
describe(transformFunctions.name, () => {
describe('FunctionDrop', () => {
it('should work', () => {
expect(
transformFunctions({
transformFunctions(ctx, {
type: 'FunctionDrop',
functionName: 'test_func',
reason: 'unknown',

View File

@ -1,7 +1,7 @@
import { SqlTransformer } from 'src/sql-tools/transformers/types';
import { DatabaseFunction, SchemaDiff } from 'src/sql-tools/types';
import { DatabaseFunction } from 'src/sql-tools/types';
export const transformFunctions: SqlTransformer = (item: SchemaDiff) => {
export const transformFunctions: SqlTransformer = (ctx, item) => {
switch (item.type) {
case 'FunctionCreate': {
return asFunctionCreate(item.function);
@ -17,7 +17,7 @@ export const transformFunctions: SqlTransformer = (item: SchemaDiff) => {
}
};
const asFunctionCreate = (func: DatabaseFunction): string => {
export const asFunctionCreate = (func: DatabaseFunction): string => {
return func.expression;
};

View File

@ -1,11 +1,14 @@
import { BaseContext } from 'src/sql-tools/contexts/base-context';
import { transformIndexes } from 'src/sql-tools/transformers/index.transformer';
import { describe, expect, it } from 'vitest';
const ctx = new BaseContext({});
describe(transformIndexes.name, () => {
describe('IndexCreate', () => {
it('should work', () => {
expect(
transformIndexes({
transformIndexes(ctx, {
type: 'IndexCreate',
index: {
name: 'IDX_test',
@ -21,7 +24,7 @@ describe(transformIndexes.name, () => {
it('should create an unique index', () => {
expect(
transformIndexes({
transformIndexes(ctx, {
type: 'IndexCreate',
index: {
name: 'IDX_test',
@ -37,7 +40,7 @@ describe(transformIndexes.name, () => {
it('should create an index with a custom expression', () => {
expect(
transformIndexes({
transformIndexes(ctx, {
type: 'IndexCreate',
index: {
name: 'IDX_test',
@ -53,7 +56,7 @@ describe(transformIndexes.name, () => {
it('should create an index with a where clause', () => {
expect(
transformIndexes({
transformIndexes(ctx, {
type: 'IndexCreate',
index: {
name: 'IDX_test',
@ -70,7 +73,7 @@ describe(transformIndexes.name, () => {
it('should create an index with a custom expression', () => {
expect(
transformIndexes({
transformIndexes(ctx, {
type: 'IndexCreate',
index: {
name: 'IDX_test',
@ -89,7 +92,7 @@ describe(transformIndexes.name, () => {
describe('IndexDrop', () => {
it('should work', () => {
expect(
transformIndexes({
transformIndexes(ctx, {
type: 'IndexDrop',
indexName: 'IDX_test',
reason: 'unknown',

View File

@ -1,8 +1,8 @@
import { asColumnList } from 'src/sql-tools/helpers';
import { SqlTransformer } from 'src/sql-tools/transformers/types';
import { DatabaseIndex, SchemaDiff } from 'src/sql-tools/types';
import { DatabaseIndex } from 'src/sql-tools/types';
export const transformIndexes: SqlTransformer = (item: SchemaDiff) => {
export const transformIndexes: SqlTransformer = (ctx, item) => {
switch (item.type) {
case 'IndexCreate': {
return asIndexCreate(item.index);

View File

@ -4,6 +4,7 @@ import { transformEnums } from 'src/sql-tools/transformers/enum.transformer';
import { transformExtensions } from 'src/sql-tools/transformers/extension.transformer';
import { transformFunctions } from 'src/sql-tools/transformers/function.transformer';
import { transformIndexes } from 'src/sql-tools/transformers/index.transformer';
import { transformOverrides } from 'src/sql-tools/transformers/override.transformer';
import { transformParameters } from 'src/sql-tools/transformers/parameter.transformer';
import { transformTables } from 'src/sql-tools/transformers/table.transformer';
import { transformTriggers } from 'src/sql-tools/transformers/trigger.transformer';
@ -19,4 +20,5 @@ export const transformers: SqlTransformer[] = [
transformParameters,
transformTables,
transformTriggers,
transformOverrides,
];

View File

@ -0,0 +1,37 @@
import { asJsonString } from 'src/sql-tools/helpers';
import { SqlTransformer } from 'src/sql-tools/transformers/types';
import { DatabaseOverride } from 'src/sql-tools/types';
export const transformOverrides: SqlTransformer = (ctx, item) => {
const tableName = ctx.overrideTableName;
switch (item.type) {
case 'OverrideCreate': {
return asOverrideCreate(tableName, item.override);
}
case 'OverrideUpdate': {
return asOverrideUpdate(tableName, item.override);
}
case 'OverrideDrop': {
return asOverrideDrop(tableName, item.overrideName);
}
default: {
return false;
}
}
};
export const asOverrideCreate = (tableName: string, override: DatabaseOverride): string => {
return `INSERT INTO "${tableName}" ("name", "value") VALUES ('${override.name}', ${asJsonString(override.value)});`;
};
export const asOverrideUpdate = (tableName: string, override: DatabaseOverride): string => {
return `UPDATE "${tableName}" SET "value" = ${asJsonString(override.value)} WHERE "name" = '${override.name}';`;
};
export const asOverrideDrop = (tableName: string, overrideName: string): string => {
return `DELETE FROM "${tableName}" WHERE "name" = '${overrideName}';`;
};

View File

@ -1,7 +1,7 @@
import { SqlTransformer } from 'src/sql-tools/transformers/types';
import { DatabaseParameter, SchemaDiff } from 'src/sql-tools/types';
import { DatabaseParameter } from 'src/sql-tools/types';
export const transformParameters: SqlTransformer = (item: SchemaDiff) => {
export const transformParameters: SqlTransformer = (ctx, item) => {
switch (item.type) {
case 'ParameterSet': {
return asParameterSet(item.parameter);

View File

@ -1,11 +1,14 @@
import { BaseContext } from 'src/sql-tools/contexts/base-context';
import { transformTables } from 'src/sql-tools/transformers/table.transformer';
import { describe, expect, it } from 'vitest';
const ctx = new BaseContext({});
describe(transformTables.name, () => {
describe('TableDrop', () => {
it('should work', () => {
expect(
transformTables({
transformTables(ctx, {
type: 'TableDrop',
tableName: 'table1',
reason: 'unknown',
@ -17,7 +20,7 @@ describe(transformTables.name, () => {
describe('TableCreate', () => {
it('should work', () => {
expect(
transformTables({
transformTables(ctx, {
type: 'TableCreate',
table: {
name: 'table1',
@ -43,7 +46,7 @@ describe(transformTables.name, () => {
it('should handle a non-nullable column', () => {
expect(
transformTables({
transformTables(ctx, {
type: 'TableCreate',
table: {
name: 'table1',
@ -69,7 +72,7 @@ describe(transformTables.name, () => {
it('should handle a default value', () => {
expect(
transformTables({
transformTables(ctx, {
type: 'TableCreate',
table: {
name: 'table1',
@ -96,7 +99,7 @@ describe(transformTables.name, () => {
it('should handle a string with a fixed length', () => {
expect(
transformTables({
transformTables(ctx, {
type: 'TableCreate',
table: {
name: 'table1',
@ -123,7 +126,7 @@ describe(transformTables.name, () => {
it('should handle an array type', () => {
expect(
transformTables({
transformTables(ctx, {
type: 'TableCreate',
table: {
name: 'table1',

View File

@ -1,9 +1,9 @@
import { asColumnComment, getColumnModifiers, getColumnType } from 'src/sql-tools/helpers';
import { asColumnAlter } from 'src/sql-tools/transformers/column.transformer';
import { SqlTransformer } from 'src/sql-tools/transformers/types';
import { DatabaseTable, SchemaDiff } from 'src/sql-tools/types';
import { DatabaseTable } from 'src/sql-tools/types';
export const transformTables: SqlTransformer = (item: SchemaDiff) => {
export const transformTables: SqlTransformer = (ctx, item) => {
switch (item.type) {
case 'TableCreate': {
return asTableCreate(item.table);

View File

@ -1,11 +1,14 @@
import { BaseContext } from 'src/sql-tools/contexts/base-context';
import { transformTriggers } from 'src/sql-tools/transformers/trigger.transformer';
import { describe, expect, it } from 'vitest';
const ctx = new BaseContext({});
describe(transformTriggers.name, () => {
describe('TriggerCreate', () => {
it('should work', () => {
expect(
transformTriggers({
transformTriggers(ctx, {
type: 'TriggerCreate',
trigger: {
name: 'trigger1',
@ -28,7 +31,7 @@ describe(transformTriggers.name, () => {
it('should work with multiple actions', () => {
expect(
transformTriggers({
transformTriggers(ctx, {
type: 'TriggerCreate',
trigger: {
name: 'trigger1',
@ -51,7 +54,7 @@ describe(transformTriggers.name, () => {
it('should work with old/new reference table aliases', () => {
expect(
transformTriggers({
transformTriggers(ctx, {
type: 'TriggerCreate',
trigger: {
name: 'trigger1',
@ -79,7 +82,7 @@ describe(transformTriggers.name, () => {
describe('TriggerDrop', () => {
it('should work', () => {
expect(
transformTriggers({
transformTriggers(ctx, {
type: 'TriggerDrop',
tableName: 'table1',
triggerName: 'trigger1',

View File

@ -1,7 +1,7 @@
import { SqlTransformer } from 'src/sql-tools/transformers/types';
import { DatabaseTrigger, SchemaDiff } from 'src/sql-tools/types';
import { DatabaseTrigger } from 'src/sql-tools/types';
export const transformTriggers: SqlTransformer = (item: SchemaDiff) => {
export const transformTriggers: SqlTransformer = (ctx, item) => {
switch (item.type) {
case 'TriggerCreate': {
return asTriggerCreate(item.trigger);

View File

@ -1,3 +1,4 @@
import { BaseContext } from 'src/sql-tools/contexts/base-context';
import { SchemaDiff } from 'src/sql-tools/types';
export type SqlTransformer = (item: SchemaDiff) => string | string[] | false;
export type SqlTransformer = (ctx: BaseContext, item: SchemaDiff) => string | string[] | false;

View File

@ -1,38 +1,49 @@
import { Kysely, ColumnType as KyselyColumnType } from 'kysely';
import { ProcessorContext } from 'src/sql-tools/contexts/processor-context';
import { ReaderContext } from 'src/sql-tools/contexts/reader-context';
import { RegisterItem } from 'src/sql-tools/register-item';
import { SchemaBuilder } from 'src/sql-tools/schema-builder';
export type SchemaFromCodeOptions = {
/** automatically create indexes on foreign key columns */
createForeignKeyIndexes?: boolean;
export type BaseContextOptions = {
databaseName?: string;
schemaName?: string;
overrideTableName?: string;
};
export type SchemaFromCodeOptions = BaseContextOptions & {
/** automatically create indexes on foreign key columns */
createForeignKeyIndexes?: boolean;
reset?: boolean;
functions?: boolean;
extensions?: boolean;
parameters?: boolean;
overrides?: boolean;
};
export type SchemaFromDatabaseOptions = {
schemaName?: string;
};
export type SchemaFromDatabaseOptions = BaseContextOptions;
export type SchemaDiffToSqlOptions = {
export type SchemaDiffToSqlOptions = BaseContextOptions & {
comments?: boolean;
};
export type SchemaDiffOptions = {
export type SchemaDiffOptions = BaseContextOptions & {
tables?: IgnoreOptions;
functions?: IgnoreOptions;
enums?: IgnoreOptions;
extension?: IgnoreOptions;
extensions?: IgnoreOptions;
parameters?: IgnoreOptions;
overrides?: IgnoreOptions;
};
export type IgnoreOptions = {
export type IgnoreOptions =
| boolean
| {
ignoreExtra?: boolean;
ignoreMissing?: boolean;
};
export type Processor = (builder: SchemaBuilder, items: RegisterItem[], options: SchemaFromCodeOptions) => void;
export type DatabaseReader = (schema: DatabaseSchema, db: DatabaseClient) => Promise<void>;
export type Processor = (ctx: ProcessorContext, items: RegisterItem[]) => void;
export type Reader = (ctx: ReaderContext, db: DatabaseClient) => Promise<void>;
export type PostgresDB = {
pg_am: {
@ -319,6 +330,7 @@ export type DatabaseSchema = {
tables: DatabaseTable[];
extensions: DatabaseExtension[];
parameters: DatabaseParameter[];
overrides: DatabaseOverride[];
warnings: string[];
};
@ -332,6 +344,14 @@ export type DatabaseParameter = {
export type ParameterScope = 'database' | 'user';
export type DatabaseOverride = {
name: string;
value: { name: string; type: OverrideType; sql: string };
synchronize: boolean;
};
export type OverrideType = 'function' | 'index' | 'trigger';
export type DatabaseEnum = {
name: string;
values: string[];
@ -342,6 +362,7 @@ export type DatabaseFunction = {
name: string;
expression: string;
synchronize: boolean;
override?: DatabaseOverride;
};
export type DatabaseExtension = {
@ -438,6 +459,7 @@ export type DatabaseTrigger = {
referencingOldTableAs?: string;
when?: string;
functionName: string;
override?: DatabaseOverride;
synchronize: boolean;
};
export type TriggerTiming = 'before' | 'after' | 'instead of';
@ -453,6 +475,7 @@ export type DatabaseIndex = {
using?: string;
with?: string;
where?: string;
override?: DatabaseOverride;
synchronize: boolean;
};
@ -476,6 +499,9 @@ export type SchemaDiff = { reason: string } & (
| { type: 'ParameterReset'; databaseName: string; parameterName: string }
| { type: 'EnumCreate'; enum: DatabaseEnum }
| { type: 'EnumDrop'; enumName: string }
| { type: 'OverrideCreate'; override: DatabaseOverride }
| { type: 'OverrideUpdate'; override: DatabaseOverride }
| { type: 'OverrideDrop'; overrideName: string }
);
export type CompareFunction<T> = (source: T, target: T) => SchemaDiff[];

View File

@ -15,6 +15,7 @@ export const schema: DatabaseSchema = {
enums: [],
extensions: [],
parameters: [],
overrides: [],
tables: [
{
name: 'table1',

View File

@ -15,6 +15,7 @@ export const schema: DatabaseSchema = {
enums: [],
extensions: [],
parameters: [],
overrides: [],
tables: [
{
name: 'table1',

View File

@ -14,6 +14,7 @@ export const schema: DatabaseSchema = {
enums: [],
extensions: [],
parameters: [],
overrides: [],
tables: [
{
name: 'table1',

View File

@ -14,6 +14,7 @@ export const schema: DatabaseSchema = {
enums: [],
extensions: [],
parameters: [],
overrides: [],
tables: [
{
name: 'table1',

View File

@ -16,6 +16,7 @@ export const schema: DatabaseSchema = {
enums: [],
extensions: [],
parameters: [],
overrides: [],
tables: [
{
name: 'table1',

View File

@ -14,6 +14,7 @@ export const schema: DatabaseSchema = {
enums: [],
extensions: [],
parameters: [],
overrides: [],
tables: [
{
name: 'table1',

View File

@ -14,6 +14,7 @@ export const schema: DatabaseSchema = {
enums: [],
extensions: [],
parameters: [],
overrides: [],
tables: [
{
name: 'table1',

Some files were not shown because too many files have changed in this diff Show More