fix: partner asset and exif sync backfill (#19224)

* fix: partner asset sync backfill

* fix: add partner asset exif backfill

* ci: output content of files that have changed
This commit is contained in:
Zack Pollard 2025-06-17 14:56:54 +01:00 committed by GitHub
parent db68d1af9b
commit 749f63e4a0
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
21 changed files with 607 additions and 37 deletions

View File

@ -722,6 +722,7 @@ jobs:
run: | run: |
echo "ERROR: Generated SQL files not up to date!" echo "ERROR: Generated SQL files not up to date!"
echo "Changed files: ${CHANGED_FILES}" echo "Changed files: ${CHANGED_FILES}"
git diff
exit 1 exit 1
# mobile-integration-tests: # mobile-integration-tests:

View File

@ -31,12 +31,15 @@ class SyncEntityType {
static const assetDeleteV1 = SyncEntityType._(r'AssetDeleteV1'); static const assetDeleteV1 = SyncEntityType._(r'AssetDeleteV1');
static const assetExifV1 = SyncEntityType._(r'AssetExifV1'); static const assetExifV1 = SyncEntityType._(r'AssetExifV1');
static const partnerAssetV1 = SyncEntityType._(r'PartnerAssetV1'); static const partnerAssetV1 = SyncEntityType._(r'PartnerAssetV1');
static const partnerAssetBackfillV1 = SyncEntityType._(r'PartnerAssetBackfillV1');
static const partnerAssetDeleteV1 = SyncEntityType._(r'PartnerAssetDeleteV1'); static const partnerAssetDeleteV1 = SyncEntityType._(r'PartnerAssetDeleteV1');
static const partnerAssetExifV1 = SyncEntityType._(r'PartnerAssetExifV1'); static const partnerAssetExifV1 = SyncEntityType._(r'PartnerAssetExifV1');
static const partnerAssetExifBackfillV1 = SyncEntityType._(r'PartnerAssetExifBackfillV1');
static const albumV1 = SyncEntityType._(r'AlbumV1'); static const albumV1 = SyncEntityType._(r'AlbumV1');
static const albumDeleteV1 = SyncEntityType._(r'AlbumDeleteV1'); static const albumDeleteV1 = SyncEntityType._(r'AlbumDeleteV1');
static const albumUserV1 = SyncEntityType._(r'AlbumUserV1'); static const albumUserV1 = SyncEntityType._(r'AlbumUserV1');
static const albumUserDeleteV1 = SyncEntityType._(r'AlbumUserDeleteV1'); static const albumUserDeleteV1 = SyncEntityType._(r'AlbumUserDeleteV1');
static const syncAckV1 = SyncEntityType._(r'SyncAckV1');
/// List of all possible values in this [enum][SyncEntityType]. /// List of all possible values in this [enum][SyncEntityType].
static const values = <SyncEntityType>[ static const values = <SyncEntityType>[
@ -48,12 +51,15 @@ class SyncEntityType {
assetDeleteV1, assetDeleteV1,
assetExifV1, assetExifV1,
partnerAssetV1, partnerAssetV1,
partnerAssetBackfillV1,
partnerAssetDeleteV1, partnerAssetDeleteV1,
partnerAssetExifV1, partnerAssetExifV1,
partnerAssetExifBackfillV1,
albumV1, albumV1,
albumDeleteV1, albumDeleteV1,
albumUserV1, albumUserV1,
albumUserDeleteV1, albumUserDeleteV1,
syncAckV1,
]; ];
static SyncEntityType? fromJson(dynamic value) => SyncEntityTypeTypeTransformer().decode(value); static SyncEntityType? fromJson(dynamic value) => SyncEntityTypeTypeTransformer().decode(value);
@ -100,12 +106,15 @@ class SyncEntityTypeTypeTransformer {
case r'AssetDeleteV1': return SyncEntityType.assetDeleteV1; case r'AssetDeleteV1': return SyncEntityType.assetDeleteV1;
case r'AssetExifV1': return SyncEntityType.assetExifV1; case r'AssetExifV1': return SyncEntityType.assetExifV1;
case r'PartnerAssetV1': return SyncEntityType.partnerAssetV1; case r'PartnerAssetV1': return SyncEntityType.partnerAssetV1;
case r'PartnerAssetBackfillV1': return SyncEntityType.partnerAssetBackfillV1;
case r'PartnerAssetDeleteV1': return SyncEntityType.partnerAssetDeleteV1; case r'PartnerAssetDeleteV1': return SyncEntityType.partnerAssetDeleteV1;
case r'PartnerAssetExifV1': return SyncEntityType.partnerAssetExifV1; case r'PartnerAssetExifV1': return SyncEntityType.partnerAssetExifV1;
case r'PartnerAssetExifBackfillV1': return SyncEntityType.partnerAssetExifBackfillV1;
case r'AlbumV1': return SyncEntityType.albumV1; case r'AlbumV1': return SyncEntityType.albumV1;
case r'AlbumDeleteV1': return SyncEntityType.albumDeleteV1; case r'AlbumDeleteV1': return SyncEntityType.albumDeleteV1;
case r'AlbumUserV1': return SyncEntityType.albumUserV1; case r'AlbumUserV1': return SyncEntityType.albumUserV1;
case r'AlbumUserDeleteV1': return SyncEntityType.albumUserDeleteV1; case r'AlbumUserDeleteV1': return SyncEntityType.albumUserDeleteV1;
case r'SyncAckV1': return SyncEntityType.syncAckV1;
default: default:
if (!allowNull) { if (!allowNull) {
throw ArgumentError('Unknown enum value to decode: $data'); throw ArgumentError('Unknown enum value to decode: $data');

View File

@ -13699,12 +13699,15 @@
"AssetDeleteV1", "AssetDeleteV1",
"AssetExifV1", "AssetExifV1",
"PartnerAssetV1", "PartnerAssetV1",
"PartnerAssetBackfillV1",
"PartnerAssetDeleteV1", "PartnerAssetDeleteV1",
"PartnerAssetExifV1", "PartnerAssetExifV1",
"PartnerAssetExifBackfillV1",
"AlbumV1", "AlbumV1",
"AlbumDeleteV1", "AlbumDeleteV1",
"AlbumUserV1", "AlbumUserV1",
"AlbumUserDeleteV1" "AlbumUserDeleteV1",
"SyncAckV1"
], ],
"type": "string" "type": "string"
}, },

View File

@ -4052,12 +4052,15 @@ export enum SyncEntityType {
AssetDeleteV1 = "AssetDeleteV1", AssetDeleteV1 = "AssetDeleteV1",
AssetExifV1 = "AssetExifV1", AssetExifV1 = "AssetExifV1",
PartnerAssetV1 = "PartnerAssetV1", PartnerAssetV1 = "PartnerAssetV1",
PartnerAssetBackfillV1 = "PartnerAssetBackfillV1",
PartnerAssetDeleteV1 = "PartnerAssetDeleteV1", PartnerAssetDeleteV1 = "PartnerAssetDeleteV1",
PartnerAssetExifV1 = "PartnerAssetExifV1", PartnerAssetExifV1 = "PartnerAssetExifV1",
PartnerAssetExifBackfillV1 = "PartnerAssetExifBackfillV1",
AlbumV1 = "AlbumV1", AlbumV1 = "AlbumV1",
AlbumDeleteV1 = "AlbumDeleteV1", AlbumDeleteV1 = "AlbumDeleteV1",
AlbumUserV1 = "AlbumUserV1", AlbumUserV1 = "AlbumUserV1",
AlbumUserDeleteV1 = "AlbumUserDeleteV1" AlbumUserDeleteV1 = "AlbumUserDeleteV1",
SyncAckV1 = "SyncAckV1"
} }
export enum SyncRequestType { export enum SyncRequestType {
UsersV1 = "UsersV1", UsersV1 = "UsersV1",

View File

@ -209,6 +209,7 @@ export type Partner = {
sharedWithId: string; sharedWithId: string;
sharedWith: User; sharedWith: User;
createdAt: Date; createdAt: Date;
createId: string;
updatedAt: Date; updatedAt: Date;
updateId: string; updateId: string;
inTimeline: boolean; inTimeline: boolean;

1
server/src/db.d.ts vendored
View File

@ -332,6 +332,7 @@ export interface PartnersAudit {
export interface Partners { export interface Partners {
createdAt: Generated<Timestamp>; createdAt: Generated<Timestamp>;
createId: Generated<string>;
inTimeline: Generated<boolean>; inTimeline: Generated<boolean>;
sharedById: string; sharedById: string;
sharedWithId: string; sharedWithId: string;

View File

@ -14,6 +14,9 @@ const GeneratedUuidV7Column = (options: Omit<ColumnOptions, 'type' | 'default' |
export const UpdateIdColumn = (options: Omit<ColumnOptions, 'type' | 'default' | 'nullable'> = {}) => export const UpdateIdColumn = (options: Omit<ColumnOptions, 'type' | 'default' | 'nullable'> = {}) =>
GeneratedUuidV7Column(options); GeneratedUuidV7Column(options);
export const CreateIdColumn = (options: Omit<ColumnOptions, 'type' | 'default' | 'nullable'> = {}) =>
GeneratedUuidV7Column(options);
export const PrimaryGeneratedUuidV7Column = () => GeneratedUuidV7Column({ primary: true }); export const PrimaryGeneratedUuidV7Column = () => GeneratedUuidV7Column({ primary: true });
export const UpdatedAtTrigger = (name: string) => export const UpdatedAtTrigger = (name: string) =>

View File

@ -154,12 +154,15 @@ export type SyncItem = {
[SyncEntityType.AssetDeleteV1]: SyncAssetDeleteV1; [SyncEntityType.AssetDeleteV1]: SyncAssetDeleteV1;
[SyncEntityType.AssetExifV1]: SyncAssetExifV1; [SyncEntityType.AssetExifV1]: SyncAssetExifV1;
[SyncEntityType.PartnerAssetV1]: SyncAssetV1; [SyncEntityType.PartnerAssetV1]: SyncAssetV1;
[SyncEntityType.PartnerAssetBackfillV1]: SyncAssetV1;
[SyncEntityType.PartnerAssetDeleteV1]: SyncAssetDeleteV1; [SyncEntityType.PartnerAssetDeleteV1]: SyncAssetDeleteV1;
[SyncEntityType.PartnerAssetExifV1]: SyncAssetExifV1; [SyncEntityType.PartnerAssetExifV1]: SyncAssetExifV1;
[SyncEntityType.PartnerAssetExifBackfillV1]: SyncAssetExifV1;
[SyncEntityType.AlbumV1]: SyncAlbumV1; [SyncEntityType.AlbumV1]: SyncAlbumV1;
[SyncEntityType.AlbumDeleteV1]: SyncAlbumDeleteV1; [SyncEntityType.AlbumDeleteV1]: SyncAlbumDeleteV1;
[SyncEntityType.AlbumUserV1]: SyncAlbumUserV1; [SyncEntityType.AlbumUserV1]: SyncAlbumUserV1;
[SyncEntityType.AlbumUserDeleteV1]: SyncAlbumUserDeleteV1; [SyncEntityType.AlbumUserDeleteV1]: SyncAlbumUserDeleteV1;
[SyncEntityType.SyncAckV1]: object;
}; };
const responseDtos = [ const responseDtos = [

View File

@ -595,13 +595,17 @@ export enum SyncEntityType {
AssetExifV1 = 'AssetExifV1', AssetExifV1 = 'AssetExifV1',
PartnerAssetV1 = 'PartnerAssetV1', PartnerAssetV1 = 'PartnerAssetV1',
PartnerAssetBackfillV1 = 'PartnerAssetBackfillV1',
PartnerAssetDeleteV1 = 'PartnerAssetDeleteV1', PartnerAssetDeleteV1 = 'PartnerAssetDeleteV1',
PartnerAssetExifV1 = 'PartnerAssetExifV1', PartnerAssetExifV1 = 'PartnerAssetExifV1',
PartnerAssetExifBackfillV1 = 'PartnerAssetExifBackfillV1',
AlbumV1 = 'AlbumV1', AlbumV1 = 'AlbumV1',
AlbumDeleteV1 = 'AlbumDeleteV1', AlbumDeleteV1 = 'AlbumDeleteV1',
AlbumUserV1 = 'AlbumUserV1', AlbumUserV1 = 'AlbumUserV1',
AlbumUserDeleteV1 = 'AlbumUserDeleteV1', AlbumUserDeleteV1 = 'AlbumUserDeleteV1',
SyncAckV1 = 'SyncAckV1',
} }
export enum NotificationLevel { export enum NotificationLevel {

View File

@ -96,6 +96,45 @@ where
order by order by
"updateId" asc "updateId" asc
-- SyncRepository.getPartnerBackfill
select
"sharedById",
"createId"
from
"partners"
where
"sharedWithId" = $1
and "createId" >= $2
and "createdAt" < now() - interval '1 millisecond'
order by
"partners"."createId" asc
-- SyncRepository.getPartnerAssetsBackfill
select
"id",
"ownerId",
"originalFileName",
"thumbhash",
"checksum",
"fileCreatedAt",
"fileModifiedAt",
"localDateTime",
"type",
"deletedAt",
"isFavorite",
"visibility",
"updateId",
"duration"
from
"assets"
where
"ownerId" = $1
and "updatedAt" < now() - interval '1 millisecond'
and "updateId" < $2
and "updateId" >= $3
order by
"updateId" asc
-- SyncRepository.getPartnerAssetsUpserts -- SyncRepository.getPartnerAssetsUpserts
select select
"id", "id",
@ -201,6 +240,45 @@ where
order by order by
"updateId" asc "updateId" asc
-- SyncRepository.getPartnerAssetExifsBackfill
select
"exif"."assetId",
"exif"."description",
"exif"."exifImageWidth",
"exif"."exifImageHeight",
"exif"."fileSizeInByte",
"exif"."orientation",
"exif"."dateTimeOriginal",
"exif"."modifyDate",
"exif"."timeZone",
"exif"."latitude",
"exif"."longitude",
"exif"."projectionType",
"exif"."city",
"exif"."state",
"exif"."country",
"exif"."make",
"exif"."model",
"exif"."lensModel",
"exif"."fNumber",
"exif"."focalLength",
"exif"."iso",
"exif"."exposureTime",
"exif"."profileDescription",
"exif"."rating",
"exif"."fps",
"exif"."updateId"
from
"exif"
inner join "assets" on "assets"."id" = "exif"."assetId"
where
"assets"."ownerId" = $1
and "exif"."updatedAt" < now() - interval '1 millisecond'
and "exif"."updateId" < $2
and "exif"."updateId" >= $3
order by
"exif"."updateId" asc
-- SyncRepository.getPartnerAssetExifsUpserts -- SyncRepository.getPartnerAssetExifsUpserts
select select
"exif"."assetId", "exif"."assetId",

View File

@ -92,6 +92,31 @@ export class SyncRepository {
.stream(); .stream();
} }
@GenerateSql({ params: [DummyValue.UUID, DummyValue.UUID] })
getPartnerBackfill(userId: string, afterCreateId?: string) {
return this.db
.selectFrom('partners')
.select(['sharedById', 'createId'])
.where('sharedWithId', '=', userId)
.$if(!!afterCreateId, (qb) => qb.where('createId', '>=', afterCreateId!))
.where('createdAt', '<', sql.raw<Date>("now() - interval '1 millisecond'"))
.orderBy('partners.createId', 'asc')
.execute();
}
@GenerateSql({ params: [DummyValue.UUID, DummyValue.UUID, DummyValue.UUID], stream: true })
getPartnerAssetsBackfill(partnerId: string, afterUpdateId: string | undefined, beforeUpdateId: string) {
return this.db
.selectFrom('assets')
.select(columns.syncAsset)
.where('ownerId', '=', partnerId)
.where('updatedAt', '<', sql.raw<Date>("now() - interval '1 millisecond'"))
.where('updateId', '<', beforeUpdateId)
.$if(!!afterUpdateId, (eb) => eb.where('updateId', '>=', afterUpdateId!))
.orderBy('updateId', 'asc')
.stream();
}
@GenerateSql({ params: [DummyValue.UUID], stream: true }) @GenerateSql({ params: [DummyValue.UUID], stream: true })
getPartnerAssetsUpserts(userId: string, ack?: SyncAck) { getPartnerAssetsUpserts(userId: string, ack?: SyncAck) {
return this.db return this.db
@ -136,6 +161,20 @@ export class SyncRepository {
.stream(); .stream();
} }
@GenerateSql({ params: [DummyValue.UUID, DummyValue.UUID, DummyValue.UUID], stream: true })
getPartnerAssetExifsBackfill(partnerId: string, afterUpdateId: string | undefined, beforeUpdateId: string) {
return this.db
.selectFrom('exif')
.select(columns.syncAssetExif)
.innerJoin('assets', 'assets.id', 'exif.assetId')
.where('assets.ownerId', '=', partnerId)
.where('exif.updatedAt', '<', sql.raw<Date>("now() - interval '1 millisecond'"))
.where('exif.updateId', '<', beforeUpdateId)
.$if(!!afterUpdateId, (eb) => eb.where('exif.updateId', '>=', afterUpdateId!))
.orderBy('exif.updateId', 'asc')
.stream();
}
@GenerateSql({ params: [DummyValue.UUID], stream: true }) @GenerateSql({ params: [DummyValue.UUID], stream: true })
getPartnerAssetExifsUpserts(userId: string, ack?: SyncAck) { getPartnerAssetExifsUpserts(userId: string, ack?: SyncAck) {
return this.db return this.db

View File

@ -0,0 +1,10 @@
import { Kysely, sql } from 'kysely';
export async function up(db: Kysely<any>): Promise<void> {
await sql`ALTER TABLE "partners" ADD "createId" uuid NOT NULL DEFAULT immich_uuid_v7();`.execute(db);
await sql`UPDATE "partners" SET "createId" = immich_uuid_v7("createdAt")`.execute(db);
}
export async function down(db: Kysely<any>): Promise<void> {
await sql`ALTER TABLE "partners" DROP COLUMN "createId";`.execute(db);
}

View File

@ -1,4 +1,4 @@
import { UpdatedAtTrigger, UpdateIdColumn } from 'src/decorators'; import { CreateIdColumn, UpdatedAtTrigger, UpdateIdColumn } from 'src/decorators';
import { partners_delete_audit } from 'src/schema/functions'; import { partners_delete_audit } from 'src/schema/functions';
import { UserTable } from 'src/schema/tables/user.table'; import { UserTable } from 'src/schema/tables/user.table';
import { AfterDeleteTrigger, Column, CreateDateColumn, ForeignKeyColumn, Table, UpdateDateColumn } from 'src/sql-tools'; import { AfterDeleteTrigger, Column, CreateDateColumn, ForeignKeyColumn, Table, UpdateDateColumn } from 'src/sql-tools';
@ -27,6 +27,9 @@ export class PartnerTable {
@CreateDateColumn() @CreateDateColumn()
createdAt!: Date; createdAt!: Date;
@CreateIdColumn()
createId!: string;
@UpdateDateColumn() @UpdateDateColumn()
updatedAt!: Date; updatedAt!: Date;

View File

@ -20,7 +20,7 @@ import { SyncAck } from 'src/types';
import { getMyPartnerIds } from 'src/utils/asset.util'; import { getMyPartnerIds } from 'src/utils/asset.util';
import { hexOrBufferToBase64 } from 'src/utils/bytes'; import { hexOrBufferToBase64 } from 'src/utils/bytes';
import { setIsEqual } from 'src/utils/set'; import { setIsEqual } from 'src/utils/set';
import { fromAck, serialize } from 'src/utils/sync'; import { fromAck, serialize, toAck } from 'src/utils/sync';
const FULL_SYNC = { needsFullSync: true, deleted: [], upserted: [] }; const FULL_SYNC = { needsFullSync: true, deleted: [], upserted: [] };
export const SYNC_TYPES_ORDER = [ export const SYNC_TYPES_ORDER = [
@ -98,12 +98,12 @@ export class SyncService extends BaseService {
case SyncRequestType.UsersV1: { case SyncRequestType.UsersV1: {
const deletes = this.syncRepository.getUserDeletes(checkpointMap[SyncEntityType.UserDeleteV1]); const deletes = this.syncRepository.getUserDeletes(checkpointMap[SyncEntityType.UserDeleteV1]);
for await (const { id, ...data } of deletes) { for await (const { id, ...data } of deletes) {
response.write(serialize({ type: SyncEntityType.UserDeleteV1, updateId: id, data })); response.write(serialize({ type: SyncEntityType.UserDeleteV1, ids: [id], data }));
} }
const upserts = this.syncRepository.getUserUpserts(checkpointMap[SyncEntityType.UserV1]); const upserts = this.syncRepository.getUserUpserts(checkpointMap[SyncEntityType.UserV1]);
for await (const { updateId, ...data } of upserts) { for await (const { updateId, ...data } of upserts) {
response.write(serialize({ type: SyncEntityType.UserV1, updateId, data })); response.write(serialize({ type: SyncEntityType.UserV1, ids: [updateId], data }));
} }
break; break;
@ -115,12 +115,12 @@ export class SyncService extends BaseService {
checkpointMap[SyncEntityType.PartnerDeleteV1], checkpointMap[SyncEntityType.PartnerDeleteV1],
); );
for await (const { id, ...data } of deletes) { for await (const { id, ...data } of deletes) {
response.write(serialize({ type: SyncEntityType.PartnerDeleteV1, updateId: id, data })); response.write(serialize({ type: SyncEntityType.PartnerDeleteV1, ids: [id], data }));
} }
const upserts = this.syncRepository.getPartnerUpserts(auth.user.id, checkpointMap[SyncEntityType.PartnerV1]); const upserts = this.syncRepository.getPartnerUpserts(auth.user.id, checkpointMap[SyncEntityType.PartnerV1]);
for await (const { updateId, ...data } of upserts) { for await (const { updateId, ...data } of upserts) {
response.write(serialize({ type: SyncEntityType.PartnerV1, updateId, data })); response.write(serialize({ type: SyncEntityType.PartnerV1, ids: [updateId], data }));
} }
break; break;
@ -132,7 +132,7 @@ export class SyncService extends BaseService {
checkpointMap[SyncEntityType.AssetDeleteV1], checkpointMap[SyncEntityType.AssetDeleteV1],
); );
for await (const { id, ...data } of deletes) { for await (const { id, ...data } of deletes) {
response.write(serialize({ type: SyncEntityType.AssetDeleteV1, updateId: id, data })); response.write(serialize({ type: SyncEntityType.AssetDeleteV1, ids: [id], data }));
} }
const upserts = this.syncRepository.getAssetUpserts(auth.user.id, checkpointMap[SyncEntityType.AssetV1]); const upserts = this.syncRepository.getAssetUpserts(auth.user.id, checkpointMap[SyncEntityType.AssetV1]);
@ -140,7 +140,7 @@ export class SyncService extends BaseService {
response.write( response.write(
serialize({ serialize({
type: SyncEntityType.AssetV1, type: SyncEntityType.AssetV1,
updateId, ids: [updateId],
data: { data: {
...data, ...data,
checksum: hexOrBufferToBase64(checksum), checksum: hexOrBufferToBase64(checksum),
@ -159,7 +159,60 @@ export class SyncService extends BaseService {
checkpointMap[SyncEntityType.PartnerAssetDeleteV1], checkpointMap[SyncEntityType.PartnerAssetDeleteV1],
); );
for await (const { id, ...data } of deletes) { for await (const { id, ...data } of deletes) {
response.write(serialize({ type: SyncEntityType.PartnerAssetDeleteV1, updateId: id, data })); response.write(serialize({ type: SyncEntityType.PartnerAssetDeleteV1, ids: [id], data }));
}
const checkpoint = checkpointMap[SyncEntityType.PartnerAssetBackfillV1];
const partnerAssetCheckpoint = checkpointMap[SyncEntityType.PartnerAssetV1];
const partners = await this.syncRepository.getPartnerBackfill(auth.user.id, checkpoint?.updateId);
if (partnerAssetCheckpoint) {
for (const partner of partners) {
if (partner.createId === checkpoint?.updateId && checkpoint.extraId === 'complete') {
continue;
}
const partnerCheckpoint = checkpoint?.updateId === partner.createId ? checkpoint?.extraId : undefined;
const backfill = this.syncRepository.getPartnerAssetsBackfill(
partner.sharedById,
partnerCheckpoint,
partnerAssetCheckpoint.updateId,
);
for await (const { updateId, checksum, thumbhash, ...data } of backfill) {
response.write(
serialize({
type: SyncEntityType.PartnerAssetBackfillV1,
ids: [updateId],
data: {
...data,
checksum: hexOrBufferToBase64(checksum),
thumbhash: thumbhash ? hexOrBufferToBase64(thumbhash) : null,
},
}),
);
}
response.write(
serialize({
type: SyncEntityType.SyncAckV1,
data: {},
ackType: SyncEntityType.PartnerAssetBackfillV1,
ids: [partner.sharedById, 'complete'],
}),
);
}
} else if (partners.length > 0) {
await this.syncRepository.upsertCheckpoints([
{
type: SyncEntityType.PartnerAssetBackfillV1,
sessionId,
ack: toAck({
type: SyncEntityType.PartnerAssetBackfillV1,
updateId: partners.at(-1)!.createId,
extraId: 'complete',
}),
},
]);
} }
const upserts = this.syncRepository.getPartnerAssetsUpserts( const upserts = this.syncRepository.getPartnerAssetsUpserts(
@ -170,7 +223,7 @@ export class SyncService extends BaseService {
response.write( response.write(
serialize({ serialize({
type: SyncEntityType.PartnerAssetV1, type: SyncEntityType.PartnerAssetV1,
updateId, ids: [updateId],
data: { data: {
...data, ...data,
checksum: hexOrBufferToBase64(checksum), checksum: hexOrBufferToBase64(checksum),
@ -189,19 +242,74 @@ export class SyncService extends BaseService {
checkpointMap[SyncEntityType.AssetExifV1], checkpointMap[SyncEntityType.AssetExifV1],
); );
for await (const { updateId, ...data } of upserts) { for await (const { updateId, ...data } of upserts) {
response.write(serialize({ type: SyncEntityType.AssetExifV1, updateId, data })); response.write(serialize({ type: SyncEntityType.AssetExifV1, ids: [updateId], data }));
} }
break; break;
} }
case SyncRequestType.PartnerAssetExifsV1: { case SyncRequestType.PartnerAssetExifsV1: {
const checkpoint = checkpointMap[SyncEntityType.PartnerAssetExifBackfillV1];
const partnerAssetCheckpoint = checkpointMap[SyncEntityType.PartnerAssetExifV1];
const partners = await this.syncRepository.getPartnerBackfill(auth.user.id, checkpoint?.updateId);
if (partnerAssetCheckpoint) {
for (const partner of partners) {
if (partner.createId === checkpoint?.updateId && checkpoint.extraId === 'complete') {
continue;
}
const partnerCheckpoint = checkpoint?.updateId === partner.createId ? checkpoint?.extraId : undefined;
const backfill = this.syncRepository.getPartnerAssetExifsBackfill(
partner.sharedById,
partnerCheckpoint,
partnerAssetCheckpoint.updateId,
);
for await (const { updateId, ...data } of backfill) {
response.write(
serialize({
type: SyncEntityType.PartnerAssetExifBackfillV1,
ids: [updateId],
data,
}),
);
}
response.write(
serialize({
type: SyncEntityType.SyncAckV1,
data: {},
ackType: SyncEntityType.PartnerAssetExifBackfillV1,
ids: [partner.sharedById, 'complete'],
}),
);
}
} else if (partners.length > 0) {
await this.syncRepository.upsertCheckpoints([
{
type: SyncEntityType.PartnerAssetExifBackfillV1,
sessionId,
ack: toAck({
type: SyncEntityType.PartnerAssetExifBackfillV1,
updateId: partners.at(-1)!.createId,
extraId: 'complete',
}),
},
]);
}
const upserts = this.syncRepository.getPartnerAssetExifsUpserts( const upserts = this.syncRepository.getPartnerAssetExifsUpserts(
auth.user.id, auth.user.id,
checkpointMap[SyncEntityType.PartnerAssetExifV1], checkpointMap[SyncEntityType.PartnerAssetExifV1],
); );
for await (const { updateId, ...data } of upserts) { for await (const { updateId, ...data } of upserts) {
response.write(serialize({ type: SyncEntityType.PartnerAssetExifV1, updateId, data })); response.write(
serialize({
type: SyncEntityType.PartnerAssetExifV1,
ids: [updateId],
data,
}),
);
} }
break; break;
@ -213,12 +321,12 @@ export class SyncService extends BaseService {
checkpointMap[SyncEntityType.AlbumDeleteV1], checkpointMap[SyncEntityType.AlbumDeleteV1],
); );
for await (const { id, ...data } of deletes) { for await (const { id, ...data } of deletes) {
response.write(serialize({ type: SyncEntityType.AlbumDeleteV1, updateId: id, data })); response.write(serialize({ type: SyncEntityType.AlbumDeleteV1, ids: [id], data }));
} }
const upserts = this.syncRepository.getAlbumUpserts(auth.user.id, checkpointMap[SyncEntityType.AlbumV1]); const upserts = this.syncRepository.getAlbumUpserts(auth.user.id, checkpointMap[SyncEntityType.AlbumV1]);
for await (const { updateId, ...data } of upserts) { for await (const { updateId, ...data } of upserts) {
response.write(serialize({ type: SyncEntityType.AlbumV1, updateId, data })); response.write(serialize({ type: SyncEntityType.AlbumV1, ids: [updateId], data }));
} }
break; break;
@ -230,7 +338,7 @@ export class SyncService extends BaseService {
checkpointMap[SyncEntityType.AlbumUserDeleteV1], checkpointMap[SyncEntityType.AlbumUserDeleteV1],
); );
for await (const { id, ...data } of deletes) { for await (const { id, ...data } of deletes) {
response.write(serialize({ type: SyncEntityType.AlbumUserDeleteV1, updateId: id, data })); response.write(serialize({ type: SyncEntityType.AlbumUserDeleteV1, ids: [id], data }));
} }
const upserts = this.syncRepository.getAlbumUserUpserts( const upserts = this.syncRepository.getAlbumUserUpserts(
@ -238,7 +346,7 @@ export class SyncService extends BaseService {
checkpointMap[SyncEntityType.AlbumUserV1], checkpointMap[SyncEntityType.AlbumUserV1],
); );
for await (const { updateId, ...data } of upserts) { for await (const { updateId, ...data } of upserts) {
response.write(serialize({ type: SyncEntityType.AlbumUserV1, updateId, data })); response.write(serialize({ type: SyncEntityType.AlbumUserV1, ids: [updateId], data }));
} }
break; break;

View File

@ -421,6 +421,7 @@ export interface IBulkAsset {
export type SyncAck = { export type SyncAck = {
type: SyncEntityType; type: SyncEntityType;
updateId: string; updateId: string;
extraId?: string;
}; };
export type StorageAsset = { export type StorageAsset = {

View File

@ -9,20 +9,23 @@ type Impossible<K extends keyof any> = {
type Exact<T, U extends T = T> = U & Impossible<Exclude<keyof U, keyof T>>; type Exact<T, U extends T = T> = U & Impossible<Exclude<keyof U, keyof T>>;
export const fromAck = (ack: string): SyncAck => { export const fromAck = (ack: string): SyncAck => {
const [type, updateId] = ack.split('|'); const [type, updateId, extraId] = ack.split('|');
return { type: type as SyncEntityType, updateId }; return { type: type as SyncEntityType, updateId, extraId };
}; };
export const toAck = ({ type, updateId }: SyncAck) => [type, updateId].join('|'); export const toAck = ({ type, updateId, extraId }: SyncAck) =>
[type, updateId, extraId].filter((v) => v !== undefined).join('|');
export const mapJsonLine = (object: unknown) => JSON.stringify(object) + '\n'; export const mapJsonLine = (object: unknown) => JSON.stringify(object) + '\n';
export const serialize = <T extends keyof SyncItem, D extends SyncItem[T]>({ export const serialize = <T extends keyof SyncItem, D extends SyncItem[T]>({
type, type,
updateId,
data, data,
ids,
ackType,
}: { }: {
type: T; type: T;
updateId: string;
data: Exact<SyncItem[T], D>; data: Exact<SyncItem[T], D>;
}) => mapJsonLine({ type, data, ack: toAck({ type, updateId }) }); ids: [string] | [string, string];
ackType?: SyncEntityType;
}) => mapJsonLine({ type, data, ack: toAck({ type: ackType ?? type, updateId: ids[0], extraId: ids[1] }) });

View File

@ -33,7 +33,7 @@ import { BaseService } from 'src/services/base.service';
import { SyncService } from 'src/services/sync.service'; import { SyncService } from 'src/services/sync.service';
import { RepositoryInterface } from 'src/types'; import { RepositoryInterface } from 'src/types';
import { factory, newDate, newEmbedding, newUuid } from 'test/small.factory'; import { factory, newDate, newEmbedding, newUuid } from 'test/small.factory';
import { automock, ServiceOverrides } from 'test/utils'; import { automock, ServiceOverrides, wait } from 'test/utils';
import { Mocked } from 'vitest'; import { Mocked } from 'vitest';
const sha256 = (value: string) => createHash('sha256').update(value).digest('base64'); const sha256 = (value: string) => createHash('sha256').update(value).digest('base64');
@ -120,7 +120,7 @@ export const newSyncTest = (options: SyncTestOptions) => {
const testSync = async (auth: AuthDto, types: SyncRequestType[]) => { const testSync = async (auth: AuthDto, types: SyncRequestType[]) => {
const stream = mediumFactory.syncStream(); const stream = mediumFactory.syncStream();
// Wait for 2ms to ensure all updates are available and account for setTimeout inaccuracy // Wait for 2ms to ensure all updates are available and account for setTimeout inaccuracy
await new Promise((resolve) => setTimeout(resolve, 2)); await wait(2);
await sut.stream(auth, stream, { types }); await sut.stream(auth, stream, { types });
return stream.getResponse(); return stream.getResponse();

View File

@ -3,7 +3,7 @@ import { DB } from 'src/db';
import { SyncEntityType, SyncRequestType } from 'src/enum'; import { SyncEntityType, SyncRequestType } from 'src/enum';
import { mediumFactory, newSyncAuthUser, newSyncTest } from 'test/medium.factory'; import { mediumFactory, newSyncAuthUser, newSyncTest } from 'test/medium.factory';
import { factory } from 'test/small.factory'; import { factory } from 'test/small.factory';
import { getKyselyDB } from 'test/utils'; import { getKyselyDB, wait } from 'test/utils';
let defaultDatabase: Kysely<DB>; let defaultDatabase: Kysely<DB>;
@ -126,4 +126,154 @@ describe.concurrent(SyncRequestType.PartnerAssetExifsV1, () => {
await expect(testSync(authUser3, [SyncRequestType.AssetExifsV1])).resolves.toHaveLength(1); await expect(testSync(authUser3, [SyncRequestType.AssetExifsV1])).resolves.toHaveLength(1);
await expect(testSync(auth, [SyncRequestType.PartnerAssetExifsV1])).resolves.toHaveLength(0); await expect(testSync(auth, [SyncRequestType.PartnerAssetExifsV1])).resolves.toHaveLength(0);
}); });
it('should backfill partner asset exif when a partner shared their library with you', async () => {
const { auth, sut, getRepository, testSync } = await setup();
const userRepo = getRepository('user');
const user2 = mediumFactory.userInsert();
const user3 = mediumFactory.userInsert();
await userRepo.create(user2);
await userRepo.create(user3);
const assetRepo = getRepository('asset');
const assetUser3 = mediumFactory.assetInsert({ ownerId: user3.id });
const assetUser2 = mediumFactory.assetInsert({ ownerId: user2.id });
await assetRepo.create(assetUser3);
await assetRepo.upsertExif({ assetId: assetUser3.id, make: 'Canon' });
await wait(2);
await assetRepo.create(assetUser2);
await assetRepo.upsertExif({ assetId: assetUser2.id, make: 'Canon' });
const partnerRepo = getRepository('partner');
await partnerRepo.create({ sharedById: user2.id, sharedWithId: auth.user.id });
const response = await testSync(auth, [SyncRequestType.PartnerAssetExifsV1]);
expect(response).toHaveLength(1);
expect(response).toEqual(
expect.arrayContaining([
{
ack: expect.any(String),
data: expect.objectContaining({
assetId: assetUser2.id,
}),
type: SyncEntityType.PartnerAssetExifV1,
},
]),
);
const acks = response.map(({ ack }) => ack);
await sut.setAcks(auth, { acks });
await partnerRepo.create({ sharedById: user3.id, sharedWithId: auth.user.id });
const backfillResponse = await testSync(auth, [SyncRequestType.PartnerAssetExifsV1]);
expect(backfillResponse).toHaveLength(2);
expect(backfillResponse).toEqual(
expect.arrayContaining([
{
ack: expect.any(String),
data: expect.objectContaining({
assetId: assetUser3.id,
}),
type: SyncEntityType.PartnerAssetExifBackfillV1,
},
{
ack: expect.any(String),
data: {},
type: SyncEntityType.SyncAckV1,
},
]),
);
const backfillAck = backfillResponse[1].ack;
await sut.setAcks(auth, { acks: [backfillAck] });
const finalResponse = await testSync(auth, [SyncRequestType.PartnerAssetExifsV1]);
const finalAcks = finalResponse.map(({ ack }) => ack);
expect(finalAcks).toEqual([]);
});
it('should only backfill partner assets created prior to the current partner asset checkpoint', async () => {
const { auth, sut, getRepository, testSync } = await setup();
const userRepo = getRepository('user');
const user2 = mediumFactory.userInsert();
const user3 = mediumFactory.userInsert();
await userRepo.create(user2);
await userRepo.create(user3);
const assetRepo = getRepository('asset');
const assetUser3 = mediumFactory.assetInsert({ ownerId: user3.id });
const assetUser2 = mediumFactory.assetInsert({ ownerId: user2.id });
const asset2User3 = mediumFactory.assetInsert({ ownerId: user3.id });
await assetRepo.create(assetUser3);
await assetRepo.upsertExif({ assetId: assetUser3.id, make: 'Canon' });
await wait(2);
await assetRepo.create(assetUser2);
await assetRepo.upsertExif({ assetId: assetUser2.id, make: 'Canon' });
await wait(2);
await assetRepo.create(asset2User3);
await assetRepo.upsertExif({ assetId: asset2User3.id, make: 'Canon' });
const partnerRepo = getRepository('partner');
await partnerRepo.create({ sharedById: user2.id, sharedWithId: auth.user.id });
const response = await testSync(auth, [SyncRequestType.PartnerAssetExifsV1]);
expect(response).toHaveLength(1);
expect(response).toEqual(
expect.arrayContaining([
{
ack: expect.any(String),
data: expect.objectContaining({
assetId: assetUser2.id,
}),
type: SyncEntityType.PartnerAssetExifV1,
},
]),
);
const acks = response.map(({ ack }) => ack);
await sut.setAcks(auth, { acks });
await partnerRepo.create({ sharedById: user3.id, sharedWithId: auth.user.id });
const backfillResponse = await testSync(auth, [SyncRequestType.PartnerAssetExifsV1]);
expect(backfillResponse).toHaveLength(3);
expect(backfillResponse).toEqual(
expect.arrayContaining([
{
ack: expect.any(String),
data: expect.objectContaining({
assetId: assetUser3.id,
}),
type: SyncEntityType.PartnerAssetExifBackfillV1,
},
{
ack: expect.stringContaining(SyncEntityType.PartnerAssetExifBackfillV1),
data: {},
type: SyncEntityType.SyncAckV1,
},
{
ack: expect.any(String),
data: expect.objectContaining({
assetId: asset2User3.id,
}),
type: SyncEntityType.PartnerAssetExifV1,
},
]),
);
const backfillAck = backfillResponse[1].ack;
const partnerAssetAck = backfillResponse[2].ack;
await sut.setAcks(auth, { acks: [backfillAck, partnerAssetAck] });
const finalResponse = await testSync(auth, [SyncRequestType.PartnerAssetExifsV1]);
const finalAcks = finalResponse.map(({ ack }) => ack);
expect(finalAcks).toEqual([]);
});
}); });

View File

@ -3,7 +3,7 @@ import { DB } from 'src/db';
import { SyncEntityType, SyncRequestType } from 'src/enum'; import { SyncEntityType, SyncRequestType } from 'src/enum';
import { mediumFactory, newSyncAuthUser, newSyncTest } from 'test/medium.factory'; import { mediumFactory, newSyncAuthUser, newSyncTest } from 'test/medium.factory';
import { factory } from 'test/small.factory'; import { factory } from 'test/small.factory';
import { getKyselyDB } from 'test/utils'; import { getKyselyDB, wait } from 'test/utils';
let defaultDatabase: Kysely<DB>; let defaultDatabase: Kysely<DB>;
@ -19,7 +19,7 @@ beforeAll(async () => {
defaultDatabase = await getKyselyDB(); defaultDatabase = await getKyselyDB();
}); });
describe.concurrent(SyncRequestType.PartnerAssetsV1, () => { describe(SyncRequestType.PartnerAssetsV1, () => {
it('should detect and sync the first partner asset', async () => { it('should detect and sync the first partner asset', async () => {
const { auth, sut, getRepository, testSync } = await setup(); const { auth, sut, getRepository, testSync } = await setup();
@ -210,4 +210,149 @@ describe.concurrent(SyncRequestType.PartnerAssetsV1, () => {
await expect(testSync(auth2, [SyncRequestType.AssetsV1])).resolves.toHaveLength(1); await expect(testSync(auth2, [SyncRequestType.AssetsV1])).resolves.toHaveLength(1);
await expect(testSync(auth, [SyncRequestType.PartnerAssetsV1])).resolves.toHaveLength(0); await expect(testSync(auth, [SyncRequestType.PartnerAssetsV1])).resolves.toHaveLength(0);
}); });
it('should backfill partner assets when a partner shared their library with you', async () => {
const { auth, sut, getRepository, testSync } = await setup();
const userRepo = getRepository('user');
const user2 = mediumFactory.userInsert();
const user3 = mediumFactory.userInsert();
await userRepo.create(user2);
await userRepo.create(user3);
const assetRepo = getRepository('asset');
const assetUser3 = mediumFactory.assetInsert({ ownerId: user3.id });
const assetUser2 = mediumFactory.assetInsert({ ownerId: user2.id });
await assetRepo.create(assetUser3);
await wait(2);
await assetRepo.create(assetUser2);
const partnerRepo = getRepository('partner');
await partnerRepo.create({ sharedById: user2.id, sharedWithId: auth.user.id });
const response = await testSync(auth, [SyncRequestType.PartnerAssetsV1]);
expect(response).toHaveLength(1);
expect(response).toEqual(
expect.arrayContaining([
{
ack: expect.any(String),
data: expect.objectContaining({
id: assetUser2.id,
}),
type: SyncEntityType.PartnerAssetV1,
},
]),
);
const acks = response.map(({ ack }) => ack);
await sut.setAcks(auth, { acks });
await partnerRepo.create({ sharedById: user3.id, sharedWithId: auth.user.id });
const backfillResponse = await testSync(auth, [SyncRequestType.PartnerAssetsV1]);
expect(backfillResponse).toHaveLength(2);
expect(backfillResponse).toEqual(
expect.arrayContaining([
{
ack: expect.any(String),
data: expect.objectContaining({
id: assetUser3.id,
}),
type: SyncEntityType.PartnerAssetBackfillV1,
},
{
ack: expect.stringContaining(SyncEntityType.PartnerAssetBackfillV1),
data: {},
type: SyncEntityType.SyncAckV1,
},
]),
);
const backfillAck = backfillResponse[1].ack;
await sut.setAcks(auth, { acks: [backfillAck] });
const finalResponse = await testSync(auth, [SyncRequestType.PartnerAssetsV1]);
const finalAcks = finalResponse.map(({ ack }) => ack);
expect(finalAcks).toEqual([]);
});
it('should only backfill partner assets created prior to the current partner asset checkpoint', async () => {
const { auth, sut, getRepository, testSync } = await setup();
const userRepo = getRepository('user');
const user2 = mediumFactory.userInsert();
const user3 = mediumFactory.userInsert();
await userRepo.create(user2);
await userRepo.create(user3);
const assetRepo = getRepository('asset');
const assetUser3 = mediumFactory.assetInsert({ ownerId: user3.id });
const assetUser2 = mediumFactory.assetInsert({ ownerId: user2.id });
const asset2User3 = mediumFactory.assetInsert({ ownerId: user3.id });
await assetRepo.create(assetUser3);
await wait(2);
await assetRepo.create(assetUser2);
await wait(2);
await assetRepo.create(asset2User3);
const partnerRepo = getRepository('partner');
await partnerRepo.create({ sharedById: user2.id, sharedWithId: auth.user.id });
const response = await testSync(auth, [SyncRequestType.PartnerAssetsV1]);
expect(response).toHaveLength(1);
expect(response).toEqual(
expect.arrayContaining([
{
ack: expect.any(String),
data: expect.objectContaining({
id: assetUser2.id,
}),
type: SyncEntityType.PartnerAssetV1,
},
]),
);
const acks = response.map(({ ack }) => ack);
await sut.setAcks(auth, { acks });
await partnerRepo.create({ sharedById: user3.id, sharedWithId: auth.user.id });
const backfillResponse = await testSync(auth, [SyncRequestType.PartnerAssetsV1]);
expect(backfillResponse).toHaveLength(3);
expect(backfillResponse).toEqual(
expect.arrayContaining([
{
ack: expect.any(String),
data: expect.objectContaining({
id: assetUser3.id,
}),
type: SyncEntityType.PartnerAssetBackfillV1,
},
{
ack: expect.stringContaining(SyncEntityType.PartnerAssetBackfillV1),
data: {},
type: SyncEntityType.SyncAckV1,
},
{
ack: expect.any(String),
data: expect.objectContaining({
id: asset2User3.id,
}),
type: SyncEntityType.PartnerAssetV1,
},
]),
);
const backfillAck = backfillResponse[1].ack;
const partnerAssetAck = backfillResponse[2].ack;
await sut.setAcks(auth, { acks: [backfillAck, partnerAssetAck] });
const finalResponse = await testSync(auth, [SyncRequestType.PartnerAssetsV1]);
const finalAcks = finalResponse.map(({ ack }) => ack);
expect(finalAcks).toEqual([]);
});
}); });

View File

@ -24,7 +24,7 @@ export const newUuids = () =>
.fill(0) .fill(0)
.map(() => newUuid()); .map(() => newUuid());
export const newDate = () => new Date(); export const newDate = () => new Date();
export const newUpdateId = () => 'uuid-v7'; export const newUuidV7 = () => 'uuid-v7';
export const newSha1 = () => Buffer.from('this is a fake hash'); export const newSha1 = () => Buffer.from('this is a fake hash');
export const newEmbedding = () => { export const newEmbedding = () => {
const embedding = Array.from({ length: 512 }) const embedding = Array.from({ length: 512 })
@ -110,9 +110,10 @@ const partnerFactory = (partner: Partial<Partner> = {}) => {
sharedBy, sharedBy,
sharedWithId: sharedWith.id, sharedWithId: sharedWith.id,
sharedWith, sharedWith,
createId: newUuidV7(),
createdAt: newDate(), createdAt: newDate(),
updatedAt: newDate(), updatedAt: newDate(),
updateId: newUpdateId(), updateId: newUuidV7(),
inTimeline: true, inTimeline: true,
...partner, ...partner,
}; };
@ -122,7 +123,7 @@ const sessionFactory = (session: Partial<Session> = {}) => ({
id: newUuid(), id: newUuid(),
createdAt: newDate(), createdAt: newDate(),
updatedAt: newDate(), updatedAt: newDate(),
updateId: newUpdateId(), updateId: newUuidV7(),
deviceOS: 'android', deviceOS: 'android',
deviceType: 'mobile', deviceType: 'mobile',
token: 'abc123', token: 'abc123',
@ -201,7 +202,7 @@ const assetFactory = (asset: Partial<MapAsset> = {}) => ({
createdAt: newDate(), createdAt: newDate(),
updatedAt: newDate(), updatedAt: newDate(),
deletedAt: null, deletedAt: null,
updateId: newUpdateId(), updateId: newUuidV7(),
status: AssetStatus.ACTIVE, status: AssetStatus.ACTIVE,
checksum: newSha1(), checksum: newSha1(),
deviceAssetId: '', deviceAssetId: '',
@ -240,7 +241,7 @@ const activityFactory = (activity: Partial<Activity> = {}) => {
albumId: newUuid(), albumId: newUuid(),
createdAt: newDate(), createdAt: newDate(),
updatedAt: newDate(), updatedAt: newDate(),
updateId: newUpdateId(), updateId: newUuidV7(),
...activity, ...activity,
}; };
}; };
@ -250,7 +251,7 @@ const apiKeyFactory = (apiKey: Partial<ApiKey> = {}) => ({
userId: newUuid(), userId: newUuid(),
createdAt: newDate(), createdAt: newDate(),
updatedAt: newDate(), updatedAt: newDate(),
updateId: newUpdateId(), updateId: newUuidV7(),
name: 'Api Key', name: 'Api Key',
permissions: [Permission.ALL], permissions: [Permission.ALL],
...apiKey, ...apiKey,
@ -260,7 +261,7 @@ const libraryFactory = (library: Partial<Library> = {}) => ({
id: newUuid(), id: newUuid(),
createdAt: newDate(), createdAt: newDate(),
updatedAt: newDate(), updatedAt: newDate(),
updateId: newUpdateId(), updateId: newUuidV7(),
deletedAt: null, deletedAt: null,
refreshedAt: null, refreshedAt: null,
name: 'Library', name: 'Library',
@ -275,7 +276,7 @@ const memoryFactory = (memory: Partial<Memory> = {}) => ({
id: newUuid(), id: newUuid(),
createdAt: newDate(), createdAt: newDate(),
updatedAt: newDate(), updatedAt: newDate(),
updateId: newUpdateId(), updateId: newUuidV7(),
deletedAt: null, deletedAt: null,
ownerId: newUuid(), ownerId: newUuid(),
type: MemoryType.ON_THIS_DAY, type: MemoryType.ON_THIS_DAY,

View File

@ -438,3 +438,7 @@ export async function* makeStream<T>(items: T[] = []): AsyncIterableIterator<T>
yield item; yield item;
} }
} }
export const wait = (ms: number) => {
return new Promise((resolve) => setTimeout(resolve, ms));
};