mirror of
https://github.com/immich-app/immich.git
synced 2025-07-09 03:04:16 -04:00
feat: sql-tools overrides (#19796)
This commit is contained in:
parent
1f9813a28e
commit
df4a27e8a7
@ -107,25 +107,21 @@ const compare = async () => {
|
||||
const { database } = configRepository.getEnv();
|
||||
const db = postgres(asPostgresConnectionConfig(database.config));
|
||||
|
||||
const source = schemaFromCode();
|
||||
const source = schemaFromCode({ overrides: true });
|
||||
const target = await schemaFromDatabase(db, {});
|
||||
|
||||
const sourceParams = new Set(source.parameters.map(({ name }) => name));
|
||||
target.parameters = target.parameters.filter(({ name }) => sourceParams.has(name));
|
||||
|
||||
const sourceTables = new Set(source.tables.map(({ name }) => name));
|
||||
target.tables = target.tables.filter(({ name }) => sourceTables.has(name));
|
||||
|
||||
console.log(source.warnings.join('\n'));
|
||||
|
||||
const up = schemaDiff(source, target, {
|
||||
tables: { ignoreExtra: true },
|
||||
functions: { ignoreExtra: false },
|
||||
parameters: { ignoreExtra: true },
|
||||
});
|
||||
const down = schemaDiff(target, source, {
|
||||
tables: { ignoreExtra: false },
|
||||
tables: { ignoreExtra: false, ignoreMissing: true },
|
||||
functions: { ignoreExtra: false },
|
||||
extension: { ignoreMissing: true },
|
||||
extensions: { ignoreMissing: true },
|
||||
parameters: { ignoreMissing: true },
|
||||
});
|
||||
|
||||
return { up, down };
|
||||
|
@ -20,7 +20,6 @@ export const immich_uuid_v7 = registerFunction({
|
||||
),
|
||||
'hex')::uuid;
|
||||
`,
|
||||
synchronize: false,
|
||||
});
|
||||
|
||||
export const album_user_after_insert = registerFunction({
|
||||
@ -33,7 +32,6 @@ export const album_user_after_insert = registerFunction({
|
||||
WHERE "id" IN (SELECT DISTINCT "albumsId" FROM inserted_rows);
|
||||
RETURN NULL;
|
||||
END`,
|
||||
synchronize: false,
|
||||
});
|
||||
|
||||
export const updated_at = registerFunction({
|
||||
@ -48,7 +46,6 @@ export const updated_at = registerFunction({
|
||||
new."updateId" = immich_uuid_v7(clock_timestamp);
|
||||
return new;
|
||||
END;`,
|
||||
synchronize: false,
|
||||
});
|
||||
|
||||
export const f_concat_ws = registerFunction({
|
||||
@ -59,7 +56,6 @@ export const f_concat_ws = registerFunction({
|
||||
parallel: 'safe',
|
||||
behavior: 'immutable',
|
||||
body: `SELECT array_to_string($2, $1)`,
|
||||
synchronize: false,
|
||||
});
|
||||
|
||||
export const f_unaccent = registerFunction({
|
||||
@ -71,7 +67,6 @@ export const f_unaccent = registerFunction({
|
||||
strict: true,
|
||||
behavior: 'immutable',
|
||||
return: `unaccent('unaccent', $1)`,
|
||||
synchronize: false,
|
||||
});
|
||||
|
||||
export const ll_to_earth_public = registerFunction({
|
||||
@ -83,7 +78,6 @@ export const ll_to_earth_public = registerFunction({
|
||||
strict: true,
|
||||
behavior: 'immutable',
|
||||
body: `SELECT public.cube(public.cube(public.cube(public.earth()*cos(radians(latitude))*cos(radians(longitude))),public.earth()*cos(radians(latitude))*sin(radians(longitude))),public.earth()*sin(radians(latitude)))::public.earth`,
|
||||
synchronize: false,
|
||||
});
|
||||
|
||||
export const users_delete_audit = registerFunction({
|
||||
@ -97,7 +91,6 @@ export const users_delete_audit = registerFunction({
|
||||
FROM OLD;
|
||||
RETURN NULL;
|
||||
END`,
|
||||
synchronize: false,
|
||||
});
|
||||
|
||||
export const partners_delete_audit = registerFunction({
|
||||
@ -111,7 +104,6 @@ export const partners_delete_audit = registerFunction({
|
||||
FROM OLD;
|
||||
RETURN NULL;
|
||||
END`,
|
||||
synchronize: false,
|
||||
});
|
||||
|
||||
export const assets_delete_audit = registerFunction({
|
||||
@ -125,7 +117,6 @@ export const assets_delete_audit = registerFunction({
|
||||
FROM OLD;
|
||||
RETURN NULL;
|
||||
END`,
|
||||
synchronize: false,
|
||||
});
|
||||
|
||||
export const albums_delete_audit = registerFunction({
|
||||
@ -139,7 +130,6 @@ export const albums_delete_audit = registerFunction({
|
||||
FROM OLD;
|
||||
RETURN NULL;
|
||||
END`,
|
||||
synchronize: false,
|
||||
});
|
||||
|
||||
export const album_assets_delete_audit = registerFunction({
|
||||
@ -153,7 +143,6 @@ export const album_assets_delete_audit = registerFunction({
|
||||
WHERE "albumsId" IN (SELECT "id" FROM albums WHERE "id" IN (SELECT "albumsId" FROM OLD));
|
||||
RETURN NULL;
|
||||
END`,
|
||||
synchronize: false,
|
||||
});
|
||||
|
||||
export const album_users_delete_audit = registerFunction({
|
||||
@ -174,7 +163,6 @@ export const album_users_delete_audit = registerFunction({
|
||||
|
||||
RETURN NULL;
|
||||
END`,
|
||||
synchronize: false,
|
||||
});
|
||||
|
||||
export const memories_delete_audit = registerFunction({
|
||||
@ -188,7 +176,6 @@ export const memories_delete_audit = registerFunction({
|
||||
FROM OLD;
|
||||
RETURN NULL;
|
||||
END`,
|
||||
synchronize: false,
|
||||
});
|
||||
|
||||
export const memory_assets_delete_audit = registerFunction({
|
||||
@ -202,7 +189,6 @@ export const memory_assets_delete_audit = registerFunction({
|
||||
WHERE "memoriesId" IN (SELECT "id" FROM memories WHERE "id" IN (SELECT "memoriesId" FROM OLD));
|
||||
RETURN NULL;
|
||||
END`,
|
||||
synchronize: false,
|
||||
});
|
||||
|
||||
export const stacks_delete_audit = registerFunction({
|
||||
@ -216,5 +202,4 @@ export const stacks_delete_audit = registerFunction({
|
||||
FROM OLD;
|
||||
RETURN NULL;
|
||||
END`,
|
||||
synchronize: false,
|
||||
});
|
||||
|
66
server/src/schema/migrations/1751924596408-AddOverrides.ts
Normal file
66
server/src/schema/migrations/1751924596408-AddOverrides.ts
Normal file
@ -0,0 +1,66 @@
|
||||
import { Kysely, sql } from 'kysely';
|
||||
|
||||
export async function up(db: Kysely<any>): Promise<void> {
|
||||
await sql`CREATE TABLE "migration_overrides" ("name" character varying NOT NULL, "value" jsonb NOT NULL);`.execute(db);
|
||||
await sql`ALTER TABLE "migration_overrides" ADD CONSTRAINT "migration_overrides_pkey" PRIMARY KEY ("name");`.execute(db);
|
||||
await sql`INSERT INTO "migration_overrides" ("name", "value") VALUES ('function_immich_uuid_v7', '{"type":"function","name":"immich_uuid_v7","sql":"CREATE OR REPLACE FUNCTION immich_uuid_v7(p_timestamp timestamp with time zone default clock_timestamp())\\n RETURNS uuid\\n VOLATILE LANGUAGE SQL\\n AS $$\\n SELECT encode(\\n set_bit(\\n set_bit(\\n overlay(uuid_send(gen_random_uuid())\\n placing substring(int8send(floor(extract(epoch from p_timestamp) * 1000)::bigint) from 3)\\n from 1 for 6\\n ),\\n 52, 1\\n ),\\n 53, 1\\n ),\\n ''hex'')::uuid;\\n $$;"}'::jsonb);`.execute(db);
|
||||
await sql`INSERT INTO "migration_overrides" ("name", "value") VALUES ('function_album_user_after_insert', '{"type":"function","name":"album_user_after_insert","sql":"CREATE OR REPLACE FUNCTION album_user_after_insert()\\n RETURNS TRIGGER\\n LANGUAGE PLPGSQL\\n AS $$\\n BEGIN\\n UPDATE albums SET \\"updatedAt\\" = clock_timestamp(), \\"updateId\\" = immich_uuid_v7(clock_timestamp())\\n WHERE \\"id\\" IN (SELECT DISTINCT \\"albumsId\\" FROM inserted_rows);\\n RETURN NULL;\\n END\\n $$;"}'::jsonb);`.execute(db);
|
||||
await sql`INSERT INTO "migration_overrides" ("name", "value") VALUES ('function_updated_at', '{"type":"function","name":"updated_at","sql":"CREATE OR REPLACE FUNCTION updated_at()\\n RETURNS TRIGGER\\n LANGUAGE PLPGSQL\\n AS $$\\n DECLARE\\n clock_timestamp TIMESTAMP := clock_timestamp();\\n BEGIN\\n new.\\"updatedAt\\" = clock_timestamp;\\n new.\\"updateId\\" = immich_uuid_v7(clock_timestamp);\\n return new;\\n END;\\n $$;"}'::jsonb);`.execute(db);
|
||||
await sql`INSERT INTO "migration_overrides" ("name", "value") VALUES ('function_f_concat_ws', '{"type":"function","name":"f_concat_ws","sql":"CREATE OR REPLACE FUNCTION f_concat_ws(text, text[])\\n RETURNS text\\n PARALLEL SAFE IMMUTABLE LANGUAGE SQL\\n AS $$SELECT array_to_string($2, $1)$$;"}'::jsonb);`.execute(db);
|
||||
await sql`INSERT INTO "migration_overrides" ("name", "value") VALUES ('function_f_unaccent', '{"type":"function","name":"f_unaccent","sql":"CREATE OR REPLACE FUNCTION f_unaccent(text)\\n RETURNS text\\n PARALLEL SAFE STRICT IMMUTABLE LANGUAGE SQL\\n RETURN unaccent(''unaccent'', $1)"}'::jsonb);`.execute(db);
|
||||
await sql`INSERT INTO "migration_overrides" ("name", "value") VALUES ('function_ll_to_earth_public', '{"type":"function","name":"ll_to_earth_public","sql":"CREATE OR REPLACE FUNCTION ll_to_earth_public(latitude double precision, longitude double precision)\\n RETURNS public.earth\\n PARALLEL SAFE STRICT IMMUTABLE LANGUAGE SQL\\n AS $$SELECT public.cube(public.cube(public.cube(public.earth()*cos(radians(latitude))*cos(radians(longitude))),public.earth()*cos(radians(latitude))*sin(radians(longitude))),public.earth()*sin(radians(latitude)))::public.earth$$;"}'::jsonb);`.execute(db);
|
||||
await sql`INSERT INTO "migration_overrides" ("name", "value") VALUES ('function_users_delete_audit', '{"type":"function","name":"users_delete_audit","sql":"CREATE OR REPLACE FUNCTION users_delete_audit()\\n RETURNS TRIGGER\\n LANGUAGE PLPGSQL\\n AS $$\\n BEGIN\\n INSERT INTO users_audit (\\"userId\\")\\n SELECT \\"id\\"\\n FROM OLD;\\n RETURN NULL;\\n END\\n $$;"}'::jsonb);`.execute(db);
|
||||
await sql`INSERT INTO "migration_overrides" ("name", "value") VALUES ('function_partners_delete_audit', '{"type":"function","name":"partners_delete_audit","sql":"CREATE OR REPLACE FUNCTION partners_delete_audit()\\n RETURNS TRIGGER\\n LANGUAGE PLPGSQL\\n AS $$\\n BEGIN\\n INSERT INTO partners_audit (\\"sharedById\\", \\"sharedWithId\\")\\n SELECT \\"sharedById\\", \\"sharedWithId\\"\\n FROM OLD;\\n RETURN NULL;\\n END\\n $$;"}'::jsonb);`.execute(db);
|
||||
await sql`INSERT INTO "migration_overrides" ("name", "value") VALUES ('function_assets_delete_audit', '{"type":"function","name":"assets_delete_audit","sql":"CREATE OR REPLACE FUNCTION assets_delete_audit()\\n RETURNS TRIGGER\\n LANGUAGE PLPGSQL\\n AS $$\\n BEGIN\\n INSERT INTO assets_audit (\\"assetId\\", \\"ownerId\\")\\n SELECT \\"id\\", \\"ownerId\\"\\n FROM OLD;\\n RETURN NULL;\\n END\\n $$;"}'::jsonb);`.execute(db);
|
||||
await sql`INSERT INTO "migration_overrides" ("name", "value") VALUES ('function_albums_delete_audit', '{"type":"function","name":"albums_delete_audit","sql":"CREATE OR REPLACE FUNCTION albums_delete_audit()\\n RETURNS TRIGGER\\n LANGUAGE PLPGSQL\\n AS $$\\n BEGIN\\n INSERT INTO albums_audit (\\"albumId\\", \\"userId\\")\\n SELECT \\"id\\", \\"ownerId\\"\\n FROM OLD;\\n RETURN NULL;\\n END\\n $$;"}'::jsonb);`.execute(db);
|
||||
await sql`INSERT INTO "migration_overrides" ("name", "value") VALUES ('function_album_assets_delete_audit', '{"type":"function","name":"album_assets_delete_audit","sql":"CREATE OR REPLACE FUNCTION album_assets_delete_audit()\\n RETURNS TRIGGER\\n LANGUAGE PLPGSQL\\n AS $$\\n BEGIN\\n INSERT INTO album_assets_audit (\\"albumId\\", \\"assetId\\")\\n SELECT \\"albumsId\\", \\"assetsId\\" FROM OLD\\n WHERE \\"albumsId\\" IN (SELECT \\"id\\" FROM albums WHERE \\"id\\" IN (SELECT \\"albumsId\\" FROM OLD));\\n RETURN NULL;\\n END\\n $$;"}'::jsonb);`.execute(db);
|
||||
await sql`INSERT INTO "migration_overrides" ("name", "value") VALUES ('function_album_users_delete_audit', '{"type":"function","name":"album_users_delete_audit","sql":"CREATE OR REPLACE FUNCTION album_users_delete_audit()\\n RETURNS TRIGGER\\n LANGUAGE PLPGSQL\\n AS $$\\n BEGIN\\n INSERT INTO albums_audit (\\"albumId\\", \\"userId\\")\\n SELECT \\"albumsId\\", \\"usersId\\"\\n FROM OLD;\\n\\n IF pg_trigger_depth() = 1 THEN\\n INSERT INTO album_users_audit (\\"albumId\\", \\"userId\\")\\n SELECT \\"albumsId\\", \\"usersId\\"\\n FROM OLD;\\n END IF;\\n\\n RETURN NULL;\\n END\\n $$;"}'::jsonb);`.execute(db);
|
||||
await sql`INSERT INTO "migration_overrides" ("name", "value") VALUES ('function_memories_delete_audit', '{"type":"function","name":"memories_delete_audit","sql":"CREATE OR REPLACE FUNCTION memories_delete_audit()\\n RETURNS TRIGGER\\n LANGUAGE PLPGSQL\\n AS $$\\n BEGIN\\n INSERT INTO memories_audit (\\"memoryId\\", \\"userId\\")\\n SELECT \\"id\\", \\"ownerId\\"\\n FROM OLD;\\n RETURN NULL;\\n END\\n $$;"}'::jsonb);`.execute(db);
|
||||
await sql`INSERT INTO "migration_overrides" ("name", "value") VALUES ('function_memory_assets_delete_audit', '{"type":"function","name":"memory_assets_delete_audit","sql":"CREATE OR REPLACE FUNCTION memory_assets_delete_audit()\\n RETURNS TRIGGER\\n LANGUAGE PLPGSQL\\n AS $$\\n BEGIN\\n INSERT INTO memory_assets_audit (\\"memoryId\\", \\"assetId\\")\\n SELECT \\"memoriesId\\", \\"assetsId\\" FROM OLD\\n WHERE \\"memoriesId\\" IN (SELECT \\"id\\" FROM memories WHERE \\"id\\" IN (SELECT \\"memoriesId\\" FROM OLD));\\n RETURN NULL;\\n END\\n $$;"}'::jsonb);`.execute(db);
|
||||
await sql`INSERT INTO "migration_overrides" ("name", "value") VALUES ('function_stacks_delete_audit', '{"type":"function","name":"stacks_delete_audit","sql":"CREATE OR REPLACE FUNCTION stacks_delete_audit()\\n RETURNS TRIGGER\\n LANGUAGE PLPGSQL\\n AS $$\\n BEGIN\\n INSERT INTO stacks_audit (\\"stackId\\", \\"userId\\")\\n SELECT \\"id\\", \\"ownerId\\"\\n FROM OLD;\\n RETURN NULL;\\n END\\n $$;"}'::jsonb);`.execute(db);
|
||||
await sql`INSERT INTO "migration_overrides" ("name", "value") VALUES ('trigger_users_delete_audit', '{"type":"trigger","name":"users_delete_audit","sql":"CREATE OR REPLACE TRIGGER \\"users_delete_audit\\"\\n AFTER DELETE ON \\"users\\"\\n REFERENCING OLD TABLE AS \\"old\\"\\n FOR EACH STATEMENT\\n WHEN (pg_trigger_depth() = 0)\\n EXECUTE FUNCTION users_delete_audit();"}'::jsonb);`.execute(db);
|
||||
await sql`INSERT INTO "migration_overrides" ("name", "value") VALUES ('trigger_users_updated_at', '{"type":"trigger","name":"users_updated_at","sql":"CREATE OR REPLACE TRIGGER \\"users_updated_at\\"\\n BEFORE UPDATE ON \\"users\\"\\n FOR EACH ROW\\n EXECUTE FUNCTION updated_at();"}'::jsonb);`.execute(db);
|
||||
await sql`INSERT INTO "migration_overrides" ("name", "value") VALUES ('trigger_libraries_updated_at', '{"type":"trigger","name":"libraries_updated_at","sql":"CREATE OR REPLACE TRIGGER \\"libraries_updated_at\\"\\n BEFORE UPDATE ON \\"libraries\\"\\n FOR EACH ROW\\n EXECUTE FUNCTION updated_at();"}'::jsonb);`.execute(db);
|
||||
await sql`INSERT INTO "migration_overrides" ("name", "value") VALUES ('trigger_stacks_delete_audit', '{"type":"trigger","name":"stacks_delete_audit","sql":"CREATE OR REPLACE TRIGGER \\"stacks_delete_audit\\"\\n AFTER DELETE ON \\"asset_stack\\"\\n REFERENCING OLD TABLE AS \\"old\\"\\n FOR EACH STATEMENT\\n WHEN (pg_trigger_depth() = 0)\\n EXECUTE FUNCTION stacks_delete_audit();"}'::jsonb);`.execute(db);
|
||||
await sql`INSERT INTO "migration_overrides" ("name", "value") VALUES ('trigger_stacks_updated_at', '{"type":"trigger","name":"stacks_updated_at","sql":"CREATE OR REPLACE TRIGGER \\"stacks_updated_at\\"\\n BEFORE UPDATE ON \\"asset_stack\\"\\n FOR EACH ROW\\n EXECUTE FUNCTION updated_at();"}'::jsonb);`.execute(db);
|
||||
await sql`INSERT INTO "migration_overrides" ("name", "value") VALUES ('trigger_assets_delete_audit', '{"type":"trigger","name":"assets_delete_audit","sql":"CREATE OR REPLACE TRIGGER \\"assets_delete_audit\\"\\n AFTER DELETE ON \\"assets\\"\\n REFERENCING OLD TABLE AS \\"old\\"\\n FOR EACH STATEMENT\\n WHEN (pg_trigger_depth() = 0)\\n EXECUTE FUNCTION assets_delete_audit();"}'::jsonb);`.execute(db);
|
||||
await sql`INSERT INTO "migration_overrides" ("name", "value") VALUES ('trigger_assets_updated_at', '{"type":"trigger","name":"assets_updated_at","sql":"CREATE OR REPLACE TRIGGER \\"assets_updated_at\\"\\n BEFORE UPDATE ON \\"assets\\"\\n FOR EACH ROW\\n EXECUTE FUNCTION updated_at();"}'::jsonb);`.execute(db);
|
||||
await sql`INSERT INTO "migration_overrides" ("name", "value") VALUES ('index_idx_originalfilename_trigram', '{"type":"index","name":"idx_originalfilename_trigram","sql":"CREATE INDEX \\"idx_originalfilename_trigram\\" ON \\"assets\\" USING gin (f_unaccent(\\"originalFileName\\") gin_trgm_ops)"}'::jsonb);`.execute(db);
|
||||
await sql`INSERT INTO "migration_overrides" ("name", "value") VALUES ('index_idx_local_date_time_month', '{"type":"index","name":"idx_local_date_time_month","sql":"CREATE INDEX \\"idx_local_date_time_month\\" ON \\"assets\\" ((date_trunc(''MONTH''::text, (\\"localDateTime\\" AT TIME ZONE ''UTC''::text)) AT TIME ZONE ''UTC''::text))"}'::jsonb);`.execute(db);
|
||||
await sql`INSERT INTO "migration_overrides" ("name", "value") VALUES ('index_idx_local_date_time', '{"type":"index","name":"idx_local_date_time","sql":"CREATE INDEX \\"idx_local_date_time\\" ON \\"assets\\" (((\\"localDateTime\\" at time zone ''UTC'')::date))"}'::jsonb);`.execute(db);
|
||||
await sql`INSERT INTO "migration_overrides" ("name", "value") VALUES ('index_UQ_assets_owner_library_checksum', '{"type":"index","name":"UQ_assets_owner_library_checksum","sql":"CREATE UNIQUE INDEX \\"UQ_assets_owner_library_checksum\\" ON \\"assets\\" (\\"ownerId\\", \\"libraryId\\", \\"checksum\\") WHERE (\\"libraryId\\" IS NOT NULL)"}'::jsonb);`.execute(db);
|
||||
await sql`INSERT INTO "migration_overrides" ("name", "value") VALUES ('index_UQ_assets_owner_checksum', '{"type":"index","name":"UQ_assets_owner_checksum","sql":"CREATE UNIQUE INDEX \\"UQ_assets_owner_checksum\\" ON \\"assets\\" (\\"ownerId\\", \\"checksum\\") WHERE (\\"libraryId\\" IS NULL)"}'::jsonb);`.execute(db);
|
||||
await sql`INSERT INTO "migration_overrides" ("name", "value") VALUES ('trigger_albums_delete_audit', '{"type":"trigger","name":"albums_delete_audit","sql":"CREATE OR REPLACE TRIGGER \\"albums_delete_audit\\"\\n AFTER DELETE ON \\"albums\\"\\n REFERENCING OLD TABLE AS \\"old\\"\\n FOR EACH STATEMENT\\n WHEN (pg_trigger_depth() = 0)\\n EXECUTE FUNCTION albums_delete_audit();"}'::jsonb);`.execute(db);
|
||||
await sql`INSERT INTO "migration_overrides" ("name", "value") VALUES ('trigger_albums_updated_at', '{"type":"trigger","name":"albums_updated_at","sql":"CREATE OR REPLACE TRIGGER \\"albums_updated_at\\"\\n BEFORE UPDATE ON \\"albums\\"\\n FOR EACH ROW\\n EXECUTE FUNCTION updated_at();"}'::jsonb);`.execute(db);
|
||||
await sql`INSERT INTO "migration_overrides" ("name", "value") VALUES ('trigger_activity_updated_at', '{"type":"trigger","name":"activity_updated_at","sql":"CREATE OR REPLACE TRIGGER \\"activity_updated_at\\"\\n BEFORE UPDATE ON \\"activity\\"\\n FOR EACH ROW\\n EXECUTE FUNCTION updated_at();"}'::jsonb);`.execute(db);
|
||||
await sql`INSERT INTO "migration_overrides" ("name", "value") VALUES ('index_IDX_activity_like', '{"type":"index","name":"IDX_activity_like","sql":"CREATE UNIQUE INDEX \\"IDX_activity_like\\" ON \\"activity\\" (\\"assetId\\", \\"userId\\", \\"albumId\\") WHERE (\\"isLiked\\" = true)"}'::jsonb);`.execute(db);
|
||||
await sql`INSERT INTO "migration_overrides" ("name", "value") VALUES ('trigger_album_assets_delete_audit', '{"type":"trigger","name":"album_assets_delete_audit","sql":"CREATE OR REPLACE TRIGGER \\"album_assets_delete_audit\\"\\n AFTER DELETE ON \\"albums_assets_assets\\"\\n REFERENCING OLD TABLE AS \\"old\\"\\n FOR EACH STATEMENT\\n WHEN (pg_trigger_depth() <= 1)\\n EXECUTE FUNCTION album_assets_delete_audit();"}'::jsonb);`.execute(db);
|
||||
await sql`INSERT INTO "migration_overrides" ("name", "value") VALUES ('trigger_album_assets_updated_at', '{"type":"trigger","name":"album_assets_updated_at","sql":"CREATE OR REPLACE TRIGGER \\"album_assets_updated_at\\"\\n BEFORE UPDATE ON \\"albums_assets_assets\\"\\n FOR EACH ROW\\n EXECUTE FUNCTION updated_at();"}'::jsonb);`.execute(db);
|
||||
await sql`INSERT INTO "migration_overrides" ("name", "value") VALUES ('trigger_album_users_delete_audit', '{"type":"trigger","name":"album_users_delete_audit","sql":"CREATE OR REPLACE TRIGGER \\"album_users_delete_audit\\"\\n AFTER DELETE ON \\"albums_shared_users_users\\"\\n REFERENCING OLD TABLE AS \\"old\\"\\n FOR EACH STATEMENT\\n WHEN (pg_trigger_depth() <= 1)\\n EXECUTE FUNCTION album_users_delete_audit();"}'::jsonb);`.execute(db);
|
||||
await sql`INSERT INTO "migration_overrides" ("name", "value") VALUES ('trigger_album_user_after_insert', '{"type":"trigger","name":"album_user_after_insert","sql":"CREATE OR REPLACE TRIGGER \\"album_user_after_insert\\"\\n AFTER INSERT ON \\"albums_shared_users_users\\"\\n REFERENCING NEW TABLE AS \\"inserted_rows\\"\\n FOR EACH STATEMENT\\n EXECUTE FUNCTION album_user_after_insert();"}'::jsonb);`.execute(db);
|
||||
await sql`INSERT INTO "migration_overrides" ("name", "value") VALUES ('trigger_album_users_updated_at', '{"type":"trigger","name":"album_users_updated_at","sql":"CREATE OR REPLACE TRIGGER \\"album_users_updated_at\\"\\n BEFORE UPDATE ON \\"albums_shared_users_users\\"\\n FOR EACH ROW\\n EXECUTE FUNCTION updated_at();"}'::jsonb);`.execute(db);
|
||||
await sql`INSERT INTO "migration_overrides" ("name", "value") VALUES ('trigger_api_keys_updated_at', '{"type":"trigger","name":"api_keys_updated_at","sql":"CREATE OR REPLACE TRIGGER \\"api_keys_updated_at\\"\\n BEFORE UPDATE ON \\"api_keys\\"\\n FOR EACH ROW\\n EXECUTE FUNCTION updated_at();"}'::jsonb);`.execute(db);
|
||||
await sql`INSERT INTO "migration_overrides" ("name", "value") VALUES ('trigger_person_updated_at', '{"type":"trigger","name":"person_updated_at","sql":"CREATE OR REPLACE TRIGGER \\"person_updated_at\\"\\n BEFORE UPDATE ON \\"person\\"\\n FOR EACH ROW\\n EXECUTE FUNCTION updated_at();"}'::jsonb);`.execute(db);
|
||||
await sql`INSERT INTO "migration_overrides" ("name", "value") VALUES ('trigger_asset_files_updated_at', '{"type":"trigger","name":"asset_files_updated_at","sql":"CREATE OR REPLACE TRIGGER \\"asset_files_updated_at\\"\\n BEFORE UPDATE ON \\"asset_files\\"\\n FOR EACH ROW\\n EXECUTE FUNCTION updated_at();"}'::jsonb);`.execute(db);
|
||||
await sql`INSERT INTO "migration_overrides" ("name", "value") VALUES ('trigger_asset_exif_updated_at', '{"type":"trigger","name":"asset_exif_updated_at","sql":"CREATE OR REPLACE TRIGGER \\"asset_exif_updated_at\\"\\n BEFORE UPDATE ON \\"exif\\"\\n FOR EACH ROW\\n EXECUTE FUNCTION updated_at();"}'::jsonb);`.execute(db);
|
||||
await sql`INSERT INTO "migration_overrides" ("name", "value") VALUES ('index_face_index', '{"type":"index","name":"face_index","sql":"CREATE INDEX \\"face_index\\" ON \\"face_search\\" USING hnsw (embedding vector_cosine_ops) WITH (ef_construction = 300, m = 16)"}'::jsonb);`.execute(db);
|
||||
await sql`INSERT INTO "migration_overrides" ("name", "value") VALUES ('index_IDX_geodata_gist_earthcoord', '{"type":"index","name":"IDX_geodata_gist_earthcoord","sql":"CREATE INDEX \\"IDX_geodata_gist_earthcoord\\" ON \\"geodata_places\\" (ll_to_earth_public(latitude, longitude))"}'::jsonb);`.execute(db);
|
||||
await sql`INSERT INTO "migration_overrides" ("name", "value") VALUES ('index_idx_geodata_places_name', '{"type":"index","name":"idx_geodata_places_name","sql":"CREATE INDEX \\"idx_geodata_places_name\\" ON \\"geodata_places\\" USING gin (f_unaccent(\\"name\\") gin_trgm_ops)"}'::jsonb);`.execute(db);
|
||||
await sql`INSERT INTO "migration_overrides" ("name", "value") VALUES ('index_idx_geodata_places_admin2_name', '{"type":"index","name":"idx_geodata_places_admin2_name","sql":"CREATE INDEX \\"idx_geodata_places_admin2_name\\" ON \\"geodata_places\\" USING gin (f_unaccent(\\"admin2Name\\") gin_trgm_ops)"}'::jsonb);`.execute(db);
|
||||
await sql`INSERT INTO "migration_overrides" ("name", "value") VALUES ('index_idx_geodata_places_admin1_name', '{"type":"index","name":"idx_geodata_places_admin1_name","sql":"CREATE INDEX \\"idx_geodata_places_admin1_name\\" ON \\"geodata_places\\" USING gin (f_unaccent(\\"admin1Name\\") gin_trgm_ops)"}'::jsonb);`.execute(db);
|
||||
await sql`INSERT INTO "migration_overrides" ("name", "value") VALUES ('index_idx_geodata_places_alternate_names', '{"type":"index","name":"idx_geodata_places_alternate_names","sql":"CREATE INDEX \\"idx_geodata_places_alternate_names\\" ON \\"geodata_places\\" USING gin (f_unaccent(\\"alternateNames\\") gin_trgm_ops)"}'::jsonb);`.execute(db);
|
||||
await sql`INSERT INTO "migration_overrides" ("name", "value") VALUES ('trigger_memories_delete_audit', '{"type":"trigger","name":"memories_delete_audit","sql":"CREATE OR REPLACE TRIGGER \\"memories_delete_audit\\"\\n AFTER DELETE ON \\"memories\\"\\n REFERENCING OLD TABLE AS \\"old\\"\\n FOR EACH STATEMENT\\n WHEN (pg_trigger_depth() = 0)\\n EXECUTE FUNCTION memories_delete_audit();"}'::jsonb);`.execute(db);
|
||||
await sql`INSERT INTO "migration_overrides" ("name", "value") VALUES ('trigger_memories_updated_at', '{"type":"trigger","name":"memories_updated_at","sql":"CREATE OR REPLACE TRIGGER \\"memories_updated_at\\"\\n BEFORE UPDATE ON \\"memories\\"\\n FOR EACH ROW\\n EXECUTE FUNCTION updated_at();"}'::jsonb);`.execute(db);
|
||||
await sql`INSERT INTO "migration_overrides" ("name", "value") VALUES ('trigger_memory_assets_delete_audit', '{"type":"trigger","name":"memory_assets_delete_audit","sql":"CREATE OR REPLACE TRIGGER \\"memory_assets_delete_audit\\"\\n AFTER DELETE ON \\"memories_assets_assets\\"\\n REFERENCING OLD TABLE AS \\"old\\"\\n FOR EACH STATEMENT\\n WHEN (pg_trigger_depth() <= 1)\\n EXECUTE FUNCTION memory_assets_delete_audit();"}'::jsonb);`.execute(db);
|
||||
await sql`INSERT INTO "migration_overrides" ("name", "value") VALUES ('trigger_memory_assets_updated_at', '{"type":"trigger","name":"memory_assets_updated_at","sql":"CREATE OR REPLACE TRIGGER \\"memory_assets_updated_at\\"\\n BEFORE UPDATE ON \\"memories_assets_assets\\"\\n FOR EACH ROW\\n EXECUTE FUNCTION updated_at();"}'::jsonb);`.execute(db);
|
||||
await sql`INSERT INTO "migration_overrides" ("name", "value") VALUES ('trigger_notifications_updated_at', '{"type":"trigger","name":"notifications_updated_at","sql":"CREATE OR REPLACE TRIGGER \\"notifications_updated_at\\"\\n BEFORE UPDATE ON \\"notifications\\"\\n FOR EACH ROW\\n EXECUTE FUNCTION updated_at();"}'::jsonb);`.execute(db);
|
||||
await sql`INSERT INTO "migration_overrides" ("name", "value") VALUES ('trigger_partners_delete_audit', '{"type":"trigger","name":"partners_delete_audit","sql":"CREATE OR REPLACE TRIGGER \\"partners_delete_audit\\"\\n AFTER DELETE ON \\"partners\\"\\n REFERENCING OLD TABLE AS \\"old\\"\\n FOR EACH STATEMENT\\n WHEN (pg_trigger_depth() = 0)\\n EXECUTE FUNCTION partners_delete_audit();"}'::jsonb);`.execute(db);
|
||||
await sql`INSERT INTO "migration_overrides" ("name", "value") VALUES ('trigger_partners_updated_at', '{"type":"trigger","name":"partners_updated_at","sql":"CREATE OR REPLACE TRIGGER \\"partners_updated_at\\"\\n BEFORE UPDATE ON \\"partners\\"\\n FOR EACH ROW\\n EXECUTE FUNCTION updated_at();"}'::jsonb);`.execute(db);
|
||||
await sql`INSERT INTO "migration_overrides" ("name", "value") VALUES ('trigger_sessions_updated_at', '{"type":"trigger","name":"sessions_updated_at","sql":"CREATE OR REPLACE TRIGGER \\"sessions_updated_at\\"\\n BEFORE UPDATE ON \\"sessions\\"\\n FOR EACH ROW\\n EXECUTE FUNCTION updated_at();"}'::jsonb);`.execute(db);
|
||||
await sql`INSERT INTO "migration_overrides" ("name", "value") VALUES ('trigger_session_sync_checkpoints_updated_at', '{"type":"trigger","name":"session_sync_checkpoints_updated_at","sql":"CREATE OR REPLACE TRIGGER \\"session_sync_checkpoints_updated_at\\"\\n BEFORE UPDATE ON \\"session_sync_checkpoints\\"\\n FOR EACH ROW\\n EXECUTE FUNCTION updated_at();"}'::jsonb);`.execute(db);
|
||||
await sql`INSERT INTO "migration_overrides" ("name", "value") VALUES ('trigger_tags_updated_at', '{"type":"trigger","name":"tags_updated_at","sql":"CREATE OR REPLACE TRIGGER \\"tags_updated_at\\"\\n BEFORE UPDATE ON \\"tags\\"\\n FOR EACH ROW\\n EXECUTE FUNCTION updated_at();"}'::jsonb);`.execute(db);
|
||||
}
|
||||
|
||||
export async function down(db: Kysely<any>): Promise<void> {
|
||||
await sql`DROP TABLE "migration_overrides";`.execute(db);
|
||||
}
|
@ -44,7 +44,6 @@ import { ASSET_CHECKSUM_CONSTRAINT } from 'src/utils/database';
|
||||
@Index({
|
||||
name: 'idx_local_date_time',
|
||||
expression: `(("localDateTime" at time zone 'UTC')::date)`,
|
||||
synchronize: false,
|
||||
})
|
||||
@Index({
|
||||
name: 'idx_local_date_time_month',
|
||||
@ -56,7 +55,6 @@ import { ASSET_CHECKSUM_CONSTRAINT } from 'src/utils/database';
|
||||
name: 'idx_originalfilename_trigram',
|
||||
using: 'gin',
|
||||
expression: 'f_unaccent("originalFileName") gin_trgm_ops',
|
||||
synchronize: false,
|
||||
})
|
||||
// For all assets, each originalpath must be unique per user and library
|
||||
export class AssetTable {
|
||||
|
@ -7,7 +7,6 @@ import { Column, ForeignKeyColumn, Index, Table } from 'src/sql-tools';
|
||||
using: 'hnsw',
|
||||
expression: `embedding vector_cosine_ops`,
|
||||
with: 'ef_construction = 300, m = 16',
|
||||
synchronize: false,
|
||||
})
|
||||
export class FaceSearchTable {
|
||||
@ForeignKeyColumn(() => AssetFaceTable, {
|
||||
|
@ -1,34 +1,29 @@
|
||||
import { Column, Index, PrimaryColumn, Table, Timestamp } from 'src/sql-tools';
|
||||
|
||||
@Table({ name: 'geodata_places', synchronize: false })
|
||||
@Table({ name: 'geodata_places' })
|
||||
@Index({
|
||||
name: 'idx_geodata_places_alternate_names',
|
||||
using: 'gin',
|
||||
expression: 'f_unaccent("alternateNames") gin_trgm_ops',
|
||||
synchronize: false,
|
||||
})
|
||||
@Index({
|
||||
name: 'idx_geodata_places_admin1_name',
|
||||
using: 'gin',
|
||||
expression: 'f_unaccent("admin1Name") gin_trgm_ops',
|
||||
synchronize: false,
|
||||
})
|
||||
@Index({
|
||||
name: 'idx_geodata_places_admin2_name',
|
||||
using: 'gin',
|
||||
expression: 'f_unaccent("admin2Name") gin_trgm_ops',
|
||||
synchronize: false,
|
||||
})
|
||||
@Index({
|
||||
name: 'idx_geodata_places_name',
|
||||
using: 'gin',
|
||||
expression: 'f_unaccent("name") gin_trgm_ops',
|
||||
synchronize: false,
|
||||
})
|
||||
@Index({
|
||||
name: 'IDX_geodata_gist_earthcoord',
|
||||
expression: 'll_to_earth_public(latitude, longitude)',
|
||||
synchronize: false,
|
||||
})
|
||||
export class GeodataPlacesTable {
|
||||
@PrimaryColumn({ type: 'integer' })
|
||||
|
69
server/src/sql-tools/comparers/override.comparer.spec.ts
Normal file
69
server/src/sql-tools/comparers/override.comparer.spec.ts
Normal file
@ -0,0 +1,69 @@
|
||||
import { compareOverrides } from 'src/sql-tools/comparers/override.comparer';
|
||||
import { DatabaseOverride, Reason } from 'src/sql-tools/types';
|
||||
import { describe, expect, it } from 'vitest';
|
||||
|
||||
const testOverride: DatabaseOverride = {
|
||||
name: 'test',
|
||||
value: { type: 'function', name: 'test_func', sql: 'func implementation' },
|
||||
synchronize: true,
|
||||
};
|
||||
|
||||
describe('compareOverrides', () => {
|
||||
describe('onExtra', () => {
|
||||
it('should work', () => {
|
||||
expect(compareOverrides.onExtra(testOverride)).toEqual([
|
||||
{
|
||||
type: 'OverrideDrop',
|
||||
overrideName: 'test',
|
||||
reason: Reason.MissingInSource,
|
||||
},
|
||||
]);
|
||||
});
|
||||
});
|
||||
|
||||
describe('onMissing', () => {
|
||||
it('should work', () => {
|
||||
expect(compareOverrides.onMissing(testOverride)).toEqual([
|
||||
{
|
||||
type: 'OverrideCreate',
|
||||
override: testOverride,
|
||||
reason: Reason.MissingInTarget,
|
||||
},
|
||||
]);
|
||||
});
|
||||
});
|
||||
|
||||
describe('onCompare', () => {
|
||||
it('should work', () => {
|
||||
expect(compareOverrides.onCompare(testOverride, testOverride)).toEqual([]);
|
||||
});
|
||||
|
||||
it('should drop and recreate when the value changes', () => {
|
||||
const source: DatabaseOverride = {
|
||||
name: 'test',
|
||||
value: {
|
||||
type: 'function',
|
||||
name: 'test_func',
|
||||
sql: 'func implementation',
|
||||
},
|
||||
synchronize: true,
|
||||
};
|
||||
const target: DatabaseOverride = {
|
||||
name: 'test',
|
||||
value: {
|
||||
type: 'function',
|
||||
name: 'test_func',
|
||||
sql: 'func implementation2',
|
||||
},
|
||||
synchronize: true,
|
||||
};
|
||||
expect(compareOverrides.onCompare(source, target)).toEqual([
|
||||
{
|
||||
override: source,
|
||||
type: 'OverrideUpdate',
|
||||
reason: expect.stringContaining('value is different'),
|
||||
},
|
||||
]);
|
||||
});
|
||||
});
|
||||
});
|
29
server/src/sql-tools/comparers/override.comparer.ts
Normal file
29
server/src/sql-tools/comparers/override.comparer.ts
Normal file
@ -0,0 +1,29 @@
|
||||
import { Comparer, DatabaseOverride, Reason } from 'src/sql-tools/types';
|
||||
|
||||
export const compareOverrides: Comparer<DatabaseOverride> = {
|
||||
onMissing: (source) => [
|
||||
{
|
||||
type: 'OverrideCreate',
|
||||
override: source,
|
||||
reason: Reason.MissingInTarget,
|
||||
},
|
||||
],
|
||||
onExtra: (target) => [
|
||||
{
|
||||
type: 'OverrideDrop',
|
||||
overrideName: target.name,
|
||||
reason: Reason.MissingInSource,
|
||||
},
|
||||
],
|
||||
onCompare: (source, target) => {
|
||||
if (source.value.name !== target.value.name || source.value.sql !== target.value.sql) {
|
||||
const sourceValue = JSON.stringify(source.value);
|
||||
const targetValue = JSON.stringify(target.value);
|
||||
return [
|
||||
{ type: 'OverrideUpdate', override: source, reason: `value is different (${sourceValue} vs ${targetValue})` },
|
||||
];
|
||||
}
|
||||
|
||||
return [];
|
||||
},
|
||||
};
|
74
server/src/sql-tools/contexts/base-context.ts
Normal file
74
server/src/sql-tools/contexts/base-context.ts
Normal file
@ -0,0 +1,74 @@
|
||||
import {
|
||||
BaseContextOptions,
|
||||
DatabaseEnum,
|
||||
DatabaseExtension,
|
||||
DatabaseFunction,
|
||||
DatabaseOverride,
|
||||
DatabaseParameter,
|
||||
DatabaseSchema,
|
||||
DatabaseTable,
|
||||
} from 'src/sql-tools/types';
|
||||
|
||||
const asOverrideKey = (type: string, name: string) => `${type}:${name}`;
|
||||
|
||||
export class BaseContext {
|
||||
databaseName: string;
|
||||
schemaName: string;
|
||||
overrideTableName: string;
|
||||
|
||||
tables: DatabaseTable[] = [];
|
||||
functions: DatabaseFunction[] = [];
|
||||
enums: DatabaseEnum[] = [];
|
||||
extensions: DatabaseExtension[] = [];
|
||||
parameters: DatabaseParameter[] = [];
|
||||
overrides: DatabaseOverride[] = [];
|
||||
warnings: string[] = [];
|
||||
|
||||
constructor(options: BaseContextOptions) {
|
||||
this.databaseName = options.databaseName ?? 'postgres';
|
||||
this.schemaName = options.schemaName ?? 'public';
|
||||
this.overrideTableName = options.overrideTableName ?? 'migration_overrides';
|
||||
}
|
||||
|
||||
getTableByName(name: string) {
|
||||
return this.tables.find((table) => table.name === name);
|
||||
}
|
||||
|
||||
warn(context: string, message: string) {
|
||||
this.warnings.push(`[${context}] ${message}`);
|
||||
}
|
||||
|
||||
build(): DatabaseSchema {
|
||||
const overrideMap = new Map<string, DatabaseOverride>();
|
||||
for (const override of this.overrides) {
|
||||
const { type, name } = override.value;
|
||||
overrideMap.set(asOverrideKey(type, name), override);
|
||||
}
|
||||
|
||||
for (const func of this.functions) {
|
||||
func.override = overrideMap.get(asOverrideKey('function', func.name));
|
||||
}
|
||||
|
||||
for (const { indexes, triggers } of this.tables) {
|
||||
for (const index of indexes) {
|
||||
index.override = overrideMap.get(asOverrideKey('index', index.name));
|
||||
}
|
||||
|
||||
for (const trigger of triggers) {
|
||||
trigger.override = overrideMap.get(asOverrideKey('trigger', trigger.name));
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
databaseName: this.databaseName,
|
||||
schemaName: this.schemaName,
|
||||
tables: this.tables,
|
||||
functions: this.functions,
|
||||
enums: this.enums,
|
||||
extensions: this.extensions,
|
||||
parameters: this.parameters,
|
||||
overrides: this.overrides,
|
||||
warnings: this.warnings,
|
||||
};
|
||||
}
|
||||
}
|
@ -1,45 +1,25 @@
|
||||
/* eslint-disable @typescript-eslint/no-unsafe-function-type */
|
||||
import { BaseContext } from 'src/sql-tools/contexts/base-context';
|
||||
import { ColumnOptions, TableOptions } from 'src/sql-tools/decorators';
|
||||
import { asKey } from 'src/sql-tools/helpers';
|
||||
import {
|
||||
DatabaseColumn,
|
||||
DatabaseEnum,
|
||||
DatabaseExtension,
|
||||
DatabaseFunction,
|
||||
DatabaseParameter,
|
||||
DatabaseSchema,
|
||||
DatabaseTable,
|
||||
SchemaFromCodeOptions,
|
||||
} from 'src/sql-tools/types';
|
||||
import { DatabaseColumn, DatabaseTable, SchemaFromCodeOptions } from 'src/sql-tools/types';
|
||||
|
||||
type TableMetadata = { options: TableOptions; object: Function; methodToColumn: Map<string | symbol, DatabaseColumn> };
|
||||
|
||||
export class SchemaBuilder {
|
||||
databaseName: string;
|
||||
schemaName: string;
|
||||
tables: DatabaseTable[] = [];
|
||||
functions: DatabaseFunction[] = [];
|
||||
enums: DatabaseEnum[] = [];
|
||||
extensions: DatabaseExtension[] = [];
|
||||
parameters: DatabaseParameter[] = [];
|
||||
warnings: string[] = [];
|
||||
export class ProcessorContext extends BaseContext {
|
||||
constructor(public options: SchemaFromCodeOptions) {
|
||||
options.createForeignKeyIndexes = options.createForeignKeyIndexes ?? true;
|
||||
options.overrides = options.overrides ?? false;
|
||||
super(options);
|
||||
}
|
||||
|
||||
classToTable: WeakMap<Function, DatabaseTable> = new WeakMap();
|
||||
tableToMetadata: WeakMap<DatabaseTable, TableMetadata> = new WeakMap();
|
||||
|
||||
constructor(options: SchemaFromCodeOptions) {
|
||||
this.databaseName = options.databaseName ?? 'postgres';
|
||||
this.schemaName = options.schemaName ?? 'public';
|
||||
}
|
||||
|
||||
getTableByObject(object: Function) {
|
||||
return this.classToTable.get(object);
|
||||
}
|
||||
|
||||
getTableByName(name: string) {
|
||||
return this.tables.find((table) => table.name === name);
|
||||
}
|
||||
|
||||
getTableMetadata(table: DatabaseTable) {
|
||||
const metadata = this.tableToMetadata.get(table);
|
||||
if (!metadata) {
|
||||
@ -92,10 +72,6 @@ export class SchemaBuilder {
|
||||
return asKey('IDX_', table, items);
|
||||
}
|
||||
|
||||
warn(context: string, message: string) {
|
||||
this.warnings.push(`[${context}] ${message}`);
|
||||
}
|
||||
|
||||
warnMissingTable(context: string, object: object, propertyName?: symbol | string) {
|
||||
const label = object.constructor.name + (propertyName ? '.' + String(propertyName) : '');
|
||||
this.warn(context, `Unable to find table (${label})`);
|
||||
@ -105,17 +81,4 @@ export class SchemaBuilder {
|
||||
const label = object.constructor.name + (propertyName ? '.' + String(propertyName) : '');
|
||||
this.warn(context, `Unable to find column (${label})`);
|
||||
}
|
||||
|
||||
build(): DatabaseSchema {
|
||||
return {
|
||||
databaseName: this.databaseName,
|
||||
schemaName: this.schemaName,
|
||||
tables: this.tables,
|
||||
functions: this.functions,
|
||||
enums: this.enums,
|
||||
extensions: this.extensions,
|
||||
parameters: this.parameters,
|
||||
warnings: this.warnings,
|
||||
};
|
||||
}
|
||||
}
|
8
server/src/sql-tools/contexts/reader-context.ts
Normal file
8
server/src/sql-tools/contexts/reader-context.ts
Normal file
@ -0,0 +1,8 @@
|
||||
import { BaseContext } from 'src/sql-tools/contexts/base-context';
|
||||
import { SchemaFromDatabaseOptions } from 'src/sql-tools/types';
|
||||
|
||||
export class ReaderContext extends BaseContext {
|
||||
constructor(public options: SchemaFromDatabaseOptions) {
|
||||
super(options);
|
||||
}
|
||||
}
|
@ -1,6 +1,6 @@
|
||||
import { createHash } from 'node:crypto';
|
||||
import { ColumnValue } from 'src/sql-tools/decorators/column.decorator';
|
||||
import { Comparer, DatabaseColumn, IgnoreOptions, SchemaDiff } from 'src/sql-tools/types';
|
||||
import { Comparer, DatabaseColumn, DatabaseOverride, IgnoreOptions, SchemaDiff } from 'src/sql-tools/types';
|
||||
|
||||
export const asMetadataKey = (name: string) => `sql-tools:${name}`;
|
||||
|
||||
@ -56,6 +56,17 @@ export const haveEqualColumns = (sourceColumns?: string[], targetColumns?: strin
|
||||
return setIsEqual(new Set(sourceColumns ?? []), new Set(targetColumns ?? []));
|
||||
};
|
||||
|
||||
export const haveEqualOverrides = <T extends { override?: DatabaseOverride }>(source: T, target: T) => {
|
||||
if (!source.override || !target.override) {
|
||||
return false;
|
||||
}
|
||||
|
||||
const sourceValue = source.override.value;
|
||||
const targetValue = target.override.value;
|
||||
|
||||
return sourceValue.name === targetValue.name && sourceValue.sql === targetValue.sql;
|
||||
};
|
||||
|
||||
export const compare = <T extends { name: string; synchronize: boolean }>(
|
||||
sources: T[],
|
||||
targets: T[],
|
||||
@ -72,7 +83,7 @@ export const compare = <T extends { name: string; synchronize: boolean }>(
|
||||
const source = sourceMap[key];
|
||||
const target = targetMap[key];
|
||||
|
||||
if (isIgnored(source, target, options)) {
|
||||
if (isIgnored(source, target, options ?? true)) {
|
||||
continue;
|
||||
}
|
||||
|
||||
@ -85,6 +96,14 @@ export const compare = <T extends { name: string; synchronize: boolean }>(
|
||||
} else if (!source && target) {
|
||||
items.push(...comparer.onExtra(target));
|
||||
} else {
|
||||
if (
|
||||
haveEqualOverrides(
|
||||
source as unknown as { override?: DatabaseOverride },
|
||||
target as unknown as { override?: DatabaseOverride },
|
||||
)
|
||||
) {
|
||||
continue;
|
||||
}
|
||||
items.push(...comparer.onCompare(source, target));
|
||||
}
|
||||
}
|
||||
@ -97,6 +116,9 @@ const isIgnored = (
|
||||
target: { synchronize?: boolean } | undefined,
|
||||
options: IgnoreOptions,
|
||||
) => {
|
||||
if (typeof options === 'boolean') {
|
||||
return !options;
|
||||
}
|
||||
return (options.ignoreExtra && !source) || (options.ignoreMissing && !target);
|
||||
};
|
||||
|
||||
@ -165,3 +187,18 @@ export const asColumnComment = (tableName: string, columnName: string, comment:
|
||||
export const asColumnList = (columns: string[]) => columns.map((column) => `"${column}"`).join(', ');
|
||||
|
||||
export const asForeignKeyConstraintName = (table: string, columns: string[]) => asKey('FK_', table, [...columns]);
|
||||
|
||||
export const asJsonString = (value: unknown): string => {
|
||||
return `'${escape(JSON.stringify(value))}'::jsonb`;
|
||||
};
|
||||
|
||||
const escape = (value: string) => {
|
||||
return value
|
||||
.replaceAll("'", "''")
|
||||
.replaceAll(/[\\]/g, '\\\\')
|
||||
.replaceAll(/[\b]/g, String.raw`\b`)
|
||||
.replaceAll(/[\f]/g, String.raw`\f`)
|
||||
.replaceAll(/[\n]/g, String.raw`\n`)
|
||||
.replaceAll(/[\r]/g, String.raw`\r`)
|
||||
.replaceAll(/[\t]/g, String.raw`\t`);
|
||||
};
|
||||
|
@ -1,13 +1,13 @@
|
||||
import { asKey } from 'src/sql-tools/helpers';
|
||||
import { ConstraintType, Processor } from 'src/sql-tools/types';
|
||||
|
||||
export const processCheckConstraints: Processor = (builder, items) => {
|
||||
export const processCheckConstraints: Processor = (ctx, items) => {
|
||||
for (const {
|
||||
item: { object, options },
|
||||
} of items.filter((item) => item.type === 'checkConstraint')) {
|
||||
const table = builder.getTableByObject(object);
|
||||
const table = ctx.getTableByObject(object);
|
||||
if (!table) {
|
||||
builder.warnMissingTable('@Check', object);
|
||||
ctx.warnMissingTable('@Check', object);
|
||||
continue;
|
||||
}
|
||||
|
||||
|
@ -2,14 +2,14 @@ import { ColumnOptions } from 'src/sql-tools/decorators/column.decorator';
|
||||
import { fromColumnValue } from 'src/sql-tools/helpers';
|
||||
import { Processor } from 'src/sql-tools/types';
|
||||
|
||||
export const processColumns: Processor = (builder, items) => {
|
||||
export const processColumns: Processor = (ctx, items) => {
|
||||
for (const {
|
||||
type,
|
||||
item: { object, propertyName, options },
|
||||
} of items.filter((item) => item.type === 'column' || item.type === 'foreignKeyColumn')) {
|
||||
const table = builder.getTableByObject(object.constructor);
|
||||
const table = ctx.getTableByObject(object.constructor);
|
||||
if (!table) {
|
||||
builder.warnMissingTable(type === 'column' ? '@Column' : '@ForeignKeyColumn', object, propertyName);
|
||||
ctx.warnMissingTable(type === 'column' ? '@Column' : '@ForeignKeyColumn', object, propertyName);
|
||||
continue;
|
||||
}
|
||||
|
||||
@ -31,7 +31,7 @@ export const processColumns: Processor = (builder, items) => {
|
||||
|
||||
const isEnum = !!(options as ColumnOptions).enum;
|
||||
|
||||
builder.addColumn(
|
||||
ctx.addColumn(
|
||||
table,
|
||||
{
|
||||
name: columnName,
|
||||
|
@ -1,12 +1,12 @@
|
||||
import { fromColumnValue } from 'src/sql-tools/helpers';
|
||||
import { Processor } from 'src/sql-tools/types';
|
||||
|
||||
export const processConfigurationParameters: Processor = (builder, items) => {
|
||||
export const processConfigurationParameters: Processor = (ctx, items) => {
|
||||
for (const {
|
||||
item: { options },
|
||||
} of items.filter((item) => item.type === 'configurationParameter')) {
|
||||
builder.parameters.push({
|
||||
databaseName: builder.databaseName,
|
||||
ctx.parameters.push({
|
||||
databaseName: ctx.databaseName,
|
||||
name: options.name,
|
||||
value: fromColumnValue(options.value),
|
||||
scope: options.scope,
|
||||
|
@ -1,10 +1,10 @@
|
||||
import { asSnakeCase } from 'src/sql-tools/helpers';
|
||||
import { Processor } from 'src/sql-tools/types';
|
||||
|
||||
export const processDatabases: Processor = (builder, items) => {
|
||||
export const processDatabases: Processor = (ctx, items) => {
|
||||
for (const {
|
||||
item: { object, options },
|
||||
} of items.filter((item) => item.type === 'database')) {
|
||||
builder.databaseName = options.name || asSnakeCase(object.name);
|
||||
ctx.databaseName = options.name || asSnakeCase(object.name);
|
||||
}
|
||||
};
|
||||
|
@ -1,8 +1,8 @@
|
||||
import { Processor } from 'src/sql-tools/types';
|
||||
|
||||
export const processEnums: Processor = (builder, items) => {
|
||||
export const processEnums: Processor = (ctx, items) => {
|
||||
for (const { item } of items.filter((item) => item.type === 'enum')) {
|
||||
// TODO log warnings if enum name is not unique
|
||||
builder.enums.push(item);
|
||||
ctx.enums.push(item);
|
||||
}
|
||||
};
|
||||
|
@ -1,10 +1,14 @@
|
||||
import { Processor } from 'src/sql-tools/types';
|
||||
|
||||
export const processExtensions: Processor = (builder, items) => {
|
||||
export const processExtensions: Processor = (ctx, items) => {
|
||||
if (ctx.options.extensions === false) {
|
||||
return;
|
||||
}
|
||||
|
||||
for (const {
|
||||
item: { options },
|
||||
} of items.filter((item) => item.type === 'extension')) {
|
||||
builder.extensions.push({
|
||||
ctx.extensions.push({
|
||||
name: options.name,
|
||||
synchronize: options.synchronize ?? true,
|
||||
});
|
||||
|
@ -1,25 +1,25 @@
|
||||
import { asForeignKeyConstraintName, asKey } from 'src/sql-tools/helpers';
|
||||
import { ActionType, ConstraintType, Processor } from 'src/sql-tools/types';
|
||||
|
||||
export const processForeignKeyColumns: Processor = (builder, items) => {
|
||||
export const processForeignKeyColumns: Processor = (ctx, items) => {
|
||||
for (const {
|
||||
item: { object, propertyName, options, target },
|
||||
} of items.filter((item) => item.type === 'foreignKeyColumn')) {
|
||||
const { table, column } = builder.getColumnByObjectAndPropertyName(object, propertyName);
|
||||
const { table, column } = ctx.getColumnByObjectAndPropertyName(object, propertyName);
|
||||
if (!table) {
|
||||
builder.warnMissingTable('@ForeignKeyColumn', object);
|
||||
ctx.warnMissingTable('@ForeignKeyColumn', object);
|
||||
continue;
|
||||
}
|
||||
|
||||
if (!column) {
|
||||
// should be impossible since they are pre-created in `column.processor.ts`
|
||||
builder.warnMissingColumn('@ForeignKeyColumn', object, propertyName);
|
||||
ctx.warnMissingColumn('@ForeignKeyColumn', object, propertyName);
|
||||
continue;
|
||||
}
|
||||
|
||||
const referenceTable = builder.getTableByObject(target());
|
||||
const referenceTable = ctx.getTableByObject(target());
|
||||
if (!referenceTable) {
|
||||
builder.warnMissingTable('@ForeignKeyColumn', object, propertyName);
|
||||
ctx.warnMissingTable('@ForeignKeyColumn', object, propertyName);
|
||||
continue;
|
||||
}
|
||||
|
||||
|
@ -1,20 +1,20 @@
|
||||
import { asForeignKeyConstraintName } from 'src/sql-tools/helpers';
|
||||
import { ActionType, ConstraintType, Processor } from 'src/sql-tools/types';
|
||||
|
||||
export const processForeignKeyConstraints: Processor = (builder, items, config) => {
|
||||
export const processForeignKeyConstraints: Processor = (ctx, items) => {
|
||||
for (const {
|
||||
item: { object, options },
|
||||
} of items.filter((item) => item.type === 'foreignKeyConstraint')) {
|
||||
const table = builder.getTableByObject(object);
|
||||
const table = ctx.getTableByObject(object);
|
||||
if (!table) {
|
||||
builder.warnMissingTable('@ForeignKeyConstraint', { name: 'referenceTable' });
|
||||
ctx.warnMissingTable('@ForeignKeyConstraint', { name: 'referenceTable' });
|
||||
continue;
|
||||
}
|
||||
|
||||
const referenceTable = builder.getTableByObject(options.referenceTable());
|
||||
const referenceTable = ctx.getTableByObject(options.referenceTable());
|
||||
if (!referenceTable) {
|
||||
const referenceTableName = options.referenceTable()?.name;
|
||||
builder.warn(
|
||||
ctx.warn(
|
||||
'@ForeignKeyConstraint.referenceTable',
|
||||
`Unable to find table` + (referenceTableName ? ` (${referenceTableName})` : ''),
|
||||
);
|
||||
@ -25,16 +25,16 @@ export const processForeignKeyConstraints: Processor = (builder, items, config)
|
||||
|
||||
for (const columnName of options.columns) {
|
||||
if (!table.columns.some(({ name }) => name === columnName)) {
|
||||
const metadata = builder.getTableMetadata(table);
|
||||
builder.warn('@ForeignKeyConstraint.columns', `Unable to find column (${metadata.object.name}.${columnName})`);
|
||||
const metadata = ctx.getTableMetadata(table);
|
||||
ctx.warn('@ForeignKeyConstraint.columns', `Unable to find column (${metadata.object.name}.${columnName})`);
|
||||
missingColumn = true;
|
||||
}
|
||||
}
|
||||
|
||||
for (const columnName of options.referenceColumns || []) {
|
||||
if (!referenceTable.columns.some(({ name }) => name === columnName)) {
|
||||
const metadata = builder.getTableMetadata(referenceTable);
|
||||
builder.warn(
|
||||
const metadata = ctx.getTableMetadata(referenceTable);
|
||||
ctx.warn(
|
||||
'@ForeignKeyConstraint.referenceColumns',
|
||||
`Unable to find column (${metadata.object.name}.${columnName})`,
|
||||
);
|
||||
@ -67,9 +67,9 @@ export const processForeignKeyConstraints: Processor = (builder, items, config)
|
||||
continue;
|
||||
}
|
||||
|
||||
if (options.index || options.indexName || config.createForeignKeyIndexes) {
|
||||
if (options.index || options.indexName || ctx.options.createForeignKeyIndexes) {
|
||||
table.indexes.push({
|
||||
name: options.indexName || builder.asIndexName(table.name, options.columns),
|
||||
name: options.indexName || ctx.asIndexName(table.name, options.columns),
|
||||
tableName: table.name,
|
||||
columnNames: options.columns,
|
||||
unique: false,
|
||||
|
@ -1,8 +1,12 @@
|
||||
import { Processor } from 'src/sql-tools/types';
|
||||
|
||||
export const processFunctions: Processor = (builder, items) => {
|
||||
export const processFunctions: Processor = (ctx, items) => {
|
||||
if (ctx.options.functions === false) {
|
||||
return;
|
||||
}
|
||||
|
||||
for (const { item } of items.filter((item) => item.type === 'function')) {
|
||||
// TODO log warnings if function name is not unique
|
||||
builder.functions.push(item);
|
||||
ctx.functions.push(item);
|
||||
}
|
||||
};
|
||||
|
@ -1,17 +1,17 @@
|
||||
import { Processor } from 'src/sql-tools/types';
|
||||
|
||||
export const processIndexes: Processor = (builder, items, config) => {
|
||||
export const processIndexes: Processor = (ctx, items) => {
|
||||
for (const {
|
||||
item: { object, options },
|
||||
} of items.filter((item) => item.type === 'index')) {
|
||||
const table = builder.getTableByObject(object);
|
||||
const table = ctx.getTableByObject(object);
|
||||
if (!table) {
|
||||
builder.warnMissingTable('@Check', object);
|
||||
ctx.warnMissingTable('@Check', object);
|
||||
continue;
|
||||
}
|
||||
|
||||
table.indexes.push({
|
||||
name: options.name || builder.asIndexName(table.name, options.columns, options.where),
|
||||
name: options.name || ctx.asIndexName(table.name, options.columns, options.where),
|
||||
tableName: table.name,
|
||||
unique: options.unique ?? false,
|
||||
expression: options.expression,
|
||||
@ -28,15 +28,15 @@ export const processIndexes: Processor = (builder, items, config) => {
|
||||
type,
|
||||
item: { object, propertyName, options },
|
||||
} of items.filter((item) => item.type === 'column' || item.type === 'foreignKeyColumn')) {
|
||||
const { table, column } = builder.getColumnByObjectAndPropertyName(object, propertyName);
|
||||
const { table, column } = ctx.getColumnByObjectAndPropertyName(object, propertyName);
|
||||
if (!table) {
|
||||
builder.warnMissingTable('@Column', object);
|
||||
ctx.warnMissingTable('@Column', object);
|
||||
continue;
|
||||
}
|
||||
|
||||
if (!column) {
|
||||
// should be impossible since they are created in `column.processor.ts`
|
||||
builder.warnMissingColumn('@Column', object, propertyName);
|
||||
ctx.warnMissingColumn('@Column', object, propertyName);
|
||||
continue;
|
||||
}
|
||||
|
||||
@ -45,12 +45,12 @@ export const processIndexes: Processor = (builder, items, config) => {
|
||||
}
|
||||
|
||||
const isIndexRequested =
|
||||
options.indexName || options.index || (type === 'foreignKeyColumn' && config.createForeignKeyIndexes);
|
||||
options.indexName || options.index || (type === 'foreignKeyColumn' && ctx.options.createForeignKeyIndexes);
|
||||
if (!isIndexRequested) {
|
||||
continue;
|
||||
}
|
||||
|
||||
const indexName = options.indexName || builder.asIndexName(table.name, [column.name]);
|
||||
const indexName = options.indexName || ctx.asIndexName(table.name, [column.name]);
|
||||
|
||||
const isIndexPresent = table.indexes.some((index) => index.name === indexName);
|
||||
if (isIndexPresent) {
|
||||
|
@ -8,6 +8,7 @@ import { processForeignKeyColumns } from 'src/sql-tools/processors/foreign-key-c
|
||||
import { processForeignKeyConstraints } from 'src/sql-tools/processors/foreign-key-constraint.processor';
|
||||
import { processFunctions } from 'src/sql-tools/processors/function.processor';
|
||||
import { processIndexes } from 'src/sql-tools/processors/index.processor';
|
||||
import { processOverrides } from 'src/sql-tools/processors/override.processor';
|
||||
import { processPrimaryKeyConstraints } from 'src/sql-tools/processors/primary-key-contraint.processor';
|
||||
import { processTables } from 'src/sql-tools/processors/table.processor';
|
||||
import { processTriggers } from 'src/sql-tools/processors/trigger.processor';
|
||||
@ -29,4 +30,5 @@ export const processors: Processor[] = [
|
||||
processPrimaryKeyConstraints,
|
||||
processIndexes,
|
||||
processTriggers,
|
||||
processOverrides,
|
||||
];
|
||||
|
50
server/src/sql-tools/processors/override.processor.ts
Normal file
50
server/src/sql-tools/processors/override.processor.ts
Normal file
@ -0,0 +1,50 @@
|
||||
import { asFunctionCreate } from 'src/sql-tools/transformers/function.transformer';
|
||||
import { asIndexCreate } from 'src/sql-tools/transformers/index.transformer';
|
||||
import { asTriggerCreate } from 'src/sql-tools/transformers/trigger.transformer';
|
||||
import { Processor } from 'src/sql-tools/types';
|
||||
|
||||
export const processOverrides: Processor = (ctx) => {
|
||||
if (ctx.options.overrides === false) {
|
||||
return;
|
||||
}
|
||||
|
||||
for (const func of ctx.functions) {
|
||||
if (!func.synchronize) {
|
||||
continue;
|
||||
}
|
||||
|
||||
ctx.overrides.push({
|
||||
name: `function_${func.name}`,
|
||||
value: { type: 'function', name: func.name, sql: asFunctionCreate(func) },
|
||||
synchronize: true,
|
||||
});
|
||||
}
|
||||
|
||||
for (const { triggers, indexes } of ctx.tables) {
|
||||
for (const trigger of triggers) {
|
||||
if (!trigger.synchronize) {
|
||||
continue;
|
||||
}
|
||||
|
||||
ctx.overrides.push({
|
||||
name: `trigger_${trigger.name}`,
|
||||
value: { type: 'trigger', name: trigger.name, sql: asTriggerCreate(trigger) },
|
||||
synchronize: true,
|
||||
});
|
||||
}
|
||||
|
||||
for (const index of indexes) {
|
||||
if (!index.synchronize) {
|
||||
continue;
|
||||
}
|
||||
|
||||
if (index.expression || index.using || index.with || index.where) {
|
||||
ctx.overrides.push({
|
||||
name: `index_${index.name}`,
|
||||
value: { type: 'index', name: index.name, sql: asIndexCreate(index) },
|
||||
synchronize: true,
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
};
|
@ -1,8 +1,8 @@
|
||||
import { asKey } from 'src/sql-tools/helpers';
|
||||
import { ConstraintType, Processor } from 'src/sql-tools/types';
|
||||
|
||||
export const processPrimaryKeyConstraints: Processor = (builder) => {
|
||||
for (const table of builder.tables) {
|
||||
export const processPrimaryKeyConstraints: Processor = (ctx) => {
|
||||
for (const table of ctx.tables) {
|
||||
const columnNames: string[] = [];
|
||||
|
||||
for (const column of table.columns) {
|
||||
@ -12,7 +12,7 @@ export const processPrimaryKeyConstraints: Processor = (builder) => {
|
||||
}
|
||||
|
||||
if (columnNames.length > 0) {
|
||||
const tableMetadata = builder.getTableMetadata(table);
|
||||
const tableMetadata = ctx.getTableMetadata(table);
|
||||
table.constraints.push({
|
||||
type: ConstraintType.PRIMARY_KEY,
|
||||
name: tableMetadata.options.primaryConstraintName || asPrimaryKeyConstraintName(table.name, columnNames),
|
||||
|
@ -1,18 +1,18 @@
|
||||
import { asSnakeCase } from 'src/sql-tools/helpers';
|
||||
import { Processor } from 'src/sql-tools/types';
|
||||
|
||||
export const processTables: Processor = (builder, items) => {
|
||||
export const processTables: Processor = (ctx, items) => {
|
||||
for (const {
|
||||
item: { options, object },
|
||||
} of items.filter((item) => item.type === 'table')) {
|
||||
const test = builder.getTableByObject(object);
|
||||
const test = ctx.getTableByObject(object);
|
||||
if (test) {
|
||||
throw new Error(
|
||||
`Table ${test.name} has already been registered. Does ${object.name} have two @Table() decorators?`,
|
||||
);
|
||||
}
|
||||
|
||||
builder.addTable(
|
||||
ctx.addTable(
|
||||
{
|
||||
name: options.name || asSnakeCase(object.name),
|
||||
columns: [],
|
||||
|
@ -2,13 +2,13 @@ import { TriggerOptions } from 'src/sql-tools/decorators/trigger.decorator';
|
||||
import { asKey } from 'src/sql-tools/helpers';
|
||||
import { Processor } from 'src/sql-tools/types';
|
||||
|
||||
export const processTriggers: Processor = (builder, items) => {
|
||||
export const processTriggers: Processor = (ctx, items) => {
|
||||
for (const {
|
||||
item: { object, options },
|
||||
} of items.filter((item) => item.type === 'trigger')) {
|
||||
const table = builder.getTableByObject(object);
|
||||
const table = ctx.getTableByObject(object);
|
||||
if (!table) {
|
||||
builder.warnMissingTable('@Trigger', object);
|
||||
ctx.warnMissingTable('@Trigger', object);
|
||||
continue;
|
||||
}
|
||||
|
||||
|
@ -1,13 +1,13 @@
|
||||
import { asKey } from 'src/sql-tools/helpers';
|
||||
import { ConstraintType, Processor } from 'src/sql-tools/types';
|
||||
|
||||
export const processUniqueConstraints: Processor = (builder, items) => {
|
||||
export const processUniqueConstraints: Processor = (ctx, items) => {
|
||||
for (const {
|
||||
item: { object, options },
|
||||
} of items.filter((item) => item.type === 'uniqueConstraint')) {
|
||||
const table = builder.getTableByObject(object);
|
||||
const table = ctx.getTableByObject(object);
|
||||
if (!table) {
|
||||
builder.warnMissingTable('@Unique', object);
|
||||
ctx.warnMissingTable('@Unique', object);
|
||||
continue;
|
||||
}
|
||||
|
||||
@ -28,15 +28,15 @@ export const processUniqueConstraints: Processor = (builder, items) => {
|
||||
type,
|
||||
item: { object, propertyName, options },
|
||||
} of items.filter((item) => item.type === 'column' || item.type === 'foreignKeyColumn')) {
|
||||
const { table, column } = builder.getColumnByObjectAndPropertyName(object, propertyName);
|
||||
const { table, column } = ctx.getColumnByObjectAndPropertyName(object, propertyName);
|
||||
if (!table) {
|
||||
builder.warnMissingTable('@Column', object);
|
||||
ctx.warnMissingTable('@Column', object);
|
||||
continue;
|
||||
}
|
||||
|
||||
if (!column) {
|
||||
// should be impossible since they are created in `column.processor.ts`
|
||||
builder.warnMissingColumn('@Column', object, propertyName);
|
||||
ctx.warnMissingColumn('@Column', object, propertyName);
|
||||
continue;
|
||||
}
|
||||
|
||||
|
@ -1,8 +1,8 @@
|
||||
import { sql } from 'kysely';
|
||||
import { jsonArrayFrom } from 'kysely/helpers/postgres';
|
||||
import { ColumnType, DatabaseColumn, DatabaseReader } from 'src/sql-tools/types';
|
||||
import { ColumnType, DatabaseColumn, Reader } from 'src/sql-tools/types';
|
||||
|
||||
export const readColumns: DatabaseReader = async (schema, db) => {
|
||||
export const readColumns: Reader = async (ctx, db) => {
|
||||
const columns = await db
|
||||
.selectFrom('information_schema.columns as c')
|
||||
.leftJoin('information_schema.element_types as o', (join) =>
|
||||
@ -42,13 +42,13 @@ export const readColumns: DatabaseReader = async (schema, db) => {
|
||||
// data type for ARRAYs
|
||||
'o.data_type as array_type',
|
||||
])
|
||||
.where('table_schema', '=', schema.schemaName)
|
||||
.where('table_schema', '=', ctx.schemaName)
|
||||
.execute();
|
||||
|
||||
const enumRaw = await db
|
||||
.selectFrom('pg_type')
|
||||
.innerJoin('pg_namespace', (join) =>
|
||||
join.onRef('pg_namespace.oid', '=', 'pg_type.typnamespace').on('pg_namespace.nspname', '=', schema.schemaName),
|
||||
join.onRef('pg_namespace.oid', '=', 'pg_type.typnamespace').on('pg_namespace.nspname', '=', ctx.schemaName),
|
||||
)
|
||||
.where('typtype', '=', sql.lit('e'))
|
||||
.select((eb) => [
|
||||
@ -61,13 +61,13 @@ export const readColumns: DatabaseReader = async (schema, db) => {
|
||||
|
||||
const enums = enumRaw.map((item) => ({ name: item.name, values: item.values.map(({ value }) => value) }));
|
||||
for (const { name, values } of enums) {
|
||||
schema.enums.push({ name, values, synchronize: true });
|
||||
ctx.enums.push({ name, values, synchronize: true });
|
||||
}
|
||||
|
||||
const enumMap = Object.fromEntries(enums.map((e) => [e.name, e.values]));
|
||||
// add columns to tables
|
||||
for (const column of columns) {
|
||||
const table = schema.tables.find((table) => table.name === column.table_name);
|
||||
const table = ctx.getTableByName(column.table_name);
|
||||
if (!table) {
|
||||
continue;
|
||||
}
|
||||
@ -93,7 +93,7 @@ export const readColumns: DatabaseReader = async (schema, db) => {
|
||||
// array types
|
||||
case 'ARRAY': {
|
||||
if (!column.array_type) {
|
||||
schema.warnings.push(`Unable to find type for ${columnLabel} (ARRAY)`);
|
||||
ctx.warnings.push(`Unable to find type for ${columnLabel} (ARRAY)`);
|
||||
continue;
|
||||
}
|
||||
item.type = column.array_type as ColumnType;
|
||||
@ -103,7 +103,7 @@ export const readColumns: DatabaseReader = async (schema, db) => {
|
||||
// enum types
|
||||
case 'USER-DEFINED': {
|
||||
if (!enumMap[column.udt_name]) {
|
||||
schema.warnings.push(`Unable to find type for ${columnLabel} (ENUM)`);
|
||||
ctx.warnings.push(`Unable to find type for ${columnLabel} (ENUM)`);
|
||||
continue;
|
||||
}
|
||||
|
||||
|
@ -1,6 +1,6 @@
|
||||
import { DatabaseReader } from 'src/sql-tools/types';
|
||||
import { Reader } from 'src/sql-tools/types';
|
||||
|
||||
export const readComments: DatabaseReader = async (schema, db) => {
|
||||
export const readComments: Reader = async (ctx, db) => {
|
||||
const comments = await db
|
||||
.selectFrom('pg_description as d')
|
||||
.innerJoin('pg_class as c', 'd.objoid', 'c.oid')
|
||||
@ -20,7 +20,7 @@ export const readComments: DatabaseReader = async (schema, db) => {
|
||||
|
||||
for (const comment of comments) {
|
||||
if (comment.object_type === 'r') {
|
||||
const table = schema.tables.find((table) => table.name === comment.object_name);
|
||||
const table = ctx.getTableByName(comment.object_name);
|
||||
if (!table) {
|
||||
continue;
|
||||
}
|
||||
|
@ -1,7 +1,7 @@
|
||||
import { sql } from 'kysely';
|
||||
import { ActionType, ConstraintType, DatabaseReader } from 'src/sql-tools/types';
|
||||
import { ActionType, ConstraintType, Reader } from 'src/sql-tools/types';
|
||||
|
||||
export const readConstraints: DatabaseReader = async (schema, db) => {
|
||||
export const readConstraints: Reader = async (ctx, db) => {
|
||||
const constraints = await db
|
||||
.selectFrom('pg_constraint')
|
||||
.innerJoin('pg_namespace', 'pg_namespace.oid', 'pg_constraint.connamespace') // namespace
|
||||
@ -40,11 +40,11 @@ export const readConstraints: DatabaseReader = async (schema, db) => {
|
||||
.as('reference_column_names'),
|
||||
eb.fn<string>('pg_get_constraintdef', ['pg_constraint.oid']).as('expression'),
|
||||
])
|
||||
.where('pg_namespace.nspname', '=', schema.schemaName)
|
||||
.where('pg_namespace.nspname', '=', ctx.schemaName)
|
||||
.execute();
|
||||
|
||||
for (const constraint of constraints) {
|
||||
const table = schema.tables.find((table) => table.name === constraint.table_name);
|
||||
const table = ctx.getTableByName(constraint.table_name);
|
||||
if (!table) {
|
||||
continue;
|
||||
}
|
||||
@ -55,7 +55,7 @@ export const readConstraints: DatabaseReader = async (schema, db) => {
|
||||
// primary key constraint
|
||||
case 'p': {
|
||||
if (!constraint.column_names) {
|
||||
schema.warnings.push(`Skipping CONSTRAINT "${constraintName}", no columns found`);
|
||||
ctx.warnings.push(`Skipping CONSTRAINT "${constraintName}", no columns found`);
|
||||
continue;
|
||||
}
|
||||
table.constraints.push({
|
||||
@ -71,7 +71,7 @@ export const readConstraints: DatabaseReader = async (schema, db) => {
|
||||
// foreign key constraint
|
||||
case 'f': {
|
||||
if (!constraint.column_names || !constraint.reference_table_name || !constraint.reference_column_names) {
|
||||
schema.warnings.push(
|
||||
ctx.warnings.push(
|
||||
`Skipping CONSTRAINT "${constraintName}", missing either columns, referenced table, or referenced columns,`,
|
||||
);
|
||||
continue;
|
||||
|
@ -1,6 +1,6 @@
|
||||
import { DatabaseReader } from 'src/sql-tools/types';
|
||||
import { Reader } from 'src/sql-tools/types';
|
||||
|
||||
export const readExtensions: DatabaseReader = async (schema, db) => {
|
||||
export const readExtensions: Reader = async (ctx, db) => {
|
||||
const extensions = await db
|
||||
.selectFrom('pg_catalog.pg_extension')
|
||||
// .innerJoin('pg_namespace', 'pg_namespace.oid', 'pg_catalog.pg_extension.extnamespace')
|
||||
@ -9,6 +9,6 @@ export const readExtensions: DatabaseReader = async (schema, db) => {
|
||||
.execute();
|
||||
|
||||
for (const { name } of extensions) {
|
||||
schema.extensions.push({ name, synchronize: true });
|
||||
ctx.extensions.push({ name, synchronize: true });
|
||||
}
|
||||
};
|
||||
|
@ -1,14 +1,14 @@
|
||||
import { sql } from 'kysely';
|
||||
import { DatabaseReader } from 'src/sql-tools/types';
|
||||
import { Reader } from 'src/sql-tools/types';
|
||||
|
||||
export const readFunctions: DatabaseReader = async (schema, db) => {
|
||||
export const readFunctions: Reader = async (ctx, db) => {
|
||||
const routines = await db
|
||||
.selectFrom('pg_proc as p')
|
||||
.innerJoin('pg_namespace', 'pg_namespace.oid', 'p.pronamespace')
|
||||
.leftJoin('pg_depend as d', (join) => join.onRef('d.objid', '=', 'p.oid').on('d.deptype', '=', sql.lit('e')))
|
||||
.where('d.objid', 'is', sql.lit(null))
|
||||
.where('p.prokind', '=', sql.lit('f'))
|
||||
.where('pg_namespace.nspname', '=', schema.schemaName)
|
||||
.where('pg_namespace.nspname', '=', ctx.schemaName)
|
||||
.select((eb) => [
|
||||
'p.proname as name',
|
||||
eb.fn<string>('pg_get_function_identity_arguments', ['p.oid']).as('arguments'),
|
||||
@ -17,7 +17,7 @@ export const readFunctions: DatabaseReader = async (schema, db) => {
|
||||
.execute();
|
||||
|
||||
for (const { name, expression } of routines) {
|
||||
schema.functions.push({
|
||||
ctx.functions.push({
|
||||
name,
|
||||
// TODO read expression from the overrides table
|
||||
expression,
|
||||
|
@ -1,7 +1,7 @@
|
||||
import { sql } from 'kysely';
|
||||
import { DatabaseReader } from 'src/sql-tools/types';
|
||||
import { Reader } from 'src/sql-tools/types';
|
||||
|
||||
export const readIndexes: DatabaseReader = async (schema, db) => {
|
||||
export const readIndexes: Reader = async (ctx, db) => {
|
||||
const indexes = await db
|
||||
.selectFrom('pg_index as ix')
|
||||
// matching index, which has column information
|
||||
@ -34,12 +34,12 @@ export const readIndexes: DatabaseReader = async (schema, db) => {
|
||||
.select((eb) => eb.fn<string[]>('json_agg', ['a.attname']).as('column_name'))
|
||||
.as('column_names'),
|
||||
])
|
||||
.where('pg_namespace.nspname', '=', schema.schemaName)
|
||||
.where('pg_namespace.nspname', '=', ctx.schemaName)
|
||||
.where('ix.indisprimary', '=', sql.lit(false))
|
||||
.execute();
|
||||
|
||||
for (const index of indexes) {
|
||||
const table = schema.tables.find((table) => table.name === index.table_name);
|
||||
const table = ctx.getTableByName(index.table_name);
|
||||
if (!table) {
|
||||
continue;
|
||||
}
|
||||
|
@ -5,13 +5,13 @@ import { readExtensions } from 'src/sql-tools/readers/extension.reader';
|
||||
import { readFunctions } from 'src/sql-tools/readers/function.reader';
|
||||
import { readIndexes } from 'src/sql-tools/readers/index.reader';
|
||||
import { readName } from 'src/sql-tools/readers/name.reader';
|
||||
import { readOverrides } from 'src/sql-tools/readers/override.reader';
|
||||
import { readParameters } from 'src/sql-tools/readers/parameter.reader';
|
||||
import { readTables } from 'src/sql-tools/readers/table.reader';
|
||||
import { readTriggers } from 'src/sql-tools/readers/trigger.reader';
|
||||
import { DatabaseReader } from 'src/sql-tools/types';
|
||||
import { Reader } from 'src/sql-tools/types';
|
||||
|
||||
export const readers: DatabaseReader[] = [
|
||||
//
|
||||
export const readers: Reader[] = [
|
||||
readName,
|
||||
readParameters,
|
||||
readExtensions,
|
||||
@ -22,4 +22,5 @@ export const readers: DatabaseReader[] = [
|
||||
readConstraints,
|
||||
readTriggers,
|
||||
readComments,
|
||||
readOverrides,
|
||||
];
|
||||
|
@ -1,8 +1,8 @@
|
||||
import { QueryResult, sql } from 'kysely';
|
||||
import { DatabaseReader } from 'src/sql-tools/types';
|
||||
import { Reader } from 'src/sql-tools/types';
|
||||
|
||||
export const readName: DatabaseReader = async (schema, db) => {
|
||||
export const readName: Reader = async (ctx, db) => {
|
||||
const result = (await sql`SELECT current_database() as name`.execute(db)) as QueryResult<{ name: string }>;
|
||||
|
||||
schema.databaseName = result.rows[0].name;
|
||||
ctx.databaseName = result.rows[0].name;
|
||||
};
|
||||
|
19
server/src/sql-tools/readers/override.reader.ts
Normal file
19
server/src/sql-tools/readers/override.reader.ts
Normal file
@ -0,0 +1,19 @@
|
||||
import { sql } from 'kysely';
|
||||
import { OverrideType, Reader } from 'src/sql-tools/types';
|
||||
|
||||
export const readOverrides: Reader = async (ctx, db) => {
|
||||
try {
|
||||
const result = await sql
|
||||
.raw<{
|
||||
name: string;
|
||||
value: { type: OverrideType; name: string; sql: string };
|
||||
}>(`SELECT name, value FROM "${ctx.overrideTableName}"`)
|
||||
.execute(db);
|
||||
|
||||
for (const { name, value } of result.rows) {
|
||||
ctx.overrides.push({ name, value, synchronize: true });
|
||||
}
|
||||
} catch (error) {
|
||||
ctx.warn('Overrides', `Error reading override table: ${error}`);
|
||||
}
|
||||
};
|
@ -1,7 +1,7 @@
|
||||
import { sql } from 'kysely';
|
||||
import { DatabaseReader, ParameterScope } from 'src/sql-tools/types';
|
||||
import { ParameterScope, Reader } from 'src/sql-tools/types';
|
||||
|
||||
export const readParameters: DatabaseReader = async (schema, db) => {
|
||||
export const readParameters: Reader = async (ctx, db) => {
|
||||
const parameters = await db
|
||||
.selectFrom('pg_settings')
|
||||
.where('source', 'in', [sql.lit('database'), sql.lit('user')])
|
||||
@ -9,10 +9,10 @@ export const readParameters: DatabaseReader = async (schema, db) => {
|
||||
.execute();
|
||||
|
||||
for (const parameter of parameters) {
|
||||
schema.parameters.push({
|
||||
ctx.parameters.push({
|
||||
name: parameter.name,
|
||||
value: parameter.value,
|
||||
databaseName: schema.databaseName,
|
||||
databaseName: ctx.databaseName,
|
||||
scope: parameter.scope as ParameterScope,
|
||||
synchronize: true,
|
||||
});
|
||||
|
@ -1,16 +1,16 @@
|
||||
import { sql } from 'kysely';
|
||||
import { DatabaseReader } from 'src/sql-tools/types';
|
||||
import { Reader } from 'src/sql-tools/types';
|
||||
|
||||
export const readTables: DatabaseReader = async (schema, db) => {
|
||||
export const readTables: Reader = async (ctx, db) => {
|
||||
const tables = await db
|
||||
.selectFrom('information_schema.tables')
|
||||
.where('table_schema', '=', schema.schemaName)
|
||||
.where('table_schema', '=', ctx.schemaName)
|
||||
.where('table_type', '=', sql.lit('BASE TABLE'))
|
||||
.selectAll()
|
||||
.execute();
|
||||
|
||||
for (const table of tables) {
|
||||
schema.tables.push({
|
||||
ctx.tables.push({
|
||||
name: table.table_name,
|
||||
columns: [],
|
||||
indexes: [],
|
||||
|
@ -1,6 +1,6 @@
|
||||
import { DatabaseReader, TriggerAction, TriggerScope, TriggerTiming } from 'src/sql-tools/types';
|
||||
import { Reader, TriggerAction, TriggerScope, TriggerTiming } from 'src/sql-tools/types';
|
||||
|
||||
export const readTriggers: DatabaseReader = async (schema, db) => {
|
||||
export const readTriggers: Reader = async (ctx, db) => {
|
||||
const triggers = await db
|
||||
.selectFrom('pg_trigger as t')
|
||||
.innerJoin('pg_proc as p', 't.tgfoid', 'p.oid')
|
||||
@ -21,12 +21,12 @@ export const readTriggers: DatabaseReader = async (schema, db) => {
|
||||
'c.relname as table_name',
|
||||
])
|
||||
.where('t.tgisinternal', '=', false) // Exclude internal system triggers
|
||||
.where('n.nspname', '=', schema.schemaName)
|
||||
.where('n.nspname', '=', ctx.schemaName)
|
||||
.execute();
|
||||
|
||||
// add triggers to tables
|
||||
for (const trigger of triggers) {
|
||||
const table = schema.tables.find((table) => table.name === trigger.table_name);
|
||||
const table = ctx.getTableByName(trigger.table_name);
|
||||
if (!table) {
|
||||
continue;
|
||||
}
|
||||
|
@ -21,6 +21,7 @@ const fromColumn = (column: Partial<Omit<DatabaseColumn, 'tableName'>>): Databas
|
||||
enums: [],
|
||||
extensions: [],
|
||||
parameters: [],
|
||||
overrides: [],
|
||||
tables: [
|
||||
{
|
||||
name: tableName,
|
||||
@ -55,6 +56,7 @@ const fromConstraint = (constraint?: DatabaseConstraint): DatabaseSchema => {
|
||||
enums: [],
|
||||
extensions: [],
|
||||
parameters: [],
|
||||
overrides: [],
|
||||
tables: [
|
||||
{
|
||||
name: tableName,
|
||||
@ -88,6 +90,7 @@ const fromIndex = (index?: DatabaseIndex): DatabaseSchema => {
|
||||
enums: [],
|
||||
extensions: [],
|
||||
parameters: [],
|
||||
overrides: [],
|
||||
tables: [
|
||||
{
|
||||
name: tableName,
|
||||
@ -161,6 +164,7 @@ const newSchema = (schema: {
|
||||
enums: [],
|
||||
extensions: [],
|
||||
parameters: [],
|
||||
overrides: [],
|
||||
tables,
|
||||
warnings: [],
|
||||
};
|
||||
|
@ -1,8 +1,10 @@
|
||||
import { compareEnums } from 'src/sql-tools/comparers/enum.comparer';
|
||||
import { compareExtensions } from 'src/sql-tools/comparers/extension.comparer';
|
||||
import { compareFunctions } from 'src/sql-tools/comparers/function.comparer';
|
||||
import { compareOverrides } from 'src/sql-tools/comparers/override.comparer';
|
||||
import { compareParameters } from 'src/sql-tools/comparers/parameter.comparer';
|
||||
import { compareTables } from 'src/sql-tools/comparers/table.comparer';
|
||||
import { BaseContext } from 'src/sql-tools/contexts/base-context';
|
||||
import { compare } from 'src/sql-tools/helpers';
|
||||
import { transformers } from 'src/sql-tools/transformers';
|
||||
import {
|
||||
@ -19,10 +21,11 @@ import {
|
||||
export const schemaDiff = (source: DatabaseSchema, target: DatabaseSchema, options: SchemaDiffOptions = {}) => {
|
||||
const items = [
|
||||
...compare(source.parameters, target.parameters, options.parameters, compareParameters),
|
||||
...compare(source.extensions, target.extensions, options.extension, compareExtensions),
|
||||
...compare(source.extensions, target.extensions, options.extensions, compareExtensions),
|
||||
...compare(source.functions, target.functions, options.functions, compareFunctions),
|
||||
...compare(source.enums, target.enums, options.enums, compareEnums),
|
||||
...compare(source.tables, target.tables, options.tables, compareTables),
|
||||
...compare(source.overrides, target.overrides, options.overrides, compareOverrides),
|
||||
];
|
||||
|
||||
type SchemaName = SchemaDiff['type'];
|
||||
@ -46,6 +49,9 @@ export const schemaDiff = (source: DatabaseSchema, target: DatabaseSchema, optio
|
||||
TriggerDrop: [],
|
||||
ParameterSet: [],
|
||||
ParameterReset: [],
|
||||
OverrideCreate: [],
|
||||
OverrideUpdate: [],
|
||||
OverrideDrop: [],
|
||||
};
|
||||
|
||||
for (const item of items) {
|
||||
@ -76,6 +82,9 @@ export const schemaDiff = (source: DatabaseSchema, target: DatabaseSchema, optio
|
||||
...itemMap.TableDrop,
|
||||
...itemMap.EnumDrop,
|
||||
...itemMap.FunctionDrop,
|
||||
...itemMap.OverrideCreate,
|
||||
...itemMap.OverrideUpdate,
|
||||
...itemMap.OverrideDrop,
|
||||
];
|
||||
|
||||
return {
|
||||
@ -88,17 +97,18 @@ export const schemaDiff = (source: DatabaseSchema, target: DatabaseSchema, optio
|
||||
* Convert schema diffs into SQL statements
|
||||
*/
|
||||
export const schemaDiffToSql = (items: SchemaDiff[], options: SchemaDiffToSqlOptions = {}): string[] => {
|
||||
return items.flatMap((item) => asSql(item).map((result) => result + withComments(options.comments, item)));
|
||||
return items.flatMap((item) => asSql(item, options));
|
||||
};
|
||||
|
||||
const asSql = (item: SchemaDiff): string[] => {
|
||||
const asSql = (item: SchemaDiff, options: SchemaDiffToSqlOptions): string[] => {
|
||||
const ctx = new BaseContext(options);
|
||||
for (const transform of transformers) {
|
||||
const result = transform(item);
|
||||
const result = transform(ctx, item);
|
||||
if (!result) {
|
||||
continue;
|
||||
}
|
||||
|
||||
return asArray(result);
|
||||
return asArray(result).map((result) => result + withComments(options.comments, item));
|
||||
}
|
||||
|
||||
throw new Error(`Unhandled schema diff type: ${item.type}`);
|
||||
|
@ -1,8 +1,16 @@
|
||||
import { readdirSync } from 'node:fs';
|
||||
import { join } from 'node:path';
|
||||
import { schemaFromCode } from 'src/sql-tools/schema-from-code';
|
||||
import { SchemaFromCodeOptions } from 'src/sql-tools/types';
|
||||
import { describe, expect, it } from 'vitest';
|
||||
|
||||
const importModule = async (filePath: string) => {
|
||||
const module = await import(filePath);
|
||||
const options: SchemaFromCodeOptions = module.options;
|
||||
|
||||
return { module, options };
|
||||
};
|
||||
|
||||
describe(schemaFromCode.name, () => {
|
||||
it('should work', () => {
|
||||
expect(schemaFromCode({ reset: true })).toEqual({
|
||||
@ -12,6 +20,7 @@ describe(schemaFromCode.name, () => {
|
||||
enums: [],
|
||||
extensions: [],
|
||||
parameters: [],
|
||||
overrides: [],
|
||||
tables: [],
|
||||
warnings: [],
|
||||
});
|
||||
@ -22,9 +31,10 @@ describe(schemaFromCode.name, () => {
|
||||
for (const file of errorStubs) {
|
||||
const filePath = join(file.parentPath, file.name);
|
||||
it(filePath, async () => {
|
||||
const module = await import(filePath);
|
||||
const { module, options } = await importModule(filePath);
|
||||
|
||||
expect(module.message).toBeDefined();
|
||||
expect(() => schemaFromCode({ reset: true })).toThrowError(module.message);
|
||||
expect(() => schemaFromCode({ ...options, reset: true })).toThrowError(module.message);
|
||||
});
|
||||
}
|
||||
|
||||
@ -36,10 +46,11 @@ describe(schemaFromCode.name, () => {
|
||||
|
||||
const filePath = join(file.parentPath, file.name);
|
||||
it(filePath, async () => {
|
||||
const module = await import(filePath);
|
||||
const { module, options } = await importModule(filePath);
|
||||
|
||||
expect(module.description).toBeDefined();
|
||||
expect(module.schema).toBeDefined();
|
||||
expect(schemaFromCode({ reset: true }), module.description).toEqual(module.schema);
|
||||
expect(schemaFromCode({ ...options, reset: true }), module.description).toEqual(module.schema);
|
||||
});
|
||||
}
|
||||
});
|
||||
|
@ -1,26 +1,58 @@
|
||||
import { ProcessorContext } from 'src/sql-tools/contexts/processor-context';
|
||||
import { processors } from 'src/sql-tools/processors';
|
||||
import { getRegisteredItems, resetRegisteredItems } from 'src/sql-tools/register';
|
||||
import { SchemaBuilder } from 'src/sql-tools/schema-builder';
|
||||
import { SchemaFromCodeOptions } from 'src/sql-tools/types';
|
||||
import { ConstraintType, SchemaFromCodeOptions } from 'src/sql-tools/types';
|
||||
|
||||
/**
|
||||
* Load schema from code (decorators, etc)
|
||||
*/
|
||||
export const schemaFromCode = (options: SchemaFromCodeOptions = {}) => {
|
||||
try {
|
||||
const globalOptions = {
|
||||
createForeignKeyIndexes: options.createForeignKeyIndexes ?? true,
|
||||
};
|
||||
|
||||
const builder = new SchemaBuilder(options);
|
||||
const ctx = new ProcessorContext(options);
|
||||
const items = getRegisteredItems();
|
||||
|
||||
for (const processor of processors) {
|
||||
processor(builder, items, globalOptions);
|
||||
processor(ctx, items);
|
||||
}
|
||||
|
||||
const newSchema = builder.build();
|
||||
if (ctx.options.overrides) {
|
||||
ctx.tables.push({
|
||||
name: ctx.overrideTableName,
|
||||
columns: [
|
||||
{
|
||||
primary: true,
|
||||
name: 'name',
|
||||
tableName: ctx.overrideTableName,
|
||||
type: 'character varying',
|
||||
nullable: false,
|
||||
isArray: false,
|
||||
synchronize: true,
|
||||
},
|
||||
{
|
||||
name: 'value',
|
||||
tableName: ctx.overrideTableName,
|
||||
type: 'jsonb',
|
||||
nullable: false,
|
||||
isArray: false,
|
||||
synchronize: true,
|
||||
},
|
||||
],
|
||||
indexes: [],
|
||||
triggers: [],
|
||||
constraints: [
|
||||
{
|
||||
type: ConstraintType.PRIMARY_KEY,
|
||||
name: `${ctx.overrideTableName}_pkey`,
|
||||
tableName: ctx.overrideTableName,
|
||||
columnNames: ['name'],
|
||||
synchronize: true,
|
||||
},
|
||||
],
|
||||
synchronize: true,
|
||||
});
|
||||
}
|
||||
|
||||
return newSchema;
|
||||
return ctx.build();
|
||||
} finally {
|
||||
if (options.reset) {
|
||||
resetRegisteredItems();
|
||||
|
@ -1,6 +1,7 @@
|
||||
import { Kysely } from 'kysely';
|
||||
import { PostgresJSDialect } from 'kysely-postgres-js';
|
||||
import { Sql } from 'postgres';
|
||||
import { ReaderContext } from 'src/sql-tools/contexts/reader-context';
|
||||
import { readers } from 'src/sql-tools/readers';
|
||||
import { DatabaseSchema, PostgresDB, SchemaFromDatabaseOptions } from 'src/sql-tools/types';
|
||||
|
||||
@ -11,23 +12,16 @@ export const schemaFromDatabase = async (
|
||||
postgres: Sql,
|
||||
options: SchemaFromDatabaseOptions = {},
|
||||
): Promise<DatabaseSchema> => {
|
||||
const schema: DatabaseSchema = {
|
||||
databaseName: 'immich',
|
||||
schemaName: options.schemaName || 'public',
|
||||
parameters: [],
|
||||
functions: [],
|
||||
enums: [],
|
||||
extensions: [],
|
||||
tables: [],
|
||||
warnings: [],
|
||||
};
|
||||
|
||||
const db = new Kysely<PostgresDB>({ dialect: new PostgresJSDialect({ postgres }) });
|
||||
for (const reader of readers) {
|
||||
await reader(schema, db);
|
||||
const ctx = new ReaderContext(options);
|
||||
|
||||
try {
|
||||
for (const reader of readers) {
|
||||
await reader(ctx, db);
|
||||
}
|
||||
|
||||
return ctx.build();
|
||||
} finally {
|
||||
await db.destroy();
|
||||
}
|
||||
|
||||
await db.destroy();
|
||||
|
||||
return schema;
|
||||
};
|
||||
|
@ -1,11 +1,14 @@
|
||||
import { BaseContext } from 'src/sql-tools/contexts/base-context';
|
||||
import { transformColumns } from 'src/sql-tools/transformers/column.transformer';
|
||||
import { describe, expect, it } from 'vitest';
|
||||
|
||||
const ctx = new BaseContext({});
|
||||
|
||||
describe(transformColumns.name, () => {
|
||||
describe('ColumnAdd', () => {
|
||||
it('should work', () => {
|
||||
expect(
|
||||
transformColumns({
|
||||
transformColumns(ctx, {
|
||||
type: 'ColumnAdd',
|
||||
column: {
|
||||
name: 'column1',
|
||||
@ -22,7 +25,7 @@ describe(transformColumns.name, () => {
|
||||
|
||||
it('should add a nullable column', () => {
|
||||
expect(
|
||||
transformColumns({
|
||||
transformColumns(ctx, {
|
||||
type: 'ColumnAdd',
|
||||
column: {
|
||||
name: 'column1',
|
||||
@ -39,7 +42,7 @@ describe(transformColumns.name, () => {
|
||||
|
||||
it('should add a column with an enum type', () => {
|
||||
expect(
|
||||
transformColumns({
|
||||
transformColumns(ctx, {
|
||||
type: 'ColumnAdd',
|
||||
column: {
|
||||
name: 'column1',
|
||||
@ -57,7 +60,7 @@ describe(transformColumns.name, () => {
|
||||
|
||||
it('should add a column that is an array type', () => {
|
||||
expect(
|
||||
transformColumns({
|
||||
transformColumns(ctx, {
|
||||
type: 'ColumnAdd',
|
||||
column: {
|
||||
name: 'column1',
|
||||
@ -76,7 +79,7 @@ describe(transformColumns.name, () => {
|
||||
describe('ColumnAlter', () => {
|
||||
it('should make a column nullable', () => {
|
||||
expect(
|
||||
transformColumns({
|
||||
transformColumns(ctx, {
|
||||
type: 'ColumnAlter',
|
||||
tableName: 'table1',
|
||||
columnName: 'column1',
|
||||
@ -88,7 +91,7 @@ describe(transformColumns.name, () => {
|
||||
|
||||
it('should make a column non-nullable', () => {
|
||||
expect(
|
||||
transformColumns({
|
||||
transformColumns(ctx, {
|
||||
type: 'ColumnAlter',
|
||||
tableName: 'table1',
|
||||
columnName: 'column1',
|
||||
@ -100,7 +103,7 @@ describe(transformColumns.name, () => {
|
||||
|
||||
it('should update the default value', () => {
|
||||
expect(
|
||||
transformColumns({
|
||||
transformColumns(ctx, {
|
||||
type: 'ColumnAlter',
|
||||
tableName: 'table1',
|
||||
columnName: 'column1',
|
||||
@ -114,7 +117,7 @@ describe(transformColumns.name, () => {
|
||||
describe('ColumnDrop', () => {
|
||||
it('should work', () => {
|
||||
expect(
|
||||
transformColumns({
|
||||
transformColumns(ctx, {
|
||||
type: 'ColumnDrop',
|
||||
tableName: 'table1',
|
||||
columnName: 'column1',
|
||||
|
@ -1,8 +1,8 @@
|
||||
import { asColumnComment, getColumnModifiers, getColumnType } from 'src/sql-tools/helpers';
|
||||
import { SqlTransformer } from 'src/sql-tools/transformers/types';
|
||||
import { ColumnChanges, DatabaseColumn, SchemaDiff } from 'src/sql-tools/types';
|
||||
import { ColumnChanges, DatabaseColumn } from 'src/sql-tools/types';
|
||||
|
||||
export const transformColumns: SqlTransformer = (item: SchemaDiff) => {
|
||||
export const transformColumns: SqlTransformer = (ctx, item) => {
|
||||
switch (item.type) {
|
||||
case 'ColumnAdd': {
|
||||
return asColumnAdd(item.column);
|
||||
|
@ -1,13 +1,16 @@
|
||||
import { BaseContext } from 'src/sql-tools/contexts/base-context';
|
||||
import { transformConstraints } from 'src/sql-tools/transformers/constraint.transformer';
|
||||
import { ConstraintType } from 'src/sql-tools/types';
|
||||
import { describe, expect, it } from 'vitest';
|
||||
|
||||
const ctx = new BaseContext({});
|
||||
|
||||
describe(transformConstraints.name, () => {
|
||||
describe('ConstraintAdd', () => {
|
||||
describe('primary keys', () => {
|
||||
it('should work', () => {
|
||||
expect(
|
||||
transformConstraints({
|
||||
transformConstraints(ctx, {
|
||||
type: 'ConstraintAdd',
|
||||
constraint: {
|
||||
type: ConstraintType.PRIMARY_KEY,
|
||||
@ -25,7 +28,7 @@ describe(transformConstraints.name, () => {
|
||||
describe('foreign keys', () => {
|
||||
it('should work', () => {
|
||||
expect(
|
||||
transformConstraints({
|
||||
transformConstraints(ctx, {
|
||||
type: 'ConstraintAdd',
|
||||
constraint: {
|
||||
type: ConstraintType.FOREIGN_KEY,
|
||||
@ -47,7 +50,7 @@ describe(transformConstraints.name, () => {
|
||||
describe('unique', () => {
|
||||
it('should work', () => {
|
||||
expect(
|
||||
transformConstraints({
|
||||
transformConstraints(ctx, {
|
||||
type: 'ConstraintAdd',
|
||||
constraint: {
|
||||
type: ConstraintType.UNIQUE,
|
||||
@ -65,7 +68,7 @@ describe(transformConstraints.name, () => {
|
||||
describe('check', () => {
|
||||
it('should work', () => {
|
||||
expect(
|
||||
transformConstraints({
|
||||
transformConstraints(ctx, {
|
||||
type: 'ConstraintAdd',
|
||||
constraint: {
|
||||
type: ConstraintType.CHECK,
|
||||
@ -84,7 +87,7 @@ describe(transformConstraints.name, () => {
|
||||
describe('ConstraintDrop', () => {
|
||||
it('should work', () => {
|
||||
expect(
|
||||
transformConstraints({
|
||||
transformConstraints(ctx, {
|
||||
type: 'ConstraintDrop',
|
||||
tableName: 'table1',
|
||||
constraintName: 'PK_test',
|
||||
|
@ -1,8 +1,8 @@
|
||||
import { asColumnList } from 'src/sql-tools/helpers';
|
||||
import { SqlTransformer } from 'src/sql-tools/transformers/types';
|
||||
import { ActionType, ConstraintType, DatabaseConstraint, SchemaDiff } from 'src/sql-tools/types';
|
||||
import { ActionType, ConstraintType, DatabaseConstraint } from 'src/sql-tools/types';
|
||||
|
||||
export const transformConstraints: SqlTransformer = (item: SchemaDiff) => {
|
||||
export const transformConstraints: SqlTransformer = (ctx, item) => {
|
||||
switch (item.type) {
|
||||
case 'ConstraintAdd': {
|
||||
return asConstraintAdd(item.constraint);
|
||||
|
@ -1,7 +1,7 @@
|
||||
import { SqlTransformer } from 'src/sql-tools/transformers/types';
|
||||
import { DatabaseEnum, SchemaDiff } from 'src/sql-tools/types';
|
||||
import { DatabaseEnum } from 'src/sql-tools/types';
|
||||
|
||||
export const transformEnums: SqlTransformer = (item: SchemaDiff) => {
|
||||
export const transformEnums: SqlTransformer = (ctx, item) => {
|
||||
switch (item.type) {
|
||||
case 'EnumCreate': {
|
||||
return asEnumCreate(item.enum);
|
||||
|
@ -1,11 +1,14 @@
|
||||
import { BaseContext } from 'src/sql-tools/contexts/base-context';
|
||||
import { transformExtensions } from 'src/sql-tools/transformers/extension.transformer';
|
||||
import { describe, expect, it } from 'vitest';
|
||||
|
||||
const ctx = new BaseContext({});
|
||||
|
||||
describe(transformExtensions.name, () => {
|
||||
describe('ExtensionDrop', () => {
|
||||
it('should work', () => {
|
||||
expect(
|
||||
transformExtensions({
|
||||
transformExtensions(ctx, {
|
||||
type: 'ExtensionDrop',
|
||||
extensionName: 'cube',
|
||||
reason: 'unknown',
|
||||
@ -17,7 +20,7 @@ describe(transformExtensions.name, () => {
|
||||
describe('ExtensionCreate', () => {
|
||||
it('should work', () => {
|
||||
expect(
|
||||
transformExtensions({
|
||||
transformExtensions(ctx, {
|
||||
type: 'ExtensionCreate',
|
||||
extension: {
|
||||
name: 'cube',
|
||||
|
@ -1,7 +1,7 @@
|
||||
import { SqlTransformer } from 'src/sql-tools/transformers/types';
|
||||
import { DatabaseExtension, SchemaDiff } from 'src/sql-tools/types';
|
||||
import { DatabaseExtension } from 'src/sql-tools/types';
|
||||
|
||||
export const transformExtensions: SqlTransformer = (item: SchemaDiff) => {
|
||||
export const transformExtensions: SqlTransformer = (ctx, item) => {
|
||||
switch (item.type) {
|
||||
case 'ExtensionCreate': {
|
||||
return asExtensionCreate(item.extension);
|
||||
|
@ -1,11 +1,14 @@
|
||||
import { BaseContext } from 'src/sql-tools/contexts/base-context';
|
||||
import { transformFunctions } from 'src/sql-tools/transformers/function.transformer';
|
||||
import { describe, expect, it } from 'vitest';
|
||||
|
||||
const ctx = new BaseContext({});
|
||||
|
||||
describe(transformFunctions.name, () => {
|
||||
describe('FunctionDrop', () => {
|
||||
it('should work', () => {
|
||||
expect(
|
||||
transformFunctions({
|
||||
transformFunctions(ctx, {
|
||||
type: 'FunctionDrop',
|
||||
functionName: 'test_func',
|
||||
reason: 'unknown',
|
||||
|
@ -1,7 +1,7 @@
|
||||
import { SqlTransformer } from 'src/sql-tools/transformers/types';
|
||||
import { DatabaseFunction, SchemaDiff } from 'src/sql-tools/types';
|
||||
import { DatabaseFunction } from 'src/sql-tools/types';
|
||||
|
||||
export const transformFunctions: SqlTransformer = (item: SchemaDiff) => {
|
||||
export const transformFunctions: SqlTransformer = (ctx, item) => {
|
||||
switch (item.type) {
|
||||
case 'FunctionCreate': {
|
||||
return asFunctionCreate(item.function);
|
||||
@ -17,7 +17,7 @@ export const transformFunctions: SqlTransformer = (item: SchemaDiff) => {
|
||||
}
|
||||
};
|
||||
|
||||
const asFunctionCreate = (func: DatabaseFunction): string => {
|
||||
export const asFunctionCreate = (func: DatabaseFunction): string => {
|
||||
return func.expression;
|
||||
};
|
||||
|
||||
|
@ -1,11 +1,14 @@
|
||||
import { BaseContext } from 'src/sql-tools/contexts/base-context';
|
||||
import { transformIndexes } from 'src/sql-tools/transformers/index.transformer';
|
||||
import { describe, expect, it } from 'vitest';
|
||||
|
||||
const ctx = new BaseContext({});
|
||||
|
||||
describe(transformIndexes.name, () => {
|
||||
describe('IndexCreate', () => {
|
||||
it('should work', () => {
|
||||
expect(
|
||||
transformIndexes({
|
||||
transformIndexes(ctx, {
|
||||
type: 'IndexCreate',
|
||||
index: {
|
||||
name: 'IDX_test',
|
||||
@ -21,7 +24,7 @@ describe(transformIndexes.name, () => {
|
||||
|
||||
it('should create an unique index', () => {
|
||||
expect(
|
||||
transformIndexes({
|
||||
transformIndexes(ctx, {
|
||||
type: 'IndexCreate',
|
||||
index: {
|
||||
name: 'IDX_test',
|
||||
@ -37,7 +40,7 @@ describe(transformIndexes.name, () => {
|
||||
|
||||
it('should create an index with a custom expression', () => {
|
||||
expect(
|
||||
transformIndexes({
|
||||
transformIndexes(ctx, {
|
||||
type: 'IndexCreate',
|
||||
index: {
|
||||
name: 'IDX_test',
|
||||
@ -53,7 +56,7 @@ describe(transformIndexes.name, () => {
|
||||
|
||||
it('should create an index with a where clause', () => {
|
||||
expect(
|
||||
transformIndexes({
|
||||
transformIndexes(ctx, {
|
||||
type: 'IndexCreate',
|
||||
index: {
|
||||
name: 'IDX_test',
|
||||
@ -70,7 +73,7 @@ describe(transformIndexes.name, () => {
|
||||
|
||||
it('should create an index with a custom expression', () => {
|
||||
expect(
|
||||
transformIndexes({
|
||||
transformIndexes(ctx, {
|
||||
type: 'IndexCreate',
|
||||
index: {
|
||||
name: 'IDX_test',
|
||||
@ -89,7 +92,7 @@ describe(transformIndexes.name, () => {
|
||||
describe('IndexDrop', () => {
|
||||
it('should work', () => {
|
||||
expect(
|
||||
transformIndexes({
|
||||
transformIndexes(ctx, {
|
||||
type: 'IndexDrop',
|
||||
indexName: 'IDX_test',
|
||||
reason: 'unknown',
|
||||
|
@ -1,8 +1,8 @@
|
||||
import { asColumnList } from 'src/sql-tools/helpers';
|
||||
import { SqlTransformer } from 'src/sql-tools/transformers/types';
|
||||
import { DatabaseIndex, SchemaDiff } from 'src/sql-tools/types';
|
||||
import { DatabaseIndex } from 'src/sql-tools/types';
|
||||
|
||||
export const transformIndexes: SqlTransformer = (item: SchemaDiff) => {
|
||||
export const transformIndexes: SqlTransformer = (ctx, item) => {
|
||||
switch (item.type) {
|
||||
case 'IndexCreate': {
|
||||
return asIndexCreate(item.index);
|
||||
|
@ -4,6 +4,7 @@ import { transformEnums } from 'src/sql-tools/transformers/enum.transformer';
|
||||
import { transformExtensions } from 'src/sql-tools/transformers/extension.transformer';
|
||||
import { transformFunctions } from 'src/sql-tools/transformers/function.transformer';
|
||||
import { transformIndexes } from 'src/sql-tools/transformers/index.transformer';
|
||||
import { transformOverrides } from 'src/sql-tools/transformers/override.transformer';
|
||||
import { transformParameters } from 'src/sql-tools/transformers/parameter.transformer';
|
||||
import { transformTables } from 'src/sql-tools/transformers/table.transformer';
|
||||
import { transformTriggers } from 'src/sql-tools/transformers/trigger.transformer';
|
||||
@ -19,4 +20,5 @@ export const transformers: SqlTransformer[] = [
|
||||
transformParameters,
|
||||
transformTables,
|
||||
transformTriggers,
|
||||
transformOverrides,
|
||||
];
|
||||
|
37
server/src/sql-tools/transformers/override.transformer.ts
Normal file
37
server/src/sql-tools/transformers/override.transformer.ts
Normal file
@ -0,0 +1,37 @@
|
||||
import { asJsonString } from 'src/sql-tools/helpers';
|
||||
import { SqlTransformer } from 'src/sql-tools/transformers/types';
|
||||
import { DatabaseOverride } from 'src/sql-tools/types';
|
||||
|
||||
export const transformOverrides: SqlTransformer = (ctx, item) => {
|
||||
const tableName = ctx.overrideTableName;
|
||||
|
||||
switch (item.type) {
|
||||
case 'OverrideCreate': {
|
||||
return asOverrideCreate(tableName, item.override);
|
||||
}
|
||||
|
||||
case 'OverrideUpdate': {
|
||||
return asOverrideUpdate(tableName, item.override);
|
||||
}
|
||||
|
||||
case 'OverrideDrop': {
|
||||
return asOverrideDrop(tableName, item.overrideName);
|
||||
}
|
||||
|
||||
default: {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
export const asOverrideCreate = (tableName: string, override: DatabaseOverride): string => {
|
||||
return `INSERT INTO "${tableName}" ("name", "value") VALUES ('${override.name}', ${asJsonString(override.value)});`;
|
||||
};
|
||||
|
||||
export const asOverrideUpdate = (tableName: string, override: DatabaseOverride): string => {
|
||||
return `UPDATE "${tableName}" SET "value" = ${asJsonString(override.value)} WHERE "name" = '${override.name}';`;
|
||||
};
|
||||
|
||||
export const asOverrideDrop = (tableName: string, overrideName: string): string => {
|
||||
return `DELETE FROM "${tableName}" WHERE "name" = '${overrideName}';`;
|
||||
};
|
@ -1,7 +1,7 @@
|
||||
import { SqlTransformer } from 'src/sql-tools/transformers/types';
|
||||
import { DatabaseParameter, SchemaDiff } from 'src/sql-tools/types';
|
||||
import { DatabaseParameter } from 'src/sql-tools/types';
|
||||
|
||||
export const transformParameters: SqlTransformer = (item: SchemaDiff) => {
|
||||
export const transformParameters: SqlTransformer = (ctx, item) => {
|
||||
switch (item.type) {
|
||||
case 'ParameterSet': {
|
||||
return asParameterSet(item.parameter);
|
||||
|
@ -1,11 +1,14 @@
|
||||
import { BaseContext } from 'src/sql-tools/contexts/base-context';
|
||||
import { transformTables } from 'src/sql-tools/transformers/table.transformer';
|
||||
import { describe, expect, it } from 'vitest';
|
||||
|
||||
const ctx = new BaseContext({});
|
||||
|
||||
describe(transformTables.name, () => {
|
||||
describe('TableDrop', () => {
|
||||
it('should work', () => {
|
||||
expect(
|
||||
transformTables({
|
||||
transformTables(ctx, {
|
||||
type: 'TableDrop',
|
||||
tableName: 'table1',
|
||||
reason: 'unknown',
|
||||
@ -17,7 +20,7 @@ describe(transformTables.name, () => {
|
||||
describe('TableCreate', () => {
|
||||
it('should work', () => {
|
||||
expect(
|
||||
transformTables({
|
||||
transformTables(ctx, {
|
||||
type: 'TableCreate',
|
||||
table: {
|
||||
name: 'table1',
|
||||
@ -43,7 +46,7 @@ describe(transformTables.name, () => {
|
||||
|
||||
it('should handle a non-nullable column', () => {
|
||||
expect(
|
||||
transformTables({
|
||||
transformTables(ctx, {
|
||||
type: 'TableCreate',
|
||||
table: {
|
||||
name: 'table1',
|
||||
@ -69,7 +72,7 @@ describe(transformTables.name, () => {
|
||||
|
||||
it('should handle a default value', () => {
|
||||
expect(
|
||||
transformTables({
|
||||
transformTables(ctx, {
|
||||
type: 'TableCreate',
|
||||
table: {
|
||||
name: 'table1',
|
||||
@ -96,7 +99,7 @@ describe(transformTables.name, () => {
|
||||
|
||||
it('should handle a string with a fixed length', () => {
|
||||
expect(
|
||||
transformTables({
|
||||
transformTables(ctx, {
|
||||
type: 'TableCreate',
|
||||
table: {
|
||||
name: 'table1',
|
||||
@ -123,7 +126,7 @@ describe(transformTables.name, () => {
|
||||
|
||||
it('should handle an array type', () => {
|
||||
expect(
|
||||
transformTables({
|
||||
transformTables(ctx, {
|
||||
type: 'TableCreate',
|
||||
table: {
|
||||
name: 'table1',
|
||||
|
@ -1,9 +1,9 @@
|
||||
import { asColumnComment, getColumnModifiers, getColumnType } from 'src/sql-tools/helpers';
|
||||
import { asColumnAlter } from 'src/sql-tools/transformers/column.transformer';
|
||||
import { SqlTransformer } from 'src/sql-tools/transformers/types';
|
||||
import { DatabaseTable, SchemaDiff } from 'src/sql-tools/types';
|
||||
import { DatabaseTable } from 'src/sql-tools/types';
|
||||
|
||||
export const transformTables: SqlTransformer = (item: SchemaDiff) => {
|
||||
export const transformTables: SqlTransformer = (ctx, item) => {
|
||||
switch (item.type) {
|
||||
case 'TableCreate': {
|
||||
return asTableCreate(item.table);
|
||||
|
@ -1,11 +1,14 @@
|
||||
import { BaseContext } from 'src/sql-tools/contexts/base-context';
|
||||
import { transformTriggers } from 'src/sql-tools/transformers/trigger.transformer';
|
||||
import { describe, expect, it } from 'vitest';
|
||||
|
||||
const ctx = new BaseContext({});
|
||||
|
||||
describe(transformTriggers.name, () => {
|
||||
describe('TriggerCreate', () => {
|
||||
it('should work', () => {
|
||||
expect(
|
||||
transformTriggers({
|
||||
transformTriggers(ctx, {
|
||||
type: 'TriggerCreate',
|
||||
trigger: {
|
||||
name: 'trigger1',
|
||||
@ -28,7 +31,7 @@ describe(transformTriggers.name, () => {
|
||||
|
||||
it('should work with multiple actions', () => {
|
||||
expect(
|
||||
transformTriggers({
|
||||
transformTriggers(ctx, {
|
||||
type: 'TriggerCreate',
|
||||
trigger: {
|
||||
name: 'trigger1',
|
||||
@ -51,7 +54,7 @@ describe(transformTriggers.name, () => {
|
||||
|
||||
it('should work with old/new reference table aliases', () => {
|
||||
expect(
|
||||
transformTriggers({
|
||||
transformTriggers(ctx, {
|
||||
type: 'TriggerCreate',
|
||||
trigger: {
|
||||
name: 'trigger1',
|
||||
@ -79,7 +82,7 @@ describe(transformTriggers.name, () => {
|
||||
describe('TriggerDrop', () => {
|
||||
it('should work', () => {
|
||||
expect(
|
||||
transformTriggers({
|
||||
transformTriggers(ctx, {
|
||||
type: 'TriggerDrop',
|
||||
tableName: 'table1',
|
||||
triggerName: 'trigger1',
|
||||
|
@ -1,7 +1,7 @@
|
||||
import { SqlTransformer } from 'src/sql-tools/transformers/types';
|
||||
import { DatabaseTrigger, SchemaDiff } from 'src/sql-tools/types';
|
||||
import { DatabaseTrigger } from 'src/sql-tools/types';
|
||||
|
||||
export const transformTriggers: SqlTransformer = (item: SchemaDiff) => {
|
||||
export const transformTriggers: SqlTransformer = (ctx, item) => {
|
||||
switch (item.type) {
|
||||
case 'TriggerCreate': {
|
||||
return asTriggerCreate(item.trigger);
|
||||
|
@ -1,3 +1,4 @@
|
||||
import { BaseContext } from 'src/sql-tools/contexts/base-context';
|
||||
import { SchemaDiff } from 'src/sql-tools/types';
|
||||
|
||||
export type SqlTransformer = (item: SchemaDiff) => string | string[] | false;
|
||||
export type SqlTransformer = (ctx: BaseContext, item: SchemaDiff) => string | string[] | false;
|
||||
|
@ -1,38 +1,49 @@
|
||||
import { Kysely, ColumnType as KyselyColumnType } from 'kysely';
|
||||
import { ProcessorContext } from 'src/sql-tools/contexts/processor-context';
|
||||
import { ReaderContext } from 'src/sql-tools/contexts/reader-context';
|
||||
import { RegisterItem } from 'src/sql-tools/register-item';
|
||||
import { SchemaBuilder } from 'src/sql-tools/schema-builder';
|
||||
|
||||
export type SchemaFromCodeOptions = {
|
||||
/** automatically create indexes on foreign key columns */
|
||||
createForeignKeyIndexes?: boolean;
|
||||
export type BaseContextOptions = {
|
||||
databaseName?: string;
|
||||
schemaName?: string;
|
||||
overrideTableName?: string;
|
||||
};
|
||||
|
||||
export type SchemaFromCodeOptions = BaseContextOptions & {
|
||||
/** automatically create indexes on foreign key columns */
|
||||
createForeignKeyIndexes?: boolean;
|
||||
reset?: boolean;
|
||||
|
||||
functions?: boolean;
|
||||
extensions?: boolean;
|
||||
parameters?: boolean;
|
||||
overrides?: boolean;
|
||||
};
|
||||
|
||||
export type SchemaFromDatabaseOptions = {
|
||||
schemaName?: string;
|
||||
};
|
||||
export type SchemaFromDatabaseOptions = BaseContextOptions;
|
||||
|
||||
export type SchemaDiffToSqlOptions = {
|
||||
export type SchemaDiffToSqlOptions = BaseContextOptions & {
|
||||
comments?: boolean;
|
||||
};
|
||||
|
||||
export type SchemaDiffOptions = {
|
||||
export type SchemaDiffOptions = BaseContextOptions & {
|
||||
tables?: IgnoreOptions;
|
||||
functions?: IgnoreOptions;
|
||||
enums?: IgnoreOptions;
|
||||
extension?: IgnoreOptions;
|
||||
extensions?: IgnoreOptions;
|
||||
parameters?: IgnoreOptions;
|
||||
overrides?: IgnoreOptions;
|
||||
};
|
||||
|
||||
export type IgnoreOptions = {
|
||||
ignoreExtra?: boolean;
|
||||
ignoreMissing?: boolean;
|
||||
};
|
||||
export type IgnoreOptions =
|
||||
| boolean
|
||||
| {
|
||||
ignoreExtra?: boolean;
|
||||
ignoreMissing?: boolean;
|
||||
};
|
||||
|
||||
export type Processor = (builder: SchemaBuilder, items: RegisterItem[], options: SchemaFromCodeOptions) => void;
|
||||
export type DatabaseReader = (schema: DatabaseSchema, db: DatabaseClient) => Promise<void>;
|
||||
export type Processor = (ctx: ProcessorContext, items: RegisterItem[]) => void;
|
||||
export type Reader = (ctx: ReaderContext, db: DatabaseClient) => Promise<void>;
|
||||
|
||||
export type PostgresDB = {
|
||||
pg_am: {
|
||||
@ -319,6 +330,7 @@ export type DatabaseSchema = {
|
||||
tables: DatabaseTable[];
|
||||
extensions: DatabaseExtension[];
|
||||
parameters: DatabaseParameter[];
|
||||
overrides: DatabaseOverride[];
|
||||
warnings: string[];
|
||||
};
|
||||
|
||||
@ -332,6 +344,14 @@ export type DatabaseParameter = {
|
||||
|
||||
export type ParameterScope = 'database' | 'user';
|
||||
|
||||
export type DatabaseOverride = {
|
||||
name: string;
|
||||
value: { name: string; type: OverrideType; sql: string };
|
||||
synchronize: boolean;
|
||||
};
|
||||
|
||||
export type OverrideType = 'function' | 'index' | 'trigger';
|
||||
|
||||
export type DatabaseEnum = {
|
||||
name: string;
|
||||
values: string[];
|
||||
@ -342,6 +362,7 @@ export type DatabaseFunction = {
|
||||
name: string;
|
||||
expression: string;
|
||||
synchronize: boolean;
|
||||
override?: DatabaseOverride;
|
||||
};
|
||||
|
||||
export type DatabaseExtension = {
|
||||
@ -438,6 +459,7 @@ export type DatabaseTrigger = {
|
||||
referencingOldTableAs?: string;
|
||||
when?: string;
|
||||
functionName: string;
|
||||
override?: DatabaseOverride;
|
||||
synchronize: boolean;
|
||||
};
|
||||
export type TriggerTiming = 'before' | 'after' | 'instead of';
|
||||
@ -453,6 +475,7 @@ export type DatabaseIndex = {
|
||||
using?: string;
|
||||
with?: string;
|
||||
where?: string;
|
||||
override?: DatabaseOverride;
|
||||
synchronize: boolean;
|
||||
};
|
||||
|
||||
@ -476,6 +499,9 @@ export type SchemaDiff = { reason: string } & (
|
||||
| { type: 'ParameterReset'; databaseName: string; parameterName: string }
|
||||
| { type: 'EnumCreate'; enum: DatabaseEnum }
|
||||
| { type: 'EnumDrop'; enumName: string }
|
||||
| { type: 'OverrideCreate'; override: DatabaseOverride }
|
||||
| { type: 'OverrideUpdate'; override: DatabaseOverride }
|
||||
| { type: 'OverrideDrop'; overrideName: string }
|
||||
);
|
||||
|
||||
export type CompareFunction<T> = (source: T, target: T) => SchemaDiff[];
|
||||
|
@ -15,6 +15,7 @@ export const schema: DatabaseSchema = {
|
||||
enums: [],
|
||||
extensions: [],
|
||||
parameters: [],
|
||||
overrides: [],
|
||||
tables: [
|
||||
{
|
||||
name: 'table1',
|
||||
|
@ -15,6 +15,7 @@ export const schema: DatabaseSchema = {
|
||||
enums: [],
|
||||
extensions: [],
|
||||
parameters: [],
|
||||
overrides: [],
|
||||
tables: [
|
||||
{
|
||||
name: 'table1',
|
||||
|
@ -14,6 +14,7 @@ export const schema: DatabaseSchema = {
|
||||
enums: [],
|
||||
extensions: [],
|
||||
parameters: [],
|
||||
overrides: [],
|
||||
tables: [
|
||||
{
|
||||
name: 'table1',
|
||||
|
@ -14,6 +14,7 @@ export const schema: DatabaseSchema = {
|
||||
enums: [],
|
||||
extensions: [],
|
||||
parameters: [],
|
||||
overrides: [],
|
||||
tables: [
|
||||
{
|
||||
name: 'table1',
|
||||
|
@ -16,6 +16,7 @@ export const schema: DatabaseSchema = {
|
||||
enums: [],
|
||||
extensions: [],
|
||||
parameters: [],
|
||||
overrides: [],
|
||||
tables: [
|
||||
{
|
||||
name: 'table1',
|
||||
|
@ -14,6 +14,7 @@ export const schema: DatabaseSchema = {
|
||||
enums: [],
|
||||
extensions: [],
|
||||
parameters: [],
|
||||
overrides: [],
|
||||
tables: [
|
||||
{
|
||||
name: 'table1',
|
||||
|
@ -14,6 +14,7 @@ export const schema: DatabaseSchema = {
|
||||
enums: [],
|
||||
extensions: [],
|
||||
parameters: [],
|
||||
overrides: [],
|
||||
tables: [
|
||||
{
|
||||
name: 'table1',
|
||||
|
@ -14,6 +14,7 @@ export const schema: DatabaseSchema = {
|
||||
enums: [],
|
||||
extensions: [],
|
||||
parameters: [],
|
||||
overrides: [],
|
||||
tables: [
|
||||
{
|
||||
name: 'table1',
|
||||
|
@ -14,6 +14,7 @@ export const schema: DatabaseSchema = {
|
||||
enums: [],
|
||||
extensions: [],
|
||||
parameters: [],
|
||||
overrides: [],
|
||||
tables: [
|
||||
{
|
||||
name: 'table1',
|
||||
|
@ -14,6 +14,7 @@ export const schema: DatabaseSchema = {
|
||||
enums: [],
|
||||
extensions: [],
|
||||
parameters: [],
|
||||
overrides: [],
|
||||
tables: [
|
||||
{
|
||||
name: 'table1',
|
||||
|
@ -27,6 +27,7 @@ export const schema: DatabaseSchema = {
|
||||
],
|
||||
extensions: [],
|
||||
parameters: [],
|
||||
overrides: [],
|
||||
tables: [
|
||||
{
|
||||
name: 'table1',
|
||||
|
@ -14,6 +14,7 @@ export const schema: DatabaseSchema = {
|
||||
enums: [],
|
||||
extensions: [],
|
||||
parameters: [],
|
||||
overrides: [],
|
||||
tables: [
|
||||
{
|
||||
name: 'table1',
|
||||
|
@ -14,6 +14,7 @@ export const schema: DatabaseSchema = {
|
||||
enums: [],
|
||||
extensions: [],
|
||||
parameters: [],
|
||||
overrides: [],
|
||||
tables: [
|
||||
{
|
||||
name: 'table1',
|
||||
|
@ -14,6 +14,7 @@ export const schema: DatabaseSchema = {
|
||||
enums: [],
|
||||
extensions: [],
|
||||
parameters: [],
|
||||
overrides: [],
|
||||
tables: [
|
||||
{
|
||||
name: 'table1',
|
||||
|
@ -14,6 +14,7 @@ export const schema: DatabaseSchema = {
|
||||
enums: [],
|
||||
extensions: [],
|
||||
parameters: [],
|
||||
overrides: [],
|
||||
tables: [
|
||||
{
|
||||
name: 'table1',
|
||||
|
@ -14,6 +14,7 @@ export const schema: DatabaseSchema = {
|
||||
enums: [],
|
||||
extensions: [],
|
||||
parameters: [],
|
||||
overrides: [],
|
||||
tables: [
|
||||
{
|
||||
name: 'table1',
|
||||
|
@ -14,6 +14,7 @@ export const schema: DatabaseSchema = {
|
||||
enums: [],
|
||||
extensions: [],
|
||||
parameters: [],
|
||||
overrides: [],
|
||||
tables: [
|
||||
{
|
||||
name: 'table1',
|
||||
|
@ -14,6 +14,7 @@ export const schema: DatabaseSchema = {
|
||||
enums: [],
|
||||
extensions: [],
|
||||
parameters: [],
|
||||
overrides: [],
|
||||
tables: [
|
||||
{
|
||||
name: 'table1',
|
||||
|
@ -14,6 +14,7 @@ export const schema: DatabaseSchema = {
|
||||
enums: [],
|
||||
extensions: [],
|
||||
parameters: [],
|
||||
overrides: [],
|
||||
tables: [
|
||||
{
|
||||
name: 'table1',
|
||||
|
@ -14,6 +14,7 @@ export const schema: DatabaseSchema = {
|
||||
enums: [],
|
||||
extensions: [],
|
||||
parameters: [],
|
||||
overrides: [],
|
||||
tables: [
|
||||
{
|
||||
name: 'table1',
|
||||
|
@ -14,6 +14,7 @@ export const schema: DatabaseSchema = {
|
||||
enums: [],
|
||||
extensions: [],
|
||||
parameters: [],
|
||||
overrides: [],
|
||||
tables: [
|
||||
{
|
||||
name: 'table1',
|
||||
|
@ -14,6 +14,7 @@ export const schema: DatabaseSchema = {
|
||||
enums: [],
|
||||
extensions: [],
|
||||
parameters: [],
|
||||
overrides: [],
|
||||
tables: [
|
||||
{
|
||||
name: 'table1',
|
||||
|
@ -14,6 +14,7 @@ export const schema: DatabaseSchema = {
|
||||
enums: [],
|
||||
extensions: [],
|
||||
parameters: [],
|
||||
overrides: [],
|
||||
tables: [
|
||||
{
|
||||
name: 'table1',
|
||||
|
@ -14,6 +14,7 @@ export const schema: DatabaseSchema = {
|
||||
enums: [],
|
||||
extensions: [],
|
||||
parameters: [],
|
||||
overrides: [],
|
||||
tables: [
|
||||
{
|
||||
name: 'table1',
|
||||
|
@ -31,6 +31,7 @@ export const schema: DatabaseSchema = {
|
||||
enums: [],
|
||||
extensions: [],
|
||||
parameters: [],
|
||||
overrides: [],
|
||||
tables: [
|
||||
{
|
||||
name: 'table1',
|
||||
|
@ -21,6 +21,7 @@ export const schema: DatabaseSchema = {
|
||||
enums: [],
|
||||
extensions: [],
|
||||
parameters: [],
|
||||
overrides: [],
|
||||
tables: [
|
||||
{
|
||||
name: 'table1',
|
||||
|
@ -21,6 +21,7 @@ export const schema: DatabaseSchema = {
|
||||
enums: [],
|
||||
extensions: [],
|
||||
parameters: [],
|
||||
overrides: [],
|
||||
tables: [
|
||||
{
|
||||
name: 'table1',
|
||||
|
@ -20,6 +20,7 @@ export const schema: DatabaseSchema = {
|
||||
enums: [],
|
||||
extensions: [],
|
||||
parameters: [],
|
||||
overrides: [],
|
||||
tables: [
|
||||
{
|
||||
name: 'table1',
|
||||
|
@ -27,6 +27,7 @@ export const schema: DatabaseSchema = {
|
||||
enums: [],
|
||||
extensions: [],
|
||||
parameters: [],
|
||||
overrides: [],
|
||||
tables: [
|
||||
{
|
||||
name: 'table1',
|
||||
|
@ -21,6 +21,7 @@ export const schema: DatabaseSchema = {
|
||||
enums: [],
|
||||
extensions: [],
|
||||
parameters: [],
|
||||
overrides: [],
|
||||
tables: [
|
||||
{
|
||||
name: 'table1',
|
||||
|
@ -25,6 +25,7 @@ export const schema: DatabaseSchema = {
|
||||
enums: [],
|
||||
extensions: [],
|
||||
parameters: [],
|
||||
overrides: [],
|
||||
tables: [
|
||||
{
|
||||
name: 'table1',
|
||||
|
@ -21,6 +21,7 @@ export const schema: DatabaseSchema = {
|
||||
enums: [],
|
||||
extensions: [],
|
||||
parameters: [],
|
||||
overrides: [],
|
||||
tables: [
|
||||
{
|
||||
name: 'table1',
|
||||
|
@ -20,6 +20,7 @@ export const schema: DatabaseSchema = {
|
||||
enums: [],
|
||||
extensions: [],
|
||||
parameters: [],
|
||||
overrides: [],
|
||||
tables: [
|
||||
{
|
||||
name: 'table1',
|
||||
|
@ -20,6 +20,7 @@ export const schema: DatabaseSchema = {
|
||||
enums: [],
|
||||
extensions: [],
|
||||
parameters: [],
|
||||
overrides: [],
|
||||
tables: [
|
||||
{
|
||||
name: 'table1',
|
||||
|
@ -15,6 +15,7 @@ export const schema: DatabaseSchema = {
|
||||
enums: [],
|
||||
extensions: [],
|
||||
parameters: [],
|
||||
overrides: [],
|
||||
tables: [
|
||||
{
|
||||
name: 'table1',
|
||||
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
x
Reference in New Issue
Block a user