mirror of
https://github.com/immich-app/immich.git
synced 2025-07-08 18:54:18 -04:00
refactor: sql-tools readers (#19672)
This commit is contained in:
parent
15da0d5a71
commit
c435bdb5d3
@ -1,576 +1,54 @@
|
||||
import { Kysely, QueryResult, sql } from 'kysely';
|
||||
import { Kysely } from 'kysely';
|
||||
import { PostgresJSDialect } from 'kysely-postgres-js';
|
||||
import { jsonArrayFrom } from 'kysely/helpers/postgres';
|
||||
import { Sql } from 'postgres';
|
||||
import { parseTriggerType } from 'src/sql-tools/helpers';
|
||||
import {
|
||||
ColumnType,
|
||||
DatabaseActionType,
|
||||
DatabaseClient,
|
||||
DatabaseColumn,
|
||||
DatabaseConstraintType,
|
||||
DatabaseEnum,
|
||||
DatabaseExtension,
|
||||
DatabaseFunction,
|
||||
DatabaseParameter,
|
||||
DatabaseSchema,
|
||||
DatabaseTable,
|
||||
LoadSchemaOptions,
|
||||
ParameterScope,
|
||||
PostgresDB,
|
||||
} from 'src/sql-tools/types';
|
||||
import { readColumns } from 'src/sql-tools/from-database/readers/column.reader';
|
||||
import { readComments } from 'src/sql-tools/from-database/readers/comment.reader';
|
||||
import { readConstraints } from 'src/sql-tools/from-database/readers/constraint.reader';
|
||||
import { readExtensions } from 'src/sql-tools/from-database/readers/extension.reader';
|
||||
import { readFunctions } from 'src/sql-tools/from-database/readers/function.reader';
|
||||
import { readIndexes } from 'src/sql-tools/from-database/readers/index.reader';
|
||||
import { readName } from 'src/sql-tools/from-database/readers/name.reader';
|
||||
import { readParameters } from 'src/sql-tools/from-database/readers/parameter.reader';
|
||||
import { readTables } from 'src/sql-tools/from-database/readers/table.reader';
|
||||
import { readTriggers } from 'src/sql-tools/from-database/readers/trigger.reader';
|
||||
import { DatabaseReader } from 'src/sql-tools/from-database/readers/type';
|
||||
import { DatabaseSchema, LoadSchemaOptions, PostgresDB } from 'src/sql-tools/types';
|
||||
|
||||
const readers: DatabaseReader[] = [
|
||||
//
|
||||
readName,
|
||||
readParameters,
|
||||
readExtensions,
|
||||
readFunctions,
|
||||
readTables,
|
||||
readColumns,
|
||||
readIndexes,
|
||||
readConstraints,
|
||||
readTriggers,
|
||||
readComments,
|
||||
];
|
||||
|
||||
/**
|
||||
* Load the database schema from the database
|
||||
*/
|
||||
export const schemaFromDatabase = async (postgres: Sql, options: LoadSchemaOptions = {}): Promise<DatabaseSchema> => {
|
||||
const db = createDatabaseClient(postgres);
|
||||
|
||||
const warnings: string[] = [];
|
||||
const warn = (message: string) => {
|
||||
warnings.push(message);
|
||||
const schema: DatabaseSchema = {
|
||||
name: 'immich',
|
||||
schemaName: options.schemaName || 'public',
|
||||
parameters: [],
|
||||
functions: [],
|
||||
enums: [],
|
||||
extensions: [],
|
||||
tables: [],
|
||||
warnings: [],
|
||||
};
|
||||
|
||||
const schemaName = options.schemaName || 'public';
|
||||
const tablesMap: Record<string, DatabaseTable> = {};
|
||||
|
||||
const [
|
||||
databaseName,
|
||||
tables,
|
||||
columns,
|
||||
indexes,
|
||||
constraints,
|
||||
enums,
|
||||
routines,
|
||||
extensions,
|
||||
triggers,
|
||||
parameters,
|
||||
comments,
|
||||
] = await Promise.all([
|
||||
getDatabaseName(db),
|
||||
getTables(db, schemaName),
|
||||
getTableColumns(db, schemaName),
|
||||
getTableIndexes(db, schemaName),
|
||||
getTableConstraints(db, schemaName),
|
||||
getUserDefinedEnums(db, schemaName),
|
||||
getRoutines(db, schemaName),
|
||||
getExtensions(db),
|
||||
getTriggers(db, schemaName),
|
||||
getParameters(db),
|
||||
getObjectComments(db),
|
||||
]);
|
||||
|
||||
const schemaEnums: DatabaseEnum[] = [];
|
||||
const schemaFunctions: DatabaseFunction[] = [];
|
||||
const schemaExtensions: DatabaseExtension[] = [];
|
||||
const schemaParameters: DatabaseParameter[] = [];
|
||||
|
||||
const enumMap = Object.fromEntries(enums.map((e) => [e.name, e.values]));
|
||||
|
||||
for (const { name } of extensions) {
|
||||
schemaExtensions.push({ name, synchronize: true });
|
||||
}
|
||||
|
||||
for (const { name, values } of enums) {
|
||||
schemaEnums.push({ name, values, synchronize: true });
|
||||
}
|
||||
|
||||
for (const parameter of parameters) {
|
||||
schemaParameters.push({
|
||||
name: parameter.name,
|
||||
value: parameter.value,
|
||||
databaseName,
|
||||
scope: parameter.scope as ParameterScope,
|
||||
synchronize: true,
|
||||
});
|
||||
}
|
||||
|
||||
for (const { name, expression } of routines) {
|
||||
schemaFunctions.push({
|
||||
name,
|
||||
// TODO read expression from the overrides table
|
||||
expression,
|
||||
synchronize: true,
|
||||
});
|
||||
}
|
||||
|
||||
// add tables
|
||||
for (const table of tables) {
|
||||
const tableName = table.table_name;
|
||||
if (tablesMap[tableName]) {
|
||||
continue;
|
||||
}
|
||||
|
||||
tablesMap[table.table_name] = {
|
||||
name: table.table_name,
|
||||
columns: [],
|
||||
indexes: [],
|
||||
triggers: [],
|
||||
constraints: [],
|
||||
synchronize: true,
|
||||
};
|
||||
}
|
||||
|
||||
// add columns to tables
|
||||
for (const column of columns) {
|
||||
const table = tablesMap[column.table_name];
|
||||
if (!table) {
|
||||
continue;
|
||||
}
|
||||
|
||||
const columnName = column.column_name;
|
||||
|
||||
const item: DatabaseColumn = {
|
||||
type: column.data_type as ColumnType,
|
||||
name: columnName,
|
||||
tableName: column.table_name,
|
||||
nullable: column.is_nullable === 'YES',
|
||||
isArray: column.array_type !== null,
|
||||
numericPrecision: column.numeric_precision ?? undefined,
|
||||
numericScale: column.numeric_scale ?? undefined,
|
||||
length: column.character_maximum_length ?? undefined,
|
||||
default: column.column_default ?? undefined,
|
||||
synchronize: true,
|
||||
};
|
||||
|
||||
const columnLabel = `${table.name}.${columnName}`;
|
||||
|
||||
switch (column.data_type) {
|
||||
// array types
|
||||
case 'ARRAY': {
|
||||
if (!column.array_type) {
|
||||
warn(`Unable to find type for ${columnLabel} (ARRAY)`);
|
||||
continue;
|
||||
}
|
||||
item.type = column.array_type as ColumnType;
|
||||
break;
|
||||
}
|
||||
|
||||
// enum types
|
||||
case 'USER-DEFINED': {
|
||||
if (!enumMap[column.udt_name]) {
|
||||
warn(`Unable to find type for ${columnLabel} (ENUM)`);
|
||||
continue;
|
||||
}
|
||||
|
||||
item.type = 'enum';
|
||||
item.enumName = column.udt_name;
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
table.columns.push(item);
|
||||
}
|
||||
|
||||
// add table indexes
|
||||
for (const index of indexes) {
|
||||
const table = tablesMap[index.table_name];
|
||||
if (!table) {
|
||||
continue;
|
||||
}
|
||||
|
||||
const indexName = index.index_name;
|
||||
|
||||
table.indexes.push({
|
||||
name: indexName,
|
||||
tableName: index.table_name,
|
||||
columnNames: index.column_names ?? undefined,
|
||||
expression: index.expression ?? undefined,
|
||||
using: index.using,
|
||||
where: index.where ?? undefined,
|
||||
unique: index.unique,
|
||||
synchronize: true,
|
||||
});
|
||||
}
|
||||
|
||||
// add table constraints
|
||||
for (const constraint of constraints) {
|
||||
const table = tablesMap[constraint.table_name];
|
||||
if (!table) {
|
||||
continue;
|
||||
}
|
||||
|
||||
const constraintName = constraint.constraint_name;
|
||||
|
||||
switch (constraint.constraint_type) {
|
||||
// primary key constraint
|
||||
case 'p': {
|
||||
if (!constraint.column_names) {
|
||||
warn(`Skipping CONSTRAINT "${constraintName}", no columns found`);
|
||||
continue;
|
||||
}
|
||||
table.constraints.push({
|
||||
type: DatabaseConstraintType.PRIMARY_KEY,
|
||||
name: constraintName,
|
||||
tableName: constraint.table_name,
|
||||
columnNames: constraint.column_names,
|
||||
synchronize: true,
|
||||
});
|
||||
break;
|
||||
}
|
||||
|
||||
// foreign key constraint
|
||||
case 'f': {
|
||||
if (!constraint.column_names || !constraint.reference_table_name || !constraint.reference_column_names) {
|
||||
warn(
|
||||
`Skipping CONSTRAINT "${constraintName}", missing either columns, referenced table, or referenced columns,`,
|
||||
);
|
||||
continue;
|
||||
}
|
||||
|
||||
table.constraints.push({
|
||||
type: DatabaseConstraintType.FOREIGN_KEY,
|
||||
name: constraintName,
|
||||
tableName: constraint.table_name,
|
||||
columnNames: constraint.column_names,
|
||||
referenceTableName: constraint.reference_table_name,
|
||||
referenceColumnNames: constraint.reference_column_names,
|
||||
onUpdate: asDatabaseAction(constraint.update_action),
|
||||
onDelete: asDatabaseAction(constraint.delete_action),
|
||||
synchronize: true,
|
||||
});
|
||||
break;
|
||||
}
|
||||
|
||||
// unique constraint
|
||||
case 'u': {
|
||||
table.constraints.push({
|
||||
type: DatabaseConstraintType.UNIQUE,
|
||||
name: constraintName,
|
||||
tableName: constraint.table_name,
|
||||
columnNames: constraint.column_names as string[],
|
||||
synchronize: true,
|
||||
});
|
||||
break;
|
||||
}
|
||||
|
||||
// check constraint
|
||||
case 'c': {
|
||||
table.constraints.push({
|
||||
type: DatabaseConstraintType.CHECK,
|
||||
name: constraint.constraint_name,
|
||||
tableName: constraint.table_name,
|
||||
expression: constraint.expression.replace('CHECK ', ''),
|
||||
synchronize: true,
|
||||
});
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// add triggers to tables
|
||||
for (const trigger of triggers) {
|
||||
const table = tablesMap[trigger.table_name];
|
||||
if (!table) {
|
||||
continue;
|
||||
}
|
||||
|
||||
table.triggers.push({
|
||||
name: trigger.name,
|
||||
tableName: trigger.table_name,
|
||||
functionName: trigger.function_name,
|
||||
referencingNewTableAs: trigger.referencing_new_table_as ?? undefined,
|
||||
referencingOldTableAs: trigger.referencing_old_table_as ?? undefined,
|
||||
when: trigger.when_expression,
|
||||
synchronize: true,
|
||||
...parseTriggerType(trigger.type),
|
||||
});
|
||||
}
|
||||
|
||||
for (const comment of comments) {
|
||||
if (comment.object_type === 'r') {
|
||||
const table = tablesMap[comment.object_name];
|
||||
if (!table) {
|
||||
continue;
|
||||
}
|
||||
|
||||
if (comment.column_name) {
|
||||
const column = table.columns.find(({ name }) => name === comment.column_name);
|
||||
if (column) {
|
||||
column.comment = comment.value;
|
||||
}
|
||||
}
|
||||
}
|
||||
const db = new Kysely<PostgresDB>({ dialect: new PostgresJSDialect({ postgres }) });
|
||||
for (const reader of readers) {
|
||||
await reader(schema, db);
|
||||
}
|
||||
|
||||
await db.destroy();
|
||||
|
||||
return {
|
||||
name: databaseName,
|
||||
schemaName,
|
||||
parameters: schemaParameters,
|
||||
functions: schemaFunctions,
|
||||
enums: schemaEnums,
|
||||
extensions: schemaExtensions,
|
||||
tables: Object.values(tablesMap),
|
||||
warnings,
|
||||
};
|
||||
};
|
||||
|
||||
const createDatabaseClient = (postgres: Sql): DatabaseClient =>
|
||||
new Kysely<PostgresDB>({ dialect: new PostgresJSDialect({ postgres }) });
|
||||
|
||||
const asDatabaseAction = (action: string) => {
|
||||
switch (action) {
|
||||
case 'a': {
|
||||
return DatabaseActionType.NO_ACTION;
|
||||
}
|
||||
case 'c': {
|
||||
return DatabaseActionType.CASCADE;
|
||||
}
|
||||
case 'r': {
|
||||
return DatabaseActionType.RESTRICT;
|
||||
}
|
||||
case 'n': {
|
||||
return DatabaseActionType.SET_NULL;
|
||||
}
|
||||
case 'd': {
|
||||
return DatabaseActionType.SET_DEFAULT;
|
||||
}
|
||||
|
||||
default: {
|
||||
return DatabaseActionType.NO_ACTION;
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
const getDatabaseName = async (db: DatabaseClient) => {
|
||||
const result = (await sql`SELECT current_database() as name`.execute(db)) as QueryResult<{ name: string }>;
|
||||
return result.rows[0].name;
|
||||
};
|
||||
|
||||
const getTables = (db: DatabaseClient, schemaName: string) => {
|
||||
return db
|
||||
.selectFrom('information_schema.tables')
|
||||
.where('table_schema', '=', schemaName)
|
||||
.where('table_type', '=', sql.lit('BASE TABLE'))
|
||||
.selectAll()
|
||||
.execute();
|
||||
};
|
||||
|
||||
const getTableColumns = (db: DatabaseClient, schemaName: string) => {
|
||||
return db
|
||||
.selectFrom('information_schema.columns as c')
|
||||
.leftJoin('information_schema.element_types as o', (join) =>
|
||||
join
|
||||
.onRef('c.table_catalog', '=', 'o.object_catalog')
|
||||
.onRef('c.table_schema', '=', 'o.object_schema')
|
||||
.onRef('c.table_name', '=', 'o.object_name')
|
||||
.on('o.object_type', '=', sql.lit('TABLE'))
|
||||
.onRef('c.dtd_identifier', '=', 'o.collection_type_identifier'),
|
||||
)
|
||||
.leftJoin('pg_type as t', (join) =>
|
||||
join.onRef('t.typname', '=', 'c.udt_name').on('c.data_type', '=', sql.lit('USER-DEFINED')),
|
||||
)
|
||||
.leftJoin('pg_enum as e', (join) => join.onRef('e.enumtypid', '=', 't.oid'))
|
||||
.select([
|
||||
'c.table_name',
|
||||
'c.column_name',
|
||||
|
||||
// is ARRAY, USER-DEFINED, or data type
|
||||
'c.data_type',
|
||||
'c.column_default',
|
||||
'c.is_nullable',
|
||||
'c.character_maximum_length',
|
||||
|
||||
// number types
|
||||
'c.numeric_precision',
|
||||
'c.numeric_scale',
|
||||
|
||||
// date types
|
||||
'c.datetime_precision',
|
||||
|
||||
// user defined type
|
||||
'c.udt_catalog',
|
||||
'c.udt_schema',
|
||||
'c.udt_name',
|
||||
|
||||
// data type for ARRAYs
|
||||
'o.data_type as array_type',
|
||||
])
|
||||
.where('table_schema', '=', schemaName)
|
||||
.execute();
|
||||
};
|
||||
|
||||
const getTableIndexes = (db: DatabaseClient, schemaName: string) => {
|
||||
return (
|
||||
db
|
||||
.selectFrom('pg_index as ix')
|
||||
// matching index, which has column information
|
||||
.innerJoin('pg_class as i', 'ix.indexrelid', 'i.oid')
|
||||
.innerJoin('pg_am as a', 'i.relam', 'a.oid')
|
||||
// matching table
|
||||
.innerJoin('pg_class as t', 'ix.indrelid', 't.oid')
|
||||
// namespace
|
||||
.innerJoin('pg_namespace', 'pg_namespace.oid', 'i.relnamespace')
|
||||
// PK and UQ constraints automatically have indexes, so we can ignore those
|
||||
.leftJoin('pg_constraint', (join) =>
|
||||
join
|
||||
.onRef('pg_constraint.conindid', '=', 'i.oid')
|
||||
.on('pg_constraint.contype', 'in', [sql.lit('p'), sql.lit('u')]),
|
||||
)
|
||||
.where('pg_constraint.oid', 'is', null)
|
||||
.select((eb) => [
|
||||
'i.relname as index_name',
|
||||
't.relname as table_name',
|
||||
'ix.indisunique as unique',
|
||||
'a.amname as using',
|
||||
eb.fn<string>('pg_get_expr', ['ix.indexprs', 'ix.indrelid']).as('expression'),
|
||||
eb.fn<string>('pg_get_expr', ['ix.indpred', 'ix.indrelid']).as('where'),
|
||||
eb
|
||||
.selectFrom('pg_attribute as a')
|
||||
.where('t.relkind', '=', sql.lit('r'))
|
||||
.whereRef('a.attrelid', '=', 't.oid')
|
||||
// list of columns numbers in the index
|
||||
.whereRef('a.attnum', '=', sql`any("ix"."indkey")`)
|
||||
.select((eb) => eb.fn<string[]>('json_agg', ['a.attname']).as('column_name'))
|
||||
.as('column_names'),
|
||||
])
|
||||
.where('pg_namespace.nspname', '=', schemaName)
|
||||
.where('ix.indisprimary', '=', sql.lit(false))
|
||||
.execute()
|
||||
);
|
||||
};
|
||||
|
||||
const getTableConstraints = (db: DatabaseClient, schemaName: string) => {
|
||||
return db
|
||||
.selectFrom('pg_constraint')
|
||||
.innerJoin('pg_namespace', 'pg_namespace.oid', 'pg_constraint.connamespace') // namespace
|
||||
.innerJoin('pg_class as source_table', (join) =>
|
||||
join.onRef('source_table.oid', '=', 'pg_constraint.conrelid').on('source_table.relkind', 'in', [
|
||||
// ordinary table
|
||||
sql.lit('r'),
|
||||
// partitioned table
|
||||
sql.lit('p'),
|
||||
// foreign table
|
||||
sql.lit('f'),
|
||||
]),
|
||||
) // table
|
||||
.leftJoin('pg_class as reference_table', 'reference_table.oid', 'pg_constraint.confrelid') // reference table
|
||||
.select((eb) => [
|
||||
'pg_constraint.contype as constraint_type',
|
||||
'pg_constraint.conname as constraint_name',
|
||||
'source_table.relname as table_name',
|
||||
'reference_table.relname as reference_table_name',
|
||||
'pg_constraint.confupdtype as update_action',
|
||||
'pg_constraint.confdeltype as delete_action',
|
||||
// 'pg_constraint.oid as constraint_id',
|
||||
eb
|
||||
.selectFrom('pg_attribute')
|
||||
// matching table for PK, FK, and UQ
|
||||
.whereRef('pg_attribute.attrelid', '=', 'pg_constraint.conrelid')
|
||||
.whereRef('pg_attribute.attnum', '=', sql`any("pg_constraint"."conkey")`)
|
||||
.select((eb) => eb.fn<string[]>('json_agg', ['pg_attribute.attname']).as('column_name'))
|
||||
.as('column_names'),
|
||||
eb
|
||||
.selectFrom('pg_attribute')
|
||||
// matching foreign table for FK
|
||||
.whereRef('pg_attribute.attrelid', '=', 'pg_constraint.confrelid')
|
||||
.whereRef('pg_attribute.attnum', '=', sql`any("pg_constraint"."confkey")`)
|
||||
.select((eb) => eb.fn<string[]>('json_agg', ['pg_attribute.attname']).as('column_name'))
|
||||
.as('reference_column_names'),
|
||||
eb.fn<string>('pg_get_constraintdef', ['pg_constraint.oid']).as('expression'),
|
||||
])
|
||||
.where('pg_namespace.nspname', '=', schemaName)
|
||||
.execute();
|
||||
};
|
||||
|
||||
const getUserDefinedEnums = async (db: DatabaseClient, schemaName: string) => {
|
||||
const items = await db
|
||||
.selectFrom('pg_type')
|
||||
.innerJoin('pg_namespace', (join) =>
|
||||
join.onRef('pg_namespace.oid', '=', 'pg_type.typnamespace').on('pg_namespace.nspname', '=', schemaName),
|
||||
)
|
||||
.where('typtype', '=', sql.lit('e'))
|
||||
.select((eb) => [
|
||||
'pg_type.typname as name',
|
||||
jsonArrayFrom(
|
||||
eb.selectFrom('pg_enum as e').select(['e.enumlabel as value']).whereRef('e.enumtypid', '=', 'pg_type.oid'),
|
||||
).as('values'),
|
||||
])
|
||||
.execute();
|
||||
|
||||
return items.map((item) => ({
|
||||
name: item.name,
|
||||
values: item.values.map(({ value }) => value),
|
||||
}));
|
||||
};
|
||||
|
||||
const getRoutines = async (db: DatabaseClient, schemaName: string) => {
|
||||
return db
|
||||
.selectFrom('pg_proc as p')
|
||||
.innerJoin('pg_namespace', 'pg_namespace.oid', 'p.pronamespace')
|
||||
.leftJoin('pg_depend as d', (join) => join.onRef('d.objid', '=', 'p.oid').on('d.deptype', '=', sql.lit('e')))
|
||||
.where('d.objid', 'is', sql.lit(null))
|
||||
.where('p.prokind', '=', sql.lit('f'))
|
||||
.where('pg_namespace.nspname', '=', schemaName)
|
||||
.select((eb) => [
|
||||
'p.proname as name',
|
||||
eb.fn<string>('pg_get_function_identity_arguments', ['p.oid']).as('arguments'),
|
||||
eb.fn<string>('pg_get_functiondef', ['p.oid']).as('expression'),
|
||||
])
|
||||
.execute();
|
||||
};
|
||||
|
||||
const getExtensions = async (db: DatabaseClient) => {
|
||||
return (
|
||||
db
|
||||
.selectFrom('pg_catalog.pg_extension')
|
||||
// .innerJoin('pg_namespace', 'pg_namespace.oid', 'pg_catalog.pg_extension.extnamespace')
|
||||
// .where('pg_namespace.nspname', '=', schemaName)
|
||||
.select(['extname as name', 'extversion as version'])
|
||||
.execute()
|
||||
);
|
||||
};
|
||||
|
||||
const getTriggers = async (db: Kysely<PostgresDB>, schemaName: string) => {
|
||||
return db
|
||||
.selectFrom('pg_trigger as t')
|
||||
.innerJoin('pg_proc as p', 't.tgfoid', 'p.oid')
|
||||
.innerJoin('pg_namespace as n', 'p.pronamespace', 'n.oid')
|
||||
.innerJoin('pg_class as c', 't.tgrelid', 'c.oid')
|
||||
.select((eb) => [
|
||||
't.tgname as name',
|
||||
't.tgenabled as enabled',
|
||||
't.tgtype as type',
|
||||
't.tgconstraint as _constraint',
|
||||
't.tgdeferrable as is_deferrable',
|
||||
't.tginitdeferred as is_initially_deferred',
|
||||
't.tgargs as arguments',
|
||||
't.tgoldtable as referencing_old_table_as',
|
||||
't.tgnewtable as referencing_new_table_as',
|
||||
eb.fn<string>('pg_get_expr', ['t.tgqual', 't.tgrelid']).as('when_expression'),
|
||||
'p.proname as function_name',
|
||||
'c.relname as table_name',
|
||||
])
|
||||
.where('t.tgisinternal', '=', false) // Exclude internal system triggers
|
||||
.where('n.nspname', '=', schemaName)
|
||||
.execute();
|
||||
};
|
||||
|
||||
const getParameters = async (db: Kysely<PostgresDB>) => {
|
||||
return db
|
||||
.selectFrom('pg_settings')
|
||||
.where('source', 'in', [sql.lit('database'), sql.lit('user')])
|
||||
.select(['name', 'setting as value', 'source as scope'])
|
||||
.execute();
|
||||
};
|
||||
|
||||
const getObjectComments = async (db: Kysely<PostgresDB>) => {
|
||||
return db
|
||||
.selectFrom('pg_description as d')
|
||||
.innerJoin('pg_class as c', 'd.objoid', 'c.oid')
|
||||
.leftJoin('pg_attribute as a', (join) =>
|
||||
join.onRef('a.attrelid', '=', 'c.oid').onRef('a.attnum', '=', 'd.objsubid'),
|
||||
)
|
||||
.select([
|
||||
'c.relname as object_name',
|
||||
'c.relkind as object_type',
|
||||
'd.description as value',
|
||||
'a.attname as column_name',
|
||||
])
|
||||
.where('d.description', 'is not', null)
|
||||
.orderBy('object_type')
|
||||
.orderBy('object_name')
|
||||
.execute();
|
||||
return schema;
|
||||
};
|
||||
|
119
server/src/sql-tools/from-database/readers/column.reader.ts
Normal file
119
server/src/sql-tools/from-database/readers/column.reader.ts
Normal file
@ -0,0 +1,119 @@
|
||||
import { sql } from 'kysely';
|
||||
import { jsonArrayFrom } from 'kysely/helpers/postgres';
|
||||
import { DatabaseReader } from 'src/sql-tools/from-database/readers/type';
|
||||
import { ColumnType, DatabaseColumn } from 'src/sql-tools/types';
|
||||
|
||||
export const readColumns: DatabaseReader = async (schema, db) => {
|
||||
const columns = await db
|
||||
.selectFrom('information_schema.columns as c')
|
||||
.leftJoin('information_schema.element_types as o', (join) =>
|
||||
join
|
||||
.onRef('c.table_catalog', '=', 'o.object_catalog')
|
||||
.onRef('c.table_schema', '=', 'o.object_schema')
|
||||
.onRef('c.table_name', '=', 'o.object_name')
|
||||
.on('o.object_type', '=', sql.lit('TABLE'))
|
||||
.onRef('c.dtd_identifier', '=', 'o.collection_type_identifier'),
|
||||
)
|
||||
.leftJoin('pg_type as t', (join) =>
|
||||
join.onRef('t.typname', '=', 'c.udt_name').on('c.data_type', '=', sql.lit('USER-DEFINED')),
|
||||
)
|
||||
.leftJoin('pg_enum as e', (join) => join.onRef('e.enumtypid', '=', 't.oid'))
|
||||
.select([
|
||||
'c.table_name',
|
||||
'c.column_name',
|
||||
|
||||
// is ARRAY, USER-DEFINED, or data type
|
||||
'c.data_type',
|
||||
'c.column_default',
|
||||
'c.is_nullable',
|
||||
'c.character_maximum_length',
|
||||
|
||||
// number types
|
||||
'c.numeric_precision',
|
||||
'c.numeric_scale',
|
||||
|
||||
// date types
|
||||
'c.datetime_precision',
|
||||
|
||||
// user defined type
|
||||
'c.udt_catalog',
|
||||
'c.udt_schema',
|
||||
'c.udt_name',
|
||||
|
||||
// data type for ARRAYs
|
||||
'o.data_type as array_type',
|
||||
])
|
||||
.where('table_schema', '=', schema.schemaName)
|
||||
.execute();
|
||||
|
||||
const enumRaw = await db
|
||||
.selectFrom('pg_type')
|
||||
.innerJoin('pg_namespace', (join) =>
|
||||
join.onRef('pg_namespace.oid', '=', 'pg_type.typnamespace').on('pg_namespace.nspname', '=', schema.schemaName),
|
||||
)
|
||||
.where('typtype', '=', sql.lit('e'))
|
||||
.select((eb) => [
|
||||
'pg_type.typname as name',
|
||||
jsonArrayFrom(
|
||||
eb.selectFrom('pg_enum as e').select(['e.enumlabel as value']).whereRef('e.enumtypid', '=', 'pg_type.oid'),
|
||||
).as('values'),
|
||||
])
|
||||
.execute();
|
||||
|
||||
const enums = enumRaw.map((item) => ({ name: item.name, values: item.values.map(({ value }) => value) }));
|
||||
for (const { name, values } of enums) {
|
||||
schema.enums.push({ name, values, synchronize: true });
|
||||
}
|
||||
|
||||
const enumMap = Object.fromEntries(enums.map((e) => [e.name, e.values]));
|
||||
// add columns to tables
|
||||
for (const column of columns) {
|
||||
const table = schema.tables.find((table) => table.name === column.table_name);
|
||||
if (!table) {
|
||||
continue;
|
||||
}
|
||||
|
||||
const columnName = column.column_name;
|
||||
|
||||
const item: DatabaseColumn = {
|
||||
type: column.data_type as ColumnType,
|
||||
name: columnName,
|
||||
tableName: column.table_name,
|
||||
nullable: column.is_nullable === 'YES',
|
||||
isArray: column.array_type !== null,
|
||||
numericPrecision: column.numeric_precision ?? undefined,
|
||||
numericScale: column.numeric_scale ?? undefined,
|
||||
length: column.character_maximum_length ?? undefined,
|
||||
default: column.column_default ?? undefined,
|
||||
synchronize: true,
|
||||
};
|
||||
|
||||
const columnLabel = `${table.name}.${columnName}`;
|
||||
|
||||
switch (column.data_type) {
|
||||
// array types
|
||||
case 'ARRAY': {
|
||||
if (!column.array_type) {
|
||||
schema.warnings.push(`Unable to find type for ${columnLabel} (ARRAY)`);
|
||||
continue;
|
||||
}
|
||||
item.type = column.array_type as ColumnType;
|
||||
break;
|
||||
}
|
||||
|
||||
// enum types
|
||||
case 'USER-DEFINED': {
|
||||
if (!enumMap[column.udt_name]) {
|
||||
schema.warnings.push(`Unable to find type for ${columnLabel} (ENUM)`);
|
||||
continue;
|
||||
}
|
||||
|
||||
item.type = 'enum';
|
||||
item.enumName = column.udt_name;
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
table.columns.push(item);
|
||||
}
|
||||
};
|
36
server/src/sql-tools/from-database/readers/comment.reader.ts
Normal file
36
server/src/sql-tools/from-database/readers/comment.reader.ts
Normal file
@ -0,0 +1,36 @@
|
||||
import { DatabaseReader } from 'src/sql-tools/from-database/readers/type';
|
||||
|
||||
export const readComments: DatabaseReader = async (schema, db) => {
|
||||
const comments = await db
|
||||
.selectFrom('pg_description as d')
|
||||
.innerJoin('pg_class as c', 'd.objoid', 'c.oid')
|
||||
.leftJoin('pg_attribute as a', (join) =>
|
||||
join.onRef('a.attrelid', '=', 'c.oid').onRef('a.attnum', '=', 'd.objsubid'),
|
||||
)
|
||||
.select([
|
||||
'c.relname as object_name',
|
||||
'c.relkind as object_type',
|
||||
'd.description as value',
|
||||
'a.attname as column_name',
|
||||
])
|
||||
.where('d.description', 'is not', null)
|
||||
.orderBy('object_type')
|
||||
.orderBy('object_name')
|
||||
.execute();
|
||||
|
||||
for (const comment of comments) {
|
||||
if (comment.object_type === 'r') {
|
||||
const table = schema.tables.find((table) => table.name === comment.object_name);
|
||||
if (!table) {
|
||||
continue;
|
||||
}
|
||||
|
||||
if (comment.column_name) {
|
||||
const column = table.columns.find(({ name }) => name === comment.column_name);
|
||||
if (column) {
|
||||
column.comment = comment.value;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
};
|
144
server/src/sql-tools/from-database/readers/constraint.reader.ts
Normal file
144
server/src/sql-tools/from-database/readers/constraint.reader.ts
Normal file
@ -0,0 +1,144 @@
|
||||
import { sql } from 'kysely';
|
||||
import { DatabaseReader } from 'src/sql-tools/from-database/readers/type';
|
||||
import { DatabaseActionType, DatabaseConstraintType } from 'src/sql-tools/types';
|
||||
|
||||
export const readConstraints: DatabaseReader = async (schema, db) => {
|
||||
const constraints = await db
|
||||
.selectFrom('pg_constraint')
|
||||
.innerJoin('pg_namespace', 'pg_namespace.oid', 'pg_constraint.connamespace') // namespace
|
||||
.innerJoin('pg_class as source_table', (join) =>
|
||||
join.onRef('source_table.oid', '=', 'pg_constraint.conrelid').on('source_table.relkind', 'in', [
|
||||
// ordinary table
|
||||
sql.lit('r'),
|
||||
// partitioned table
|
||||
sql.lit('p'),
|
||||
// foreign table
|
||||
sql.lit('f'),
|
||||
]),
|
||||
) // table
|
||||
.leftJoin('pg_class as reference_table', 'reference_table.oid', 'pg_constraint.confrelid') // reference table
|
||||
.select((eb) => [
|
||||
'pg_constraint.contype as constraint_type',
|
||||
'pg_constraint.conname as constraint_name',
|
||||
'source_table.relname as table_name',
|
||||
'reference_table.relname as reference_table_name',
|
||||
'pg_constraint.confupdtype as update_action',
|
||||
'pg_constraint.confdeltype as delete_action',
|
||||
// 'pg_constraint.oid as constraint_id',
|
||||
eb
|
||||
.selectFrom('pg_attribute')
|
||||
// matching table for PK, FK, and UQ
|
||||
.whereRef('pg_attribute.attrelid', '=', 'pg_constraint.conrelid')
|
||||
.whereRef('pg_attribute.attnum', '=', sql`any("pg_constraint"."conkey")`)
|
||||
.select((eb) => eb.fn<string[]>('json_agg', ['pg_attribute.attname']).as('column_name'))
|
||||
.as('column_names'),
|
||||
eb
|
||||
.selectFrom('pg_attribute')
|
||||
// matching foreign table for FK
|
||||
.whereRef('pg_attribute.attrelid', '=', 'pg_constraint.confrelid')
|
||||
.whereRef('pg_attribute.attnum', '=', sql`any("pg_constraint"."confkey")`)
|
||||
.select((eb) => eb.fn<string[]>('json_agg', ['pg_attribute.attname']).as('column_name'))
|
||||
.as('reference_column_names'),
|
||||
eb.fn<string>('pg_get_constraintdef', ['pg_constraint.oid']).as('expression'),
|
||||
])
|
||||
.where('pg_namespace.nspname', '=', schema.schemaName)
|
||||
.execute();
|
||||
|
||||
for (const constraint of constraints) {
|
||||
const table = schema.tables.find((table) => table.name === constraint.table_name);
|
||||
if (!table) {
|
||||
continue;
|
||||
}
|
||||
|
||||
const constraintName = constraint.constraint_name;
|
||||
|
||||
switch (constraint.constraint_type) {
|
||||
// primary key constraint
|
||||
case 'p': {
|
||||
if (!constraint.column_names) {
|
||||
schema.warnings.push(`Skipping CONSTRAINT "${constraintName}", no columns found`);
|
||||
continue;
|
||||
}
|
||||
table.constraints.push({
|
||||
type: DatabaseConstraintType.PRIMARY_KEY,
|
||||
name: constraintName,
|
||||
tableName: constraint.table_name,
|
||||
columnNames: constraint.column_names,
|
||||
synchronize: true,
|
||||
});
|
||||
break;
|
||||
}
|
||||
|
||||
// foreign key constraint
|
||||
case 'f': {
|
||||
if (!constraint.column_names || !constraint.reference_table_name || !constraint.reference_column_names) {
|
||||
schema.warnings.push(
|
||||
`Skipping CONSTRAINT "${constraintName}", missing either columns, referenced table, or referenced columns,`,
|
||||
);
|
||||
continue;
|
||||
}
|
||||
|
||||
table.constraints.push({
|
||||
type: DatabaseConstraintType.FOREIGN_KEY,
|
||||
name: constraintName,
|
||||
tableName: constraint.table_name,
|
||||
columnNames: constraint.column_names,
|
||||
referenceTableName: constraint.reference_table_name,
|
||||
referenceColumnNames: constraint.reference_column_names,
|
||||
onUpdate: asDatabaseAction(constraint.update_action),
|
||||
onDelete: asDatabaseAction(constraint.delete_action),
|
||||
synchronize: true,
|
||||
});
|
||||
break;
|
||||
}
|
||||
|
||||
// unique constraint
|
||||
case 'u': {
|
||||
table.constraints.push({
|
||||
type: DatabaseConstraintType.UNIQUE,
|
||||
name: constraintName,
|
||||
tableName: constraint.table_name,
|
||||
columnNames: constraint.column_names as string[],
|
||||
synchronize: true,
|
||||
});
|
||||
break;
|
||||
}
|
||||
|
||||
// check constraint
|
||||
case 'c': {
|
||||
table.constraints.push({
|
||||
type: DatabaseConstraintType.CHECK,
|
||||
name: constraint.constraint_name,
|
||||
tableName: constraint.table_name,
|
||||
expression: constraint.expression.replace('CHECK ', ''),
|
||||
synchronize: true,
|
||||
});
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
const asDatabaseAction = (action: string) => {
|
||||
switch (action) {
|
||||
case 'a': {
|
||||
return DatabaseActionType.NO_ACTION;
|
||||
}
|
||||
case 'c': {
|
||||
return DatabaseActionType.CASCADE;
|
||||
}
|
||||
case 'r': {
|
||||
return DatabaseActionType.RESTRICT;
|
||||
}
|
||||
case 'n': {
|
||||
return DatabaseActionType.SET_NULL;
|
||||
}
|
||||
case 'd': {
|
||||
return DatabaseActionType.SET_DEFAULT;
|
||||
}
|
||||
|
||||
default: {
|
||||
return DatabaseActionType.NO_ACTION;
|
||||
}
|
||||
}
|
||||
};
|
@ -0,0 +1,14 @@
|
||||
import { DatabaseReader } from 'src/sql-tools/from-database/readers/type';
|
||||
|
||||
export const readExtensions: DatabaseReader = async (schema, db) => {
|
||||
const extensions = await db
|
||||
.selectFrom('pg_catalog.pg_extension')
|
||||
// .innerJoin('pg_namespace', 'pg_namespace.oid', 'pg_catalog.pg_extension.extnamespace')
|
||||
// .where('pg_namespace.nspname', '=', schemaName)
|
||||
.select(['extname as name', 'extversion as version'])
|
||||
.execute();
|
||||
|
||||
for (const { name } of extensions) {
|
||||
schema.extensions.push({ name, synchronize: true });
|
||||
}
|
||||
};
|
@ -0,0 +1,27 @@
|
||||
import { sql } from 'kysely';
|
||||
import { DatabaseReader } from 'src/sql-tools/from-database/readers/type';
|
||||
|
||||
export const readFunctions: DatabaseReader = async (schema, db) => {
|
||||
const routines = await db
|
||||
.selectFrom('pg_proc as p')
|
||||
.innerJoin('pg_namespace', 'pg_namespace.oid', 'p.pronamespace')
|
||||
.leftJoin('pg_depend as d', (join) => join.onRef('d.objid', '=', 'p.oid').on('d.deptype', '=', sql.lit('e')))
|
||||
.where('d.objid', 'is', sql.lit(null))
|
||||
.where('p.prokind', '=', sql.lit('f'))
|
||||
.where('pg_namespace.nspname', '=', schema.schemaName)
|
||||
.select((eb) => [
|
||||
'p.proname as name',
|
||||
eb.fn<string>('pg_get_function_identity_arguments', ['p.oid']).as('arguments'),
|
||||
eb.fn<string>('pg_get_functiondef', ['p.oid']).as('expression'),
|
||||
])
|
||||
.execute();
|
||||
|
||||
for (const { name, expression } of routines) {
|
||||
schema.functions.push({
|
||||
name,
|
||||
// TODO read expression from the overrides table
|
||||
expression,
|
||||
synchronize: true,
|
||||
});
|
||||
}
|
||||
};
|
58
server/src/sql-tools/from-database/readers/index.reader.ts
Normal file
58
server/src/sql-tools/from-database/readers/index.reader.ts
Normal file
@ -0,0 +1,58 @@
|
||||
import { sql } from 'kysely';
|
||||
import { DatabaseReader } from 'src/sql-tools/from-database/readers/type';
|
||||
|
||||
export const readIndexes: DatabaseReader = async (schema, db) => {
|
||||
const indexes = await db
|
||||
.selectFrom('pg_index as ix')
|
||||
// matching index, which has column information
|
||||
.innerJoin('pg_class as i', 'ix.indexrelid', 'i.oid')
|
||||
.innerJoin('pg_am as a', 'i.relam', 'a.oid')
|
||||
// matching table
|
||||
.innerJoin('pg_class as t', 'ix.indrelid', 't.oid')
|
||||
// namespace
|
||||
.innerJoin('pg_namespace', 'pg_namespace.oid', 'i.relnamespace')
|
||||
// PK and UQ constraints automatically have indexes, so we can ignore those
|
||||
.leftJoin('pg_constraint', (join) =>
|
||||
join
|
||||
.onRef('pg_constraint.conindid', '=', 'i.oid')
|
||||
.on('pg_constraint.contype', 'in', [sql.lit('p'), sql.lit('u')]),
|
||||
)
|
||||
.where('pg_constraint.oid', 'is', null)
|
||||
.select((eb) => [
|
||||
'i.relname as index_name',
|
||||
't.relname as table_name',
|
||||
'ix.indisunique as unique',
|
||||
'a.amname as using',
|
||||
eb.fn<string>('pg_get_expr', ['ix.indexprs', 'ix.indrelid']).as('expression'),
|
||||
eb.fn<string>('pg_get_expr', ['ix.indpred', 'ix.indrelid']).as('where'),
|
||||
eb
|
||||
.selectFrom('pg_attribute as a')
|
||||
.where('t.relkind', '=', sql.lit('r'))
|
||||
.whereRef('a.attrelid', '=', 't.oid')
|
||||
// list of columns numbers in the index
|
||||
.whereRef('a.attnum', '=', sql`any("ix"."indkey")`)
|
||||
.select((eb) => eb.fn<string[]>('json_agg', ['a.attname']).as('column_name'))
|
||||
.as('column_names'),
|
||||
])
|
||||
.where('pg_namespace.nspname', '=', schema.schemaName)
|
||||
.where('ix.indisprimary', '=', sql.lit(false))
|
||||
.execute();
|
||||
|
||||
for (const index of indexes) {
|
||||
const table = schema.tables.find((table) => table.name === index.table_name);
|
||||
if (!table) {
|
||||
continue;
|
||||
}
|
||||
|
||||
table.indexes.push({
|
||||
name: index.index_name,
|
||||
tableName: index.table_name,
|
||||
columnNames: index.column_names ?? undefined,
|
||||
expression: index.expression ?? undefined,
|
||||
using: index.using,
|
||||
where: index.where ?? undefined,
|
||||
unique: index.unique,
|
||||
synchronize: true,
|
||||
});
|
||||
}
|
||||
};
|
@ -0,0 +1,8 @@
|
||||
import { QueryResult, sql } from 'kysely';
|
||||
import { DatabaseReader } from 'src/sql-tools/from-database/readers/type';
|
||||
|
||||
export const readName: DatabaseReader = async (schema, db) => {
|
||||
const result = (await sql`SELECT current_database() as name`.execute(db)) as QueryResult<{ name: string }>;
|
||||
|
||||
schema.name = result.rows[0].name;
|
||||
};
|
@ -0,0 +1,21 @@
|
||||
import { sql } from 'kysely';
|
||||
import { DatabaseReader } from 'src/sql-tools/from-database/readers/type';
|
||||
import { ParameterScope } from 'src/sql-tools/types';
|
||||
|
||||
export const readParameters: DatabaseReader = async (schema, db) => {
|
||||
const parameters = await db
|
||||
.selectFrom('pg_settings')
|
||||
.where('source', 'in', [sql.lit('database'), sql.lit('user')])
|
||||
.select(['name', 'setting as value', 'source as scope'])
|
||||
.execute();
|
||||
|
||||
for (const parameter of parameters) {
|
||||
schema.parameters.push({
|
||||
name: parameter.name,
|
||||
value: parameter.value,
|
||||
databaseName: schema.name,
|
||||
scope: parameter.scope as ParameterScope,
|
||||
synchronize: true,
|
||||
});
|
||||
}
|
||||
};
|
22
server/src/sql-tools/from-database/readers/table.reader.ts
Normal file
22
server/src/sql-tools/from-database/readers/table.reader.ts
Normal file
@ -0,0 +1,22 @@
|
||||
import { sql } from 'kysely';
|
||||
import { DatabaseReader } from 'src/sql-tools/from-database/readers/type';
|
||||
|
||||
export const readTables: DatabaseReader = async (schema, db) => {
|
||||
const tables = await db
|
||||
.selectFrom('information_schema.tables')
|
||||
.where('table_schema', '=', schema.schemaName)
|
||||
.where('table_type', '=', sql.lit('BASE TABLE'))
|
||||
.selectAll()
|
||||
.execute();
|
||||
|
||||
for (const table of tables) {
|
||||
schema.tables.push({
|
||||
name: table.table_name,
|
||||
columns: [],
|
||||
indexes: [],
|
||||
triggers: [],
|
||||
constraints: [],
|
||||
synchronize: true,
|
||||
});
|
||||
}
|
||||
};
|
46
server/src/sql-tools/from-database/readers/trigger.reader.ts
Normal file
46
server/src/sql-tools/from-database/readers/trigger.reader.ts
Normal file
@ -0,0 +1,46 @@
|
||||
import { DatabaseReader } from 'src/sql-tools/from-database/readers/type';
|
||||
import { parseTriggerType } from 'src/sql-tools/helpers';
|
||||
|
||||
export const readTriggers: DatabaseReader = async (schema, db) => {
|
||||
const triggers = await db
|
||||
.selectFrom('pg_trigger as t')
|
||||
.innerJoin('pg_proc as p', 't.tgfoid', 'p.oid')
|
||||
.innerJoin('pg_namespace as n', 'p.pronamespace', 'n.oid')
|
||||
.innerJoin('pg_class as c', 't.tgrelid', 'c.oid')
|
||||
.select((eb) => [
|
||||
't.tgname as name',
|
||||
't.tgenabled as enabled',
|
||||
't.tgtype as type',
|
||||
't.tgconstraint as _constraint',
|
||||
't.tgdeferrable as is_deferrable',
|
||||
't.tginitdeferred as is_initially_deferred',
|
||||
't.tgargs as arguments',
|
||||
't.tgoldtable as referencing_old_table_as',
|
||||
't.tgnewtable as referencing_new_table_as',
|
||||
eb.fn<string>('pg_get_expr', ['t.tgqual', 't.tgrelid']).as('when_expression'),
|
||||
'p.proname as function_name',
|
||||
'c.relname as table_name',
|
||||
])
|
||||
.where('t.tgisinternal', '=', false) // Exclude internal system triggers
|
||||
.where('n.nspname', '=', schema.schemaName)
|
||||
.execute();
|
||||
|
||||
// add triggers to tables
|
||||
for (const trigger of triggers) {
|
||||
const table = schema.tables.find((table) => table.name === trigger.table_name);
|
||||
if (!table) {
|
||||
continue;
|
||||
}
|
||||
|
||||
table.triggers.push({
|
||||
name: trigger.name,
|
||||
tableName: trigger.table_name,
|
||||
functionName: trigger.function_name,
|
||||
referencingNewTableAs: trigger.referencing_new_table_as ?? undefined,
|
||||
referencingOldTableAs: trigger.referencing_old_table_as ?? undefined,
|
||||
when: trigger.when_expression,
|
||||
synchronize: true,
|
||||
...parseTriggerType(trigger.type),
|
||||
});
|
||||
}
|
||||
};
|
3
server/src/sql-tools/from-database/readers/type.ts
Normal file
3
server/src/sql-tools/from-database/readers/type.ts
Normal file
@ -0,0 +1,3 @@
|
||||
import { DatabaseClient, DatabaseSchema } from 'src/sql-tools/types';
|
||||
|
||||
export type DatabaseReader = (schema: DatabaseSchema, db: DatabaseClient) => Promise<void>;
|
Loading…
x
Reference in New Issue
Block a user