mirror of
https://github.com/zoriya/Kyoo.git
synced 2025-11-26 08:15:07 -05:00
Batch images task insertion and add priority
This commit is contained in:
parent
f59cb5d671
commit
019aceb8d9
@ -5,7 +5,7 @@
|
||||
"": {
|
||||
"name": "api",
|
||||
"dependencies": {
|
||||
"@elysiajs/opentelemetry": "^1.4.7",
|
||||
"@elysiajs/opentelemetry": "^1.4.8",
|
||||
"@elysiajs/swagger": "zoriya/elysia-swagger#build",
|
||||
"@kubiks/otel-drizzle": "zoriya/drizzle-otel#build",
|
||||
"@types/bun": "^1.3.1",
|
||||
@ -49,7 +49,7 @@
|
||||
|
||||
"@drizzle-team/brocli": ["@drizzle-team/brocli@0.10.2", "", {}, "sha512-z33Il7l5dKjUgGULTqBsQBQwckHh5AbIuxhdsIxDDiZAzBOrZO6q9ogcWC65kU382AfynTfgNumVcNIjuIua6w=="],
|
||||
|
||||
"@elysiajs/opentelemetry": ["@elysiajs/opentelemetry@1.4.7", "", { "dependencies": { "@opentelemetry/api": "^1.9.0", "@opentelemetry/instrumentation": "^0.200.0", "@opentelemetry/sdk-node": "^0.200.0" }, "peerDependencies": { "elysia": ">= 1.4.0" } }, "sha512-biJfj3bCHf7aYPB8EygvN90sEKR/qgPn8Cziq2ebJSGyY8cpmskTTP6zbUMkMk6R6rfpoP7ECZbXlTZz+7BfJA=="],
|
||||
"@elysiajs/opentelemetry": ["@elysiajs/opentelemetry@1.4.8", "", { "dependencies": { "@opentelemetry/api": "^1.9.0", "@opentelemetry/instrumentation": "^0.200.0", "@opentelemetry/sdk-node": "^0.200.0" }, "peerDependencies": { "elysia": ">= 1.4.0" } }, "sha512-c9unbcdXfehExCv1GsiTCfos5SyIAyDwP7apcMeXmUMBaJZiAYMfiEH8RFFFIfIHJHC/xlNJzUPodkcUaaoJJQ=="],
|
||||
|
||||
"@elysiajs/swagger": ["@elysiajs/swagger@github:zoriya/elysia-swagger#f88fbc7", { "dependencies": { "@scalar/themes": "^0.9.81", "@scalar/types": "^0.1.3", "openapi-types": "^12.1.3", "pathe": "^1.1.2" }, "peerDependencies": { "elysia": ">= 1.3.0" } }, "zoriya-elysia-swagger-f88fbc7"],
|
||||
|
||||
|
||||
3
api/drizzle/0023_mqueue-priority.sql
Normal file
3
api/drizzle/0023_mqueue-priority.sql
Normal file
@ -0,0 +1,3 @@
|
||||
ALTER TABLE "kyoo"."history" ALTER COLUMN "time" SET DEFAULT 0;--> statement-breakpoint
|
||||
ALTER TABLE "kyoo"."history" ALTER COLUMN "time" SET NOT NULL;--> statement-breakpoint
|
||||
ALTER TABLE "kyoo"."mqueue" ADD COLUMN "priority" integer DEFAULT 0 NOT NULL;
|
||||
2036
api/drizzle/meta/0023_snapshot.json
Normal file
2036
api/drizzle/meta/0023_snapshot.json
Normal file
File diff suppressed because it is too large
Load Diff
@ -162,6 +162,13 @@
|
||||
"when": 1752446736231,
|
||||
"tag": "0022_seasons-count",
|
||||
"breakpoints": true
|
||||
},
|
||||
{
|
||||
"idx": 23,
|
||||
"version": "7",
|
||||
"when": 1763924097229,
|
||||
"tag": "0023_mqueue-priority",
|
||||
"breakpoints": true
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
@ -9,7 +9,7 @@
|
||||
"format": "biome check --write ."
|
||||
},
|
||||
"dependencies": {
|
||||
"@elysiajs/opentelemetry": "^1.4.7",
|
||||
"@elysiajs/opentelemetry": "^1.4.8",
|
||||
"@elysiajs/swagger": "zoriya/elysia-swagger#build",
|
||||
"@kubiks/otel-drizzle": "zoriya/drizzle-otel#build",
|
||||
"@types/bun": "^1.3.1",
|
||||
|
||||
@ -15,7 +15,7 @@ import { getFile } from "~/utils";
|
||||
export const imageDir = process.env.IMAGES_PATH ?? "/images";
|
||||
export const defaultBlurhash = "000000";
|
||||
|
||||
type ImageTask = {
|
||||
export type ImageTask = {
|
||||
id: string;
|
||||
url: string;
|
||||
table: string;
|
||||
@ -25,12 +25,12 @@ type ImageTask = {
|
||||
// this will only push a task to the image downloader service and not download it instantly.
|
||||
// this is both done to prevent too many requests to be sent at once and to make sure POST
|
||||
// requests are not blocked by image downloading or blurhash calculation
|
||||
export const enqueueOptImage = async (
|
||||
tx: Transaction,
|
||||
export const enqueueOptImage = (
|
||||
imgQueue: ImageTask[],
|
||||
img:
|
||||
| { url: string | null; column: PgColumn }
|
||||
| { url: string | null; table: PgTable; column: SQL },
|
||||
): Promise<Image | null> => {
|
||||
): Image | null => {
|
||||
if (!img.url) return null;
|
||||
|
||||
const hasher = new Bun.CryptoHasher("sha256");
|
||||
@ -66,11 +66,8 @@ export const enqueueOptImage = async (
|
||||
table: db.dialect.sqlToQuery(sql`${img.column.table}`).sql,
|
||||
column: sql.identifier(img.column.name).value,
|
||||
};
|
||||
await tx.insert(mqueue).values({
|
||||
kind: "image",
|
||||
message,
|
||||
});
|
||||
await tx.execute(sql`notify kyoo_image`);
|
||||
|
||||
imgQueue.push(message);
|
||||
|
||||
return {
|
||||
id,
|
||||
@ -79,6 +76,20 @@ export const enqueueOptImage = async (
|
||||
};
|
||||
};
|
||||
|
||||
export const flushImageQueue = async (
|
||||
tx: Transaction,
|
||||
imgQueue: ImageTask[],
|
||||
priority: number,
|
||||
) => {
|
||||
if (!imgQueue.length) return;
|
||||
record("enqueue images", async () => {
|
||||
await tx
|
||||
.insert(mqueue)
|
||||
.values(imgQueue.map((x) => ({ kind: "image", message: x, priority })));
|
||||
await tx.execute(sql`notify kyoo_image`);
|
||||
});
|
||||
};
|
||||
|
||||
export const processImages = async () => {
|
||||
return record("download images", async () => {
|
||||
let running = false;
|
||||
@ -114,7 +125,7 @@ async function processOne() {
|
||||
.from(mqueue)
|
||||
.for("update", { skipLocked: true })
|
||||
.where(and(eq(mqueue.kind, "image"), lt(mqueue.attempt, 5)))
|
||||
.orderBy(mqueue.attempt, mqueue.createdAt)
|
||||
.orderBy(mqueue.priority, mqueue.attempt, mqueue.createdAt)
|
||||
.limit(1);
|
||||
|
||||
if (!item) return false;
|
||||
|
||||
@ -5,7 +5,7 @@ import { conflictUpdateAllExcept } from "~/db/utils";
|
||||
import type { SeedCollection } from "~/models/collections";
|
||||
import type { SeedMovie } from "~/models/movie";
|
||||
import type { SeedSerie } from "~/models/serie";
|
||||
import { enqueueOptImage } from "../images";
|
||||
import { enqueueOptImage, flushImageQueue, type ImageTask } from "../images";
|
||||
|
||||
type ShowTrans = typeof showTranslations.$inferInsert;
|
||||
|
||||
@ -19,6 +19,7 @@ export const insertCollection = async (
|
||||
const { translations, ...col } = collection;
|
||||
|
||||
return await db.transaction(async (tx) => {
|
||||
const imgQueue: ImageTask[] = [];
|
||||
const [ret] = await tx
|
||||
.insert(shows)
|
||||
.values({
|
||||
@ -48,29 +49,30 @@ export const insertCollection = async (
|
||||
})
|
||||
.returning({ pk: shows.pk, id: shows.id, slug: shows.slug });
|
||||
|
||||
const trans: ShowTrans[] = await Promise.all(
|
||||
Object.entries(translations).map(async ([lang, tr]) => ({
|
||||
const trans: ShowTrans[] = Object.entries(translations).map(
|
||||
([lang, tr]) => ({
|
||||
pk: ret.pk,
|
||||
language: lang,
|
||||
...tr,
|
||||
poster: await enqueueOptImage(tx, {
|
||||
poster: enqueueOptImage(imgQueue, {
|
||||
url: tr.poster,
|
||||
column: showTranslations.poster,
|
||||
}),
|
||||
thumbnail: await enqueueOptImage(tx, {
|
||||
thumbnail: enqueueOptImage(imgQueue, {
|
||||
url: tr.thumbnail,
|
||||
column: showTranslations.thumbnail,
|
||||
}),
|
||||
logo: await enqueueOptImage(tx, {
|
||||
logo: enqueueOptImage(imgQueue, {
|
||||
url: tr.logo,
|
||||
column: showTranslations.logo,
|
||||
}),
|
||||
banner: await enqueueOptImage(tx, {
|
||||
banner: enqueueOptImage(imgQueue, {
|
||||
url: tr.banner,
|
||||
column: showTranslations.banner,
|
||||
}),
|
||||
})),
|
||||
}),
|
||||
);
|
||||
await flushImageQueue(tx, imgQueue, 100);
|
||||
await tx
|
||||
.insert(showTranslations)
|
||||
.values(trans)
|
||||
|
||||
@ -8,7 +8,7 @@ import {
|
||||
} from "~/db/schema";
|
||||
import { conflictUpdateAllExcept, values } from "~/db/utils";
|
||||
import type { SeedEntry as SEntry, SeedExtra as SExtra } from "~/models/entry";
|
||||
import { enqueueOptImage } from "../images";
|
||||
import { enqueueOptImage, flushImageQueue, type ImageTask } from "../images";
|
||||
import { guessNextRefresh } from "../refresh";
|
||||
import { updateAvailableCount, updateAvailableSince } from "./shows";
|
||||
|
||||
@ -50,30 +50,29 @@ export const insertEntries = async (
|
||||
if (!items.length) return [];
|
||||
|
||||
const retEntries = await db.transaction(async (tx) => {
|
||||
const vals: EntryI[] = await Promise.all(
|
||||
items.map(async (seed) => {
|
||||
const { translations, videos, video, ...entry } = seed;
|
||||
return {
|
||||
...entry,
|
||||
showPk: show.pk,
|
||||
slug: generateSlug(show.slug, seed),
|
||||
thumbnail: await enqueueOptImage(tx, {
|
||||
url: seed.thumbnail,
|
||||
column: entries.thumbnail,
|
||||
}),
|
||||
nextRefresh:
|
||||
entry.kind !== "extra"
|
||||
? guessNextRefresh(entry.airDate ?? new Date())
|
||||
: guessNextRefresh(new Date()),
|
||||
episodeNumber:
|
||||
entry.kind === "episode"
|
||||
? entry.episodeNumber
|
||||
: entry.kind === "special"
|
||||
? entry.number
|
||||
: undefined,
|
||||
};
|
||||
}),
|
||||
);
|
||||
const imgQueue: ImageTask[] = [];
|
||||
const vals: EntryI[] = items.map((seed) => {
|
||||
const { translations, videos, video, ...entry } = seed;
|
||||
return {
|
||||
...entry,
|
||||
showPk: show.pk,
|
||||
slug: generateSlug(show.slug, seed),
|
||||
thumbnail: enqueueOptImage(imgQueue, {
|
||||
url: seed.thumbnail,
|
||||
column: entries.thumbnail,
|
||||
}),
|
||||
nextRefresh:
|
||||
entry.kind !== "extra"
|
||||
? guessNextRefresh(entry.airDate ?? new Date())
|
||||
: guessNextRefresh(new Date()),
|
||||
episodeNumber:
|
||||
entry.kind === "episode"
|
||||
? entry.episodeNumber
|
||||
: entry.kind === "special"
|
||||
? entry.number
|
||||
: undefined,
|
||||
};
|
||||
});
|
||||
const ret = await tx
|
||||
.insert(entries)
|
||||
.values(vals)
|
||||
@ -89,41 +88,36 @@ export const insertEntries = async (
|
||||
})
|
||||
.returning({ pk: entries.pk, id: entries.id, slug: entries.slug });
|
||||
|
||||
const trans: EntryTransI[] = (
|
||||
await Promise.all(
|
||||
items.map(async (seed, i) => {
|
||||
if (seed.kind === "extra") {
|
||||
return [
|
||||
{
|
||||
pk: ret[i].pk,
|
||||
// yeah we hardcode the language to extra because if we want to support
|
||||
// translations one day it won't be awkward
|
||||
language: "extra",
|
||||
name: seed.name,
|
||||
description: null,
|
||||
poster: undefined,
|
||||
},
|
||||
];
|
||||
}
|
||||
const trans: EntryTransI[] = items.flatMap((seed, i) => {
|
||||
if (seed.kind === "extra") {
|
||||
return [
|
||||
{
|
||||
pk: ret[i].pk,
|
||||
// yeah we hardcode the language to extra because if we want to support
|
||||
// translations one day it won't be awkward
|
||||
language: "extra",
|
||||
name: seed.name,
|
||||
description: null,
|
||||
poster: undefined,
|
||||
},
|
||||
];
|
||||
}
|
||||
|
||||
return await Promise.all(
|
||||
Object.entries(seed.translations).map(async ([lang, tr]) => ({
|
||||
// assumes ret is ordered like items.
|
||||
pk: ret[i].pk,
|
||||
language: lang,
|
||||
...tr,
|
||||
poster:
|
||||
seed.kind === "movie"
|
||||
? await enqueueOptImage(tx, {
|
||||
url: (tr as any).poster,
|
||||
column: entryTranslations.poster,
|
||||
})
|
||||
: undefined,
|
||||
})),
|
||||
);
|
||||
}),
|
||||
)
|
||||
).flat();
|
||||
return Object.entries(seed.translations).map(([lang, tr]) => ({
|
||||
// assumes ret is ordered like items.
|
||||
pk: ret[i].pk,
|
||||
language: lang,
|
||||
...tr,
|
||||
poster:
|
||||
seed.kind === "movie"
|
||||
? enqueueOptImage(imgQueue, {
|
||||
url: (tr as any).poster,
|
||||
column: entryTranslations.poster,
|
||||
})
|
||||
: undefined,
|
||||
}));
|
||||
});
|
||||
await flushImageQueue(tx, imgQueue, 0);
|
||||
await tx
|
||||
.insert(entryTranslations)
|
||||
.values(trans)
|
||||
|
||||
@ -2,7 +2,7 @@ import { db } from "~/db";
|
||||
import { seasons, seasonTranslations } from "~/db/schema";
|
||||
import { conflictUpdateAllExcept } from "~/db/utils";
|
||||
import type { SeedSeason } from "~/models/season";
|
||||
import { enqueueOptImage } from "../images";
|
||||
import { enqueueOptImage, flushImageQueue, type ImageTask } from "../images";
|
||||
import { guessNextRefresh } from "../refresh";
|
||||
|
||||
type SeasonI = typeof seasons.$inferInsert;
|
||||
@ -15,6 +15,7 @@ export const insertSeasons = async (
|
||||
if (!items.length) return [];
|
||||
|
||||
return db.transaction(async (tx) => {
|
||||
const imgQueue: ImageTask[] = [];
|
||||
const vals: SeasonI[] = items.map((x) => {
|
||||
const { translations, ...season } = x;
|
||||
return {
|
||||
@ -42,33 +43,27 @@ export const insertSeasons = async (
|
||||
})
|
||||
.returning({ pk: seasons.pk, id: seasons.id, slug: seasons.slug });
|
||||
|
||||
const trans: SeasonTransI[] = (
|
||||
await Promise.all(
|
||||
items.map(
|
||||
async (seed, i) =>
|
||||
await Promise.all(
|
||||
Object.entries(seed.translations).map(async ([lang, tr]) => ({
|
||||
// assumes ret is ordered like items.
|
||||
pk: ret[i].pk,
|
||||
language: lang,
|
||||
...tr,
|
||||
poster: await enqueueOptImage(tx, {
|
||||
url: tr.poster,
|
||||
column: seasonTranslations.poster,
|
||||
}),
|
||||
thumbnail: await enqueueOptImage(tx, {
|
||||
url: tr.thumbnail,
|
||||
column: seasonTranslations.thumbnail,
|
||||
}),
|
||||
banner: await enqueueOptImage(tx, {
|
||||
url: tr.banner,
|
||||
column: seasonTranslations.banner,
|
||||
}),
|
||||
})),
|
||||
),
|
||||
),
|
||||
)
|
||||
).flat();
|
||||
const trans: SeasonTransI[] = items.flatMap((seed, i) =>
|
||||
Object.entries(seed.translations).map(([lang, tr]) => ({
|
||||
// assumes ret is ordered like items.
|
||||
pk: ret[i].pk,
|
||||
language: lang,
|
||||
...tr,
|
||||
poster: enqueueOptImage(imgQueue, {
|
||||
url: tr.poster,
|
||||
column: seasonTranslations.poster,
|
||||
}),
|
||||
thumbnail: enqueueOptImage(imgQueue, {
|
||||
url: tr.thumbnail,
|
||||
column: seasonTranslations.thumbnail,
|
||||
}),
|
||||
banner: enqueueOptImage(imgQueue, {
|
||||
url: tr.banner,
|
||||
column: seasonTranslations.banner,
|
||||
}),
|
||||
})),
|
||||
);
|
||||
await flushImageQueue(tx, imgQueue, -10);
|
||||
await tx
|
||||
.insert(seasonTranslations)
|
||||
.values(trans)
|
||||
|
||||
@ -22,7 +22,7 @@ import type { SeedMovie } from "~/models/movie";
|
||||
import type { SeedSerie } from "~/models/serie";
|
||||
import type { Original } from "~/models/utils";
|
||||
import { getYear } from "~/utils";
|
||||
import { enqueueOptImage } from "../images";
|
||||
import { enqueueOptImage, flushImageQueue, type ImageTask } from "../images";
|
||||
|
||||
type Show = typeof shows.$inferInsert;
|
||||
type ShowTrans = typeof showTranslations.$inferInsert;
|
||||
@ -41,24 +41,25 @@ export const insertShow = async (
|
||||
| SeedCollection["translations"],
|
||||
) => {
|
||||
return await db.transaction(async (tx) => {
|
||||
const imgQueue: ImageTask[] = [];
|
||||
const orig = {
|
||||
...original,
|
||||
poster: await enqueueOptImage(tx, {
|
||||
poster: enqueueOptImage(imgQueue, {
|
||||
url: original.poster,
|
||||
table: shows,
|
||||
column: sql`${shows.original}['poster']`,
|
||||
}),
|
||||
thumbnail: await enqueueOptImage(tx, {
|
||||
thumbnail: enqueueOptImage(imgQueue, {
|
||||
url: original.thumbnail,
|
||||
table: shows,
|
||||
column: sql`${shows.original}['thumbnail']`,
|
||||
}),
|
||||
banner: await enqueueOptImage(tx, {
|
||||
banner: enqueueOptImage(imgQueue, {
|
||||
url: original.banner,
|
||||
table: shows,
|
||||
column: sql`${shows.original}['banner']`,
|
||||
}),
|
||||
logo: await enqueueOptImage(tx, {
|
||||
logo: enqueueOptImage(imgQueue, {
|
||||
url: original.logo,
|
||||
table: shows,
|
||||
column: sql`${shows.original}['logo']`,
|
||||
@ -67,30 +68,31 @@ export const insertShow = async (
|
||||
const ret = await insertBaseShow(tx, { ...show, original: orig });
|
||||
if ("status" in ret) return ret;
|
||||
|
||||
const trans: ShowTrans[] = await Promise.all(
|
||||
Object.entries(translations).map(async ([lang, tr]) => ({
|
||||
const trans: ShowTrans[] = Object.entries(translations).map(
|
||||
([lang, tr]) => ({
|
||||
pk: ret.pk,
|
||||
language: lang,
|
||||
...tr,
|
||||
latinName: tr.latinName ?? null,
|
||||
poster: await enqueueOptImage(tx, {
|
||||
poster: enqueueOptImage(imgQueue, {
|
||||
url: tr.poster,
|
||||
column: showTranslations.poster,
|
||||
}),
|
||||
thumbnail: await enqueueOptImage(tx, {
|
||||
thumbnail: enqueueOptImage(imgQueue, {
|
||||
url: tr.thumbnail,
|
||||
column: showTranslations.thumbnail,
|
||||
}),
|
||||
logo: await enqueueOptImage(tx, {
|
||||
logo: enqueueOptImage(imgQueue, {
|
||||
url: tr.logo,
|
||||
column: showTranslations.logo,
|
||||
}),
|
||||
banner: await enqueueOptImage(tx, {
|
||||
banner: enqueueOptImage(imgQueue, {
|
||||
url: tr.banner,
|
||||
column: showTranslations.banner,
|
||||
}),
|
||||
})),
|
||||
}),
|
||||
);
|
||||
await flushImageQueue(tx, imgQueue, 200);
|
||||
await tx
|
||||
.insert(showTranslations)
|
||||
.values(trans)
|
||||
|
||||
@ -3,7 +3,7 @@ import { db } from "~/db";
|
||||
import { roles, staff } from "~/db/schema";
|
||||
import { conflictUpdateAllExcept } from "~/db/utils";
|
||||
import type { SeedStaff } from "~/models/staff";
|
||||
import { enqueueOptImage } from "../images";
|
||||
import { enqueueOptImage, flushImageQueue, type ImageTask } from "../images";
|
||||
|
||||
export const insertStaff = async (
|
||||
seed: SeedStaff[] | undefined,
|
||||
@ -12,15 +12,14 @@ export const insertStaff = async (
|
||||
if (!seed?.length) return [];
|
||||
|
||||
return await db.transaction(async (tx) => {
|
||||
const people = await Promise.all(
|
||||
seed.map(async (x) => ({
|
||||
...x.staff,
|
||||
image: await enqueueOptImage(tx, {
|
||||
url: x.staff.image,
|
||||
column: staff.image,
|
||||
}),
|
||||
})),
|
||||
);
|
||||
const imgQueue: ImageTask[] = [];
|
||||
const people = seed.map((x) => ({
|
||||
...x.staff,
|
||||
image: enqueueOptImage(imgQueue, {
|
||||
url: x.staff.image,
|
||||
column: staff.image,
|
||||
}),
|
||||
}));
|
||||
const ret = await tx
|
||||
.insert(staff)
|
||||
.values(people)
|
||||
@ -30,22 +29,22 @@ export const insertStaff = async (
|
||||
})
|
||||
.returning({ pk: staff.pk, id: staff.id, slug: staff.slug });
|
||||
|
||||
const rval = await Promise.all(
|
||||
seed.map(async (x, i) => ({
|
||||
showPk,
|
||||
staffPk: ret[i].pk,
|
||||
kind: x.kind,
|
||||
order: i,
|
||||
character: {
|
||||
...x.character,
|
||||
image: await enqueueOptImage(tx, {
|
||||
url: x.character.image,
|
||||
table: roles,
|
||||
column: sql`${roles.character}['image']`,
|
||||
}),
|
||||
},
|
||||
})),
|
||||
);
|
||||
const rval = seed.map((x, i) => ({
|
||||
showPk,
|
||||
staffPk: ret[i].pk,
|
||||
kind: x.kind,
|
||||
order: i,
|
||||
character: {
|
||||
...x.character,
|
||||
image: enqueueOptImage(imgQueue, {
|
||||
url: x.character.image,
|
||||
table: roles,
|
||||
column: sql`${roles.character}['image']`,
|
||||
}),
|
||||
},
|
||||
}));
|
||||
|
||||
await flushImageQueue(tx, imgQueue, -200);
|
||||
|
||||
// always replace all roles. this is because:
|
||||
// - we want `order` to stay in sync (& without duplicates)
|
||||
|
||||
@ -2,7 +2,7 @@ import { db } from "~/db";
|
||||
import { showStudioJoin, studios, studioTranslations } from "~/db/schema";
|
||||
import { conflictUpdateAllExcept } from "~/db/utils";
|
||||
import type { SeedStudio } from "~/models/studio";
|
||||
import { enqueueOptImage } from "../images";
|
||||
import { enqueueOptImage, flushImageQueue, ImageTask } from "../images";
|
||||
|
||||
type StudioI = typeof studios.$inferInsert;
|
||||
type StudioTransI = typeof studioTranslations.$inferInsert;
|
||||
@ -33,24 +33,19 @@ export const insertStudios = async (
|
||||
})
|
||||
.returning({ pk: studios.pk, id: studios.id, slug: studios.slug });
|
||||
|
||||
const trans: StudioTransI[] = (
|
||||
await Promise.all(
|
||||
seed.map(
|
||||
async (x, i) =>
|
||||
await Promise.all(
|
||||
Object.entries(x.translations).map(async ([lang, tr]) => ({
|
||||
pk: ret[i].pk,
|
||||
language: lang,
|
||||
name: tr.name,
|
||||
logo: await enqueueOptImage(tx, {
|
||||
url: tr.logo,
|
||||
column: studioTranslations.logo,
|
||||
}),
|
||||
})),
|
||||
),
|
||||
),
|
||||
)
|
||||
).flat();
|
||||
const imgQueue: ImageTask[] = [];
|
||||
const trans: StudioTransI[] = seed.flatMap((x, i) =>
|
||||
Object.entries(x.translations).map(([lang, tr]) => ({
|
||||
pk: ret[i].pk,
|
||||
language: lang,
|
||||
name: tr.name,
|
||||
logo: enqueueOptImage(imgQueue, {
|
||||
url: tr.logo,
|
||||
column: studioTranslations.logo,
|
||||
}),
|
||||
})),
|
||||
);
|
||||
await flushImageQueue(tx, imgQueue, -100);
|
||||
await tx
|
||||
.insert(studioTranslations)
|
||||
.values(trans)
|
||||
|
||||
@ -8,18 +8,18 @@ import { migrate as migrateDb } from "drizzle-orm/node-postgres/migrator";
|
||||
import type { PoolConfig } from "pg";
|
||||
import * as schema from "./schema";
|
||||
|
||||
async function getPostgresConfig(): Promise<PoolConfig> {
|
||||
const config: PoolConfig = {
|
||||
connectionString: process.env.POSTGRES_URL,
|
||||
host: process.env.PGHOST ?? "postgres",
|
||||
port: Number(process.env.PGPORT) || 5432,
|
||||
database: process.env.PGDATABASE ?? "kyoo",
|
||||
user: process.env.PGUSER ?? "kyoo",
|
||||
password: process.env.PGPASSWORD ?? "password",
|
||||
options: process.env.PGOPTIONS,
|
||||
application_name: process.env.PGAPPNAME ?? "kyoo",
|
||||
};
|
||||
const config: PoolConfig = {
|
||||
connectionString: process.env.POSTGRES_URL,
|
||||
host: process.env.PGHOST ?? "postgres",
|
||||
port: Number(process.env.PGPORT) || 5432,
|
||||
database: process.env.PGDATABASE ?? "kyoo",
|
||||
user: process.env.PGUSER ?? "kyoo",
|
||||
password: process.env.PGPASSWORD ?? "password",
|
||||
options: process.env.PGOPTIONS,
|
||||
application_name: process.env.PGAPPNAME ?? "kyoo",
|
||||
};
|
||||
|
||||
async function parseSslConfig(): Promise<PoolConfig> {
|
||||
// Due to an upstream bug, if `ssl` is not falsey, an SSL connection will always be attempted. This means
|
||||
// that non-SSL connection options under `ssl` (which is incorrectly named) cannot be set unless SSL is enabled.
|
||||
if (!process.env.PGSSLMODE || process.env.PGSSLMODE === "disable")
|
||||
@ -108,7 +108,9 @@ async function getPostgresConfig(): Promise<PoolConfig> {
|
||||
return config;
|
||||
}
|
||||
|
||||
const postgresConfig = await getPostgresConfig();
|
||||
const postgresConfig = await parseSslConfig();
|
||||
// use this when using drizzle-kit since it can't parse await statements
|
||||
// const postgresConfig = config;
|
||||
|
||||
export const db = drizzle({
|
||||
schema,
|
||||
|
||||
@ -12,9 +12,8 @@ import {
|
||||
uuid,
|
||||
varchar,
|
||||
} from "drizzle-orm/pg-core";
|
||||
import { timestamp } from "../utils";
|
||||
import { shows } from "./shows";
|
||||
import { image, language, schema } from "./utils";
|
||||
import { image, language, schema, timestamp } from "./utils";
|
||||
import { entryVideoJoin } from "./videos";
|
||||
|
||||
export const entryType = schema.enum("entry_type", [
|
||||
|
||||
@ -1,9 +1,8 @@
|
||||
import { sql } from "drizzle-orm";
|
||||
import { check, index, integer } from "drizzle-orm/pg-core";
|
||||
import { timestamp } from "../utils";
|
||||
import { entries } from "./entries";
|
||||
import { profiles } from "./profiles";
|
||||
import { schema } from "./utils";
|
||||
import { schema, timestamp } from "./utils";
|
||||
import { videos } from "./videos";
|
||||
|
||||
export const history = schema.table(
|
||||
|
||||
@ -1,7 +1,6 @@
|
||||
import { sql } from "drizzle-orm";
|
||||
import { index, integer, jsonb, uuid, varchar } from "drizzle-orm/pg-core";
|
||||
import { timestamp } from "../utils";
|
||||
import { schema } from "./utils";
|
||||
import { schema, timestamp } from "./utils";
|
||||
|
||||
export const mqueue = schema.table(
|
||||
"mqueue",
|
||||
@ -9,6 +8,7 @@ export const mqueue = schema.table(
|
||||
id: uuid().notNull().primaryKey().defaultRandom(),
|
||||
kind: varchar({ length: 255 }).notNull(),
|
||||
message: jsonb().notNull(),
|
||||
priority: integer().notNull().default(0),
|
||||
attempt: integer().notNull().default(0),
|
||||
createdAt: timestamp({ withTimezone: true, mode: "iso" })
|
||||
.notNull()
|
||||
|
||||
@ -10,9 +10,8 @@ import {
|
||||
uuid,
|
||||
varchar,
|
||||
} from "drizzle-orm/pg-core";
|
||||
import { timestamp } from "../utils";
|
||||
import { shows } from "./shows";
|
||||
import { image, language, schema } from "./utils";
|
||||
import { image, language, schema, timestamp } from "./utils";
|
||||
|
||||
export const season_extid = () =>
|
||||
jsonb()
|
||||
|
||||
@ -13,12 +13,11 @@ import {
|
||||
varchar,
|
||||
} from "drizzle-orm/pg-core";
|
||||
import type { Image, Original } from "~/models/utils";
|
||||
import { timestamp } from "../utils";
|
||||
import { entries } from "./entries";
|
||||
import { seasons } from "./seasons";
|
||||
import { roles } from "./staff";
|
||||
import { showStudioJoin } from "./studios";
|
||||
import { externalid, image, language, schema } from "./utils";
|
||||
import { externalid, image, language, schema, timestamp } from "./utils";
|
||||
|
||||
export const showKind = schema.enum("show_kind", [
|
||||
"serie",
|
||||
|
||||
@ -8,9 +8,8 @@ import {
|
||||
varchar,
|
||||
} from "drizzle-orm/pg-core";
|
||||
import type { Character } from "~/models/staff";
|
||||
import { timestamp } from "../utils";
|
||||
import { shows } from "./shows";
|
||||
import { externalid, image, schema } from "./utils";
|
||||
import { externalid, image, schema, timestamp } from "./utils";
|
||||
|
||||
export const roleKind = schema.enum("role_kind", [
|
||||
"actor",
|
||||
|
||||
@ -7,9 +7,8 @@ import {
|
||||
uuid,
|
||||
varchar,
|
||||
} from "drizzle-orm/pg-core";
|
||||
import { timestamp } from "../utils";
|
||||
import { shows } from "./shows";
|
||||
import { externalid, image, language, schema } from "./utils";
|
||||
import { externalid, image, language, schema, timestamp } from "./utils";
|
||||
|
||||
export const studios = schema.table("studios", {
|
||||
pk: integer().primaryKey().generatedAlwaysAsIdentity(),
|
||||
|
||||
@ -1,4 +1,4 @@
|
||||
import { jsonb, pgSchema, varchar } from "drizzle-orm/pg-core";
|
||||
import { customType, jsonb, pgSchema, varchar } from "drizzle-orm/pg-core";
|
||||
import type { Image } from "~/models/utils";
|
||||
|
||||
export const schema = pgSchema("kyoo");
|
||||
@ -20,3 +20,19 @@ export const externalid = () =>
|
||||
>()
|
||||
.notNull()
|
||||
.default({});
|
||||
|
||||
export const timestamp = customType<{
|
||||
data: string;
|
||||
driverData: string;
|
||||
config: { withTimezone: boolean; precision?: number; mode: "iso" };
|
||||
}>({
|
||||
dataType(config) {
|
||||
const precision = config?.precision ? ` (${config.precision})` : "";
|
||||
return `timestamp${precision}${config?.withTimezone ? " with time zone" : ""}`;
|
||||
},
|
||||
fromDriver(value: string): string {
|
||||
// postgres format: 2025-06-22 16:13:37.489301+00
|
||||
// what we want: 2025-06-22T16:13:37Z
|
||||
return `${value.substring(0, 10)}T${value.substring(11, 19)}Z`;
|
||||
},
|
||||
});
|
||||
|
||||
@ -10,9 +10,8 @@ import {
|
||||
varchar,
|
||||
} from "drizzle-orm/pg-core";
|
||||
import type { Guess } from "~/models/video";
|
||||
import { timestamp } from "../utils";
|
||||
import { entries } from "./entries";
|
||||
import { schema } from "./utils";
|
||||
import { schema, timestamp } from "./utils";
|
||||
|
||||
export const videos = schema.table(
|
||||
"videos",
|
||||
|
||||
@ -1,10 +1,9 @@
|
||||
import { sql } from "drizzle-orm";
|
||||
import { check, integer, primaryKey } from "drizzle-orm/pg-core";
|
||||
import { timestamp } from "../utils";
|
||||
import { entries } from "./entries";
|
||||
import { profiles } from "./profiles";
|
||||
import { shows } from "./shows";
|
||||
import { schema } from "./utils";
|
||||
import { schema, timestamp } from "./utils";
|
||||
|
||||
export const watchlistStatus = schema.enum("watchlist_status", [
|
||||
"watching",
|
||||
|
||||
@ -13,11 +13,7 @@ import {
|
||||
} from "drizzle-orm";
|
||||
import type { CasingCache } from "drizzle-orm/casing";
|
||||
import type { AnyMySqlSelect } from "drizzle-orm/mysql-core";
|
||||
import {
|
||||
type AnyPgSelect,
|
||||
customType,
|
||||
type SelectedFieldsFlat,
|
||||
} from "drizzle-orm/pg-core";
|
||||
import type { AnyPgSelect, SelectedFieldsFlat } from "drizzle-orm/pg-core";
|
||||
import type { AnySQLiteSelect } from "drizzle-orm/sqlite-core";
|
||||
import type { WithSubquery } from "drizzle-orm/subquery";
|
||||
import { db } from "./index";
|
||||
@ -157,19 +153,3 @@ export const isUniqueConstraint = (e: unknown): boolean => {
|
||||
cause.code === "23505"
|
||||
);
|
||||
};
|
||||
|
||||
export const timestamp = customType<{
|
||||
data: string;
|
||||
driverData: string;
|
||||
config: { withTimezone: boolean; precision?: number; mode: "iso" };
|
||||
}>({
|
||||
dataType(config) {
|
||||
const precision = config?.precision ? ` (${config.precision})` : "";
|
||||
return `timestamp${precision}${config?.withTimezone ? " with time zone" : ""}`;
|
||||
},
|
||||
fromDriver(value: string): string {
|
||||
// postgres format: 2025-06-22 16:13:37.489301+00
|
||||
// what we want: 2025-06-22T16:13:37Z
|
||||
return `${value.substring(0, 10)}T${value.substring(11, 19)}Z`;
|
||||
},
|
||||
});
|
||||
|
||||
Loading…
x
Reference in New Issue
Block a user