mirror of
https://github.com/zoriya/Kyoo.git
synced 2025-11-26 16:25:11 -05:00
Batch images task insertion and add priority
This commit is contained in:
parent
f59cb5d671
commit
019aceb8d9
@ -5,7 +5,7 @@
|
|||||||
"": {
|
"": {
|
||||||
"name": "api",
|
"name": "api",
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"@elysiajs/opentelemetry": "^1.4.7",
|
"@elysiajs/opentelemetry": "^1.4.8",
|
||||||
"@elysiajs/swagger": "zoriya/elysia-swagger#build",
|
"@elysiajs/swagger": "zoriya/elysia-swagger#build",
|
||||||
"@kubiks/otel-drizzle": "zoriya/drizzle-otel#build",
|
"@kubiks/otel-drizzle": "zoriya/drizzle-otel#build",
|
||||||
"@types/bun": "^1.3.1",
|
"@types/bun": "^1.3.1",
|
||||||
@ -49,7 +49,7 @@
|
|||||||
|
|
||||||
"@drizzle-team/brocli": ["@drizzle-team/brocli@0.10.2", "", {}, "sha512-z33Il7l5dKjUgGULTqBsQBQwckHh5AbIuxhdsIxDDiZAzBOrZO6q9ogcWC65kU382AfynTfgNumVcNIjuIua6w=="],
|
"@drizzle-team/brocli": ["@drizzle-team/brocli@0.10.2", "", {}, "sha512-z33Il7l5dKjUgGULTqBsQBQwckHh5AbIuxhdsIxDDiZAzBOrZO6q9ogcWC65kU382AfynTfgNumVcNIjuIua6w=="],
|
||||||
|
|
||||||
"@elysiajs/opentelemetry": ["@elysiajs/opentelemetry@1.4.7", "", { "dependencies": { "@opentelemetry/api": "^1.9.0", "@opentelemetry/instrumentation": "^0.200.0", "@opentelemetry/sdk-node": "^0.200.0" }, "peerDependencies": { "elysia": ">= 1.4.0" } }, "sha512-biJfj3bCHf7aYPB8EygvN90sEKR/qgPn8Cziq2ebJSGyY8cpmskTTP6zbUMkMk6R6rfpoP7ECZbXlTZz+7BfJA=="],
|
"@elysiajs/opentelemetry": ["@elysiajs/opentelemetry@1.4.8", "", { "dependencies": { "@opentelemetry/api": "^1.9.0", "@opentelemetry/instrumentation": "^0.200.0", "@opentelemetry/sdk-node": "^0.200.0" }, "peerDependencies": { "elysia": ">= 1.4.0" } }, "sha512-c9unbcdXfehExCv1GsiTCfos5SyIAyDwP7apcMeXmUMBaJZiAYMfiEH8RFFFIfIHJHC/xlNJzUPodkcUaaoJJQ=="],
|
||||||
|
|
||||||
"@elysiajs/swagger": ["@elysiajs/swagger@github:zoriya/elysia-swagger#f88fbc7", { "dependencies": { "@scalar/themes": "^0.9.81", "@scalar/types": "^0.1.3", "openapi-types": "^12.1.3", "pathe": "^1.1.2" }, "peerDependencies": { "elysia": ">= 1.3.0" } }, "zoriya-elysia-swagger-f88fbc7"],
|
"@elysiajs/swagger": ["@elysiajs/swagger@github:zoriya/elysia-swagger#f88fbc7", { "dependencies": { "@scalar/themes": "^0.9.81", "@scalar/types": "^0.1.3", "openapi-types": "^12.1.3", "pathe": "^1.1.2" }, "peerDependencies": { "elysia": ">= 1.3.0" } }, "zoriya-elysia-swagger-f88fbc7"],
|
||||||
|
|
||||||
|
|||||||
3
api/drizzle/0023_mqueue-priority.sql
Normal file
3
api/drizzle/0023_mqueue-priority.sql
Normal file
@ -0,0 +1,3 @@
|
|||||||
|
ALTER TABLE "kyoo"."history" ALTER COLUMN "time" SET DEFAULT 0;--> statement-breakpoint
|
||||||
|
ALTER TABLE "kyoo"."history" ALTER COLUMN "time" SET NOT NULL;--> statement-breakpoint
|
||||||
|
ALTER TABLE "kyoo"."mqueue" ADD COLUMN "priority" integer DEFAULT 0 NOT NULL;
|
||||||
2036
api/drizzle/meta/0023_snapshot.json
Normal file
2036
api/drizzle/meta/0023_snapshot.json
Normal file
File diff suppressed because it is too large
Load Diff
@ -162,6 +162,13 @@
|
|||||||
"when": 1752446736231,
|
"when": 1752446736231,
|
||||||
"tag": "0022_seasons-count",
|
"tag": "0022_seasons-count",
|
||||||
"breakpoints": true
|
"breakpoints": true
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"idx": 23,
|
||||||
|
"version": "7",
|
||||||
|
"when": 1763924097229,
|
||||||
|
"tag": "0023_mqueue-priority",
|
||||||
|
"breakpoints": true
|
||||||
}
|
}
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
|
|||||||
@ -9,7 +9,7 @@
|
|||||||
"format": "biome check --write ."
|
"format": "biome check --write ."
|
||||||
},
|
},
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"@elysiajs/opentelemetry": "^1.4.7",
|
"@elysiajs/opentelemetry": "^1.4.8",
|
||||||
"@elysiajs/swagger": "zoriya/elysia-swagger#build",
|
"@elysiajs/swagger": "zoriya/elysia-swagger#build",
|
||||||
"@kubiks/otel-drizzle": "zoriya/drizzle-otel#build",
|
"@kubiks/otel-drizzle": "zoriya/drizzle-otel#build",
|
||||||
"@types/bun": "^1.3.1",
|
"@types/bun": "^1.3.1",
|
||||||
|
|||||||
@ -15,7 +15,7 @@ import { getFile } from "~/utils";
|
|||||||
export const imageDir = process.env.IMAGES_PATH ?? "/images";
|
export const imageDir = process.env.IMAGES_PATH ?? "/images";
|
||||||
export const defaultBlurhash = "000000";
|
export const defaultBlurhash = "000000";
|
||||||
|
|
||||||
type ImageTask = {
|
export type ImageTask = {
|
||||||
id: string;
|
id: string;
|
||||||
url: string;
|
url: string;
|
||||||
table: string;
|
table: string;
|
||||||
@ -25,12 +25,12 @@ type ImageTask = {
|
|||||||
// this will only push a task to the image downloader service and not download it instantly.
|
// this will only push a task to the image downloader service and not download it instantly.
|
||||||
// this is both done to prevent too many requests to be sent at once and to make sure POST
|
// this is both done to prevent too many requests to be sent at once and to make sure POST
|
||||||
// requests are not blocked by image downloading or blurhash calculation
|
// requests are not blocked by image downloading or blurhash calculation
|
||||||
export const enqueueOptImage = async (
|
export const enqueueOptImage = (
|
||||||
tx: Transaction,
|
imgQueue: ImageTask[],
|
||||||
img:
|
img:
|
||||||
| { url: string | null; column: PgColumn }
|
| { url: string | null; column: PgColumn }
|
||||||
| { url: string | null; table: PgTable; column: SQL },
|
| { url: string | null; table: PgTable; column: SQL },
|
||||||
): Promise<Image | null> => {
|
): Image | null => {
|
||||||
if (!img.url) return null;
|
if (!img.url) return null;
|
||||||
|
|
||||||
const hasher = new Bun.CryptoHasher("sha256");
|
const hasher = new Bun.CryptoHasher("sha256");
|
||||||
@ -66,11 +66,8 @@ export const enqueueOptImage = async (
|
|||||||
table: db.dialect.sqlToQuery(sql`${img.column.table}`).sql,
|
table: db.dialect.sqlToQuery(sql`${img.column.table}`).sql,
|
||||||
column: sql.identifier(img.column.name).value,
|
column: sql.identifier(img.column.name).value,
|
||||||
};
|
};
|
||||||
await tx.insert(mqueue).values({
|
|
||||||
kind: "image",
|
imgQueue.push(message);
|
||||||
message,
|
|
||||||
});
|
|
||||||
await tx.execute(sql`notify kyoo_image`);
|
|
||||||
|
|
||||||
return {
|
return {
|
||||||
id,
|
id,
|
||||||
@ -79,6 +76,20 @@ export const enqueueOptImage = async (
|
|||||||
};
|
};
|
||||||
};
|
};
|
||||||
|
|
||||||
|
export const flushImageQueue = async (
|
||||||
|
tx: Transaction,
|
||||||
|
imgQueue: ImageTask[],
|
||||||
|
priority: number,
|
||||||
|
) => {
|
||||||
|
if (!imgQueue.length) return;
|
||||||
|
record("enqueue images", async () => {
|
||||||
|
await tx
|
||||||
|
.insert(mqueue)
|
||||||
|
.values(imgQueue.map((x) => ({ kind: "image", message: x, priority })));
|
||||||
|
await tx.execute(sql`notify kyoo_image`);
|
||||||
|
});
|
||||||
|
};
|
||||||
|
|
||||||
export const processImages = async () => {
|
export const processImages = async () => {
|
||||||
return record("download images", async () => {
|
return record("download images", async () => {
|
||||||
let running = false;
|
let running = false;
|
||||||
@ -114,7 +125,7 @@ async function processOne() {
|
|||||||
.from(mqueue)
|
.from(mqueue)
|
||||||
.for("update", { skipLocked: true })
|
.for("update", { skipLocked: true })
|
||||||
.where(and(eq(mqueue.kind, "image"), lt(mqueue.attempt, 5)))
|
.where(and(eq(mqueue.kind, "image"), lt(mqueue.attempt, 5)))
|
||||||
.orderBy(mqueue.attempt, mqueue.createdAt)
|
.orderBy(mqueue.priority, mqueue.attempt, mqueue.createdAt)
|
||||||
.limit(1);
|
.limit(1);
|
||||||
|
|
||||||
if (!item) return false;
|
if (!item) return false;
|
||||||
|
|||||||
@ -5,7 +5,7 @@ import { conflictUpdateAllExcept } from "~/db/utils";
|
|||||||
import type { SeedCollection } from "~/models/collections";
|
import type { SeedCollection } from "~/models/collections";
|
||||||
import type { SeedMovie } from "~/models/movie";
|
import type { SeedMovie } from "~/models/movie";
|
||||||
import type { SeedSerie } from "~/models/serie";
|
import type { SeedSerie } from "~/models/serie";
|
||||||
import { enqueueOptImage } from "../images";
|
import { enqueueOptImage, flushImageQueue, type ImageTask } from "../images";
|
||||||
|
|
||||||
type ShowTrans = typeof showTranslations.$inferInsert;
|
type ShowTrans = typeof showTranslations.$inferInsert;
|
||||||
|
|
||||||
@ -19,6 +19,7 @@ export const insertCollection = async (
|
|||||||
const { translations, ...col } = collection;
|
const { translations, ...col } = collection;
|
||||||
|
|
||||||
return await db.transaction(async (tx) => {
|
return await db.transaction(async (tx) => {
|
||||||
|
const imgQueue: ImageTask[] = [];
|
||||||
const [ret] = await tx
|
const [ret] = await tx
|
||||||
.insert(shows)
|
.insert(shows)
|
||||||
.values({
|
.values({
|
||||||
@ -48,29 +49,30 @@ export const insertCollection = async (
|
|||||||
})
|
})
|
||||||
.returning({ pk: shows.pk, id: shows.id, slug: shows.slug });
|
.returning({ pk: shows.pk, id: shows.id, slug: shows.slug });
|
||||||
|
|
||||||
const trans: ShowTrans[] = await Promise.all(
|
const trans: ShowTrans[] = Object.entries(translations).map(
|
||||||
Object.entries(translations).map(async ([lang, tr]) => ({
|
([lang, tr]) => ({
|
||||||
pk: ret.pk,
|
pk: ret.pk,
|
||||||
language: lang,
|
language: lang,
|
||||||
...tr,
|
...tr,
|
||||||
poster: await enqueueOptImage(tx, {
|
poster: enqueueOptImage(imgQueue, {
|
||||||
url: tr.poster,
|
url: tr.poster,
|
||||||
column: showTranslations.poster,
|
column: showTranslations.poster,
|
||||||
}),
|
}),
|
||||||
thumbnail: await enqueueOptImage(tx, {
|
thumbnail: enqueueOptImage(imgQueue, {
|
||||||
url: tr.thumbnail,
|
url: tr.thumbnail,
|
||||||
column: showTranslations.thumbnail,
|
column: showTranslations.thumbnail,
|
||||||
}),
|
}),
|
||||||
logo: await enqueueOptImage(tx, {
|
logo: enqueueOptImage(imgQueue, {
|
||||||
url: tr.logo,
|
url: tr.logo,
|
||||||
column: showTranslations.logo,
|
column: showTranslations.logo,
|
||||||
}),
|
}),
|
||||||
banner: await enqueueOptImage(tx, {
|
banner: enqueueOptImage(imgQueue, {
|
||||||
url: tr.banner,
|
url: tr.banner,
|
||||||
column: showTranslations.banner,
|
column: showTranslations.banner,
|
||||||
}),
|
}),
|
||||||
})),
|
}),
|
||||||
);
|
);
|
||||||
|
await flushImageQueue(tx, imgQueue, 100);
|
||||||
await tx
|
await tx
|
||||||
.insert(showTranslations)
|
.insert(showTranslations)
|
||||||
.values(trans)
|
.values(trans)
|
||||||
|
|||||||
@ -8,7 +8,7 @@ import {
|
|||||||
} from "~/db/schema";
|
} from "~/db/schema";
|
||||||
import { conflictUpdateAllExcept, values } from "~/db/utils";
|
import { conflictUpdateAllExcept, values } from "~/db/utils";
|
||||||
import type { SeedEntry as SEntry, SeedExtra as SExtra } from "~/models/entry";
|
import type { SeedEntry as SEntry, SeedExtra as SExtra } from "~/models/entry";
|
||||||
import { enqueueOptImage } from "../images";
|
import { enqueueOptImage, flushImageQueue, type ImageTask } from "../images";
|
||||||
import { guessNextRefresh } from "../refresh";
|
import { guessNextRefresh } from "../refresh";
|
||||||
import { updateAvailableCount, updateAvailableSince } from "./shows";
|
import { updateAvailableCount, updateAvailableSince } from "./shows";
|
||||||
|
|
||||||
@ -50,30 +50,29 @@ export const insertEntries = async (
|
|||||||
if (!items.length) return [];
|
if (!items.length) return [];
|
||||||
|
|
||||||
const retEntries = await db.transaction(async (tx) => {
|
const retEntries = await db.transaction(async (tx) => {
|
||||||
const vals: EntryI[] = await Promise.all(
|
const imgQueue: ImageTask[] = [];
|
||||||
items.map(async (seed) => {
|
const vals: EntryI[] = items.map((seed) => {
|
||||||
const { translations, videos, video, ...entry } = seed;
|
const { translations, videos, video, ...entry } = seed;
|
||||||
return {
|
return {
|
||||||
...entry,
|
...entry,
|
||||||
showPk: show.pk,
|
showPk: show.pk,
|
||||||
slug: generateSlug(show.slug, seed),
|
slug: generateSlug(show.slug, seed),
|
||||||
thumbnail: await enqueueOptImage(tx, {
|
thumbnail: enqueueOptImage(imgQueue, {
|
||||||
url: seed.thumbnail,
|
url: seed.thumbnail,
|
||||||
column: entries.thumbnail,
|
column: entries.thumbnail,
|
||||||
}),
|
}),
|
||||||
nextRefresh:
|
nextRefresh:
|
||||||
entry.kind !== "extra"
|
entry.kind !== "extra"
|
||||||
? guessNextRefresh(entry.airDate ?? new Date())
|
? guessNextRefresh(entry.airDate ?? new Date())
|
||||||
: guessNextRefresh(new Date()),
|
: guessNextRefresh(new Date()),
|
||||||
episodeNumber:
|
episodeNumber:
|
||||||
entry.kind === "episode"
|
entry.kind === "episode"
|
||||||
? entry.episodeNumber
|
? entry.episodeNumber
|
||||||
: entry.kind === "special"
|
: entry.kind === "special"
|
||||||
? entry.number
|
? entry.number
|
||||||
: undefined,
|
: undefined,
|
||||||
};
|
};
|
||||||
}),
|
});
|
||||||
);
|
|
||||||
const ret = await tx
|
const ret = await tx
|
||||||
.insert(entries)
|
.insert(entries)
|
||||||
.values(vals)
|
.values(vals)
|
||||||
@ -89,41 +88,36 @@ export const insertEntries = async (
|
|||||||
})
|
})
|
||||||
.returning({ pk: entries.pk, id: entries.id, slug: entries.slug });
|
.returning({ pk: entries.pk, id: entries.id, slug: entries.slug });
|
||||||
|
|
||||||
const trans: EntryTransI[] = (
|
const trans: EntryTransI[] = items.flatMap((seed, i) => {
|
||||||
await Promise.all(
|
if (seed.kind === "extra") {
|
||||||
items.map(async (seed, i) => {
|
return [
|
||||||
if (seed.kind === "extra") {
|
{
|
||||||
return [
|
pk: ret[i].pk,
|
||||||
{
|
// yeah we hardcode the language to extra because if we want to support
|
||||||
pk: ret[i].pk,
|
// translations one day it won't be awkward
|
||||||
// yeah we hardcode the language to extra because if we want to support
|
language: "extra",
|
||||||
// translations one day it won't be awkward
|
name: seed.name,
|
||||||
language: "extra",
|
description: null,
|
||||||
name: seed.name,
|
poster: undefined,
|
||||||
description: null,
|
},
|
||||||
poster: undefined,
|
];
|
||||||
},
|
}
|
||||||
];
|
|
||||||
}
|
|
||||||
|
|
||||||
return await Promise.all(
|
return Object.entries(seed.translations).map(([lang, tr]) => ({
|
||||||
Object.entries(seed.translations).map(async ([lang, tr]) => ({
|
// assumes ret is ordered like items.
|
||||||
// assumes ret is ordered like items.
|
pk: ret[i].pk,
|
||||||
pk: ret[i].pk,
|
language: lang,
|
||||||
language: lang,
|
...tr,
|
||||||
...tr,
|
poster:
|
||||||
poster:
|
seed.kind === "movie"
|
||||||
seed.kind === "movie"
|
? enqueueOptImage(imgQueue, {
|
||||||
? await enqueueOptImage(tx, {
|
url: (tr as any).poster,
|
||||||
url: (tr as any).poster,
|
column: entryTranslations.poster,
|
||||||
column: entryTranslations.poster,
|
})
|
||||||
})
|
: undefined,
|
||||||
: undefined,
|
}));
|
||||||
})),
|
});
|
||||||
);
|
await flushImageQueue(tx, imgQueue, 0);
|
||||||
}),
|
|
||||||
)
|
|
||||||
).flat();
|
|
||||||
await tx
|
await tx
|
||||||
.insert(entryTranslations)
|
.insert(entryTranslations)
|
||||||
.values(trans)
|
.values(trans)
|
||||||
|
|||||||
@ -2,7 +2,7 @@ import { db } from "~/db";
|
|||||||
import { seasons, seasonTranslations } from "~/db/schema";
|
import { seasons, seasonTranslations } from "~/db/schema";
|
||||||
import { conflictUpdateAllExcept } from "~/db/utils";
|
import { conflictUpdateAllExcept } from "~/db/utils";
|
||||||
import type { SeedSeason } from "~/models/season";
|
import type { SeedSeason } from "~/models/season";
|
||||||
import { enqueueOptImage } from "../images";
|
import { enqueueOptImage, flushImageQueue, type ImageTask } from "../images";
|
||||||
import { guessNextRefresh } from "../refresh";
|
import { guessNextRefresh } from "../refresh";
|
||||||
|
|
||||||
type SeasonI = typeof seasons.$inferInsert;
|
type SeasonI = typeof seasons.$inferInsert;
|
||||||
@ -15,6 +15,7 @@ export const insertSeasons = async (
|
|||||||
if (!items.length) return [];
|
if (!items.length) return [];
|
||||||
|
|
||||||
return db.transaction(async (tx) => {
|
return db.transaction(async (tx) => {
|
||||||
|
const imgQueue: ImageTask[] = [];
|
||||||
const vals: SeasonI[] = items.map((x) => {
|
const vals: SeasonI[] = items.map((x) => {
|
||||||
const { translations, ...season } = x;
|
const { translations, ...season } = x;
|
||||||
return {
|
return {
|
||||||
@ -42,33 +43,27 @@ export const insertSeasons = async (
|
|||||||
})
|
})
|
||||||
.returning({ pk: seasons.pk, id: seasons.id, slug: seasons.slug });
|
.returning({ pk: seasons.pk, id: seasons.id, slug: seasons.slug });
|
||||||
|
|
||||||
const trans: SeasonTransI[] = (
|
const trans: SeasonTransI[] = items.flatMap((seed, i) =>
|
||||||
await Promise.all(
|
Object.entries(seed.translations).map(([lang, tr]) => ({
|
||||||
items.map(
|
// assumes ret is ordered like items.
|
||||||
async (seed, i) =>
|
pk: ret[i].pk,
|
||||||
await Promise.all(
|
language: lang,
|
||||||
Object.entries(seed.translations).map(async ([lang, tr]) => ({
|
...tr,
|
||||||
// assumes ret is ordered like items.
|
poster: enqueueOptImage(imgQueue, {
|
||||||
pk: ret[i].pk,
|
url: tr.poster,
|
||||||
language: lang,
|
column: seasonTranslations.poster,
|
||||||
...tr,
|
}),
|
||||||
poster: await enqueueOptImage(tx, {
|
thumbnail: enqueueOptImage(imgQueue, {
|
||||||
url: tr.poster,
|
url: tr.thumbnail,
|
||||||
column: seasonTranslations.poster,
|
column: seasonTranslations.thumbnail,
|
||||||
}),
|
}),
|
||||||
thumbnail: await enqueueOptImage(tx, {
|
banner: enqueueOptImage(imgQueue, {
|
||||||
url: tr.thumbnail,
|
url: tr.banner,
|
||||||
column: seasonTranslations.thumbnail,
|
column: seasonTranslations.banner,
|
||||||
}),
|
}),
|
||||||
banner: await enqueueOptImage(tx, {
|
})),
|
||||||
url: tr.banner,
|
);
|
||||||
column: seasonTranslations.banner,
|
await flushImageQueue(tx, imgQueue, -10);
|
||||||
}),
|
|
||||||
})),
|
|
||||||
),
|
|
||||||
),
|
|
||||||
)
|
|
||||||
).flat();
|
|
||||||
await tx
|
await tx
|
||||||
.insert(seasonTranslations)
|
.insert(seasonTranslations)
|
||||||
.values(trans)
|
.values(trans)
|
||||||
|
|||||||
@ -22,7 +22,7 @@ import type { SeedMovie } from "~/models/movie";
|
|||||||
import type { SeedSerie } from "~/models/serie";
|
import type { SeedSerie } from "~/models/serie";
|
||||||
import type { Original } from "~/models/utils";
|
import type { Original } from "~/models/utils";
|
||||||
import { getYear } from "~/utils";
|
import { getYear } from "~/utils";
|
||||||
import { enqueueOptImage } from "../images";
|
import { enqueueOptImage, flushImageQueue, type ImageTask } from "../images";
|
||||||
|
|
||||||
type Show = typeof shows.$inferInsert;
|
type Show = typeof shows.$inferInsert;
|
||||||
type ShowTrans = typeof showTranslations.$inferInsert;
|
type ShowTrans = typeof showTranslations.$inferInsert;
|
||||||
@ -41,24 +41,25 @@ export const insertShow = async (
|
|||||||
| SeedCollection["translations"],
|
| SeedCollection["translations"],
|
||||||
) => {
|
) => {
|
||||||
return await db.transaction(async (tx) => {
|
return await db.transaction(async (tx) => {
|
||||||
|
const imgQueue: ImageTask[] = [];
|
||||||
const orig = {
|
const orig = {
|
||||||
...original,
|
...original,
|
||||||
poster: await enqueueOptImage(tx, {
|
poster: enqueueOptImage(imgQueue, {
|
||||||
url: original.poster,
|
url: original.poster,
|
||||||
table: shows,
|
table: shows,
|
||||||
column: sql`${shows.original}['poster']`,
|
column: sql`${shows.original}['poster']`,
|
||||||
}),
|
}),
|
||||||
thumbnail: await enqueueOptImage(tx, {
|
thumbnail: enqueueOptImage(imgQueue, {
|
||||||
url: original.thumbnail,
|
url: original.thumbnail,
|
||||||
table: shows,
|
table: shows,
|
||||||
column: sql`${shows.original}['thumbnail']`,
|
column: sql`${shows.original}['thumbnail']`,
|
||||||
}),
|
}),
|
||||||
banner: await enqueueOptImage(tx, {
|
banner: enqueueOptImage(imgQueue, {
|
||||||
url: original.banner,
|
url: original.banner,
|
||||||
table: shows,
|
table: shows,
|
||||||
column: sql`${shows.original}['banner']`,
|
column: sql`${shows.original}['banner']`,
|
||||||
}),
|
}),
|
||||||
logo: await enqueueOptImage(tx, {
|
logo: enqueueOptImage(imgQueue, {
|
||||||
url: original.logo,
|
url: original.logo,
|
||||||
table: shows,
|
table: shows,
|
||||||
column: sql`${shows.original}['logo']`,
|
column: sql`${shows.original}['logo']`,
|
||||||
@ -67,30 +68,31 @@ export const insertShow = async (
|
|||||||
const ret = await insertBaseShow(tx, { ...show, original: orig });
|
const ret = await insertBaseShow(tx, { ...show, original: orig });
|
||||||
if ("status" in ret) return ret;
|
if ("status" in ret) return ret;
|
||||||
|
|
||||||
const trans: ShowTrans[] = await Promise.all(
|
const trans: ShowTrans[] = Object.entries(translations).map(
|
||||||
Object.entries(translations).map(async ([lang, tr]) => ({
|
([lang, tr]) => ({
|
||||||
pk: ret.pk,
|
pk: ret.pk,
|
||||||
language: lang,
|
language: lang,
|
||||||
...tr,
|
...tr,
|
||||||
latinName: tr.latinName ?? null,
|
latinName: tr.latinName ?? null,
|
||||||
poster: await enqueueOptImage(tx, {
|
poster: enqueueOptImage(imgQueue, {
|
||||||
url: tr.poster,
|
url: tr.poster,
|
||||||
column: showTranslations.poster,
|
column: showTranslations.poster,
|
||||||
}),
|
}),
|
||||||
thumbnail: await enqueueOptImage(tx, {
|
thumbnail: enqueueOptImage(imgQueue, {
|
||||||
url: tr.thumbnail,
|
url: tr.thumbnail,
|
||||||
column: showTranslations.thumbnail,
|
column: showTranslations.thumbnail,
|
||||||
}),
|
}),
|
||||||
logo: await enqueueOptImage(tx, {
|
logo: enqueueOptImage(imgQueue, {
|
||||||
url: tr.logo,
|
url: tr.logo,
|
||||||
column: showTranslations.logo,
|
column: showTranslations.logo,
|
||||||
}),
|
}),
|
||||||
banner: await enqueueOptImage(tx, {
|
banner: enqueueOptImage(imgQueue, {
|
||||||
url: tr.banner,
|
url: tr.banner,
|
||||||
column: showTranslations.banner,
|
column: showTranslations.banner,
|
||||||
}),
|
}),
|
||||||
})),
|
}),
|
||||||
);
|
);
|
||||||
|
await flushImageQueue(tx, imgQueue, 200);
|
||||||
await tx
|
await tx
|
||||||
.insert(showTranslations)
|
.insert(showTranslations)
|
||||||
.values(trans)
|
.values(trans)
|
||||||
|
|||||||
@ -3,7 +3,7 @@ import { db } from "~/db";
|
|||||||
import { roles, staff } from "~/db/schema";
|
import { roles, staff } from "~/db/schema";
|
||||||
import { conflictUpdateAllExcept } from "~/db/utils";
|
import { conflictUpdateAllExcept } from "~/db/utils";
|
||||||
import type { SeedStaff } from "~/models/staff";
|
import type { SeedStaff } from "~/models/staff";
|
||||||
import { enqueueOptImage } from "../images";
|
import { enqueueOptImage, flushImageQueue, type ImageTask } from "../images";
|
||||||
|
|
||||||
export const insertStaff = async (
|
export const insertStaff = async (
|
||||||
seed: SeedStaff[] | undefined,
|
seed: SeedStaff[] | undefined,
|
||||||
@ -12,15 +12,14 @@ export const insertStaff = async (
|
|||||||
if (!seed?.length) return [];
|
if (!seed?.length) return [];
|
||||||
|
|
||||||
return await db.transaction(async (tx) => {
|
return await db.transaction(async (tx) => {
|
||||||
const people = await Promise.all(
|
const imgQueue: ImageTask[] = [];
|
||||||
seed.map(async (x) => ({
|
const people = seed.map((x) => ({
|
||||||
...x.staff,
|
...x.staff,
|
||||||
image: await enqueueOptImage(tx, {
|
image: enqueueOptImage(imgQueue, {
|
||||||
url: x.staff.image,
|
url: x.staff.image,
|
||||||
column: staff.image,
|
column: staff.image,
|
||||||
}),
|
}),
|
||||||
})),
|
}));
|
||||||
);
|
|
||||||
const ret = await tx
|
const ret = await tx
|
||||||
.insert(staff)
|
.insert(staff)
|
||||||
.values(people)
|
.values(people)
|
||||||
@ -30,22 +29,22 @@ export const insertStaff = async (
|
|||||||
})
|
})
|
||||||
.returning({ pk: staff.pk, id: staff.id, slug: staff.slug });
|
.returning({ pk: staff.pk, id: staff.id, slug: staff.slug });
|
||||||
|
|
||||||
const rval = await Promise.all(
|
const rval = seed.map((x, i) => ({
|
||||||
seed.map(async (x, i) => ({
|
showPk,
|
||||||
showPk,
|
staffPk: ret[i].pk,
|
||||||
staffPk: ret[i].pk,
|
kind: x.kind,
|
||||||
kind: x.kind,
|
order: i,
|
||||||
order: i,
|
character: {
|
||||||
character: {
|
...x.character,
|
||||||
...x.character,
|
image: enqueueOptImage(imgQueue, {
|
||||||
image: await enqueueOptImage(tx, {
|
url: x.character.image,
|
||||||
url: x.character.image,
|
table: roles,
|
||||||
table: roles,
|
column: sql`${roles.character}['image']`,
|
||||||
column: sql`${roles.character}['image']`,
|
}),
|
||||||
}),
|
},
|
||||||
},
|
}));
|
||||||
})),
|
|
||||||
);
|
await flushImageQueue(tx, imgQueue, -200);
|
||||||
|
|
||||||
// always replace all roles. this is because:
|
// always replace all roles. this is because:
|
||||||
// - we want `order` to stay in sync (& without duplicates)
|
// - we want `order` to stay in sync (& without duplicates)
|
||||||
|
|||||||
@ -2,7 +2,7 @@ import { db } from "~/db";
|
|||||||
import { showStudioJoin, studios, studioTranslations } from "~/db/schema";
|
import { showStudioJoin, studios, studioTranslations } from "~/db/schema";
|
||||||
import { conflictUpdateAllExcept } from "~/db/utils";
|
import { conflictUpdateAllExcept } from "~/db/utils";
|
||||||
import type { SeedStudio } from "~/models/studio";
|
import type { SeedStudio } from "~/models/studio";
|
||||||
import { enqueueOptImage } from "../images";
|
import { enqueueOptImage, flushImageQueue, ImageTask } from "../images";
|
||||||
|
|
||||||
type StudioI = typeof studios.$inferInsert;
|
type StudioI = typeof studios.$inferInsert;
|
||||||
type StudioTransI = typeof studioTranslations.$inferInsert;
|
type StudioTransI = typeof studioTranslations.$inferInsert;
|
||||||
@ -33,24 +33,19 @@ export const insertStudios = async (
|
|||||||
})
|
})
|
||||||
.returning({ pk: studios.pk, id: studios.id, slug: studios.slug });
|
.returning({ pk: studios.pk, id: studios.id, slug: studios.slug });
|
||||||
|
|
||||||
const trans: StudioTransI[] = (
|
const imgQueue: ImageTask[] = [];
|
||||||
await Promise.all(
|
const trans: StudioTransI[] = seed.flatMap((x, i) =>
|
||||||
seed.map(
|
Object.entries(x.translations).map(([lang, tr]) => ({
|
||||||
async (x, i) =>
|
pk: ret[i].pk,
|
||||||
await Promise.all(
|
language: lang,
|
||||||
Object.entries(x.translations).map(async ([lang, tr]) => ({
|
name: tr.name,
|
||||||
pk: ret[i].pk,
|
logo: enqueueOptImage(imgQueue, {
|
||||||
language: lang,
|
url: tr.logo,
|
||||||
name: tr.name,
|
column: studioTranslations.logo,
|
||||||
logo: await enqueueOptImage(tx, {
|
}),
|
||||||
url: tr.logo,
|
})),
|
||||||
column: studioTranslations.logo,
|
);
|
||||||
}),
|
await flushImageQueue(tx, imgQueue, -100);
|
||||||
})),
|
|
||||||
),
|
|
||||||
),
|
|
||||||
)
|
|
||||||
).flat();
|
|
||||||
await tx
|
await tx
|
||||||
.insert(studioTranslations)
|
.insert(studioTranslations)
|
||||||
.values(trans)
|
.values(trans)
|
||||||
|
|||||||
@ -8,18 +8,18 @@ import { migrate as migrateDb } from "drizzle-orm/node-postgres/migrator";
|
|||||||
import type { PoolConfig } from "pg";
|
import type { PoolConfig } from "pg";
|
||||||
import * as schema from "./schema";
|
import * as schema from "./schema";
|
||||||
|
|
||||||
async function getPostgresConfig(): Promise<PoolConfig> {
|
const config: PoolConfig = {
|
||||||
const config: PoolConfig = {
|
connectionString: process.env.POSTGRES_URL,
|
||||||
connectionString: process.env.POSTGRES_URL,
|
host: process.env.PGHOST ?? "postgres",
|
||||||
host: process.env.PGHOST ?? "postgres",
|
port: Number(process.env.PGPORT) || 5432,
|
||||||
port: Number(process.env.PGPORT) || 5432,
|
database: process.env.PGDATABASE ?? "kyoo",
|
||||||
database: process.env.PGDATABASE ?? "kyoo",
|
user: process.env.PGUSER ?? "kyoo",
|
||||||
user: process.env.PGUSER ?? "kyoo",
|
password: process.env.PGPASSWORD ?? "password",
|
||||||
password: process.env.PGPASSWORD ?? "password",
|
options: process.env.PGOPTIONS,
|
||||||
options: process.env.PGOPTIONS,
|
application_name: process.env.PGAPPNAME ?? "kyoo",
|
||||||
application_name: process.env.PGAPPNAME ?? "kyoo",
|
};
|
||||||
};
|
|
||||||
|
|
||||||
|
async function parseSslConfig(): Promise<PoolConfig> {
|
||||||
// Due to an upstream bug, if `ssl` is not falsey, an SSL connection will always be attempted. This means
|
// Due to an upstream bug, if `ssl` is not falsey, an SSL connection will always be attempted. This means
|
||||||
// that non-SSL connection options under `ssl` (which is incorrectly named) cannot be set unless SSL is enabled.
|
// that non-SSL connection options under `ssl` (which is incorrectly named) cannot be set unless SSL is enabled.
|
||||||
if (!process.env.PGSSLMODE || process.env.PGSSLMODE === "disable")
|
if (!process.env.PGSSLMODE || process.env.PGSSLMODE === "disable")
|
||||||
@ -108,7 +108,9 @@ async function getPostgresConfig(): Promise<PoolConfig> {
|
|||||||
return config;
|
return config;
|
||||||
}
|
}
|
||||||
|
|
||||||
const postgresConfig = await getPostgresConfig();
|
const postgresConfig = await parseSslConfig();
|
||||||
|
// use this when using drizzle-kit since it can't parse await statements
|
||||||
|
// const postgresConfig = config;
|
||||||
|
|
||||||
export const db = drizzle({
|
export const db = drizzle({
|
||||||
schema,
|
schema,
|
||||||
|
|||||||
@ -12,9 +12,8 @@ import {
|
|||||||
uuid,
|
uuid,
|
||||||
varchar,
|
varchar,
|
||||||
} from "drizzle-orm/pg-core";
|
} from "drizzle-orm/pg-core";
|
||||||
import { timestamp } from "../utils";
|
|
||||||
import { shows } from "./shows";
|
import { shows } from "./shows";
|
||||||
import { image, language, schema } from "./utils";
|
import { image, language, schema, timestamp } from "./utils";
|
||||||
import { entryVideoJoin } from "./videos";
|
import { entryVideoJoin } from "./videos";
|
||||||
|
|
||||||
export const entryType = schema.enum("entry_type", [
|
export const entryType = schema.enum("entry_type", [
|
||||||
|
|||||||
@ -1,9 +1,8 @@
|
|||||||
import { sql } from "drizzle-orm";
|
import { sql } from "drizzle-orm";
|
||||||
import { check, index, integer } from "drizzle-orm/pg-core";
|
import { check, index, integer } from "drizzle-orm/pg-core";
|
||||||
import { timestamp } from "../utils";
|
|
||||||
import { entries } from "./entries";
|
import { entries } from "./entries";
|
||||||
import { profiles } from "./profiles";
|
import { profiles } from "./profiles";
|
||||||
import { schema } from "./utils";
|
import { schema, timestamp } from "./utils";
|
||||||
import { videos } from "./videos";
|
import { videos } from "./videos";
|
||||||
|
|
||||||
export const history = schema.table(
|
export const history = schema.table(
|
||||||
|
|||||||
@ -1,7 +1,6 @@
|
|||||||
import { sql } from "drizzle-orm";
|
import { sql } from "drizzle-orm";
|
||||||
import { index, integer, jsonb, uuid, varchar } from "drizzle-orm/pg-core";
|
import { index, integer, jsonb, uuid, varchar } from "drizzle-orm/pg-core";
|
||||||
import { timestamp } from "../utils";
|
import { schema, timestamp } from "./utils";
|
||||||
import { schema } from "./utils";
|
|
||||||
|
|
||||||
export const mqueue = schema.table(
|
export const mqueue = schema.table(
|
||||||
"mqueue",
|
"mqueue",
|
||||||
@ -9,6 +8,7 @@ export const mqueue = schema.table(
|
|||||||
id: uuid().notNull().primaryKey().defaultRandom(),
|
id: uuid().notNull().primaryKey().defaultRandom(),
|
||||||
kind: varchar({ length: 255 }).notNull(),
|
kind: varchar({ length: 255 }).notNull(),
|
||||||
message: jsonb().notNull(),
|
message: jsonb().notNull(),
|
||||||
|
priority: integer().notNull().default(0),
|
||||||
attempt: integer().notNull().default(0),
|
attempt: integer().notNull().default(0),
|
||||||
createdAt: timestamp({ withTimezone: true, mode: "iso" })
|
createdAt: timestamp({ withTimezone: true, mode: "iso" })
|
||||||
.notNull()
|
.notNull()
|
||||||
|
|||||||
@ -10,9 +10,8 @@ import {
|
|||||||
uuid,
|
uuid,
|
||||||
varchar,
|
varchar,
|
||||||
} from "drizzle-orm/pg-core";
|
} from "drizzle-orm/pg-core";
|
||||||
import { timestamp } from "../utils";
|
|
||||||
import { shows } from "./shows";
|
import { shows } from "./shows";
|
||||||
import { image, language, schema } from "./utils";
|
import { image, language, schema, timestamp } from "./utils";
|
||||||
|
|
||||||
export const season_extid = () =>
|
export const season_extid = () =>
|
||||||
jsonb()
|
jsonb()
|
||||||
|
|||||||
@ -13,12 +13,11 @@ import {
|
|||||||
varchar,
|
varchar,
|
||||||
} from "drizzle-orm/pg-core";
|
} from "drizzle-orm/pg-core";
|
||||||
import type { Image, Original } from "~/models/utils";
|
import type { Image, Original } from "~/models/utils";
|
||||||
import { timestamp } from "../utils";
|
|
||||||
import { entries } from "./entries";
|
import { entries } from "./entries";
|
||||||
import { seasons } from "./seasons";
|
import { seasons } from "./seasons";
|
||||||
import { roles } from "./staff";
|
import { roles } from "./staff";
|
||||||
import { showStudioJoin } from "./studios";
|
import { showStudioJoin } from "./studios";
|
||||||
import { externalid, image, language, schema } from "./utils";
|
import { externalid, image, language, schema, timestamp } from "./utils";
|
||||||
|
|
||||||
export const showKind = schema.enum("show_kind", [
|
export const showKind = schema.enum("show_kind", [
|
||||||
"serie",
|
"serie",
|
||||||
|
|||||||
@ -8,9 +8,8 @@ import {
|
|||||||
varchar,
|
varchar,
|
||||||
} from "drizzle-orm/pg-core";
|
} from "drizzle-orm/pg-core";
|
||||||
import type { Character } from "~/models/staff";
|
import type { Character } from "~/models/staff";
|
||||||
import { timestamp } from "../utils";
|
|
||||||
import { shows } from "./shows";
|
import { shows } from "./shows";
|
||||||
import { externalid, image, schema } from "./utils";
|
import { externalid, image, schema, timestamp } from "./utils";
|
||||||
|
|
||||||
export const roleKind = schema.enum("role_kind", [
|
export const roleKind = schema.enum("role_kind", [
|
||||||
"actor",
|
"actor",
|
||||||
|
|||||||
@ -7,9 +7,8 @@ import {
|
|||||||
uuid,
|
uuid,
|
||||||
varchar,
|
varchar,
|
||||||
} from "drizzle-orm/pg-core";
|
} from "drizzle-orm/pg-core";
|
||||||
import { timestamp } from "../utils";
|
|
||||||
import { shows } from "./shows";
|
import { shows } from "./shows";
|
||||||
import { externalid, image, language, schema } from "./utils";
|
import { externalid, image, language, schema, timestamp } from "./utils";
|
||||||
|
|
||||||
export const studios = schema.table("studios", {
|
export const studios = schema.table("studios", {
|
||||||
pk: integer().primaryKey().generatedAlwaysAsIdentity(),
|
pk: integer().primaryKey().generatedAlwaysAsIdentity(),
|
||||||
|
|||||||
@ -1,4 +1,4 @@
|
|||||||
import { jsonb, pgSchema, varchar } from "drizzle-orm/pg-core";
|
import { customType, jsonb, pgSchema, varchar } from "drizzle-orm/pg-core";
|
||||||
import type { Image } from "~/models/utils";
|
import type { Image } from "~/models/utils";
|
||||||
|
|
||||||
export const schema = pgSchema("kyoo");
|
export const schema = pgSchema("kyoo");
|
||||||
@ -20,3 +20,19 @@ export const externalid = () =>
|
|||||||
>()
|
>()
|
||||||
.notNull()
|
.notNull()
|
||||||
.default({});
|
.default({});
|
||||||
|
|
||||||
|
export const timestamp = customType<{
|
||||||
|
data: string;
|
||||||
|
driverData: string;
|
||||||
|
config: { withTimezone: boolean; precision?: number; mode: "iso" };
|
||||||
|
}>({
|
||||||
|
dataType(config) {
|
||||||
|
const precision = config?.precision ? ` (${config.precision})` : "";
|
||||||
|
return `timestamp${precision}${config?.withTimezone ? " with time zone" : ""}`;
|
||||||
|
},
|
||||||
|
fromDriver(value: string): string {
|
||||||
|
// postgres format: 2025-06-22 16:13:37.489301+00
|
||||||
|
// what we want: 2025-06-22T16:13:37Z
|
||||||
|
return `${value.substring(0, 10)}T${value.substring(11, 19)}Z`;
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|||||||
@ -10,9 +10,8 @@ import {
|
|||||||
varchar,
|
varchar,
|
||||||
} from "drizzle-orm/pg-core";
|
} from "drizzle-orm/pg-core";
|
||||||
import type { Guess } from "~/models/video";
|
import type { Guess } from "~/models/video";
|
||||||
import { timestamp } from "../utils";
|
|
||||||
import { entries } from "./entries";
|
import { entries } from "./entries";
|
||||||
import { schema } from "./utils";
|
import { schema, timestamp } from "./utils";
|
||||||
|
|
||||||
export const videos = schema.table(
|
export const videos = schema.table(
|
||||||
"videos",
|
"videos",
|
||||||
|
|||||||
@ -1,10 +1,9 @@
|
|||||||
import { sql } from "drizzle-orm";
|
import { sql } from "drizzle-orm";
|
||||||
import { check, integer, primaryKey } from "drizzle-orm/pg-core";
|
import { check, integer, primaryKey } from "drizzle-orm/pg-core";
|
||||||
import { timestamp } from "../utils";
|
|
||||||
import { entries } from "./entries";
|
import { entries } from "./entries";
|
||||||
import { profiles } from "./profiles";
|
import { profiles } from "./profiles";
|
||||||
import { shows } from "./shows";
|
import { shows } from "./shows";
|
||||||
import { schema } from "./utils";
|
import { schema, timestamp } from "./utils";
|
||||||
|
|
||||||
export const watchlistStatus = schema.enum("watchlist_status", [
|
export const watchlistStatus = schema.enum("watchlist_status", [
|
||||||
"watching",
|
"watching",
|
||||||
|
|||||||
@ -13,11 +13,7 @@ import {
|
|||||||
} from "drizzle-orm";
|
} from "drizzle-orm";
|
||||||
import type { CasingCache } from "drizzle-orm/casing";
|
import type { CasingCache } from "drizzle-orm/casing";
|
||||||
import type { AnyMySqlSelect } from "drizzle-orm/mysql-core";
|
import type { AnyMySqlSelect } from "drizzle-orm/mysql-core";
|
||||||
import {
|
import type { AnyPgSelect, SelectedFieldsFlat } from "drizzle-orm/pg-core";
|
||||||
type AnyPgSelect,
|
|
||||||
customType,
|
|
||||||
type SelectedFieldsFlat,
|
|
||||||
} from "drizzle-orm/pg-core";
|
|
||||||
import type { AnySQLiteSelect } from "drizzle-orm/sqlite-core";
|
import type { AnySQLiteSelect } from "drizzle-orm/sqlite-core";
|
||||||
import type { WithSubquery } from "drizzle-orm/subquery";
|
import type { WithSubquery } from "drizzle-orm/subquery";
|
||||||
import { db } from "./index";
|
import { db } from "./index";
|
||||||
@ -157,19 +153,3 @@ export const isUniqueConstraint = (e: unknown): boolean => {
|
|||||||
cause.code === "23505"
|
cause.code === "23505"
|
||||||
);
|
);
|
||||||
};
|
};
|
||||||
|
|
||||||
export const timestamp = customType<{
|
|
||||||
data: string;
|
|
||||||
driverData: string;
|
|
||||||
config: { withTimezone: boolean; precision?: number; mode: "iso" };
|
|
||||||
}>({
|
|
||||||
dataType(config) {
|
|
||||||
const precision = config?.precision ? ` (${config.precision})` : "";
|
|
||||||
return `timestamp${precision}${config?.withTimezone ? " with time zone" : ""}`;
|
|
||||||
},
|
|
||||||
fromDriver(value: string): string {
|
|
||||||
// postgres format: 2025-06-22 16:13:37.489301+00
|
|
||||||
// what we want: 2025-06-22T16:13:37Z
|
|
||||||
return `${value.substring(0, 10)}T${value.substring(11, 19)}Z`;
|
|
||||||
},
|
|
||||||
});
|
|
||||||
|
|||||||
Loading…
x
Reference in New Issue
Block a user