mirror of
https://github.com/zoriya/Kyoo.git
synced 2025-05-24 02:02:36 -04:00
Add watch status in entries, movies & series (#843)
This commit is contained in:
commit
341322d625
@ -96,7 +96,7 @@ RABBITMQ_DEFAULT_PASS=aohohunuhouhuhhoahothonseuhaoensuthoaentsuhha
|
||||
|
||||
|
||||
# v5 stuff, does absolutely nothing on master (aka: you can delete this)
|
||||
EXTRA_CLAIMS='{"permissions": [], "verified": false}'
|
||||
FIRST_USER_CLAIMS='{"permissions": ["user.read", "users.write", "users.delete"], "verified": true}'
|
||||
GUEST_CLAIMS='{"permissions": []}'
|
||||
EXTRA_CLAIMS='{"permissions": ["core.read"], "verified": false}'
|
||||
FIRST_USER_CLAIMS='{"permissions": ["user.read", "users.write", "users.delete", "core.read"], "verified": true}'
|
||||
GUEST_CLAIMS='{"permissions": ["core.read"]}'
|
||||
PROTECTED_CLAIMS="permissions,verified"
|
||||
|
@ -63,14 +63,14 @@ erDiagram
|
||||
}
|
||||
entries ||--|{ entry_translations : has
|
||||
|
||||
video {
|
||||
videos {
|
||||
guid id PK
|
||||
string path "NN"
|
||||
uint rendering "dedup for duplicates part1/2"
|
||||
uint part
|
||||
uint version "max version is preferred rendering"
|
||||
}
|
||||
video }|--|{ entries : for
|
||||
videos }|--|{ entries : for
|
||||
|
||||
seasons {
|
||||
guid id PK
|
||||
@ -102,27 +102,28 @@ erDiagram
|
||||
guid id PK
|
||||
}
|
||||
|
||||
watched_shows {
|
||||
watchlist {
|
||||
guid show_id PK, FK
|
||||
guid user_id PK, FK
|
||||
status status "completed|watching|dropped|planned"
|
||||
status status "completed|watching|rewatching|dropped|planned"
|
||||
uint seen_entry_count "NN"
|
||||
guid next_entry FK
|
||||
}
|
||||
shows ||--|{ watched_shows : has
|
||||
users ||--|{ watched_shows : has
|
||||
watched_shows ||--|o entries : next_entry
|
||||
shows ||--|{ watchlist : has
|
||||
users ||--|{ watchlist : has
|
||||
watchlist ||--|o entries : next_entry
|
||||
|
||||
history {
|
||||
int id PK
|
||||
guid entry_id FK
|
||||
guid user_id FK
|
||||
uint time "in seconds, null of finished"
|
||||
uint progress "NN, from 0 to 100"
|
||||
guid profile_id FK
|
||||
guid video_id FK
|
||||
jsonb progress "{ percent, time }"
|
||||
datetime played_date
|
||||
}
|
||||
entries ||--|{ history : part_of
|
||||
users ||--|{ history : has
|
||||
videos o|--o{ history : has
|
||||
|
||||
roles {
|
||||
guid show_id PK, FK
|
||||
@ -143,6 +144,7 @@ erDiagram
|
||||
jsonb external_id
|
||||
}
|
||||
staff ||--|{ roles : has
|
||||
shows ||--|{ roles : has
|
||||
|
||||
studios {
|
||||
guid id PK
|
||||
|
40
api/drizzle/0017_watchlist.sql
Normal file
40
api/drizzle/0017_watchlist.sql
Normal file
@ -0,0 +1,40 @@
|
||||
CREATE TYPE "kyoo"."watchlist_status" AS ENUM('completed', 'watching', 'rewatching', 'dropped', 'planned');--> statement-breakpoint
|
||||
CREATE TABLE "kyoo"."history" (
|
||||
"pk" integer PRIMARY KEY GENERATED ALWAYS AS IDENTITY (sequence name "kyoo"."history_pk_seq" INCREMENT BY 1 MINVALUE 1 MAXVALUE 2147483647 START WITH 1 CACHE 1),
|
||||
"profile_pk" integer NOT NULL,
|
||||
"entry_pk" integer NOT NULL,
|
||||
"video_pk" integer NOT NULL,
|
||||
"percent" integer DEFAULT 0 NOT NULL,
|
||||
"time" integer,
|
||||
"played_date" timestamp with time zone DEFAULT now() NOT NULL,
|
||||
CONSTRAINT "percent_valid" CHECK ("kyoo"."history"."percent" between 0 and 100)
|
||||
);
|
||||
--> statement-breakpoint
|
||||
CREATE TABLE "kyoo"."profiles" (
|
||||
"pk" integer PRIMARY KEY GENERATED ALWAYS AS IDENTITY (sequence name "kyoo"."profiles_pk_seq" INCREMENT BY 1 MINVALUE 1 MAXVALUE 2147483647 START WITH 1 CACHE 1),
|
||||
"id" uuid NOT NULL,
|
||||
CONSTRAINT "profiles_id_unique" UNIQUE("id")
|
||||
);
|
||||
--> statement-breakpoint
|
||||
CREATE TABLE "kyoo"."watchlist" (
|
||||
"profile_pk" integer NOT NULL,
|
||||
"show_pk" integer NOT NULL,
|
||||
"status" "kyoo"."watchlist_status" NOT NULL,
|
||||
"seen_count" integer DEFAULT 0 NOT NULL,
|
||||
"next_entry" integer,
|
||||
"score" integer,
|
||||
"started_at" timestamp with time zone,
|
||||
"completed_at" timestamp with time zone,
|
||||
"created_at" timestamp with time zone DEFAULT now() NOT NULL,
|
||||
"updated_at" timestamp with time zone NOT NULL,
|
||||
CONSTRAINT "watchlist_profile_pk_show_pk_pk" PRIMARY KEY("profile_pk","show_pk"),
|
||||
CONSTRAINT "score_percent" CHECK ("kyoo"."watchlist"."score" between 0 and 100)
|
||||
);
|
||||
--> statement-breakpoint
|
||||
ALTER TABLE "kyoo"."history" ADD CONSTRAINT "history_profile_pk_profiles_pk_fk" FOREIGN KEY ("profile_pk") REFERENCES "kyoo"."profiles"("pk") ON DELETE cascade ON UPDATE no action;--> statement-breakpoint
|
||||
ALTER TABLE "kyoo"."history" ADD CONSTRAINT "history_entry_pk_entries_pk_fk" FOREIGN KEY ("entry_pk") REFERENCES "kyoo"."entries"("pk") ON DELETE cascade ON UPDATE no action;--> statement-breakpoint
|
||||
ALTER TABLE "kyoo"."history" ADD CONSTRAINT "history_video_pk_videos_pk_fk" FOREIGN KEY ("video_pk") REFERENCES "kyoo"."videos"("pk") ON DELETE set null ON UPDATE no action;--> statement-breakpoint
|
||||
ALTER TABLE "kyoo"."watchlist" ADD CONSTRAINT "watchlist_profile_pk_profiles_pk_fk" FOREIGN KEY ("profile_pk") REFERENCES "kyoo"."profiles"("pk") ON DELETE cascade ON UPDATE no action;--> statement-breakpoint
|
||||
ALTER TABLE "kyoo"."watchlist" ADD CONSTRAINT "watchlist_show_pk_shows_pk_fk" FOREIGN KEY ("show_pk") REFERENCES "kyoo"."shows"("pk") ON DELETE cascade ON UPDATE no action;--> statement-breakpoint
|
||||
ALTER TABLE "kyoo"."watchlist" ADD CONSTRAINT "watchlist_next_entry_entries_pk_fk" FOREIGN KEY ("next_entry") REFERENCES "kyoo"."entries"("pk") ON DELETE set null ON UPDATE no action;--> statement-breakpoint
|
||||
CREATE INDEX "history_play_date" ON "kyoo"."history" USING btree ("played_date" DESC NULLS LAST);
|
1839
api/drizzle/meta/0017_snapshot.json
Normal file
1839
api/drizzle/meta/0017_snapshot.json
Normal file
File diff suppressed because it is too large
Load Diff
@ -120,6 +120,13 @@
|
||||
"when": 1742205790510,
|
||||
"tag": "0016_mqueue",
|
||||
"breakpoints": true
|
||||
},
|
||||
{
|
||||
"idx": 17,
|
||||
"version": "7",
|
||||
"when": 1743944773824,
|
||||
"tag": "0017_watchlist",
|
||||
"breakpoints": true
|
||||
}
|
||||
]
|
||||
}
|
||||
|
@ -1,3 +1,6 @@
|
||||
diff --git a/node_modules/drizzle-orm/.bun-tag-36446a2521398ee8 b/.bun-tag-36446a2521398ee8
|
||||
new file mode 100644
|
||||
index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391
|
||||
diff --git a/node_modules/drizzle-orm/.bun-tag-9fae835e61d5cc75 b/.bun-tag-9fae835e61d5cc75
|
||||
new file mode 100644
|
||||
index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391
|
||||
@ -53,12 +56,68 @@ index b968ebb3f563f37c8c36221dd17cc6f3603270ec..3fda6d0a97997f6bd07ec6a0c83397c0
|
||||
* ```
|
||||
*/
|
||||
- fullJoin: PgSelectJoinFn<this, TDynamic, "full">;
|
||||
+ fullJoin: PgSelectJoinFn<this, TDynamic, "full", false>;
|
||||
private createSetOperator;
|
||||
/**
|
||||
* Adds `union` set operator to the query.
|
||||
diff --git a/pg-core/query-builders/select.d.ts b/pg-core/query-builders/select.d.ts
|
||||
index d44256289ffe7bd19d3f3af98cbd9ba0fc7efc57..f106eb28a919e0182f833632ace36ea7f87f9a88 100644
|
||||
--- a/pg-core/query-builders/select.d.ts
|
||||
+++ b/pg-core/query-builders/select.d.ts
|
||||
@@ -98,7 +98,16 @@ export declare abstract class PgSelectQueryBuilderBase<THKT extends PgSelectHKTB
|
||||
* .leftJoin(pets, eq(users.id, pets.ownerId))
|
||||
* ```
|
||||
*/
|
||||
- leftJoin: PgSelectJoinFn<this, TDynamic, "left">;
|
||||
+ leftJoin: PgSelectJoinFn<this, TDynamic, "left", false>;
|
||||
+ /**
|
||||
+ * For each row of the table, include
|
||||
+ * values from a matching row of the joined
|
||||
+ * subquery, if there is a matching row. If not,
|
||||
+ * all of the columns of the joined subquery
|
||||
+ * will be set to null. The lateral keyword allows
|
||||
+ * access to columns after the FROM statement.
|
||||
+ */
|
||||
+ leftJoinLateral: PgSelectJoinFn<this, TDynamic, "left", true>;
|
||||
/**
|
||||
* Executes a `right join` operation by adding another table to the current query.
|
||||
*
|
||||
@@ -126,7 +135,7 @@ export declare abstract class PgSelectQueryBuilderBase<THKT extends PgSelectHKTB
|
||||
* .rightJoin(pets, eq(users.id, pets.ownerId))
|
||||
* ```
|
||||
*/
|
||||
- rightJoin: PgSelectJoinFn<this, TDynamic, "right">;
|
||||
+ rightJoin: PgSelectJoinFn<this, TDynamic, "right", false>;
|
||||
/**
|
||||
* Executes an `inner join` operation, creating a new table by combining rows from two tables that have matching values.
|
||||
*
|
||||
@@ -154,7 +163,14 @@ export declare abstract class PgSelectQueryBuilderBase<THKT extends PgSelectHKTB
|
||||
* .innerJoin(pets, eq(users.id, pets.ownerId))
|
||||
* ```
|
||||
*/
|
||||
- innerJoin: PgSelectJoinFn<this, TDynamic, "inner">;
|
||||
+ innerJoin: PgSelectJoinFn<this, TDynamic, "inner", false>;
|
||||
+ /**
|
||||
+ * For each row of the table, the joined subquery
|
||||
+ * needs to have a matching row, or it will
|
||||
+ * be excluded from results. The lateral keyword allows
|
||||
+ * access to columns after the FROM statement.
|
||||
+ */
|
||||
+ innerJoinLateral: PgSelectJoinFn<this, TDynamic, "inner", true>;
|
||||
/**
|
||||
* Executes a `full join` operation by combining rows from two tables into a new table.
|
||||
*
|
||||
@@ -182,7 +198,7 @@ export declare abstract class PgSelectQueryBuilderBase<THKT extends PgSelectHKTB
|
||||
* .fullJoin(pets, eq(users.id, pets.ownerId))
|
||||
* ```
|
||||
*/
|
||||
- fullJoin: PgSelectJoinFn<this, TDynamic, "full">;
|
||||
+ fullJoin: PgSelectJoinFn<this, TDynamic, "full", false>;
|
||||
private createSetOperator;
|
||||
/**
|
||||
* Adds `union` set operator to the query.
|
||||
diff --git a/pg-core/query-builders/select.js b/pg-core/query-builders/select.js
|
||||
index e54406fcaf68ccfdaf32c8945d4d432212c4cf3f..0441be1e483a7ec02430978b5fac5bf6d863ffc7 100644
|
||||
index e54406fcaf68ccfdaf32c8945d4d432212c4cf3f..5c514132f30366ee600b9530c284932d54f481f3 100644
|
||||
--- a/pg-core/query-builders/select.js
|
||||
+++ b/pg-core/query-builders/select.js
|
||||
@@ -98,7 +98,7 @@ class PgSelectQueryBuilderBase extends TypedQueryBuilder {
|
||||
|
@ -26,35 +26,44 @@ export const auth = new Elysia({ name: "auth" })
|
||||
authorization: t.TemplateLiteral("Bearer ${string}"),
|
||||
}),
|
||||
})
|
||||
.resolve(async ({ headers: { authorization }, error }) => {
|
||||
const bearer = authorization?.slice(7);
|
||||
if (!bearer) {
|
||||
return error(500, {
|
||||
status: 500,
|
||||
message: "No jwt, auth server configuration error.",
|
||||
});
|
||||
}
|
||||
|
||||
try {
|
||||
// @ts-expect-error ts can't understand that there's two overload idk why
|
||||
const { payload } = await jwtVerify(bearer, jwtSecret ?? jwks, {
|
||||
issuer: process.env.JWT_ISSUER,
|
||||
});
|
||||
const jwt = validator.Decode(payload);
|
||||
|
||||
return { jwt };
|
||||
} catch (err) {
|
||||
return error(403, {
|
||||
status: 403,
|
||||
message: "Invalid jwt. Verification vailed",
|
||||
details: err,
|
||||
});
|
||||
}
|
||||
})
|
||||
.macro({
|
||||
permissions(perms: string[]) {
|
||||
return {
|
||||
resolve: async ({ headers: { authorization }, error }) => {
|
||||
const bearer = authorization?.slice(7);
|
||||
if (!bearer) {
|
||||
return error(500, {
|
||||
status: 500,
|
||||
message: "No jwt, auth server configuration error.",
|
||||
});
|
||||
}
|
||||
|
||||
// @ts-expect-error ts can't understand that there's two overload idk why
|
||||
const { payload } = await jwtVerify(bearer, jwtSecret ?? jwks, {
|
||||
issuer: process.env.JWT_ISSUER,
|
||||
});
|
||||
const jwt = validator.Decode(payload);
|
||||
|
||||
beforeHandle: ({ jwt, error }) => {
|
||||
for (const perm of perms) {
|
||||
if (!jwt.permissions.includes(perm)) {
|
||||
if (!jwt!.permissions.includes(perm)) {
|
||||
return error(403, {
|
||||
status: 403,
|
||||
message: `Missing permission: '${perm}'.`,
|
||||
details: { current: jwt.permissions, required: perms },
|
||||
details: { current: jwt!.permissions, required: perms },
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
return { jwt };
|
||||
},
|
||||
};
|
||||
},
|
||||
|
@ -1,10 +1,13 @@
|
||||
import { type SQL, and, eq, isNotNull, ne, sql } from "drizzle-orm";
|
||||
import { type SQL, and, desc, eq, isNotNull, ne, sql } from "drizzle-orm";
|
||||
import { Elysia, t } from "elysia";
|
||||
import { auth } from "~/auth";
|
||||
import { db } from "~/db";
|
||||
import {
|
||||
entries,
|
||||
entryTranslations,
|
||||
entryVideoJoin,
|
||||
history,
|
||||
profiles,
|
||||
shows,
|
||||
videos,
|
||||
} from "~/db/schema";
|
||||
@ -39,7 +42,7 @@ import {
|
||||
processLanguages,
|
||||
sortToSql,
|
||||
} from "~/models/utils";
|
||||
import { desc } from "~/models/utils/descriptions";
|
||||
import { desc as description } from "~/models/utils/descriptions";
|
||||
import type { EmbeddedVideo } from "~/models/video";
|
||||
|
||||
const entryFilters: FilterDef = {
|
||||
@ -105,6 +108,52 @@ const newsSort: Sort = {
|
||||
},
|
||||
],
|
||||
};
|
||||
const { guess, createdAt, updatedAt, ...videosCol } = getColumns(videos);
|
||||
export const entryVideosQ = db
|
||||
.select({
|
||||
videos: coalesce(
|
||||
jsonbAgg(
|
||||
jsonbBuildObject<EmbeddedVideo>({
|
||||
slug: entryVideoJoin.slug,
|
||||
...videosCol,
|
||||
}),
|
||||
),
|
||||
sql`'[]'::jsonb`,
|
||||
).as("videos"),
|
||||
})
|
||||
.from(entryVideoJoin)
|
||||
.where(eq(entryVideoJoin.entryPk, entries.pk))
|
||||
.leftJoin(videos, eq(videos.pk, entryVideoJoin.videoPk))
|
||||
.as("videos");
|
||||
|
||||
export const getEntryProgressQ = (userId: string) =>
|
||||
db
|
||||
.selectDistinctOn([history.entryPk], {
|
||||
percent: history.percent,
|
||||
time: history.time,
|
||||
entryPk: history.entryPk,
|
||||
videoId: videos.id,
|
||||
})
|
||||
.from(history)
|
||||
.leftJoin(videos, eq(history.videoPk, videos.pk))
|
||||
.leftJoin(profiles, eq(history.profilePk, profiles.pk))
|
||||
.where(eq(profiles.id, userId))
|
||||
.orderBy(history.entryPk, desc(history.playedDate))
|
||||
.as("progress");
|
||||
|
||||
export const mapProgress = (
|
||||
progressQ: ReturnType<typeof getEntryProgressQ>,
|
||||
{ aliased }: { aliased: boolean } = { aliased: false },
|
||||
) => {
|
||||
const { time, percent, videoId } = getColumns(progressQ);
|
||||
const ret = {
|
||||
time: coalesce(time, sql`0`),
|
||||
percent: coalesce(percent, sql`0`),
|
||||
videoId: sql`${videoId}`,
|
||||
};
|
||||
if (!aliased) return ret;
|
||||
return Object.fromEntries(Object.entries(ret).map(([k, v]) => [k, v.as(k)]));
|
||||
};
|
||||
|
||||
async function getEntries({
|
||||
after,
|
||||
@ -113,6 +162,7 @@ async function getEntries({
|
||||
sort,
|
||||
filter,
|
||||
languages,
|
||||
userId,
|
||||
}: {
|
||||
after: string | undefined;
|
||||
limit: number;
|
||||
@ -120,6 +170,7 @@ async function getEntries({
|
||||
sort: Sort;
|
||||
filter: SQL | undefined;
|
||||
languages: string[];
|
||||
userId: string;
|
||||
}): Promise<(Entry | Extra | UnknownEntry)[]> {
|
||||
const transQ = db
|
||||
.selectDistinctOn([entryTranslations.pk])
|
||||
@ -131,23 +182,7 @@ async function getEntries({
|
||||
.as("t");
|
||||
const { pk, name, ...transCol } = getColumns(transQ);
|
||||
|
||||
const { guess, createdAt, updatedAt, ...videosCol } = getColumns(videos);
|
||||
const videosQ = db
|
||||
.select({
|
||||
videos: coalesce(
|
||||
jsonbAgg(
|
||||
jsonbBuildObject<EmbeddedVideo>({
|
||||
slug: entryVideoJoin.slug,
|
||||
...videosCol,
|
||||
}),
|
||||
),
|
||||
sql`'[]'::jsonb`,
|
||||
).as("videos"),
|
||||
})
|
||||
.from(entryVideoJoin)
|
||||
.where(eq(entryVideoJoin.entryPk, entries.pk))
|
||||
.leftJoin(videos, eq(videos.pk, entryVideoJoin.videoPk))
|
||||
.as("videos");
|
||||
const entryProgressQ = getEntryProgressQ(userId);
|
||||
|
||||
const {
|
||||
kind,
|
||||
@ -162,7 +197,8 @@ async function getEntries({
|
||||
.select({
|
||||
...entryCol,
|
||||
...transCol,
|
||||
videos: videosQ.videos,
|
||||
videos: entryVideosQ.videos,
|
||||
progress: mapProgress(entryProgressQ, { aliased: true }),
|
||||
// specials don't have an `episodeNumber` but a `number` field.
|
||||
number: episodeNumber,
|
||||
|
||||
@ -180,7 +216,8 @@ async function getEntries({
|
||||
})
|
||||
.from(entries)
|
||||
.innerJoin(transQ, eq(entries.pk, transQ.pk))
|
||||
.leftJoinLateral(videosQ, sql`true`)
|
||||
.leftJoinLateral(entryVideosQ, sql`true`)
|
||||
.leftJoin(entryProgressQ, eq(entries.pk, entryProgressQ.entryPk))
|
||||
.where(
|
||||
and(
|
||||
filter,
|
||||
@ -210,6 +247,7 @@ export const entriesH = new Elysia({ tags: ["series"] })
|
||||
...models,
|
||||
entry: t.Union([models.episode, models.movie_entry, models.special]),
|
||||
}))
|
||||
.use(auth)
|
||||
.get(
|
||||
"/series/:id/entries",
|
||||
async ({
|
||||
@ -217,6 +255,7 @@ export const entriesH = new Elysia({ tags: ["series"] })
|
||||
query: { limit, after, query, sort, filter },
|
||||
headers: { "accept-language": languages },
|
||||
request: { url },
|
||||
jwt: { sub },
|
||||
error,
|
||||
}) => {
|
||||
const [serie] = await db
|
||||
@ -250,6 +289,7 @@ export const entriesH = new Elysia({ tags: ["series"] })
|
||||
filter,
|
||||
),
|
||||
languages: langs,
|
||||
userId: sub,
|
||||
})) as Entry[];
|
||||
|
||||
return createPage(items, { url, sort, limit });
|
||||
@ -265,14 +305,14 @@ export const entriesH = new Elysia({ tags: ["series"] })
|
||||
query: t.Object({
|
||||
sort: entrySort,
|
||||
filter: t.Optional(Filter({ def: entryFilters })),
|
||||
query: t.Optional(t.String({ description: desc.query })),
|
||||
query: t.Optional(t.String({ description: description.query })),
|
||||
limit: t.Integer({
|
||||
minimum: 1,
|
||||
maximum: 250,
|
||||
default: 50,
|
||||
description: "Max page size.",
|
||||
}),
|
||||
after: t.Optional(t.String({ description: desc.after })),
|
||||
after: t.Optional(t.String({ description: description.after })),
|
||||
}),
|
||||
headers: t.Object(
|
||||
{
|
||||
@ -296,6 +336,7 @@ export const entriesH = new Elysia({ tags: ["series"] })
|
||||
params: { id },
|
||||
query: { limit, after, query, sort, filter },
|
||||
request: { url },
|
||||
jwt: { sub },
|
||||
error,
|
||||
}) => {
|
||||
const [serie] = await db
|
||||
@ -327,6 +368,7 @@ export const entriesH = new Elysia({ tags: ["series"] })
|
||||
filter,
|
||||
),
|
||||
languages: ["extra"],
|
||||
userId: sub,
|
||||
})) as Extra[];
|
||||
|
||||
return createPage(items, { url, sort, limit });
|
||||
@ -342,14 +384,14 @@ export const entriesH = new Elysia({ tags: ["series"] })
|
||||
query: t.Object({
|
||||
sort: extraSort,
|
||||
filter: t.Optional(Filter({ def: extraFilters })),
|
||||
query: t.Optional(t.String({ description: desc.query })),
|
||||
query: t.Optional(t.String({ description: description.query })),
|
||||
limit: t.Integer({
|
||||
minimum: 1,
|
||||
maximum: 250,
|
||||
default: 50,
|
||||
description: "Max page size.",
|
||||
}),
|
||||
after: t.Optional(t.String({ description: desc.after })),
|
||||
after: t.Optional(t.String({ description: description.after })),
|
||||
}),
|
||||
response: {
|
||||
200: Page(Extra),
|
||||
@ -366,6 +408,7 @@ export const entriesH = new Elysia({ tags: ["series"] })
|
||||
async ({
|
||||
query: { limit, after, query, sort, filter },
|
||||
request: { url },
|
||||
jwt: { sub },
|
||||
}) => {
|
||||
const items = (await getEntries({
|
||||
limit,
|
||||
@ -374,6 +417,7 @@ export const entriesH = new Elysia({ tags: ["series"] })
|
||||
sort: sort,
|
||||
filter: and(eq(entries.kind, "unknown"), filter),
|
||||
languages: ["extra"],
|
||||
userId: sub,
|
||||
})) as UnknownEntry[];
|
||||
|
||||
return createPage(items, { url, sort, limit });
|
||||
@ -383,14 +427,14 @@ export const entriesH = new Elysia({ tags: ["series"] })
|
||||
query: t.Object({
|
||||
sort: extraSort,
|
||||
filter: t.Optional(Filter({ def: unknownFilters })),
|
||||
query: t.Optional(t.String({ description: desc.query })),
|
||||
query: t.Optional(t.String({ description: description.query })),
|
||||
limit: t.Integer({
|
||||
minimum: 1,
|
||||
maximum: 250,
|
||||
default: 50,
|
||||
description: "Max page size.",
|
||||
}),
|
||||
after: t.Optional(t.String({ description: desc.after })),
|
||||
after: t.Optional(t.String({ description: description.after })),
|
||||
}),
|
||||
response: {
|
||||
200: Page(UnknownEntry),
|
||||
@ -401,7 +445,11 @@ export const entriesH = new Elysia({ tags: ["series"] })
|
||||
)
|
||||
.get(
|
||||
"/news",
|
||||
async ({ query: { limit, after, query, filter }, request: { url } }) => {
|
||||
async ({
|
||||
query: { limit, after, query, filter },
|
||||
request: { url },
|
||||
jwt: { sub },
|
||||
}) => {
|
||||
const sort = newsSort;
|
||||
const items = (await getEntries({
|
||||
limit,
|
||||
@ -415,6 +463,7 @@ export const entriesH = new Elysia({ tags: ["series"] })
|
||||
filter,
|
||||
),
|
||||
languages: ["extra"],
|
||||
userId: sub,
|
||||
})) as Entry[];
|
||||
|
||||
return createPage(items, { url, sort, limit });
|
||||
@ -423,14 +472,14 @@ export const entriesH = new Elysia({ tags: ["series"] })
|
||||
detail: { description: "Get new movies/episodes added recently." },
|
||||
query: t.Object({
|
||||
filter: t.Optional(Filter({ def: entryFilters })),
|
||||
query: t.Optional(t.String({ description: desc.query })),
|
||||
query: t.Optional(t.String({ description: description.query })),
|
||||
limit: t.Integer({
|
||||
minimum: 1,
|
||||
maximum: 250,
|
||||
default: 50,
|
||||
description: "Max page size.",
|
||||
}),
|
||||
after: t.Optional(t.String({ description: desc.after })),
|
||||
after: t.Optional(t.String({ description: description.after })),
|
||||
}),
|
||||
response: {
|
||||
200: Page(Entry),
|
||||
|
@ -1,9 +1,21 @@
|
||||
import { type SQL, and, eq, exists, ne, sql } from "drizzle-orm";
|
||||
import {
|
||||
type SQL,
|
||||
type Subquery,
|
||||
and,
|
||||
desc,
|
||||
eq,
|
||||
exists,
|
||||
ne,
|
||||
sql,
|
||||
} from "drizzle-orm";
|
||||
import type { PgSelect } from "drizzle-orm/pg-core";
|
||||
import { db } from "~/db";
|
||||
import {
|
||||
entries,
|
||||
entryTranslations,
|
||||
entryVideoJoin,
|
||||
history,
|
||||
profiles,
|
||||
showStudioJoin,
|
||||
showTranslations,
|
||||
shows,
|
||||
@ -11,6 +23,7 @@ import {
|
||||
studios,
|
||||
videos,
|
||||
} from "~/db/schema";
|
||||
import { watchlist } from "~/db/schema/watchlist";
|
||||
import {
|
||||
coalesce,
|
||||
getColumns,
|
||||
@ -33,6 +46,7 @@ import {
|
||||
sortToSql,
|
||||
} from "~/models/utils";
|
||||
import type { EmbeddedVideo } from "~/models/video";
|
||||
import { entryVideosQ, getEntryProgressQ, mapProgress } from "../entries";
|
||||
|
||||
export const showFilters: FilterDef = {
|
||||
genres: {
|
||||
@ -144,7 +158,10 @@ const showRelations = {
|
||||
.leftJoin(videos, eq(videos.pk, entryVideoJoin.videoPk))
|
||||
.as("videos");
|
||||
},
|
||||
firstEntry: ({ languages }: { languages: string[] }) => {
|
||||
firstEntry: ({
|
||||
languages,
|
||||
userId,
|
||||
}: { languages: string[]; userId: string }) => {
|
||||
const transQ = db
|
||||
.selectDistinctOn([entryTranslations.pk])
|
||||
.from(entryTranslations)
|
||||
@ -155,23 +172,7 @@ const showRelations = {
|
||||
.as("t");
|
||||
const { pk, ...transCol } = getColumns(transQ);
|
||||
|
||||
const { guess, createdAt, updatedAt, ...videosCol } = getColumns(videos);
|
||||
const videosQ = db
|
||||
.select({
|
||||
videos: coalesce(
|
||||
jsonbAgg(
|
||||
jsonbBuildObject<EmbeddedVideo>({
|
||||
slug: entryVideoJoin.slug,
|
||||
...videosCol,
|
||||
}),
|
||||
),
|
||||
sql`'[]'::jsonb`,
|
||||
).as("videos"),
|
||||
})
|
||||
.from(entryVideoJoin)
|
||||
.where(eq(entryVideoJoin.entryPk, entries.pk))
|
||||
.leftJoin(videos, eq(videos.pk, entryVideoJoin.videoPk))
|
||||
.as("videos");
|
||||
const progressQ = getEntryProgressQ(userId);
|
||||
|
||||
return db
|
||||
.select({
|
||||
@ -179,17 +180,59 @@ const showRelations = {
|
||||
...getColumns(entries),
|
||||
...transCol,
|
||||
number: entries.episodeNumber,
|
||||
videos: videosQ.videos,
|
||||
videos: entryVideosQ.videos,
|
||||
progress: mapProgress(progressQ),
|
||||
}).as("firstEntry"),
|
||||
})
|
||||
.from(entries)
|
||||
.innerJoin(transQ, eq(entries.pk, transQ.pk))
|
||||
.leftJoinLateral(videosQ, sql`true`)
|
||||
.leftJoin(progressQ, eq(entries.pk, progressQ.entryPk))
|
||||
.leftJoinLateral(entryVideosQ, sql`true`)
|
||||
.where(and(eq(entries.showPk, shows.pk), ne(entries.kind, "extra")))
|
||||
.orderBy(entries.order)
|
||||
.limit(1)
|
||||
.as("firstEntry");
|
||||
},
|
||||
nextEntry: ({
|
||||
languages,
|
||||
userId,
|
||||
watchStatusQ,
|
||||
}: {
|
||||
languages: string[];
|
||||
userId: string;
|
||||
watchStatusQ: Subquery;
|
||||
}) => {
|
||||
const transQ = db
|
||||
.selectDistinctOn([entryTranslations.pk])
|
||||
.from(entryTranslations)
|
||||
.orderBy(
|
||||
entryTranslations.pk,
|
||||
sql`array_position(${sqlarr(languages)}, ${entryTranslations.language})`,
|
||||
)
|
||||
.as("t");
|
||||
const { pk, ...transCol } = getColumns(transQ);
|
||||
|
||||
const progressQ = getEntryProgressQ(userId);
|
||||
|
||||
return db
|
||||
.select({
|
||||
nextEntry: jsonbBuildObject<Entry>({
|
||||
...getColumns(entries),
|
||||
...transCol,
|
||||
number: entries.episodeNumber,
|
||||
videos: entryVideosQ.videos,
|
||||
progress: mapProgress(progressQ),
|
||||
}).as("nextEntry"),
|
||||
})
|
||||
.from(entries)
|
||||
.innerJoin(transQ, eq(entries.pk, transQ.pk))
|
||||
.leftJoin(progressQ, eq(entries.pk, progressQ.entryPk))
|
||||
.leftJoinLateral(entryVideosQ, sql`true`)
|
||||
.where(
|
||||
eq((watchStatusQ as unknown as typeof watchlist).nextEntry, entries.pk),
|
||||
)
|
||||
.as("nextEntry");
|
||||
},
|
||||
};
|
||||
|
||||
export async function getShows({
|
||||
@ -202,6 +245,7 @@ export async function getShows({
|
||||
fallbackLanguage = true,
|
||||
preferOriginal = false,
|
||||
relations = [],
|
||||
userId,
|
||||
}: {
|
||||
after?: string;
|
||||
limit: number;
|
||||
@ -212,6 +256,7 @@ export async function getShows({
|
||||
fallbackLanguage?: boolean;
|
||||
preferOriginal?: boolean;
|
||||
relations?: (keyof typeof showRelations)[];
|
||||
userId: string;
|
||||
}) {
|
||||
const transQ = db
|
||||
.selectDistinctOn([showTranslations.pk])
|
||||
@ -227,6 +272,16 @@ export async function getShows({
|
||||
)
|
||||
.as("t");
|
||||
|
||||
const watchStatusQ = db
|
||||
.select({
|
||||
...getColumns(watchlist),
|
||||
percent: sql`${watchlist.seenCount}`.as("percent"),
|
||||
})
|
||||
.from(watchlist)
|
||||
.leftJoin(profiles, eq(watchlist.profilePk, profiles.pk))
|
||||
.where(eq(profiles.id, userId))
|
||||
.as("watchstatus");
|
||||
|
||||
return await db
|
||||
.select({
|
||||
...getColumns(shows),
|
||||
@ -245,9 +300,16 @@ export async function getShows({
|
||||
logo: sql<Image>`coalesce(nullif(${shows.original}->'logo', 'null'::jsonb), ${transQ.logo})`,
|
||||
}),
|
||||
|
||||
...buildRelations(relations, showRelations, { languages }),
|
||||
watchStatus: getColumns(watchStatusQ),
|
||||
|
||||
...buildRelations(relations, showRelations, {
|
||||
languages,
|
||||
userId,
|
||||
watchStatusQ,
|
||||
}),
|
||||
})
|
||||
.from(shows)
|
||||
.leftJoin(watchStatusQ, eq(shows.pk, watchStatusQ.showPk))
|
||||
[fallbackLanguage ? "innerJoin" : ("leftJoin" as "innerJoin")](
|
||||
transQ,
|
||||
eq(shows.pk, transQ.pk),
|
||||
|
@ -1,5 +1,6 @@
|
||||
import { and, eq, sql } from "drizzle-orm";
|
||||
import { Elysia, t } from "elysia";
|
||||
import { auth } from "~/auth";
|
||||
import { prefix } from "~/base";
|
||||
import { db } from "~/db";
|
||||
import { shows } from "~/db/schema";
|
||||
@ -22,12 +23,14 @@ export const series = new Elysia({ prefix: "/series", tags: ["series"] })
|
||||
serie: Serie,
|
||||
"serie-translation": SerieTranslation,
|
||||
})
|
||||
.use(auth)
|
||||
.get(
|
||||
"/:id",
|
||||
async ({
|
||||
params: { id },
|
||||
headers: { "accept-language": languages },
|
||||
query: { preferOriginal, with: relations },
|
||||
jwt: { sub },
|
||||
error,
|
||||
set,
|
||||
}) => {
|
||||
@ -42,6 +45,7 @@ export const series = new Elysia({ prefix: "/series", tags: ["series"] })
|
||||
fallbackLanguage: langs.includes("*"),
|
||||
preferOriginal,
|
||||
relations,
|
||||
userId: sub,
|
||||
});
|
||||
if (!ret) {
|
||||
return error(404, {
|
||||
@ -72,10 +76,13 @@ export const series = new Elysia({ prefix: "/series", tags: ["series"] })
|
||||
preferOriginal: t.Optional(
|
||||
t.Boolean({ description: desc.preferOriginal }),
|
||||
),
|
||||
with: t.Array(t.UnionEnum(["translations", "studios", "firstEntry"]), {
|
||||
default: [],
|
||||
description: "Include related resources in the response.",
|
||||
}),
|
||||
with: t.Array(
|
||||
t.UnionEnum(["translations", "studios", "firstEntry", "nextEntry"]),
|
||||
{
|
||||
default: [],
|
||||
description: "Include related resources in the response.",
|
||||
},
|
||||
),
|
||||
}),
|
||||
headers: t.Object(
|
||||
{
|
||||
@ -131,6 +138,7 @@ export const series = new Elysia({ prefix: "/series", tags: ["series"] })
|
||||
query: { limit, after, query, sort, filter, preferOriginal },
|
||||
headers: { "accept-language": languages },
|
||||
request: { url },
|
||||
jwt: { sub },
|
||||
}) => {
|
||||
const langs = processLanguages(languages);
|
||||
const items = await getShows({
|
||||
@ -141,6 +149,7 @@ export const series = new Elysia({ prefix: "/series", tags: ["series"] })
|
||||
filter: and(eq(shows.kind, "serie"), filter),
|
||||
languages: langs,
|
||||
preferOriginal,
|
||||
userId: sub,
|
||||
});
|
||||
return createPage(items, { url, sort, limit });
|
||||
},
|
||||
|
@ -1,5 +1,6 @@
|
||||
import { and, isNull, sql } from "drizzle-orm";
|
||||
import { Elysia, t } from "elysia";
|
||||
import { auth } from "~/auth";
|
||||
import { prefix } from "~/base";
|
||||
import { db } from "~/db";
|
||||
import { shows } from "~/db/schema";
|
||||
@ -19,6 +20,7 @@ export const showsH = new Elysia({ prefix: "/shows", tags: ["shows"] })
|
||||
.model({
|
||||
show: Show,
|
||||
})
|
||||
.use(auth)
|
||||
.get(
|
||||
"random",
|
||||
async ({ error, redirect }) => {
|
||||
@ -63,6 +65,7 @@ export const showsH = new Elysia({ prefix: "/shows", tags: ["shows"] })
|
||||
},
|
||||
headers: { "accept-language": languages },
|
||||
request: { url },
|
||||
jwt: { sub },
|
||||
}) => {
|
||||
const langs = processLanguages(languages);
|
||||
const items = await getShows({
|
||||
@ -76,6 +79,7 @@ export const showsH = new Elysia({ prefix: "/shows", tags: ["shows"] })
|
||||
),
|
||||
languages: langs,
|
||||
preferOriginal,
|
||||
userId: sub,
|
||||
});
|
||||
return createPage(items, { url, sort, limit });
|
||||
},
|
||||
|
@ -1,15 +1,18 @@
|
||||
import { type SQL, and, eq, sql } from "drizzle-orm";
|
||||
import Elysia, { t } from "elysia";
|
||||
import { auth } from "~/auth";
|
||||
import { prefix } from "~/base";
|
||||
import { db } from "~/db";
|
||||
import { showTranslations, shows } from "~/db/schema";
|
||||
import { profiles, showTranslations, shows } from "~/db/schema";
|
||||
import { roles, staff } from "~/db/schema/staff";
|
||||
import { getColumns, sqlarr } from "~/db/utils";
|
||||
import { watchlist } from "~/db/schema/watchlist";
|
||||
import { getColumns, jsonbBuildObject, sqlarr } from "~/db/utils";
|
||||
import { KError } from "~/models/error";
|
||||
import type { MovieStatus } from "~/models/movie";
|
||||
import { Role, Staff } from "~/models/staff";
|
||||
import { RoleWShow, RoleWStaff } from "~/models/staff-roles";
|
||||
import {
|
||||
AcceptLanguage,
|
||||
Filter,
|
||||
type FilterDef,
|
||||
type Image,
|
||||
@ -22,6 +25,7 @@ import {
|
||||
sortToSql,
|
||||
} from "~/models/utils";
|
||||
import { desc } from "~/models/utils/descriptions";
|
||||
import type { WatchStatus } from "~/models/watchlist";
|
||||
import { showFilters, showSort } from "./shows/logic";
|
||||
|
||||
const staffSort = Sort(
|
||||
@ -113,6 +117,7 @@ export const staffH = new Elysia({ tags: ["staff"] })
|
||||
staff: Staff,
|
||||
role: Role,
|
||||
})
|
||||
.use(auth)
|
||||
.get(
|
||||
"/staff/:id",
|
||||
async ({ params: { id }, error }) => {
|
||||
@ -186,6 +191,7 @@ export const staffH = new Elysia({ tags: ["staff"] })
|
||||
query: { limit, after, query, sort, filter, preferOriginal },
|
||||
headers: { "accept-language": languages },
|
||||
request: { url },
|
||||
jwt: { sub },
|
||||
error,
|
||||
}) => {
|
||||
const [member] = await db
|
||||
@ -210,6 +216,20 @@ export const staffH = new Elysia({ tags: ["staff"] })
|
||||
sql`array_position(${sqlarr(langs)}, ${showTranslations.language})`,
|
||||
)
|
||||
.as("t");
|
||||
|
||||
const watchStatusQ = db
|
||||
.select({
|
||||
watchStatus: jsonbBuildObject<WatchStatus>({
|
||||
...getColumns(watchlist),
|
||||
percent: watchlist.seenCount,
|
||||
}).as("watchStatus"),
|
||||
})
|
||||
.from(watchlist)
|
||||
.leftJoin(profiles, eq(watchlist.profilePk, profiles.pk))
|
||||
.where(and(eq(profiles.id, sub), eq(watchlist.showPk, shows.pk)))
|
||||
.limit(1)
|
||||
.as("watchstatus");
|
||||
|
||||
const items = await db
|
||||
.select({
|
||||
...getColumns(roles),
|
||||
@ -229,6 +249,7 @@ export const staffH = new Elysia({ tags: ["staff"] })
|
||||
banner: sql<Image>`coalesce(nullif(${shows.original}->'banner', 'null'::jsonb), ${transQ.banner})`,
|
||||
logo: sql<Image>`coalesce(nullif(${shows.original}->'logo', 'null'::jsonb), ${transQ.logo})`,
|
||||
}),
|
||||
watchStatus: sql`${watchStatusQ}`,
|
||||
},
|
||||
})
|
||||
.from(roles)
|
||||
@ -278,6 +299,12 @@ export const staffH = new Elysia({ tags: ["staff"] })
|
||||
}),
|
||||
),
|
||||
}),
|
||||
headers: t.Object(
|
||||
{
|
||||
"accept-language": AcceptLanguage(),
|
||||
},
|
||||
{ additionalProperties: true },
|
||||
),
|
||||
response: {
|
||||
200: Page(RoleWShow),
|
||||
404: {
|
||||
|
32
api/src/db/schema/history.ts
Normal file
32
api/src/db/schema/history.ts
Normal file
@ -0,0 +1,32 @@
|
||||
import { sql } from "drizzle-orm";
|
||||
import { check, index, integer, timestamp } from "drizzle-orm/pg-core";
|
||||
import { entries } from "./entries";
|
||||
import { profiles } from "./profiles";
|
||||
import { schema } from "./utils";
|
||||
import { videos } from "./videos";
|
||||
|
||||
export const history = schema.table(
|
||||
"history",
|
||||
{
|
||||
pk: integer().primaryKey().generatedAlwaysAsIdentity(),
|
||||
profilePk: integer()
|
||||
.notNull()
|
||||
.references(() => profiles.pk, { onDelete: "cascade" }),
|
||||
entryPk: integer()
|
||||
.notNull()
|
||||
.references(() => entries.pk, { onDelete: "cascade" }),
|
||||
videoPk: integer()
|
||||
.notNull()
|
||||
.references(() => videos.pk, { onDelete: "set null" }),
|
||||
percent: integer().notNull().default(0),
|
||||
time: integer(),
|
||||
playedDate: timestamp({ withTimezone: true, mode: "string" })
|
||||
.notNull()
|
||||
.defaultNow(),
|
||||
},
|
||||
(t) => [
|
||||
index("history_play_date").on(t.playedDate.desc()),
|
||||
|
||||
check("percent_valid", sql`${t.percent} between 0 and 100`),
|
||||
],
|
||||
);
|
@ -4,4 +4,6 @@ export * from "./shows";
|
||||
export * from "./studios";
|
||||
export * from "./staff";
|
||||
export * from "./videos";
|
||||
export * from "./profiles";
|
||||
export * from "./history";
|
||||
export * from "./mqueue";
|
||||
|
9
api/src/db/schema/profiles.ts
Normal file
9
api/src/db/schema/profiles.ts
Normal file
@ -0,0 +1,9 @@
|
||||
import { integer, uuid } from "drizzle-orm/pg-core";
|
||||
import { schema } from "./utils";
|
||||
|
||||
// user info is stored in keibi (the auth service).
|
||||
// this table is only there for relations.
|
||||
export const profiles = schema.table("profiles", {
|
||||
pk: integer().primaryKey().generatedAlwaysAsIdentity(),
|
||||
id: uuid().notNull().unique(),
|
||||
});
|
46
api/src/db/schema/watchlist.ts
Normal file
46
api/src/db/schema/watchlist.ts
Normal file
@ -0,0 +1,46 @@
|
||||
import { sql } from "drizzle-orm";
|
||||
import { check, integer, primaryKey, timestamp } from "drizzle-orm/pg-core";
|
||||
import { entries } from "./entries";
|
||||
import { profiles } from "./profiles";
|
||||
import { shows } from "./shows";
|
||||
import { schema } from "./utils";
|
||||
|
||||
export const watchlistStatus = schema.enum("watchlist_status", [
|
||||
"completed",
|
||||
"watching",
|
||||
"rewatching",
|
||||
"dropped",
|
||||
"planned",
|
||||
]);
|
||||
|
||||
export const watchlist = schema.table(
|
||||
"watchlist",
|
||||
{
|
||||
profilePk: integer()
|
||||
.notNull()
|
||||
.references(() => profiles.pk, { onDelete: "cascade" }),
|
||||
showPk: integer()
|
||||
.notNull()
|
||||
.references(() => shows.pk, { onDelete: "cascade" }),
|
||||
|
||||
status: watchlistStatus().notNull(),
|
||||
seenCount: integer().notNull().default(0),
|
||||
nextEntry: integer().references(() => entries.pk, { onDelete: "set null" }),
|
||||
|
||||
score: integer(),
|
||||
|
||||
startedAt: timestamp({ withTimezone: true, mode: "string" }),
|
||||
completedAt: timestamp({ withTimezone: true, mode: "string" }),
|
||||
|
||||
createdAt: timestamp({ withTimezone: true, mode: "string" })
|
||||
.notNull()
|
||||
.defaultNow(),
|
||||
updatedAt: timestamp({ withTimezone: true, mode: "string" })
|
||||
.notNull()
|
||||
.$onUpdate(() => sql`now()`),
|
||||
},
|
||||
(t) => [
|
||||
primaryKey({ columns: [t.profilePk, t.showPk] }),
|
||||
check("score_percent", sql`${t.score} between 0 and 100`),
|
||||
],
|
||||
);
|
@ -1,6 +1,6 @@
|
||||
import {
|
||||
type Column,
|
||||
type ColumnsSelection,
|
||||
InferColumnsDataTypes,
|
||||
type SQL,
|
||||
type SQLWrapper,
|
||||
type Subquery,
|
||||
@ -13,7 +13,7 @@ import {
|
||||
} from "drizzle-orm";
|
||||
import type { CasingCache } from "drizzle-orm/casing";
|
||||
import type { AnyMySqlSelect } from "drizzle-orm/mysql-core";
|
||||
import type { AnyPgSelect } from "drizzle-orm/pg-core";
|
||||
import type { AnyPgSelect, SelectedFieldsFlat } from "drizzle-orm/pg-core";
|
||||
import type { AnySQLiteSelect } from "drizzle-orm/sqlite-core";
|
||||
import type { WithSubquery } from "drizzle-orm/subquery";
|
||||
import { db } from "./index";
|
||||
@ -95,7 +95,7 @@ export function values(items: Record<string, unknown>[]) {
|
||||
};
|
||||
}
|
||||
|
||||
export const coalesce = <T>(val: SQL<T>, def: SQLWrapper) => {
|
||||
export const coalesce = <T>(val: SQL<T> | Column, def: SQL<T>) => {
|
||||
return sql<T>`coalesce(${val}, ${def})`;
|
||||
};
|
||||
|
||||
@ -109,10 +109,19 @@ export const jsonbAgg = <T>(val: SQL<T>) => {
|
||||
return sql<T[]>`jsonb_agg(${val})`;
|
||||
};
|
||||
|
||||
export const jsonbBuildObject = <T>(select: Record<string, SQLWrapper>) => {
|
||||
type JsonFields = {
|
||||
[k: string]:
|
||||
| SelectedFieldsFlat[string]
|
||||
| Table
|
||||
| SelectedFieldsFlat
|
||||
| JsonFields;
|
||||
};
|
||||
export const jsonbBuildObject = <T>(select: JsonFields) => {
|
||||
const query = sql.join(
|
||||
Object.entries(select).flatMap(([k, v]) => {
|
||||
return [sql.raw(`'${k}'`), v];
|
||||
if (v.getSQL) return [sql.raw(`'${k}'`), v];
|
||||
// nested object (getSql is present in all SqlWrappers)
|
||||
return [sql.raw(`'${k}'`), jsonbBuildObject<any>(v as JsonFields)];
|
||||
}),
|
||||
sql.raw(", "),
|
||||
);
|
||||
|
@ -9,6 +9,7 @@ import {
|
||||
TranslationRecord,
|
||||
} from "../utils";
|
||||
import { EmbeddedVideo } from "../video";
|
||||
import { Progress } from "../watchlist";
|
||||
import { BaseEntry, EntryTranslation } from "./base-entry";
|
||||
|
||||
export const BaseEpisode = t.Intersect([
|
||||
@ -27,7 +28,8 @@ export const Episode = t.Intersect([
|
||||
EntryTranslation(),
|
||||
BaseEpisode,
|
||||
t.Object({
|
||||
videos: t.Optional(t.Array(EmbeddedVideo)),
|
||||
videos: t.Array(EmbeddedVideo),
|
||||
progress: Progress,
|
||||
}),
|
||||
DbMetadata,
|
||||
]);
|
||||
|
@ -3,6 +3,7 @@ import { type Prettify, comment } from "~/utils";
|
||||
import { madeInAbyss, registerExamples } from "../examples";
|
||||
import { DbMetadata, SeedImage } from "../utils";
|
||||
import { Resource } from "../utils/resource";
|
||||
import { Progress } from "../watchlist";
|
||||
import { BaseEntry } from "./base-entry";
|
||||
|
||||
export const ExtraType = t.UnionEnum([
|
||||
@ -31,7 +32,14 @@ export const BaseExtra = t.Intersect(
|
||||
},
|
||||
);
|
||||
|
||||
export const Extra = t.Intersect([Resource(), BaseExtra, DbMetadata]);
|
||||
export const Extra = t.Intersect([
|
||||
Resource(),
|
||||
BaseExtra,
|
||||
t.Object({
|
||||
progress: t.Omit(Progress, ["videoId"]),
|
||||
}),
|
||||
DbMetadata,
|
||||
]);
|
||||
export type Extra = Prettify<typeof Extra.static>;
|
||||
|
||||
export const SeedExtra = t.Intersect([
|
||||
|
@ -10,6 +10,7 @@ import {
|
||||
TranslationRecord,
|
||||
} from "../utils";
|
||||
import { EmbeddedVideo } from "../video";
|
||||
import { Progress } from "../watchlist";
|
||||
import { BaseEntry, EntryTranslation } from "./base-entry";
|
||||
|
||||
export const BaseMovieEntry = t.Intersect(
|
||||
@ -46,6 +47,7 @@ export const MovieEntry = t.Intersect([
|
||||
BaseMovieEntry,
|
||||
t.Object({
|
||||
videos: t.Optional(t.Array(EmbeddedVideo)),
|
||||
progress: Progress,
|
||||
}),
|
||||
DbMetadata,
|
||||
]);
|
||||
|
@ -9,6 +9,7 @@ import {
|
||||
TranslationRecord,
|
||||
} from "../utils";
|
||||
import { EmbeddedVideo } from "../video";
|
||||
import { Progress } from "../watchlist";
|
||||
import { BaseEntry, EntryTranslation } from "./base-entry";
|
||||
|
||||
export const BaseSpecial = t.Intersect(
|
||||
@ -38,6 +39,7 @@ export const Special = t.Intersect([
|
||||
BaseSpecial,
|
||||
t.Object({
|
||||
videos: t.Optional(t.Array(EmbeddedVideo)),
|
||||
progress: Progress,
|
||||
}),
|
||||
DbMetadata,
|
||||
]);
|
||||
|
@ -2,6 +2,7 @@ import { t } from "elysia";
|
||||
import { type Prettify, comment } from "~/utils";
|
||||
import { bubbleImages, registerExamples, youtubeExample } from "../examples";
|
||||
import { DbMetadata, Resource } from "../utils";
|
||||
import { Progress } from "../watchlist";
|
||||
import { BaseEntry, EntryTranslation } from "./base-entry";
|
||||
|
||||
export const BaseUnknownEntry = t.Intersect(
|
||||
@ -27,6 +28,9 @@ export const UnknownEntry = t.Intersect([
|
||||
Resource(),
|
||||
UnknownEntryTranslation,
|
||||
BaseUnknownEntry,
|
||||
t.Object({
|
||||
progress: t.Omit(Progress, ["videoId"]),
|
||||
}),
|
||||
DbMetadata,
|
||||
]);
|
||||
export type UnknownEntry = Prettify<typeof UnknownEntry.static>;
|
||||
|
@ -16,6 +16,7 @@ import {
|
||||
} from "./utils";
|
||||
import { Original } from "./utils/original";
|
||||
import { EmbeddedVideo } from "./video";
|
||||
import { WatchStatus } from "./watchlist";
|
||||
|
||||
export const MovieStatus = t.UnionEnum(["unknown", "finished", "planned"]);
|
||||
export type MovieStatus = typeof MovieStatus.static;
|
||||
@ -55,6 +56,7 @@ export const Movie = t.Intersect([
|
||||
t.Object({
|
||||
original: Original,
|
||||
isAvailable: t.Boolean(),
|
||||
watchStatus: t.Nullable(t.Omit(WatchStatus, ["seenCount"])),
|
||||
}),
|
||||
]);
|
||||
export type Movie = Prettify<typeof Movie.static>;
|
||||
|
@ -17,6 +17,7 @@ import {
|
||||
TranslationRecord,
|
||||
} from "./utils";
|
||||
import { Original } from "./utils/original";
|
||||
import { WatchStatus } from "./watchlist";
|
||||
|
||||
export const SerieStatus = t.UnionEnum([
|
||||
"unknown",
|
||||
@ -70,6 +71,7 @@ export const Serie = t.Intersect([
|
||||
availableCount: t.Integer({
|
||||
description: "The number of episodes that can be played right away",
|
||||
}),
|
||||
watchStatus: t.Nullable(t.Omit(WatchStatus, ["percent"])),
|
||||
}),
|
||||
]);
|
||||
export type Serie = Prettify<typeof Serie.static>;
|
||||
|
55
api/src/models/watchlist.ts
Normal file
55
api/src/models/watchlist.ts
Normal file
@ -0,0 +1,55 @@
|
||||
import { t } from "elysia";
|
||||
import { comment } from "~/utils";
|
||||
|
||||
export const Progress = t.Object({
|
||||
percent: t.Integer({ minimum: 0, maximum: 100 }),
|
||||
time: t.Nullable(
|
||||
t.Integer({
|
||||
minimum: 0,
|
||||
description: comment`
|
||||
When this episode was stopped (in seconds since the start).
|
||||
This value is null if the entry was never watched or is finished.
|
||||
`,
|
||||
}),
|
||||
),
|
||||
videoId: t.Nullable(
|
||||
t.String({
|
||||
format: "uuid",
|
||||
description: comment`
|
||||
Id of the video the user watched.
|
||||
This can be used to resume playback in the correct video file
|
||||
without asking the user what video to play.
|
||||
|
||||
This will be null if the user did not watch the entry or
|
||||
if the video was deleted since.
|
||||
`,
|
||||
}),
|
||||
),
|
||||
});
|
||||
export type Progress = typeof Progress.static;
|
||||
|
||||
export const WatchlistStatus = t.UnionEnum([
|
||||
"completed",
|
||||
"watching",
|
||||
"rewatching",
|
||||
"dropped",
|
||||
"planned",
|
||||
]);
|
||||
|
||||
export const WatchStatus = t.Object({
|
||||
status: WatchlistStatus,
|
||||
score: t.Nullable(t.Integer({ minimum: 0, maximum: 100 })),
|
||||
startedAt: t.Nullable(t.String({ format: "date-time" })),
|
||||
completedAt: t.Nullable(t.String({ format: "date-time" })),
|
||||
// only for series
|
||||
seenCount: t.Integer({
|
||||
description: "The number of episodes you watched in this serie.",
|
||||
minimum: 0,
|
||||
}),
|
||||
// only for movies
|
||||
percent: t.Integer({
|
||||
minimum: 0,
|
||||
maximum: 100,
|
||||
}),
|
||||
});
|
||||
export type WatchStatus = typeof WatchStatus.static;
|
@ -58,8 +58,8 @@ func (h *Handler) createGuestJwt() *string {
|
||||
|
||||
claims := maps.Clone(h.config.GuestClaims)
|
||||
claims["username"] = "guest"
|
||||
claims["sub"] = "guest"
|
||||
claims["sid"] = "guest"
|
||||
claims["sub"] = "00000000-0000-0000-0000-000000000000"
|
||||
claims["sid"] = "00000000-0000-0000-0000-000000000000"
|
||||
claims["iss"] = h.config.PublicUrl
|
||||
claims["iat"] = &jwt.NumericDate{
|
||||
Time: time.Now().UTC(),
|
||||
|
Loading…
x
Reference in New Issue
Block a user