From fbb995a3a62c70f9681932255bbd2ec0282cca0e Mon Sep 17 00:00:00 2001
From: Zoe Roux
Date: Mon, 13 Apr 2026 10:28:02 +0200
Subject: [PATCH 01/27] Use ids as pk for infos
---
.../migrations/000004_add_id_column.down.sql | 49 ++++++++++++++
.../migrations/000004_add_id_column.up.sql | 55 ++++++++++++++++
transcoder/src/info.go | 10 +--
transcoder/src/keyframes.go | 6 +-
transcoder/src/metadata.go | 64 ++++++++++---------
5 files changed, 147 insertions(+), 37 deletions(-)
create mode 100644 transcoder/migrations/000004_add_id_column.down.sql
create mode 100644 transcoder/migrations/000004_add_id_column.up.sql
diff --git a/transcoder/migrations/000004_add_id_column.down.sql b/transcoder/migrations/000004_add_id_column.down.sql
new file mode 100644
index 00000000..ccf9e527
--- /dev/null
+++ b/transcoder/migrations/000004_add_id_column.down.sql
@@ -0,0 +1,49 @@
+begin;
+
+-- chapters
+alter table gocoder.chapters add column sha varchar(40);
+update gocoder.chapters c set sha = i.sha from gocoder.info i where c.id = i.id;
+alter table gocoder.chapters alter column sha set not null;
+
+alter table gocoder.chapters drop constraint chapter_pk;
+alter table gocoder.chapters drop constraint chapters_info_fk;
+alter table gocoder.chapters drop column id;
+alter table gocoder.chapters add constraint chapter_pk primary key (sha, start_time);
+alter table gocoder.chapters add foreign key (sha) references gocoder.info(sha) on delete cascade;
+
+-- subtitles
+alter table gocoder.subtitles add column sha varchar(40);
+update gocoder.subtitles s set sha = i.sha from gocoder.info i where s.id = i.id;
+alter table gocoder.subtitles alter column sha set not null;
+
+alter table gocoder.subtitles drop constraint subtitle_pk;
+alter table gocoder.subtitles drop constraint subtitles_info_fk;
+alter table gocoder.subtitles drop column id;
+alter table gocoder.subtitles add constraint subtitle_pk primary key (sha, idx);
+alter table gocoder.subtitles add foreign key (sha) references gocoder.info(sha) on delete cascade;
+
+-- audios
+alter table gocoder.audios add column sha varchar(40);
+update gocoder.audios a set sha = i.sha from gocoder.info i where a.id = i.id;
+alter table gocoder.audios alter column sha set not null;
+
+alter table gocoder.audios drop constraint audios_pk;
+alter table gocoder.audios drop constraint audios_info_fk;
+alter table gocoder.audios drop column id;
+alter table gocoder.audios add constraint audios_pk primary key (sha, idx);
+alter table gocoder.audios add foreign key (sha) references gocoder.info(sha) on delete cascade;
+
+-- videos
+alter table gocoder.videos add column sha varchar(40);
+update gocoder.videos v set sha = i.sha from gocoder.info i where v.id = i.id;
+alter table gocoder.videos alter column sha set not null;
+
+alter table gocoder.videos drop constraint videos_pk;
+alter table gocoder.videos drop constraint videos_info_fk;
+alter table gocoder.videos drop column id;
+alter table gocoder.videos add constraint videos_pk primary key (sha, idx);
+alter table gocoder.videos add foreign key (sha) references gocoder.info(sha) on delete cascade;
+
+alter table gocoder.info drop column id;
+
+commit;
diff --git a/transcoder/migrations/000004_add_id_column.up.sql b/transcoder/migrations/000004_add_id_column.up.sql
new file mode 100644
index 00000000..2eb05fc6
--- /dev/null
+++ b/transcoder/migrations/000004_add_id_column.up.sql
@@ -0,0 +1,55 @@
+begin;
+
+alter table gocoder.info add column id serial unique not null;
+alter table gocoder.info drop constraint info_pkey;
+alter table gocoder.info add constraint info_pkey primary key(id);
+
+-- videos
+alter table gocoder.videos add column id integer;
+update gocoder.videos v set id = i.id from gocoder.info i where v.sha = i.sha;
+alter table gocoder.videos alter column id set not null;
+
+alter table gocoder.videos drop constraint videos_pk;
+alter table gocoder.videos drop constraint videos_sha_fkey;
+alter table gocoder.videos drop column sha;
+alter table gocoder.videos add constraint videos_info_fk
+ foreign key (id) references gocoder.info(id) on delete cascade;
+alter table gocoder.videos add constraint videos_pk primary key (id, idx);
+
+-- audios
+alter table gocoder.audios add column id integer;
+update gocoder.audios a set id = i.id from gocoder.info i where a.sha = i.sha;
+alter table gocoder.audios alter column id set not null;
+
+alter table gocoder.audios drop constraint audios_pk;
+alter table gocoder.audios drop constraint audios_sha_fkey;
+alter table gocoder.audios drop column sha;
+alter table gocoder.audios add constraint audios_info_fk
+ foreign key (id) references gocoder.info(id) on delete cascade;
+alter table gocoder.audios add constraint audios_pk primary key (id, idx);
+
+-- subtitles
+alter table gocoder.subtitles add column id integer;
+update gocoder.subtitles s set id = i.id from gocoder.info i where s.sha = i.sha;
+alter table gocoder.subtitles alter column id set not null;
+
+alter table gocoder.subtitles drop constraint subtitle_pk;
+alter table gocoder.subtitles drop constraint subtitles_sha_fkey;
+alter table gocoder.subtitles drop column sha;
+alter table gocoder.subtitles add constraint subtitles_info_fk
+ foreign key (id) references gocoder.info(id) on delete cascade;
+alter table gocoder.subtitles add constraint subtitle_pk primary key (id, idx);
+
+-- chapters
+alter table gocoder.chapters add column id integer;
+update gocoder.chapters c set id = i.id from gocoder.info i where c.sha = i.sha;
+alter table gocoder.chapters alter column id set not null;
+
+alter table gocoder.chapters drop constraint chapter_pk;
+alter table gocoder.chapters drop constraint chapters_sha_fkey;
+alter table gocoder.chapters drop column sha;
+alter table gocoder.chapters add constraint chapters_info_fk
+ foreign key (id) references gocoder.info(id) on delete cascade;
+alter table gocoder.chapters add constraint chapter_pk primary key (id, start_time);
+
+commit;
diff --git a/transcoder/src/info.go b/transcoder/src/info.go
index e6761b97..5e166602 100644
--- a/transcoder/src/info.go
+++ b/transcoder/src/info.go
@@ -27,6 +27,8 @@ type Versions struct {
}
type MediaInfo struct {
+ // Auto-increment id used as foreign key for related tables.
+ Id int32 `json:"id" db:"id"`
// The sha1 of the video file.
Sha string `json:"sha" db:"sha"`
/// The internal path of the video file.
@@ -60,7 +62,7 @@ type MediaInfo struct {
}
type Video struct {
- Sha string `json:"-" db:"sha"`
+ Id int32 `json:"-" db:"id"`
/// The index of this track on the media.
Index uint32 `json:"index" db:"idx"`
@@ -86,7 +88,7 @@ type Video struct {
}
type Audio struct {
- Sha string `json:"-" db:"sha"`
+ Id int32 `json:"-" db:"id"`
/// The index of this track on the media.
Index uint32 `json:"index" db:"idx"`
@@ -110,7 +112,7 @@ type Audio struct {
}
type Subtitle struct {
- Sha string `json:"-" db:"sha"`
+ Id int32 `json:"-" db:"id"`
/// The index of this track on the media.
Index *uint32 `json:"index" db:"idx"`
@@ -137,7 +139,7 @@ type Subtitle struct {
}
type Chapter struct {
- Sha string `json:"-" db:"sha"`
+ Id int32 `json:"-" db:"id"`
/// The start time of the chapter (in second from the start of the episode).
StartTime float32 `json:"startTime" db:"start_time"`
diff --git a/transcoder/src/keyframes.go b/transcoder/src/keyframes.go
index 5f76ed08..94d4038b 100644
--- a/transcoder/src/keyframes.go
+++ b/transcoder/src/keyframes.go
@@ -160,12 +160,12 @@ func (s *MetadataService) GetKeyframes(info *MediaInfo, isVideo bool, idx uint32
tx, _ := s.Database.Begin(ctx)
tx.Exec(
ctx,
- fmt.Sprintf(`update %s set keyframes = $3 where sha = $1 and idx = $2`, table),
- info.Sha,
+ fmt.Sprintf(`update %s set keyframes = $3 where id = $1 and idx = $2`, table),
+ info.Id,
idx,
kf.Keyframes,
)
- tx.Exec(ctx, `update gocoder.info set ver_keyframes = $2 where sha = $1`, info.Sha, KeyframeVersion)
+ tx.Exec(ctx, `update gocoder.info set ver_keyframes = $2 where id = $1`, info.Id, KeyframeVersion)
err = tx.Commit(ctx)
if err != nil {
log.Printf("Couldn't store keyframes on database: %v", err)
diff --git a/transcoder/src/metadata.go b/transcoder/src/metadata.go
index 4a79c912..4b972e9e 100644
--- a/transcoder/src/metadata.go
+++ b/transcoder/src/metadata.go
@@ -171,9 +171,9 @@ func (s *MetadataService) GetMetadata(ctx context.Context, path string, sha stri
if err != nil {
return nil, err
}
- tx.Exec(bgCtx, `update gocoder.videos set keyframes = null where sha = $1`, sha)
- tx.Exec(bgCtx, `update gocoder.audios set keyframes = null where sha = $1`, sha)
- tx.Exec(bgCtx, `update gocoder.info set ver_keyframes = 0 where sha = $1`, sha)
+ tx.Exec(bgCtx, `update gocoder.videos set keyframes = null where id = $1`, ret.Id)
+ tx.Exec(bgCtx, `update gocoder.audios set keyframes = null where id = $1`, ret.Id)
+ tx.Exec(bgCtx, `update gocoder.info set ver_keyframes = 0 where id = $1`, ret.Id)
err = tx.Commit(bgCtx)
if err != nil {
fmt.Printf("error deleting old keyframes from database: %v", err)
@@ -187,7 +187,7 @@ func (s *MetadataService) getMetadata(ctx context.Context, path string, sha stri
rows, _ := s.Database.Query(
ctx,
`select
- i.sha, i.path, i.extension, i.mime_codec, i.size, i.duration, i.container, i.fonts,
+ i.id, i.sha, i.path, i.extension, i.mime_codec, i.size, i.duration, i.container, i.fonts,
jsonb_build_object(
'info', i.ver_info,
'extract', i.ver_extract,
@@ -209,8 +209,8 @@ func (s *MetadataService) getMetadata(ctx context.Context, path string, sha stri
rows, _ = s.Database.Query(
ctx,
- `select * from gocoder.videos as v where v.sha=$1`,
- sha,
+ `select * from gocoder.videos as v where v.id=$1`,
+ ret.Id,
)
ret.Videos, err = pgx.CollectRows(rows, pgx.RowToStructByName[Video])
if err != nil {
@@ -219,8 +219,8 @@ func (s *MetadataService) getMetadata(ctx context.Context, path string, sha stri
rows, _ = s.Database.Query(
ctx,
- `select * from gocoder.audios as a where a.sha=$1`,
- sha,
+ `select * from gocoder.audios as a where a.id=$1`,
+ ret.Id,
)
ret.Audios, err = pgx.CollectRows(rows, pgx.RowToStructByName[Audio])
if err != nil {
@@ -229,8 +229,8 @@ func (s *MetadataService) getMetadata(ctx context.Context, path string, sha stri
rows, _ = s.Database.Query(
ctx,
- `select * from gocoder.subtitles as s where s.sha=$1`,
- sha,
+ `select * from gocoder.subtitles as s where s.id=$1`,
+ ret.Id,
)
ret.Subtitles, err = pgx.CollectRows(rows, pgx.RowToStructByName[Subtitle])
if err != nil {
@@ -254,8 +254,8 @@ func (s *MetadataService) getMetadata(ctx context.Context, path string, sha stri
rows, _ = s.Database.Query(
ctx,
- `select * from gocoder.chapters as c where c.sha=$1`,
- sha,
+ `select * from gocoder.chapters as c where c.id=$1`,
+ ret.Id,
)
ret.Chapters, err = pgx.CollectRows(rows, pgx.RowToStructByName[Chapter])
if err != nil {
@@ -283,24 +283,28 @@ func (s *MetadataService) storeFreshMetadata(ctx context.Context, path string, s
// it needs to be a delete instead of a on conflict do update because we want to trigger delete casquade for
// videos/audios & co.
tx.Exec(ctx, `delete from gocoder.info where path = $1`, path)
- tx.Exec(ctx,
+ err = tx.QueryRow(ctx,
`
insert into gocoder.info(sha, path, extension, mime_codec, size, duration, container,
fonts, ver_info, ver_extract, ver_thumbs, ver_keyframes)
values ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10, $11, $12)
+ returning id
`,
// on conflict do not update versions of extract/thumbs/keyframes
ret.Sha, ret.Path, ret.Extension, ret.MimeCodec, ret.Size, ret.Duration, ret.Container,
ret.Fonts, ret.Versions.Info, ret.Versions.Extract, ret.Versions.Thumbs, ret.Versions.Keyframes,
- )
+ ).Scan(&ret.Id)
+ if err != nil {
+ return set(ret, fmt.Errorf("failed to insert info: %w", err))
+ }
for _, v := range ret.Videos {
tx.Exec(
ctx,
`
- insert into gocoder.videos(sha, idx, title, language, codec, mime_codec, width, height, is_default, bitrate)
+ insert into gocoder.videos(id, idx, title, language, codec, mime_codec, width, height, is_default, bitrate)
values ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10)
- on conflict (sha, idx) do update set
- sha = excluded.sha,
+ on conflict (id, idx) do update set
+ id = excluded.id,
idx = excluded.idx,
title = excluded.title,
language = excluded.language,
@@ -311,17 +315,17 @@ func (s *MetadataService) storeFreshMetadata(ctx context.Context, path string, s
is_default = excluded.is_default,
bitrate = excluded.bitrate
`,
- ret.Sha, v.Index, v.Title, v.Language, v.Codec, v.MimeCodec, v.Width, v.Height, v.IsDefault, v.Bitrate,
+ ret.Id, v.Index, v.Title, v.Language, v.Codec, v.MimeCodec, v.Width, v.Height, v.IsDefault, v.Bitrate,
)
}
for _, a := range ret.Audios {
tx.Exec(
ctx,
`
- insert into gocoder.audios(sha, idx, title, language, codec, mime_codec, channels, is_default, bitrate)
+ insert into gocoder.audios(id, idx, title, language, codec, mime_codec, channels, is_default, bitrate)
values ($1, $2, $3, $4, $5, $6, $7, $8, $9)
- on conflict (sha, idx) do update set
- sha = excluded.sha,
+ on conflict (id, idx) do update set
+ id = excluded.id,
idx = excluded.idx,
title = excluded.title,
language = excluded.language,
@@ -331,17 +335,17 @@ func (s *MetadataService) storeFreshMetadata(ctx context.Context, path string, s
is_default = excluded.is_default,
bitrate = excluded.bitrate
`,
- ret.Sha, a.Index, a.Title, a.Language, a.Codec, a.MimeCodec, a.Channels, a.IsDefault, a.Bitrate,
+ ret.Id, a.Index, a.Title, a.Language, a.Codec, a.MimeCodec, a.Channels, a.IsDefault, a.Bitrate,
)
}
for _, s := range ret.Subtitles {
tx.Exec(
ctx,
`
- insert into gocoder.subtitles(sha, idx, title, language, codec, extension, is_default, is_forced, is_hearing_impaired)
+ insert into gocoder.subtitles(id, idx, title, language, codec, extension, is_default, is_forced, is_hearing_impaired)
values ($1, $2, $3, $4, $5, $6, $7, $8, $9)
- on conflict (sha, idx) do update set
- sha = excluded.sha,
+ on conflict (id, idx) do update set
+ id = excluded.id,
idx = excluded.idx,
title = excluded.title,
language = excluded.language,
@@ -351,23 +355,23 @@ func (s *MetadataService) storeFreshMetadata(ctx context.Context, path string, s
is_forced = excluded.is_forced,
is_hearing_impaired = excluded.is_hearing_impaired
`,
- ret.Sha, s.Index, s.Title, s.Language, s.Codec, s.Extension, s.IsDefault, s.IsForced, s.IsHearingImpaired,
+ ret.Id, s.Index, s.Title, s.Language, s.Codec, s.Extension, s.IsDefault, s.IsForced, s.IsHearingImpaired,
)
}
for _, c := range ret.Chapters {
tx.Exec(
ctx,
`
- insert into gocoder.chapters(sha, start_time, end_time, name, type)
+ insert into gocoder.chapters(id, start_time, end_time, name, type)
values ($1, $2, $3, $4, $5)
- on conflict (sha, start_time) do update set
- sha = excluded.sha,
+ on conflict (id, start_time) do update set
+ id = excluded.id,
start_time = excluded.start_time,
end_time = excluded.end_time,
name = excluded.name,
type = excluded.type
`,
- ret.Sha, c.StartTime, c.EndTime, c.Name, c.Type,
+ ret.Id, c.StartTime, c.EndTime, c.Name, c.Type,
)
}
err = tx.Commit(ctx)
From f09728a993d5cae2e1ea31eb88bd28cbfc42d10e Mon Sep 17 00:00:00 2001
From: Zoe Roux
Date: Mon, 13 Apr 2026 10:28:02 +0200
Subject: [PATCH 02/27] Create types/routes for fingerprinting
---
.../migrations/000005_fingerprints.down.sql | 9 +++++
.../migrations/000005_fingerprints.up.sql | 19 +++++++++++
transcoder/src/api/metadata.go | 33 ++++++++++++++-----
transcoder/src/info.go | 22 ++++++++-----
transcoder/src/metadata.go | 21 ++++++++++--
5 files changed, 84 insertions(+), 20 deletions(-)
create mode 100644 transcoder/migrations/000005_fingerprints.down.sql
create mode 100644 transcoder/migrations/000005_fingerprints.up.sql
diff --git a/transcoder/migrations/000005_fingerprints.down.sql b/transcoder/migrations/000005_fingerprints.down.sql
new file mode 100644
index 00000000..42ebcd05
--- /dev/null
+++ b/transcoder/migrations/000005_fingerprints.down.sql
@@ -0,0 +1,9 @@
+begin;
+
+alter table gocoder.chapters drop column match_accuracy;
+alter table gocoder.chapters drop column fingerprint_id;
+drop table gocoder.chapterprints;
+drop table gocoder.fingerprints;
+alter table gocoder.info drop column ver_fingerprint;
+
+commit;
diff --git a/transcoder/migrations/000005_fingerprints.up.sql b/transcoder/migrations/000005_fingerprints.up.sql
new file mode 100644
index 00000000..e95c45f5
--- /dev/null
+++ b/transcoder/migrations/000005_fingerprints.up.sql
@@ -0,0 +1,19 @@
+begin;
+
+alter table gocoder.info add column ver_fingerprint integer not null default 0;
+
+create table gocoder.fingerprints(
+ id integer not null primary key references gocoder.info(id) on delete cascade,
+ start_data text not null,
+ end_data text not null
+);
+
+create table gocoder.chapterprints(
+ id serial primary key,
+ data text not null
+);
+
+alter table gocoder.chapters add column fingerprint_id integer references gocoder.chapterprints(id) on delete set null;
+alter table gocoder.chapters add column match_accuracy integer;
+
+commit;
diff --git a/transcoder/src/api/metadata.go b/transcoder/src/api/metadata.go
index a314fc19..473fd2ee 100644
--- a/transcoder/src/api/metadata.go
+++ b/transcoder/src/api/metadata.go
@@ -23,7 +23,7 @@ func RegisterMetadataHandlers(e *echo.Group, metadata *src.MetadataService) {
h := mhandler{metadata}
e.GET("/:path/info", h.GetInfo)
- e.GET("/:path/prepare", h.Prepare)
+ e.POST("/:path/prepare", h.Prepare)
e.GET("/:path/subtitle/:name", h.GetSubtitle)
e.GET("/:path/attachment/:name", h.GetAttachment)
e.GET("/:path/thumbnails.png", h.GetThumbnails)
@@ -62,10 +62,11 @@ func (h *mhandler) GetInfo(c *echo.Context) error {
Container: nil,
MimeCodec: nil,
Versions: src.Versions{
- Info: -1,
- Extract: 0,
- Thumbs: 0,
- Keyframes: 0,
+ Info: -1,
+ Extract: 0,
+ Thumbs: 0,
+ Keyframes: 0,
+ Fingerprint: 0,
},
Videos: make([]src.Video, 0),
Audios: make([]src.Audio, 0),
@@ -77,22 +78,34 @@ func (h *mhandler) GetInfo(c *echo.Context) error {
return c.JSON(http.StatusOK, ret)
}
+type PrepareRequest struct {
+ // File path of the previous/next episodes (for audio fingerprinting).
+ NearEpisodes *string `json:"nearEpisodes"`
+}
+
// @Summary Prepare metadata
//
-// @Description Starts metadata preparation in background (info, extract, thumbs, keyframes).
+// @Description Starts metadata preparation in background (info, extract, thumbs, keyframes, chapter identification).
//
// @Tags metadata
// @Param path path string true "Base64 of a video's path" format(base64) example(L3ZpZGVvL2J1YmJsZS5ta3YK)
+// @Param body body PrepareRequest false "Adjacent episode paths for chapter detection"
//
// @Success 202 "Preparation started"
-// @Router /:path/prepare [get]
+// @Router /:path/prepare [post]
func (h *mhandler) Prepare(c *echo.Context) error {
path, sha, err := getPath(c)
if err != nil {
return err
}
- go func(path string, sha string) {
+ var req PrepareRequest
+ err = c.Bind(&req)
+ if err != nil {
+ return echo.NewHTTPError(http.StatusUnprocessableEntity, err.Error())
+ }
+
+ go func() {
bgCtx := context.Background()
info, err := h.metadata.GetMetadata(bgCtx, path, sha)
@@ -113,7 +126,9 @@ func (h *mhandler) Prepare(c *echo.Context) error {
fmt.Printf("failed to extract audio keyframes for %s (stream %d): %v\n", path, audio.Index, err)
}
}
- }(path, sha)
+
+ h.metadata.IdentifyChapters(bgCtx, info, req.NearEpisodes)
+ }()
return c.NoContent(http.StatusAccepted)
}
diff --git a/transcoder/src/info.go b/transcoder/src/info.go
index 5e166602..9e5c1b4b 100644
--- a/transcoder/src/info.go
+++ b/transcoder/src/info.go
@@ -20,10 +20,11 @@ import (
const InfoVersion = 4
type Versions struct {
- Info int32 `json:"info" db:"ver_info"`
- Extract int32 `json:"extract" db:"ver_extract"`
- Thumbs int32 `json:"thumbs" db:"ver_thumbs"`
- Keyframes int32 `json:"keyframes" db:"ver_keyframes"`
+ Info int32 `json:"info" db:"ver_info"`
+ Extract int32 `json:"extract" db:"ver_extract"`
+ Thumbs int32 `json:"thumbs" db:"ver_thumbs"`
+ Keyframes int32 `json:"keyframes" db:"ver_keyframes"`
+ Fingerprint int32 `json:"fingerprint" db:"ver_fingerprint"`
}
type MediaInfo struct {
@@ -149,6 +150,10 @@ type Chapter struct {
Name string `json:"name" db:"name"`
/// The type value is used to mark special chapters (openning/credits...)
Type ChapterType `json:"type" db:"type"`
+ /// Reference to the chapterprint used for fingerprint matching.
+ FingerprintId *int32 `json:"-" db:"fingerprint_id"`
+ /// Accuracy of the fingerprint match (0-100).
+ MatchAccuracy *int32 `json:"matchAccuracy,omitempty" db:"match_accuracy"`
}
type ChapterType string
@@ -255,10 +260,11 @@ func RetriveMediaInfo(path string, sha string) (*MediaInfo, error) {
Duration: mi.Format.DurationSeconds,
Container: OrNull(mi.Format.FormatName),
Versions: Versions{
- Info: InfoVersion,
- Extract: 0,
- Thumbs: 0,
- Keyframes: 0,
+ Info: InfoVersion,
+ Extract: 0,
+ Thumbs: 0,
+ Keyframes: 0,
+ Fingerprint: 0,
},
Videos: MapStream(mi.Streams, ffprobe.StreamVideo, func(stream *ffprobe.Stream, i uint32) Video {
lang, _ := language.Parse(stream.Tags.Language)
diff --git a/transcoder/src/metadata.go b/transcoder/src/metadata.go
index 4b972e9e..c80a97c6 100644
--- a/transcoder/src/metadata.go
+++ b/transcoder/src/metadata.go
@@ -180,6 +180,19 @@ func (s *MetadataService) GetMetadata(ctx context.Context, path string, sha stri
}
}
+ if ret.Versions.Fingerprint < FingerprintVersion && ret.Versions.Fingerprint != 0 {
+ tx, err := s.Database.Begin(bgCtx)
+ if err != nil {
+ return nil, err
+ }
+ tx.Exec(bgCtx, `delete from gocoder.fingerprints where id = $1`, ret.Id)
+ tx.Exec(bgCtx, `update gocoder.info set ver_fingerprint = 0 where id = $1`, ret.Id)
+ err = tx.Commit(bgCtx)
+ if err != nil {
+ fmt.Printf("error deleting old fingerprints from database: %v", err)
+ }
+ }
+
return ret, nil
}
@@ -192,7 +205,8 @@ func (s *MetadataService) getMetadata(ctx context.Context, path string, sha stri
'info', i.ver_info,
'extract', i.ver_extract,
'thumbs', i.ver_thumbs,
- 'keyframes', i.ver_keyframes
+ 'keyframes', i.ver_keyframes,
+ 'fingerprint', i.ver_fingerprint
) as versions
from gocoder.info as i
where i.sha=$1 limit 1`,
@@ -286,13 +300,14 @@ func (s *MetadataService) storeFreshMetadata(ctx context.Context, path string, s
err = tx.QueryRow(ctx,
`
insert into gocoder.info(sha, path, extension, mime_codec, size, duration, container,
- fonts, ver_info, ver_extract, ver_thumbs, ver_keyframes)
- values ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10, $11, $12)
+ fonts, ver_info, ver_extract, ver_thumbs, ver_keyframes, ver_fingerprint)
+ values ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10, $11, $12, $13)
returning id
`,
// on conflict do not update versions of extract/thumbs/keyframes
ret.Sha, ret.Path, ret.Extension, ret.MimeCodec, ret.Size, ret.Duration, ret.Container,
ret.Fonts, ret.Versions.Info, ret.Versions.Extract, ret.Versions.Thumbs, ret.Versions.Keyframes,
+ ret.Versions.Fingerprint,
).Scan(&ret.Id)
if err != nil {
return set(ret, fmt.Errorf("failed to insert info: %w", err))
From 1ad1aa0392c963e9d09125b3e69f26940dd4adae Mon Sep 17 00:00:00 2001
From: Zoe Roux
Date: Mon, 13 Apr 2026 10:28:02 +0200
Subject: [PATCH 03/27] Add fingerprint generation
---
transcoder/src/fingerprints.go | 179 +++++++++++++++++++++++++++
transcoder/src/fingerprints_utils.go | 96 ++++++++++++++
transcoder/src/metadata.go | 22 ++--
3 files changed, 287 insertions(+), 10 deletions(-)
create mode 100644 transcoder/src/fingerprints.go
create mode 100644 transcoder/src/fingerprints_utils.go
diff --git a/transcoder/src/fingerprints.go b/transcoder/src/fingerprints.go
new file mode 100644
index 00000000..c72a5c11
--- /dev/null
+++ b/transcoder/src/fingerprints.go
@@ -0,0 +1,179 @@
+package src
+
+import (
+ "context"
+ "encoding/binary"
+ "errors"
+ "fmt"
+ "os/exec"
+
+ "github.com/jackc/pgx/v5"
+ "github.com/zoriya/kyoo/transcoder/src/utils"
+)
+
+const (
+ FingerprintVersion = 1
+ FpStartPercent = 0.20
+ FpStartDuration = 10 * 60
+ FpEndDuration = 5 * 60
+)
+
+type Fingerprint struct {
+ Start []uint32
+ End []uint32
+}
+
+func (s *MetadataService) ComputeFingerprint(ctx context.Context, info *MediaInfo) (*Fingerprint, error) {
+ getRunning, set := s.fingerprintLock.Start(info.Path)
+ if getRunning != nil {
+ return getRunning()
+ }
+
+ var startData string
+ var endData string
+ err := s.Database.QueryRow(ctx,
+ `select start_data, end_data from gocoder.fingerprints where id = $1`,
+ info.Id,
+ ).Scan(&startData, &endData)
+ if err == nil {
+ startFingerprint, err := DecompressFingerprint(startData)
+ if err != nil {
+ return set(nil, fmt.Errorf("failed to decompress start fingerprint: %w", err))
+ }
+ endFingerprint, err := DecompressFingerprint(endData)
+ if err != nil {
+ return set(nil, fmt.Errorf("failed to decompress end fingerprint: %w", err))
+ }
+ return set(&Fingerprint{
+ Start: startFingerprint,
+ End: endFingerprint,
+ }, nil)
+ }
+ if !errors.Is(err, pgx.ErrNoRows) {
+ return set(nil, fmt.Errorf("failed to query fingerprint: %w", err))
+ }
+
+ defer utils.PrintExecTime("chromaprint for %s", info.Path)()
+ startFingerprint, err := computeChromaprint(
+ info.Path,
+ 0,
+ min(info.Duration*FpStartPercent, FpStartDuration),
+ )
+ if err != nil {
+ return set(nil, fmt.Errorf("failed to compute start fingerprint: %w", err))
+ }
+
+ endFingerprint, err := computeChromaprint(info.Path, info.Duration-5*60, -1)
+ if err != nil {
+ return set(nil, fmt.Errorf("failed to compute end fingerprint: %w", err))
+ }
+
+ return set(&Fingerprint{
+ Start: startFingerprint,
+ End: endFingerprint,
+ }, nil)
+}
+
+func computeChromaprint(
+ path string,
+ start float64,
+ duration float64,
+) ([]uint32, error) {
+ defer utils.PrintExecTime("chromaprint for %s (between %f and %f)", path, start, duration)()
+
+ args := []string{
+ "-v", "error",
+ }
+ if start > 0 {
+ args = append(args, "-ss", fmt.Sprintf("%.6f", start))
+ }
+ if duration > 0 {
+ args = append(args, "-t", fmt.Sprintf("%.6f", duration))
+ }
+ args = append(args,
+ "-i", path,
+ "-ac", "2",
+ "-f", "chromaprint",
+ "-fp_format", "raw",
+ "-",
+ )
+
+ cmd := exec.Command(
+ "ffmpeg",
+ args...,
+ )
+ output, err := cmd.Output()
+ if err != nil {
+ return nil, fmt.Errorf("ffmpeg failed: %w", err)
+ }
+
+ if len(output)%4 != 0 {
+ return nil, fmt.Errorf("invalid binary fingerprint size: %d", len(output))
+ }
+
+ result := make([]uint32, len(output)/4)
+ for i := range result {
+ result[i] = binary.LittleEndian.Uint32(output[i*4:])
+ }
+ return result, nil
+}
+
+func (s *MetadataService) StoreFingerprint(ctx context.Context, infoID int32, fingerprint *Fingerprint) error {
+ startCompressed, err := CompressFingerprint(fingerprint.Start)
+ if err != nil {
+ return fmt.Errorf("failed to compress start fingerprint: %w", err)
+ }
+ endCompressed, err := CompressFingerprint(fingerprint.End)
+ if err != nil {
+ return fmt.Errorf("failed to compress end fingerprint: %w", err)
+ }
+
+ _, err = s.Database.Exec(ctx,
+ `insert into gocoder.fingerprints(id, start_data, end_data) values ($1, $2, $3)
+ on conflict (id) do update set start_data = excluded.start_data, end_data = excluded.end_data`,
+ infoID, startCompressed, endCompressed,
+ )
+ return err
+}
+
+func (s *MetadataService) DeleteFingerprint(ctx context.Context, infoID int32) error {
+ _, err := s.Database.Exec(ctx,
+ `delete from gocoder.fingerprints where id = $1`,
+ infoID,
+ )
+ return err
+}
+
+func (s *MetadataService) GetChapterprint(ctx context.Context, id int32) ([]uint32, error) {
+ var data string
+ err := s.Database.QueryRow(ctx,
+ `select data from gocoder.chapterprints where id = $1`,
+ id,
+ ).Scan(&data)
+ if err != nil {
+ return nil, fmt.Errorf("failed to get chapterprint %d: %w", id, err)
+ }
+
+ fingerprint, err := DecompressFingerprint(data)
+ if err != nil {
+ return nil, fmt.Errorf("failed to decompress chapterprint %d: %w", id, err)
+ }
+ return fingerprint, nil
+}
+
+func (s *MetadataService) StoreChapterprint(ctx context.Context, fp []uint32) (int32, error) {
+ data, err := CompressFingerprint(fp)
+ if err != nil {
+ return 0, fmt.Errorf("failed to compress chapterprint: %w", err)
+ }
+
+ var id int32
+ err = s.Database.QueryRow(ctx,
+ `insert into gocoder.chapterprints(data) values ($1) returning id`,
+ data,
+ ).Scan(&id)
+ if err != nil {
+ return 0, fmt.Errorf("failed to store chapterprint: %w", err)
+ }
+ return id, nil
+}
diff --git a/transcoder/src/fingerprints_utils.go b/transcoder/src/fingerprints_utils.go
new file mode 100644
index 00000000..80f14a24
--- /dev/null
+++ b/transcoder/src/fingerprints_utils.go
@@ -0,0 +1,96 @@
+package src
+
+import (
+ "bytes"
+ "compress/zlib"
+ "encoding/base64"
+ "encoding/binary"
+ "fmt"
+ "io"
+ "math"
+)
+
+// Number of fingerprint items per second (chromaprint default sample rate).
+// Chromaprint uses ~8000 Hz sample rate with 4096-sample frames and 4096/3 overlap,
+// producing roughly 7.8 items/s. We use the conventional approximation.
+const FingerprintSampleRate = 7.8125
+
+func secToSamples(sec float64) int {
+ return int(math.Round(sec * FingerprintSampleRate))
+}
+
+func samplesToSec(samples int) float64 {
+ return float64(samples) / FingerprintSampleRate
+}
+
+func CompressFingerprint(fp []uint32) (string, error) {
+ if len(fp) == 0 {
+ return "", nil
+ }
+
+ raw := make([]byte, len(fp)*4)
+ for i, v := range fp {
+ binary.LittleEndian.PutUint32(raw[i*4:], v)
+ }
+
+ var compressed bytes.Buffer
+ zw := zlib.NewWriter(&compressed)
+ if _, err := zw.Write(raw); err != nil {
+ _ = zw.Close()
+ return "", fmt.Errorf("failed to compress fingerprint: %w", err)
+ }
+ if err := zw.Close(); err != nil {
+ return "", fmt.Errorf("failed to finalize compressed fingerprint: %w", err)
+ }
+
+ return base64.StdEncoding.EncodeToString(compressed.Bytes()), nil
+}
+
+func DecompressFingerprint(compressed string) ([]uint32, error) {
+ data, err := base64.StdEncoding.DecodeString(compressed)
+ if err != nil {
+ return nil, fmt.Errorf("failed to base64 decode fingerprint: %w", err)
+ }
+
+ zr, err := zlib.NewReader(bytes.NewReader(data))
+ if err != nil {
+ return nil, fmt.Errorf("failed to create zlib reader: %w", err)
+ }
+ defer zr.Close()
+
+ raw, err := io.ReadAll(zr)
+ if err != nil {
+ return nil, fmt.Errorf("failed to decompress fingerprint: %w", err)
+ }
+
+ if len(raw)%4 != 0 {
+ return nil, fmt.Errorf("invalid raw fingerprint size: %d", len(raw))
+ }
+
+ numItems := len(raw) / 4
+ result := make([]uint32, numItems)
+ for i := range numItems {
+ result[i] = binary.LittleEndian.Uint32(raw[i*4:])
+ }
+
+ return result, nil
+}
+
+func ExtractSegment(fp []uint32, startSec, endSec float64) ([]uint32, error) {
+ startIdx := secToSamples(startSec)
+ endIdx := secToSamples(endSec)
+
+ if startIdx < 0 {
+ startIdx = 0
+ }
+ if endIdx > len(fp) {
+ endIdx = len(fp)
+ }
+ if startIdx >= endIdx {
+ return nil, fmt.Errorf("invalid segment range: %f-%f", startSec, endSec)
+ }
+
+ segment := make([]uint32, endIdx-startIdx)
+ copy(segment, fp[startIdx:endIdx])
+ return segment, nil
+}
diff --git a/transcoder/src/metadata.go b/transcoder/src/metadata.go
index c80a97c6..e9a5cb6e 100644
--- a/transcoder/src/metadata.go
+++ b/transcoder/src/metadata.go
@@ -20,22 +20,24 @@ import (
)
type MetadataService struct {
- Database *pgxpool.Pool
- lock RunLock[string, *MediaInfo]
- thumbLock RunLock[string, any]
- extractLock RunLock[string, any]
- keyframeLock RunLock[KeyframeKey, *Keyframe]
- storage storage.StorageBackend
+ Database *pgxpool.Pool
+ lock RunLock[string, *MediaInfo]
+ thumbLock RunLock[string, any]
+ extractLock RunLock[string, any]
+ keyframeLock RunLock[KeyframeKey, *Keyframe]
+ fingerprintLock RunLock[string, *Fingerprint]
+ storage storage.StorageBackend
}
func NewMetadataService() (*MetadataService, error) {
ctx := context.TODO()
s := &MetadataService{
- lock: NewRunLock[string, *MediaInfo](),
- thumbLock: NewRunLock[string, any](),
- extractLock: NewRunLock[string, any](),
- keyframeLock: NewRunLock[KeyframeKey, *Keyframe](),
+ lock: NewRunLock[string, *MediaInfo](),
+ thumbLock: NewRunLock[string, any](),
+ extractLock: NewRunLock[string, any](),
+ keyframeLock: NewRunLock[KeyframeKey, *Keyframe](),
+ fingerprintLock: NewRunLock[string, *Fingerprint](),
}
db, err := s.setupDb()
From 9ac833b11b75f5f387be5f6d09298677ee808dc6 Mon Sep 17 00:00:00 2001
From: Zoe Roux
Date: Mon, 13 Apr 2026 10:28:02 +0200
Subject: [PATCH 04/27] Add fingerprinting logic
---
transcoder/go.mod | 22 +-
transcoder/go.sum | 20 ++
transcoder/src/api/metadata.go | 2 +-
transcoder/src/api/path.go | 13 +-
transcoder/src/chapters.go | 355 +++++++++++++++++++++++++
transcoder/src/fingerprints.go | 6 +-
transcoder/src/fingerprints_compare.go | 49 ++++
transcoder/src/info.go | 16 ++
8 files changed, 458 insertions(+), 25 deletions(-)
create mode 100644 transcoder/src/chapters.go
create mode 100644 transcoder/src/fingerprints_compare.go
diff --git a/transcoder/go.mod b/transcoder/go.mod
index 328c5f0e..cfe9ab7b 100644
--- a/transcoder/go.mod
+++ b/transcoder/go.mod
@@ -1,6 +1,6 @@
module github.com/zoriya/kyoo/transcoder
-go 1.25.0
+go 1.26.0
require (
github.com/MicahParks/keyfunc/v3 v3.8.0
@@ -37,7 +37,7 @@ require (
require (
github.com/KyleBanks/depth v1.2.1 // indirect
github.com/MicahParks/jwkset v0.11.0 // indirect
- github.com/asticode/go-astikit v0.58.0 // indirect
+ github.com/asticode/go-astikit v0.59.0 // indirect
github.com/asticode/go-astits v1.15.0 // indirect
github.com/aws/aws-sdk-go-v2/service/signin v1.0.9 // indirect
github.com/cenkalti/backoff/v5 v5.0.3 // indirect
@@ -69,10 +69,10 @@ require (
go.opentelemetry.io/proto/otlp v1.10.0 // indirect
go.yaml.in/yaml/v2 v2.4.4 // indirect
go.yaml.in/yaml/v3 v3.0.4 // indirect
- golang.org/x/mod v0.34.0 // indirect
- golang.org/x/tools v0.43.0 // indirect
- google.golang.org/genproto/googleapis/api v0.0.0-20260401024825-9d38bb4040a9 // indirect
- google.golang.org/genproto/googleapis/rpc v0.0.0-20260401024825-9d38bb4040a9 // indirect
+ golang.org/x/mod v0.35.0 // indirect
+ golang.org/x/tools v0.44.0 // indirect
+ google.golang.org/genproto/googleapis/api v0.0.0-20260414002931-afd174a4e478 // indirect
+ google.golang.org/genproto/googleapis/rpc v0.0.0-20260414002931-afd174a4e478 // indirect
google.golang.org/grpc v1.80.0 // indirect
google.golang.org/protobuf v1.36.11 // indirect
gopkg.in/yaml.v3 v3.0.1 // indirect
@@ -95,11 +95,11 @@ require (
github.com/aws/aws-sdk-go-v2/service/sso v1.30.15 // indirect
github.com/aws/aws-sdk-go-v2/service/ssooidc v1.35.19 // indirect
github.com/aws/aws-sdk-go-v2/service/sts v1.41.10 // indirect
- github.com/aws/smithy-go v1.24.2 // indirect
+ github.com/aws/smithy-go v1.24.3 // indirect
github.com/golang-jwt/jwt/v5 v5.3.1
- golang.org/x/image v0.38.0 // indirect
- golang.org/x/net v0.52.0 // indirect
- golang.org/x/sys v0.42.0 // indirect
- golang.org/x/text v0.35.0
+ golang.org/x/image v0.39.0 // indirect
+ golang.org/x/net v0.53.0 // indirect
+ golang.org/x/sys v0.43.0 // indirect
+ golang.org/x/text v0.36.0
golang.org/x/time v0.15.0 // indirect
)
diff --git a/transcoder/go.sum b/transcoder/go.sum
index e5bac815..7bdd6ca5 100644
--- a/transcoder/go.sum
+++ b/transcoder/go.sum
@@ -12,6 +12,8 @@ github.com/asticode/go-astikit v0.20.0/go.mod h1:h4ly7idim1tNhaVkdVBeXQZEE3L0xbl
github.com/asticode/go-astikit v0.30.0/go.mod h1:h4ly7idim1tNhaVkdVBeXQZEE3L0xblP7fCWbgwipF0=
github.com/asticode/go-astikit v0.58.0 h1:WXNpaxCPNFReikHiXvzyDv49NpV/GMD6PV80iem6WGo=
github.com/asticode/go-astikit v0.58.0/go.mod h1:fV43j20UZYfXzP9oBn33udkvCvDvCDhzjVqoLFuuYZE=
+github.com/asticode/go-astikit v0.59.0 h1:tjbwDym+MTSxqkAhJoHRZmHMXK6Jv4vGx+97FptKH6k=
+github.com/asticode/go-astikit v0.59.0/go.mod h1:fV43j20UZYfXzP9oBn33udkvCvDvCDhzjVqoLFuuYZE=
github.com/asticode/go-astisub v0.39.0 h1:j1/rFLRUH0TT2CW9YCtBek9lRdMp96oxaZm6vbgE96M=
github.com/asticode/go-astisub v0.39.0/go.mod h1:WTkuSzFB+Bp7wezuSf2Oxulj5A8zu2zLRVFf6bIFQK8=
github.com/asticode/go-astits v1.8.0/go.mod h1:DkOWmBNQpnr9mv24KfZjq4JawCFX1FCqjLVGvO0DygQ=
@@ -55,6 +57,8 @@ github.com/aws/aws-sdk-go-v2/service/sts v1.41.10 h1:p8ogvvLugcR/zLBXTXrTkj0RYBU
github.com/aws/aws-sdk-go-v2/service/sts v1.41.10/go.mod h1:60dv0eZJfeVXfbT1tFJinbHrDfSJ2GZl4Q//OSSNAVw=
github.com/aws/smithy-go v1.24.2 h1:FzA3bu/nt/vDvmnkg+R8Xl46gmzEDam6mZ1hzmwXFng=
github.com/aws/smithy-go v1.24.2/go.mod h1:YE2RhdIuDbA5E5bTdciG9KrW3+TiEONeUWCqxX9i1Fc=
+github.com/aws/smithy-go v1.24.3 h1:XgOAaUgx+HhVBoP4v8n6HCQoTRDhoMghKqw4LNHsDNg=
+github.com/aws/smithy-go v1.24.3/go.mod h1:YE2RhdIuDbA5E5bTdciG9KrW3+TiEONeUWCqxX9i1Fc=
github.com/cenkalti/backoff/v5 v5.0.3 h1:ZN+IMa753KfX5hd8vVaMixjnqRZ3y8CuJKRKj1xcsSM=
github.com/cenkalti/backoff/v5 v5.0.3/go.mod h1:rkhZdG3JZukswDf7f0cwqPNk4K0sa+F97BxZthm/crw=
github.com/cespare/xxhash/v2 v2.3.0 h1:UL815xU9SqsFlibzuggzjXhog7bL6oX9BbNZnL2UFvs=
@@ -235,12 +239,18 @@ golang.org/x/crypto v0.0.0-20200622213623-75b288015ac9/go.mod h1:LzIPMQfyMNhhGPh
golang.org/x/image v0.0.0-20191009234506-e7c1f5e7dbb8/go.mod h1:FeLwcggjj3mMvU+oOTbSwawSJRM1uh48EjtB4UJZlP0=
golang.org/x/image v0.38.0 h1:5l+q+Y9JDC7mBOMjo4/aPhMDcxEptsX+Tt3GgRQRPuE=
golang.org/x/image v0.38.0/go.mod h1:/3f6vaXC+6CEanU4KJxbcUZyEePbyKbaLoDOe4ehFYY=
+golang.org/x/image v0.39.0 h1:skVYidAEVKgn8lZ602XO75asgXBgLj9G/FE3RbuPFww=
+golang.org/x/image v0.39.0/go.mod h1:sIbmppfU+xFLPIG0FoVUTvyBMmgng1/XAMhQ2ft0hpA=
golang.org/x/mod v0.34.0 h1:xIHgNUUnW6sYkcM5Jleh05DvLOtwc6RitGHbDk4akRI=
golang.org/x/mod v0.34.0/go.mod h1:ykgH52iCZe79kzLLMhyCUzhMci+nQj+0XkbXpNYtVjY=
+golang.org/x/mod v0.35.0 h1:Ww1D637e6Pg+Zb2KrWfHQUnH2dQRLBQyAtpr/haaJeM=
+golang.org/x/mod v0.35.0/go.mod h1:+GwiRhIInF8wPm+4AoT6L0FA1QWAad3OMdTRx4tFYlU=
golang.org/x/net v0.0.0-20190404232315-eb5bcb51f2a3/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg=
golang.org/x/net v0.0.0-20200904194848-62affa334b73/go.mod h1:/O7V0waA8r7cgGh81Ro3o1hOxt32SMVPicZroKQ2sZA=
golang.org/x/net v0.52.0 h1:He/TN1l0e4mmR3QqHMT2Xab3Aj3L9qjbhRm78/6jrW0=
golang.org/x/net v0.52.0/go.mod h1:R1MAz7uMZxVMualyPXb+VaqGSa3LIaUqk0eEt3w36Sw=
+golang.org/x/net v0.53.0 h1:d+qAbo5L0orcWAr0a9JweQpjXF19LMXJE8Ey7hwOdUA=
+golang.org/x/net v0.53.0/go.mod h1:JvMuJH7rrdiCfbeHoo3fCQU24Lf5JJwT9W3sJFulfgs=
golang.org/x/sync v0.20.0 h1:e0PTpb7pjO8GAtTs2dQ6jYa5BWYlMuX047Dco/pItO4=
golang.org/x/sync v0.20.0/go.mod h1:9xrNwdLfx4jkKbNva9FpL6vEN7evnE43NNNJQ2LF3+0=
golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
@@ -248,21 +258,31 @@ golang.org/x/sys v0.0.0-20190412213103-97732733099d/go.mod h1:h1NjWce9XRLGQEsW7w
golang.org/x/sys v0.0.0-20200323222414-85ca7c5b95cd/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.42.0 h1:omrd2nAlyT5ESRdCLYdm3+fMfNFE/+Rf4bDIQImRJeo=
golang.org/x/sys v0.42.0/go.mod h1:4GL1E5IUh+htKOUEOaiffhrAeqysfVGipDYzABqnCmw=
+golang.org/x/sys v0.43.0 h1:Rlag2XtaFTxp19wS8MXlJwTvoh8ArU6ezoyFsMyCTNI=
+golang.org/x/sys v0.43.0/go.mod h1:4GL1E5IUh+htKOUEOaiffhrAeqysfVGipDYzABqnCmw=
golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
golang.org/x/text v0.3.2/go.mod h1:bEr9sfX3Q8Zfm5fL9x+3itogRgK3+ptLWKqgva+5dAk=
golang.org/x/text v0.35.0 h1:JOVx6vVDFokkpaq1AEptVzLTpDe9KGpj5tR4/X+ybL8=
golang.org/x/text v0.35.0/go.mod h1:khi/HExzZJ2pGnjenulevKNX1W67CUy0AsXcNubPGCA=
+golang.org/x/text v0.36.0 h1:JfKh3XmcRPqZPKevfXVpI1wXPTqbkE5f7JA92a55Yxg=
+golang.org/x/text v0.36.0/go.mod h1:NIdBknypM8iqVmPiuco0Dh6P5Jcdk8lJL0CUebqK164=
golang.org/x/time v0.15.0 h1:bbrp8t3bGUeFOx08pvsMYRTCVSMk89u4tKbNOZbp88U=
golang.org/x/time v0.15.0/go.mod h1:Y4YMaQmXwGQZoFaVFk4YpCt4FLQMYKZe9oeV/f4MSno=
golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
golang.org/x/tools v0.43.0 h1:12BdW9CeB3Z+J/I/wj34VMl8X+fEXBxVR90JeMX5E7s=
golang.org/x/tools v0.43.0/go.mod h1:uHkMso649BX2cZK6+RpuIPXS3ho2hZo4FVwfoy1vIk0=
+golang.org/x/tools v0.44.0 h1:UP4ajHPIcuMjT1GqzDWRlalUEoY+uzoZKnhOjbIPD2c=
+golang.org/x/tools v0.44.0/go.mod h1:KA0AfVErSdxRZIsOVipbv3rQhVXTnlU6UhKxHd1seDI=
gonum.org/v1/gonum v0.17.0 h1:VbpOemQlsSMrYmn7T2OUvQ4dqxQXU+ouZFQsZOx50z4=
gonum.org/v1/gonum v0.17.0/go.mod h1:El3tOrEuMpv2UdMrbNlKEh9vd86bmQ6vqIcDwxEOc1E=
google.golang.org/genproto/googleapis/api v0.0.0-20260401024825-9d38bb4040a9 h1:VPWxll4HlMw1Vs/qXtN7BvhZqsS9cdAittCNvVENElA=
google.golang.org/genproto/googleapis/api v0.0.0-20260401024825-9d38bb4040a9/go.mod h1:7QBABkRtR8z+TEnmXTqIqwJLlzrZKVfAUm7tY3yGv0M=
+google.golang.org/genproto/googleapis/api v0.0.0-20260414002931-afd174a4e478 h1:yQugLulqltosq0B/f8l4w9VryjV+N/5gcW0jQ3N8Qec=
+google.golang.org/genproto/googleapis/api v0.0.0-20260414002931-afd174a4e478/go.mod h1:C6ADNqOxbgdUUeRTU+LCHDPB9ttAMCTff6auwCVa4uc=
google.golang.org/genproto/googleapis/rpc v0.0.0-20260401024825-9d38bb4040a9 h1:m8qni9SQFH0tJc1X0vmnpw/0t+AImlSvp30sEupozUg=
google.golang.org/genproto/googleapis/rpc v0.0.0-20260401024825-9d38bb4040a9/go.mod h1:4Hqkh8ycfw05ld/3BWL7rJOSfebL2Q+DVDeRgYgxUU8=
+google.golang.org/genproto/googleapis/rpc v0.0.0-20260414002931-afd174a4e478 h1:RmoJA1ujG+/lRGNfUnOMfhCy5EipVMyvUE+KNbPbTlw=
+google.golang.org/genproto/googleapis/rpc v0.0.0-20260414002931-afd174a4e478/go.mod h1:4Hqkh8ycfw05ld/3BWL7rJOSfebL2Q+DVDeRgYgxUU8=
google.golang.org/grpc v1.80.0 h1:Xr6m2WmWZLETvUNvIUmeD5OAagMw3FiKmMlTdViWsHM=
google.golang.org/grpc v1.80.0/go.mod h1:ho/dLnxwi3EDJA4Zghp7k2Ec1+c2jqup0bFkw07bwF4=
google.golang.org/protobuf v1.36.11 h1:fV6ZwhNocDyBLK0dj+fg8ektcVegBBuEolpbTQyBNVE=
diff --git a/transcoder/src/api/metadata.go b/transcoder/src/api/metadata.go
index 473fd2ee..684e81b1 100644
--- a/transcoder/src/api/metadata.go
+++ b/transcoder/src/api/metadata.go
@@ -80,7 +80,7 @@ func (h *mhandler) GetInfo(c *echo.Context) error {
type PrepareRequest struct {
// File path of the previous/next episodes (for audio fingerprinting).
- NearEpisodes *string `json:"nearEpisodes"`
+ NearEpisodes []string `json:"nearEpisodes"`
}
// @Summary Prepare metadata
diff --git a/transcoder/src/api/path.go b/transcoder/src/api/path.go
index 2e14bbdf..fbed6c51 100644
--- a/transcoder/src/api/path.go
+++ b/transcoder/src/api/path.go
@@ -1,11 +1,8 @@
package api
import (
- "crypto/sha1"
"encoding/base64"
- "encoding/hex"
"net/http"
- "os"
"path/filepath"
"strings"
@@ -38,15 +35,7 @@ func getPath(c *echo.Context) (string, string, error) {
}
func getHash(path string) (string, error) {
- info, err := os.Stat(path)
- if err != nil {
- return "", err
- }
- h := sha1.New()
- h.Write([]byte(path))
- h.Write([]byte(info.ModTime().String()))
- sha := hex.EncodeToString(h.Sum(nil))
- return sha, nil
+ return src.ComputeSha(path)
}
func sanitizePath(path string) error {
diff --git a/transcoder/src/chapters.go b/transcoder/src/chapters.go
new file mode 100644
index 00000000..d113a207
--- /dev/null
+++ b/transcoder/src/chapters.go
@@ -0,0 +1,355 @@
+package src
+
+import (
+ "context"
+ "fmt"
+ "math"
+
+ "github.com/zoriya/kyoo/transcoder/src/utils"
+)
+
+const (
+ // MergeWindowSec is the maximum gap (in seconds) between a detected chapter
+ // boundary and an existing chapter for them to be merged.
+ MergeWindowSec float32 = 3.0
+)
+
+func (s *MetadataService) IdentifyChapters(ctx context.Context, info *MediaInfo, nearEpisodes []string) {
+ defer utils.PrintExecTime("identify chapters for %s", info.Path)()
+
+ if info.Versions.Fingerprint >= FingerprintVersion {
+ return
+ }
+
+ fingerprint, err := s.ComputeFingerprint(ctx, info)
+ if err != nil {
+ fmt.Printf("failed to compute fingerprint for %s: %v\n", info.Path, err)
+ return
+ }
+
+ candidates := make([]Chapter, 0)
+
+ for _, otherPath := range nearEpisodes {
+ otherCandidates, err := s.compareWithOther(ctx, info, fingerprint, otherPath)
+ if err != nil {
+ fmt.Printf("failed to compare %s with %s: %v\n", info.Path, otherPath, err)
+ continue
+ }
+ candidates = append(candidates, otherCandidates...)
+ }
+
+ chapters := mergeChapters(info, candidates)
+ if err := s.saveChapters(ctx, info.Id, chapters); err != nil {
+ fmt.Printf("failed to save chapters for %s: %v\n", info.Path, err)
+ return
+ }
+
+ if err := s.DeleteFingerprint(ctx, info.Id); err != nil {
+ fmt.Printf("failed to delete fingerprint for %s: %v\n", info.Path, err)
+ }
+
+ _, err = s.Database.Exec(ctx,
+ `update gocoder.info set ver_fingerprint = $2 where id = $1`,
+ info.Id, FingerprintVersion,
+ )
+ if err != nil {
+ fmt.Printf("failed to update fingerprint version for %s: %v\n", info.Path, err)
+ }
+}
+
+func (s *MetadataService) compareWithOther(
+ ctx context.Context,
+ info *MediaInfo,
+ fingerprint *Fingerprint,
+ otherPath string,
+) ([]Chapter, error) {
+ otherSha, err := ComputeSha(otherPath)
+ if err != nil {
+ return nil, fmt.Errorf("failed to compute sha for %s: %w", otherPath, err)
+ }
+ otherInfo, err := s.GetMetadata(ctx, otherPath, otherSha)
+ if err != nil {
+ return nil, fmt.Errorf("failed to get metadata for %s: %w", otherPath, err)
+ }
+
+ hasChapterprints := false
+ for _, c := range otherInfo.Chapters {
+ if c.FingerprintId != nil {
+ hasChapterprints = true
+ break
+ }
+ }
+
+ if hasChapterprints {
+ return s.matchByChapterprints(ctx, info, fingerprint, otherInfo)
+ }
+
+ return s.matchByOverlap(ctx, info, fingerprint, otherInfo)
+}
+
+func (s *MetadataService) matchByChapterprints(
+ ctx context.Context,
+ info *MediaInfo,
+ fingerprint *Fingerprint,
+ otherInfo *MediaInfo,
+) ([]Chapter, error) {
+ var candidates []Chapter
+
+ for _, ch := range otherInfo.Chapters {
+ if ch.FingerprintId == nil {
+ continue
+ }
+ if ch.Type == Content {
+ continue
+ }
+
+ needle, err := s.GetChapterprint(ctx, *ch.FingerprintId)
+ if err != nil {
+ fmt.Printf("failed to get chapterprint %d: %v\n", *ch.FingerprintId, err)
+ continue
+ }
+
+ fp := fingerprint.Start
+ startOffset := 0.0
+ if ch.Type == Credits {
+ fp = fingerprint.End
+ startOffset = max(info.Duration-FpEndDuration, 0)
+ }
+
+ match, err := FpFindContain(fp, needle)
+ if err != nil {
+ fmt.Printf("failed to find chapterprint in fingerprint: %v\n", err)
+ continue
+ }
+ if match == nil {
+ continue
+ }
+
+ candidates = append(candidates, Chapter{
+ Id: info.Id,
+ StartTime: float32(startOffset + match.Start),
+ EndTime: float32(startOffset + match.Start + match.Duration),
+ Name: "",
+ Type: ch.Type,
+ FingerprintId: ch.FingerprintId,
+ MatchAccuracy: new(int32(match.Accuracy)),
+ })
+ }
+
+ return candidates, nil
+}
+
+func (s *MetadataService) matchByOverlap(
+ ctx context.Context,
+ info *MediaInfo,
+ fingerprint *Fingerprint,
+ otherInfo *MediaInfo,
+) ([]Chapter, error) {
+ otherPrint, err := s.ComputeFingerprint(ctx, otherInfo)
+ if err != nil {
+ return nil, fmt.Errorf("failed to compute fingerprint for %s: %w", otherInfo.Path, err)
+ }
+
+ if err := s.StoreFingerprint(ctx, otherInfo.Id, otherPrint); err != nil {
+ fmt.Printf("failed to store fingerprint for %s: %v\n", otherInfo.Path, err)
+ }
+
+ intros, err := FpFindOverlap(fingerprint.Start, otherPrint.Start)
+ if err != nil {
+ return nil, fmt.Errorf("failed to find intro overlaps: %w", err)
+ }
+ credits, err := FpFindOverlap(fingerprint.End, otherPrint.End)
+ if err != nil {
+ return nil, fmt.Errorf("failed to find credit overlaps: %w", err)
+ }
+
+ var candidates []Chapter
+ for _, intro := range intros {
+ fp, err := ExtractSegment(fingerprint.Start, intro.StartFirst, intro.StartFirst+intro.Duration)
+ if err != nil {
+ fmt.Printf("failed to extract segment: %v\n", err)
+ continue
+ }
+
+ fpId, err := s.StoreChapterprint(ctx, fp)
+ if err != nil {
+ fmt.Printf("failed to store chapterprint: %v\n", err)
+ continue
+ }
+
+ candidates = append(candidates, Chapter{
+ Id: info.Id,
+ StartTime: float32(intro.StartFirst),
+ EndTime: float32(intro.StartFirst + intro.Duration),
+ Name: "",
+ Type: Intro,
+ FingerprintId: &fpId,
+ MatchAccuracy: new(int32(intro.Accuracy)),
+ })
+ }
+
+ endOffset := max(info.Duration-FpEndDuration, 0)
+ for _, ov := range credits {
+ segData, err := ExtractSegment(fingerprint.End, ov.StartFirst, ov.StartFirst+ov.Duration)
+ if err != nil {
+ fmt.Printf("failed to extract segment: %v\n", err)
+ continue
+ }
+
+ fpId, err := s.StoreChapterprint(ctx, segData)
+ if err != nil {
+ fmt.Printf("failed to store chapterprint: %v\n", err)
+ continue
+ }
+
+ candidates = append(candidates, Chapter{
+ Id: info.Id,
+ StartTime: float32(endOffset + ov.StartFirst),
+ EndTime: float32(endOffset + ov.StartFirst + ov.Duration),
+ Name: "",
+ Type: Credits,
+ FingerprintId: &fpId,
+ MatchAccuracy: new(int32(ov.Accuracy)),
+ })
+ }
+
+ return candidates, nil
+}
+
+func mergeChapters(info *MediaInfo, candidates []Chapter) []Chapter {
+ if len(candidates) == 0 {
+ return info.Chapters
+ }
+
+ chapters := make([]Chapter, len(info.Chapters))
+ copy(chapters, info.Chapters)
+
+ for _, cand := range candidates {
+ if cand.Type == Content {
+ continue
+ }
+
+ merged := false
+ for i := range chapters {
+ if absF32(chapters[i].StartTime-cand.StartTime) < MergeWindowSec {
+ if chapters[i].Type == Content {
+ chapters[i].Type = cand.Type
+ }
+ chapters[i].FingerprintId = cand.FingerprintId
+ chapters[i].MatchAccuracy = cand.MatchAccuracy
+ merged = true
+ break
+ }
+ }
+
+ if !merged {
+ chapters = insertChapter(chapters, Chapter{
+ Id: info.Id,
+ StartTime: cand.StartTime,
+ EndTime: cand.EndTime,
+ Name: "",
+ Type: cand.Type,
+ FingerprintId: cand.FingerprintId,
+ MatchAccuracy: cand.MatchAccuracy,
+ }, info.Duration)
+ }
+ }
+
+ return chapters
+}
+
+// insertChapter adds a new chapter into the chapter list, adjusting adjacent
+// chapters so there are no gaps or overlaps.
+func insertChapter(chapters []Chapter, ch Chapter, duration float64) []Chapter {
+ var ret []Chapter
+ if len(chapters) == 0 {
+ if ch.StartTime > 0 {
+ ret = append(ret, Chapter{
+ Id: ch.Id,
+ StartTime: 0,
+ EndTime: ch.StartTime,
+ Name: "",
+ Type: Content,
+ })
+ }
+ ret = append(ret, ch)
+ if ch.EndTime < float32(duration) {
+ ret = append(ret, Chapter{
+ Id: ch.Id,
+ StartTime: ch.EndTime,
+ EndTime: float32(duration),
+ Name: "",
+ Type: Content,
+ })
+ }
+ return ret
+ }
+
+ inserted := false
+ for _, existing := range chapters {
+ if !inserted && ch.StartTime < existing.EndTime {
+ if ch.StartTime > existing.StartTime {
+ before := existing
+ before.EndTime = ch.StartTime
+ ret = append(ret, before)
+ }
+ ret = append(ret, ch)
+ inserted = true
+
+ if ch.EndTime < existing.EndTime {
+ after := existing
+ after.StartTime = ch.EndTime
+ ret = append(ret, after)
+ }
+ continue
+ }
+
+ if inserted && existing.StartTime < ch.EndTime {
+ if existing.EndTime > ch.EndTime {
+ existing.StartTime = ch.EndTime
+ ret = append(ret, existing)
+ }
+ continue
+ }
+
+ ret = append(ret, existing)
+ }
+
+ if !inserted {
+ ret = append(ret, ch)
+ }
+
+ return ret
+}
+
+func (s *MetadataService) saveChapters(ctx context.Context, infoId int32, chapters []Chapter) error {
+ tx, err := s.Database.Begin(ctx)
+ if err != nil {
+ return fmt.Errorf("failed to begin transaction: %w", err)
+ }
+ defer tx.Rollback(ctx)
+
+ // Delete existing chapters
+ _, err = tx.Exec(ctx, `delete from gocoder.chapters where id = $1`, infoId)
+ if err != nil {
+ return fmt.Errorf("failed to delete existing chapters: %w", err)
+ }
+
+ // Insert new chapters
+ for _, c := range chapters {
+ _, err = tx.Exec(ctx,
+ `insert into gocoder.chapters(id, start_time, end_time, name, type, fingerprint_id, match_accuracy)
+ values ($1, $2, $3, $4, $5, $6, $7)`,
+ infoId, c.StartTime, c.EndTime, c.Name, c.Type, c.FingerprintId, c.MatchAccuracy,
+ )
+ if err != nil {
+ return fmt.Errorf("failed to insert chapter: %w", err)
+ }
+ }
+
+ return tx.Commit(ctx)
+}
+
+func absF32(v float32) float32 {
+ return float32(math.Abs(float64(v)))
+}
diff --git a/transcoder/src/fingerprints.go b/transcoder/src/fingerprints.go
index c72a5c11..83f7cd6f 100644
--- a/transcoder/src/fingerprints.go
+++ b/transcoder/src/fingerprints.go
@@ -63,7 +63,11 @@ func (s *MetadataService) ComputeFingerprint(ctx context.Context, info *MediaInf
return set(nil, fmt.Errorf("failed to compute start fingerprint: %w", err))
}
- endFingerprint, err := computeChromaprint(info.Path, info.Duration-5*60, -1)
+ endFingerprint, err := computeChromaprint(
+ info.Path,
+ max(info.Duration-5*60, 0),
+ -1,
+ )
if err != nil {
return set(nil, fmt.Errorf("failed to compute end fingerprint: %w", err))
}
diff --git a/transcoder/src/fingerprints_compare.go b/transcoder/src/fingerprints_compare.go
new file mode 100644
index 00000000..fcf856de
--- /dev/null
+++ b/transcoder/src/fingerprints_compare.go
@@ -0,0 +1,49 @@
+package src
+
+import (
+ "math/bits"
+)
+
+const (
+ MinOverlapDuration = 15.0
+ MinSilenceDuration = 2.0
+ // Correlation threshold (0.0-1.0) above which a match is considered valid.
+ // Each fingerprint sub-band has 32 bits; we consider a match if fewer than
+ // this fraction of bits differ on average.
+ MatchThreshold = 0.35
+)
+
+type Overlap struct {
+ StartFirst float64
+ StartSecond float64
+ Duration float64
+ Accuracy int
+}
+
+type Match struct {
+ Start float64
+ Duration float64
+ Accuracy int
+}
+
+func hammingDistance(a, b uint32) int {
+ return bits.OnesCount32(a ^ b)
+}
+
+func segmentCorrelation(fp1 []uint32, fp2 []uint32) float64 {
+ length := min(len(fp1), len(fp2))
+ diffBits := 0
+ for i := range length {
+ diffBits += hammingDistance(fp1[i], fp2[i])
+ }
+ return 1.0 - float64(diffBits)/float64(length*32)
+}
+
+func FpFindOverlap(fp1 []uint32, fp2 []uint32) ([]Overlap, error) {
+ return nil, nil
+}
+
+func FpFindContain(fp1 []uint32, fp2 []uint32) (*Match, error) {
+ return nil, nil
+}
+
diff --git a/transcoder/src/info.go b/transcoder/src/info.go
index 9e5c1b4b..b3875555 100644
--- a/transcoder/src/info.go
+++ b/transcoder/src/info.go
@@ -3,9 +3,12 @@ package src
import (
"cmp"
"context"
+ "crypto/sha1"
"encoding/base64"
+ "encoding/hex"
"fmt"
"mime"
+ "os"
"path/filepath"
"strconv"
"strings"
@@ -348,3 +351,16 @@ func RetriveMediaInfo(path string, sha string) (*MediaInfo, error) {
}
return &ret, nil
}
+
+// ComputeSha computes a SHA1 hash of the file path and its modification time.
+// This is used as a cache key to detect when a file has changed.
+func ComputeSha(path string) (string, error) {
+ info, err := os.Stat(path)
+ if err != nil {
+ return "", err
+ }
+ h := sha1.New()
+ h.Write([]byte(path))
+ h.Write([]byte(info.ModTime().String()))
+ return hex.EncodeToString(h.Sum(nil)), nil
+}
From 3874ff723863f620948c2fb3c0bc173e070dbc43 Mon Sep 17 00:00:00 2001
From: Zoe Roux
Date: Tue, 14 Apr 2026 17:51:31 +0200
Subject: [PATCH 05/27] Implement FpFindOverlap function
---
transcoder/src/fingerprints_compare.go | 205 +++++++++++++++++++++++--
1 file changed, 195 insertions(+), 10 deletions(-)
diff --git a/transcoder/src/fingerprints_compare.go b/transcoder/src/fingerprints_compare.go
index fcf856de..72665169 100644
--- a/transcoder/src/fingerprints_compare.go
+++ b/transcoder/src/fingerprints_compare.go
@@ -4,13 +4,30 @@ import (
"math/bits"
)
+/// See how acoustid handles comparision:
+//// https://bitbucket.org/acoustid/acoustid-server/src/cb303c2a3588ff055b7669cf6f1711a224ab9183/postgresql/acoustid_compare.c?at=master
+
const (
MinOverlapDuration = 15.0
MinSilenceDuration = 2.0
+
// Correlation threshold (0.0-1.0) above which a match is considered valid.
- // Each fingerprint sub-band has 32 bits; we consider a match if fewer than
- // this fraction of bits differ on average.
- MatchThreshold = 0.35
+ // Uses the AcoustID-style formula: 1.0 - 2.0 * biterror / (32 * length),
+ // where random noise scores ~0.0 and identical audio scores 1.0.
+ MatchThreshold = 0.1
+
+ // Number of most-significant bits used as a hash key for offset voting.
+ // Matches AcoustID's MATCH_BITS. The top bits of a chromaprint value are
+ // the most discriminative (classifiers are ordered by importance).
+ MatchBits = 14
+
+ // Chromaprint encodes silence as this specific value.
+ // We skip it during offset voting to avoid false matches.
+ SilenceValue = 627964279
+
+ // Number of samples per correlation block (~2 seconds at 7.8125 samples/s).
+ // Segments are evaluated in blocks of this size to find contiguous matching runs.
+ CorrBlockSize = 16
)
type Overlap struct {
@@ -30,20 +47,188 @@ func hammingDistance(a, b uint32) int {
return bits.OnesCount32(a ^ b)
}
+// segmentCorrelation computes a similarity score between two aligned
+// fingerprint slices using the AcoustID formula.
+// Returns a value in [0.0, 1.0] where 0.0 means completely different
+// (or random noise) and 1.0 means identical.
func segmentCorrelation(fp1 []uint32, fp2 []uint32) float64 {
length := min(len(fp1), len(fp2))
- diffBits := 0
- for i := range length {
- diffBits += hammingDistance(fp1[i], fp2[i])
+ if length == 0 {
+ return 0
}
- return 1.0 - float64(diffBits)/float64(length*32)
+ biterror := 0
+ for i := range length {
+ biterror += hammingDistance(fp1[i], fp2[i])
+ }
+ score := 1.0 - 2.0*float64(biterror)/float64(32*length)
+ return max(0, score)
}
+func matchStrip(v uint32) uint16 {
+ return uint16(v >> (32 - MatchBits))
+}
+
+// findBestOffset discovers the time offset that best aligns two fingerprints.
+//
+// It follows AcoustID's match_fingerprints2 approach:
+// 1. Hash each fingerprint value by its top 14 bits into a fixed-size table,
+// storing the last seen position for each hash bucket.
+// 2. For each hash bucket present in both tables, vote for the offset
+// (position_in_fp1 - position_in_fp2).
+// 3. The offset with the most votes wins.
+// 4. A diversity check rejects matches caused by repetitive/silent audio.
+func findBestOffset(fp1, fp2 []uint32) *int {
+ offsets1 := make(map[uint16]int)
+ offsets2 := make(map[uint16]int)
+
+ for i, v := range fp1 {
+ if v == SilenceValue {
+ continue
+ }
+ key := matchStrip(v)
+ offsets1[key] = i + 1
+ }
+
+ for i, v := range fp2 {
+ if v == SilenceValue {
+ continue
+ }
+ key := matchStrip(v)
+ offsets2[key] = i + 1
+ }
+
+ if len(offsets1) == 0 || len(offsets2) == 0 {
+ return nil
+ }
+
+ votes := make(map[int]int)
+ topCount := 0
+ topOffset := 0
+
+ for key, a := range offsets1 {
+ b, ok := offsets2[key]
+ if !ok {
+ continue
+ }
+ offset := a - b
+ votes[offset]++
+ if votes[offset] > topCount {
+ topCount = votes[offset]
+ topOffset = offset
+ }
+ }
+
+ // Diversity check: reject if the top offset got very few votes relative
+ // to the number of unique values. This filters out repetitive audio
+ // (silence, static noise) that would produce spurious matches.
+ // (at least 2% of values must match with said offset)
+ if topCount < max(len(offsets1), len(offsets2))*2/100 {
+ return nil
+ }
+ return new(topOffset)
+}
+
+// alignFingerprints returns the sub-slices of fp1 and fp2 that overlap
+// when fp1 is shifted by `offset` positions relative to fp2.
+// offset = position_in_fp1 - position_in_fp2.
+// Also returns the starting indices in fp1 and fp2.
+func alignFingerprints(fp1, fp2 []uint32, offset int) ([]uint32, []uint32, int, int) {
+ start1 := 0
+ start2 := 0
+ if offset > 0 {
+ start1 = offset
+ } else {
+ start2 = -offset
+ }
+
+ length := min(len(fp1)-start1, len(fp2)-start2)
+ if length <= 0 {
+ return nil, nil, 0, 0
+ }
+ return fp1[start1 : start1+length], fp2[start2 : start2+length], start1, start2
+}
+
+// findMatchingRuns divides the aligned fingerprints into fixed-size blocks,
+// computes the correlation of each block, and finds contiguous runs of
+// blocks whose correlation exceeds MatchThreshold. Each run that is at least
+// MinOverlapDuration long is returned as an Overlap.
+func findMatchingRuns(fp1, fp2 []uint32, start1, start2 int) []Overlap {
+ length := min(len(fp1), len(fp2))
+ minSamples := secToSamples(MinOverlapDuration)
+ if length < minSamples {
+ return nil
+ }
+
+ nblocks := length / CorrBlockSize
+ blockCorr := make([]float64, nblocks)
+ for b := range nblocks {
+ lo := b * CorrBlockSize
+ hi := lo + CorrBlockSize
+ blockCorr[b] = segmentCorrelation(fp1[lo:hi], fp2[lo:hi])
+ }
+
+ // Find contiguous runs of blocks above threshold.
+ var overlaps []Overlap
+ inRun := false
+ runStart := 0
+
+ // Handle a run that extends to the last block.
+ nblocks++
+ blockCorr = append(blockCorr, MatchThreshold)
+
+ for b := range nblocks {
+ if blockCorr[b] >= MatchThreshold {
+ inRun = true
+ runStart = min(runStart, b)
+ continue
+ }
+ if !inRun {
+ continue
+ }
+
+ inRun = false
+ start := runStart * CorrBlockSize
+ end := b * CorrBlockSize
+ if end-start >= minSamples {
+ corr := segmentCorrelation(fp1[start:end], fp2[start:end])
+ overlaps = append(overlaps, Overlap{
+ StartFirst: samplesToSec(start1 + start),
+ StartSecond: samplesToSec(start2 + start),
+ Duration: samplesToSec(end - start),
+ Accuracy: max(0, min(int(corr*100), 100)),
+ })
+ }
+ }
+
+ return overlaps
+}
+
+// FpFindOverlap finds all similar segments (like shared intro music) between
+// two chromaprint fingerprints.
+//
+// 1. Hash each fingerprint value by its top 14 bits to find the best
+// time-offset alignment between the two fingerprints (like
+// AcoustID's match_fingerprints2)
+// 2. Align the fingerprints at that offset.
+// 3. Divide the aligned region into ~2-second blocks and compute correlation
+// per block using the AcoustID scoring formula.
+// 4. Find contiguous runs of high-correlation blocks that are at least
+// MinOverlapDuration long.
func FpFindOverlap(fp1 []uint32, fp2 []uint32) ([]Overlap, error) {
- return nil, nil
+ offset := findBestOffset(fp1, fp2)
+ if offset == nil {
+ return nil, nil
+ }
+
+ a1, a2, s1, s2 := alignFingerprints(fp1, fp2, *offset)
+ if len(a1) == 0 {
+ return nil, nil
+ }
+
+ runs := findMatchingRuns(a1, a2, s1, s2)
+ return runs, nil
}
-func FpFindContain(fp1 []uint32, fp2 []uint32) (*Match, error) {
+func FpFindContain(haystack []uint32, needle []uint32) (*Match, error) {
return nil, nil
}
-
From 841212de52e0020ad8b066f72c8fb7431e1949fb Mon Sep 17 00:00:00 2001
From: Zoe Roux
Date: Tue, 14 Apr 2026 23:06:49 +0200
Subject: [PATCH 06/27] Implement `FpFindContain` function
---
transcoder/src/fingerprints_compare.go | 17 ++++++++++++++++-
1 file changed, 16 insertions(+), 1 deletion(-)
diff --git a/transcoder/src/fingerprints_compare.go b/transcoder/src/fingerprints_compare.go
index 72665169..0dc9ee8d 100644
--- a/transcoder/src/fingerprints_compare.go
+++ b/transcoder/src/fingerprints_compare.go
@@ -230,5 +230,20 @@ func FpFindOverlap(fp1 []uint32, fp2 []uint32) ([]Overlap, error) {
}
func FpFindContain(haystack []uint32, needle []uint32) (*Match, error) {
- return nil, nil
+ offset := findBestOffset(haystack, needle)
+ if offset == nil || *offset < 0 || *offset+len(needle) < len(haystack) {
+ return nil, nil
+ }
+
+ corr := segmentCorrelation(haystack[*offset:*offset+len(needle)], needle)
+ if corr < MatchThreshold {
+ return nil, nil
+ }
+
+ accuracy := min(int(corr*100), 100)
+ return &Match{
+ Start: samplesToSec(*offset),
+ Duration: samplesToSec(len(needle)),
+ Accuracy: accuracy,
+ }, nil
}
From ee9125b427456ca9a85127597e577e27aede0329 Mon Sep 17 00:00:00 2001
From: Zoe Roux
Date: Tue, 14 Apr 2026 23:06:49 +0200
Subject: [PATCH 07/27] Update prepare caller
---
api/src/controllers/video-metadata.ts | 74 ++++++++++++++++++++++++++-
api/src/websockets.ts | 23 ++-------
2 files changed, 75 insertions(+), 22 deletions(-)
diff --git a/api/src/controllers/video-metadata.ts b/api/src/controllers/video-metadata.ts
index c4837b52..53ebb5ae 100644
--- a/api/src/controllers/video-metadata.ts
+++ b/api/src/controllers/video-metadata.ts
@@ -1,13 +1,16 @@
-import { eq } from "drizzle-orm";
+import { getLogger } from "@logtape/logtape";
+import { eq, and } from "drizzle-orm";
import { Elysia, t } from "elysia";
import slugify from "slugify";
import { auth } from "~/auth";
import { db } from "~/db";
-import { entryVideoJoin, videos } from "~/db/schema";
+import { entries, entryVideoJoin, videos } from "~/db/schema";
import { KError } from "~/models/error";
import { isUuid } from "~/models/utils";
import { Video } from "~/models/video";
+const logger = getLogger();
+
export const videosMetadata = new Elysia({
prefix: "/videos",
tags: ["videos"],
@@ -188,4 +191,71 @@ export const videosMetadata = new Elysia({
},
},
},
+ )
+ .get(
+ ":id/prepare",
+ async ({ params: { id }, headers: { authorization } }) => {
+ await prepareVideo(id, authorization!);
+ },
+ {
+ detail: { description: "Prepare a video for playback" },
+ params: t.Object({
+ id: t.String({
+ description: "The id or slug of the video to watch.",
+ example: "made-in-abyss-s1e13",
+ }),
+ }),
+ response: {
+ 302: t.Void({
+ description:
+ "Prepare said video for playback (compute everything possible and cache it)",
+ }),
+ 404: {
+ ...KError,
+ description: "No video found with the given id or slug.",
+ },
+ },
+ },
);
+
+export const prepareVideo = async (slug: string, auth: string) => {
+ logger.info("Preparing next video {slug}", { slug });
+ const [vid] = await db
+ .select({ path: videos.path, show: entries.showPk, order: entries.order })
+ .from(videos)
+ .innerJoin(entryVideoJoin, eq(videos.pk, entryVideoJoin.videoPk))
+ .leftJoin(entries, eq(entries.pk, entryVideoJoin.entryPk))
+ .where(eq(entryVideoJoin.slug, slug))
+ .limit(1);
+
+ const related = vid.show
+ ? await db
+ .select({ order: entries.order, path: videos.path })
+ .from(entries)
+ .innerJoin(entryVideoJoin, eq(entries.pk, entryVideoJoin.entryPk))
+ .innerJoin(videos, eq(videos.pk, entryVideoJoin.videoPk))
+ .where(and(eq(entries.showPk, vid.show), eq(entries.kind, "episode")))
+ .orderBy(entries.order)
+ : [];
+ const idx = related.findIndex((x) => x.order === vid.order);
+
+ const path = Buffer.from(vid.path, "utf8").toString("base64url");
+ await fetch(
+ new URL(
+ `/video/${path}/prepare`,
+ process.env.TRANSCODER_SERVER ?? "http://transcoder:7666",
+ ),
+ {
+ headers: {
+ authorization: auth,
+ "content-type": "application/json",
+ },
+ method: "POST",
+ body: JSON.stringify({
+ nearEpisodes: [related[idx - 1], related[idx + 1]]
+ .filter((x) => x)
+ .map((x) => x.path),
+ }),
+ },
+ );
+};
diff --git a/api/src/websockets.ts b/api/src/websockets.ts
index d41792dc..e885cc70 100644
--- a/api/src/websockets.ts
+++ b/api/src/websockets.ts
@@ -1,15 +1,13 @@
-import { getLogger } from "@logtape/logtape";
import type { TObject, TString } from "@sinclair/typebox";
import { eq } from "drizzle-orm";
import Elysia, { type TSchema, t } from "elysia";
import { auth } from "./auth";
import { updateProgress } from "./controllers/profiles/history";
import { getOrCreateProfile } from "./controllers/profiles/profile";
+import { prepareVideo } from "./controllers/video-metadata";
import { getVideos } from "./controllers/videos";
import { videos } from "./db/schema";
-const logger = getLogger();
-
const actionMap = {
ping: handler({
message(ws) {
@@ -61,23 +59,8 @@ const actionMap = {
languages: ["*"],
userId: ws.data.jwt.sub,
});
- if (!vid) return;
-
- logger.info("Preparing next video {videoId}", {
- videoId: vid.id,
- });
- const path = Buffer.from(vid.path, "utf8").toString("base64url");
- await fetch(
- new URL(
- `/video/${path}/prepare`,
- process.env.TRANSCODER_SERVER ?? "http://transcoder:7666",
- ),
- {
- headers: {
- authorization: ws.data.headers.authorization!,
- },
- },
- );
+ const next = vid?.next?.video;
+ if (next) await prepareVideo(next, ws.data.headers.authorization!);
}
},
}),
From 338b2712f48c05b4f56c5066b613302fc0f639a5 Mon Sep 17 00:00:00 2001
From: Zoe Roux
Date: Wed, 15 Apr 2026 11:50:11 +0200
Subject: [PATCH 08/27] Add skip intro button on front
---
front/public/translations/en.json | 6 ++-
front/shell.nix | 2 +-
front/src/ui/player/controls/index.tsx | 8 ++++
front/src/ui/player/controls/skip-chapter.tsx | 38 +++++++++++++++++++
front/src/ui/player/controls/touch.tsx | 7 ++++
5 files changed, 59 insertions(+), 2 deletions(-)
create mode 100644 front/src/ui/player/controls/skip-chapter.tsx
diff --git a/front/public/translations/en.json b/front/public/translations/en.json
index ce6de080..5d43e75d 100644
--- a/front/public/translations/en.json
+++ b/front/public/translations/en.json
@@ -256,7 +256,11 @@
"unsupportedError": "Video codec not supported, transcoding in progress...",
"not-available": "{{entry}} is not available on kyoo yet, ask your server admins about it",
"fatal": "Fatal playback error",
- "entry-list": "Entry list"
+ "entry-list": "Entry list",
+ "skip-intro": "Skip intro",
+ "skip-credits": "Skip credits",
+ "skip-recap": "Skip recap",
+ "skip-preview": "Skip preview"
},
"search": {
"empty": "No result found. Try a different query."
diff --git a/front/shell.nix b/front/shell.nix
index 10ab171c..ac392a0a 100644
--- a/front/shell.nix
+++ b/front/shell.nix
@@ -3,7 +3,7 @@ pkgs.mkShell {
packages = with pkgs; [
bun
biome
- nodePackages.eas-cli
+ eas-cli
];
}
diff --git a/front/src/ui/player/controls/index.tsx b/front/src/ui/player/controls/index.tsx
index e9cda2b7..a93d3ba3 100644
--- a/front/src/ui/player/controls/index.tsx
+++ b/front/src/ui/player/controls/index.tsx
@@ -7,6 +7,7 @@ import { useIsTouch } from "~/primitives";
import { Back } from "./back";
import { BottomControls } from "./bottom-controls";
import { MiddleControls } from "./middle-controls";
+import { SkipChapterButton } from "./skip-chapter";
import { TouchControls } from "./touch";
export const Controls = ({
@@ -40,6 +41,7 @@ export const Controls = ({
const [hover, setHover] = useState(false);
const [menuOpened, setMenuOpened] = useState(false);
+ const [controlsVisible, setControlsVisible] = useState(false);
const hoverControls = {
onPointerEnter: (e) => {
@@ -61,6 +63,7 @@ export const Controls = ({
+
);
};
diff --git a/front/src/ui/player/controls/skip-chapter.tsx b/front/src/ui/player/controls/skip-chapter.tsx
new file mode 100644
index 00000000..c2417eb6
--- /dev/null
+++ b/front/src/ui/player/controls/skip-chapter.tsx
@@ -0,0 +1,38 @@
+import { useState } from "react";
+import { useTranslation } from "react-i18next";
+import { useEvent, type VideoPlayer } from "react-native-video";
+import type { Chapter } from "~/models";
+import { Button } from "~/primitives";
+
+export const SkipChapterButton = ({
+ player,
+ chapters,
+ isVisible,
+}: {
+ player: VideoPlayer;
+ chapters: Chapter[];
+ isVisible: boolean;
+}) => {
+ const { t } = useTranslation();
+
+ const [progress, setProgress] = useState(player.currentTime || 0);
+ useEvent(player, "onProgress", ({ currentTime }) => {
+ setProgress(currentTime);
+ });
+
+ const chapter = chapters.find(
+ (chapter) => chapter.startTime <= progress && progress < chapter.endTime,
+ );
+
+ if (!chapter || chapter.type === "content") return null;
+
+ if (!isVisible && progress >= chapter.startTime + 8) return null;
+
+ return (
+
+ {chapter && chapter.type !== "content" && (
+ {t(`player.chapters.${chapter.type}`)}
+ )}
);
From bf2c5efafdef2efc15e03ba9202fbae3a822a8ef Mon Sep 17 00:00:00 2001
From: Zoe Roux
Date: Fri, 17 Apr 2026 23:44:21 +0200
Subject: [PATCH 27/27] Add settings for auto-skip
---
README.md | 2 +
api/src/websockets.ts | 2 +-
front/public/translations/en.json | 21 ++++++
front/src/models/user.ts | 28 ++++++++
front/src/models/video-info.ts | 2 +
front/src/ui/player/controls/skip-chapter.tsx | 48 ++++++++++---
front/src/ui/player/scrubber.tsx | 2 +-
front/src/ui/settings/index.tsx | 3 +-
front/src/ui/settings/oidc.tsx | 2 +
front/src/ui/settings/playback.tsx | 69 ++++++++++++++++++-
transcoder/src/info.go | 2 +-
11 files changed, 165 insertions(+), 16 deletions(-)
diff --git a/README.md b/README.md
index 89024f99..be01ad17 100644
--- a/README.md
+++ b/README.md
@@ -24,6 +24,8 @@ Kyoo does not have a plugin system and aim to have every features built-in (see
- **Video Preview Thumbnails:** Simply hover the video's progress bar and see a preview of the video.
+- **Intro/Credit detection:** Automatically detect intro/credits with audio fingerprinting (or chapter title matching).
+
- **Enhanced Subtitle Support:** Subtitles are important, Kyoo supports PGS/VODSUB and SSA/ASS and uses the video's embedded fonts when available.
- **Anime Name Parsing**: Kyoo will match weird anime names (like `[Some-Stuffs] Jojo's Bizarre Adventure Stone Ocean 24 (1920x1080 Blu-Ray Opus) [2750810F].mkv`) without issue.
diff --git a/api/src/websockets.ts b/api/src/websockets.ts
index f2f7ac0a..7b45016e 100644
--- a/api/src/websockets.ts
+++ b/api/src/websockets.ts
@@ -67,7 +67,7 @@ const actionMap = {
logger.info("No next video to prepare for ${slug}", {
slug: vid.path,
});
- return
+ return;
}
await prepareVideo(next, ws.data.headers.authorization!);
}
diff --git a/front/public/translations/en.json b/front/public/translations/en.json
index 5beb771c..ba92ce8b 100644
--- a/front/public/translations/en.json
+++ b/front/public/translations/en.json
@@ -188,6 +188,27 @@
"label": "Subtitle language",
"description": "The default subtitle language used",
"none": "None"
+ },
+ "chapterSkip": {
+ "label": "Chapter skip",
+ "behaviors": {
+ "autoSkip": "Auto skip",
+ "autoSkipExceptFirstAppearance": "Auto skip except first appearance",
+ "showSkipButton": "Show skip button",
+ "disabled": "Do nothing"
+ },
+ "types": {
+ "recap": "Recap",
+ "intro": "Intro",
+ "credits": "Credits",
+ "preview": "Preview"
+ },
+ "descriptions": {
+ "recap": "Control what happens when a recap chapter starts",
+ "intro": "Control what happens when an intro chapter starts",
+ "credits": "Control what happens when a credits chapter starts",
+ "preview": "Control what happens when a preview chapter starts"
+ }
}
},
"account": {
diff --git a/front/src/models/user.ts b/front/src/models/user.ts
index eb3c68f9..daeb1049 100644
--- a/front/src/models/user.ts
+++ b/front/src/models/user.ts
@@ -1,5 +1,14 @@
import { z } from "zod/v4";
+const ChapterSkipBehavior = z
+ .enum([
+ "autoSkip",
+ "autoSkipExceptFirstAppearance",
+ "showSkipButton",
+ "disabled",
+ ])
+ .catch("showSkipButton");
+
export const User = z
.object({
id: z.string(),
@@ -28,11 +37,30 @@ export const User = z
.catch("original"),
audioLanguage: z.string().catch("default"),
subtitleLanguage: z.string().nullable().catch(null),
+ chapterSkip: z
+ .object({
+ recap: ChapterSkipBehavior,
+ intro: ChapterSkipBehavior,
+ credits: ChapterSkipBehavior,
+ preview: ChapterSkipBehavior,
+ })
+ .catch({
+ recap: "showSkipButton",
+ intro: "showSkipButton",
+ credits: "showSkipButton",
+ preview: "showSkipButton",
+ }),
})
.default({
downloadQuality: "original",
audioLanguage: "default",
subtitleLanguage: null,
+ chapterSkip: {
+ recap: "showSkipButton",
+ intro: "showSkipButton",
+ credits: "showSkipButton",
+ preview: "showSkipButton",
+ },
}),
}),
oidc: z
diff --git a/front/src/models/video-info.ts b/front/src/models/video-info.ts
index 80e0466d..ce7edd03 100644
--- a/front/src/models/video-info.ts
+++ b/front/src/models/video-info.ts
@@ -65,6 +65,8 @@ export const Chapter = z.object({
endTime: z.number(),
name: z.string(),
type: z.enum(["content", "recap", "intro", "credits", "preview"]),
+ firstAppearance: z.boolean().optional(),
+ matchAccuracy: z.number().optional(),
});
export type Chapter = z.infer;
diff --git a/front/src/ui/player/controls/skip-chapter.tsx b/front/src/ui/player/controls/skip-chapter.tsx
index 5be1fe1f..a316f075 100644
--- a/front/src/ui/player/controls/skip-chapter.tsx
+++ b/front/src/ui/player/controls/skip-chapter.tsx
@@ -1,8 +1,9 @@
-import { useState } from "react";
+import { useCallback, useEffect, useRef, useState } from "react";
import { useTranslation } from "react-i18next";
import { useEvent, type VideoPlayer } from "react-native-video";
import type { Chapter } from "~/models";
import { Button } from "~/primitives";
+import { useAccount } from "~/providers/account-context";
import { useFetch } from "~/query";
import { Info } from "~/ui/info";
import { cn, useQueryState } from "~/utils";
@@ -19,8 +20,10 @@ export const SkipChapterButton = ({
isVisible: boolean;
}) => {
const { t } = useTranslation();
+ const account = useAccount();
const [slug] = useQueryState("slug", undefined!);
const { data } = useFetch(Info.infoQuery(slug));
+ const lastAutoSkippedChapter = useRef(null);
const [progress, setProgress] = useState(player.currentTime || 0);
useEvent(player, "onProgress", ({ currentTime }) => {
@@ -31,23 +34,50 @@ export const SkipChapterButton = ({
(chapter) => chapter.startTime <= progress && progress < chapter.endTime,
);
- if (!chapter || chapter.type === "content") return null;
+ const behavior =
+ (chapter &&
+ chapter.type !== "content" &&
+ account?.claims.settings.chapterSkip[chapter.type]) ||
+ "showSkipButton";
+ const shouldAutoSkip =
+ behavior === "autoSkip" ||
+ (behavior === "autoSkipExceptFirstAppearance" && !chapter!.firstAppearance);
// delay credits appearance by a few seconds, we want to make sure it doesn't
// show on top of the end of the serie. it's common for the end credits music
// to start playing on top of the episode also.
- const start = chapter.startTime + +(chapter.type === "credits") * 4;
+ const start = chapter
+ ? chapter.startTime + +(chapter.type === "credits") * 4
+ : Infinity;
+
+ const skipChapter = useCallback(() => {
+ if (!chapter) return;
+ if (data?.durationSeconds && data.durationSeconds <= chapter.endTime + 3) {
+ return seekEnd();
+ }
+ player.seekTo(chapter.endTime);
+ }, [player, chapter, data?.durationSeconds, seekEnd]);
+
+ useEffect(() => {
+ if (
+ chapter &&
+ shouldAutoSkip &&
+ progress >= start &&
+ lastAutoSkippedChapter.current !== chapter.startTime
+ ) {
+ lastAutoSkippedChapter.current = chapter.startTime;
+ skipChapter();
+ }
+ }, [chapter, progress, shouldAutoSkip, start, skipChapter]);
+
+ if (!chapter || chapter.type === "content" || behavior === "disabled")
+ return null;
if (!isVisible && progress >= start + 8) return null;
return (
{
- if (data?.durationSeconds && data.durationSeconds <= chapter.endTime) {
- return seekEnd();
- }
- player.seekTo(chapter.endTime);
- }}
+ onPress={skipChapter}
className={cn(
"absolute right-safe bottom-2/10 m-8",
"z-20 bg-slate-900/70 px-4 py-2",
diff --git a/front/src/ui/player/scrubber.tsx b/front/src/ui/player/scrubber.tsx
index f68f1861..de6270de 100644
--- a/front/src/ui/player/scrubber.tsx
+++ b/front/src/ui/player/scrubber.tsx
@@ -1,4 +1,5 @@
import { useMemo, useState } from "react";
+import { useTranslation } from "react-i18next";
import { View } from "react-native";
import { useEvent, type VideoPlayer } from "react-native-video";
import type { Chapter } from "~/models";
@@ -7,7 +8,6 @@ import { useToken } from "~/providers/account-context";
import { type QueryIdentifier, useFetch } from "~/query";
import { useQueryState } from "~/utils";
import { toTimerString } from "./controls/progress";
-import { useTranslation } from "react-i18next";
type Thumb = {
from: number;
diff --git a/front/src/ui/settings/index.tsx b/front/src/ui/settings/index.tsx
index 76c5e7d1..7ef8ddb9 100644
--- a/front/src/ui/settings/index.tsx
+++ b/front/src/ui/settings/index.tsx
@@ -3,7 +3,7 @@ import { useAccount } from "~/providers/account-context";
import { AccountSettings } from "./account";
import { About, GeneralSettings } from "./general";
import { OidcSettings } from "./oidc";
-import { PlaybackSettings } from "./playback";
+import { ChapterSkipSettings, PlaybackSettings } from "./playback";
import { SessionsSettings } from "./sessions";
export const SettingsPage = () => {
@@ -12,6 +12,7 @@ export const SettingsPage = () => {
{account && }
+ {account && }
{account && }
{account && }
{account && }
diff --git a/front/src/ui/settings/oidc.tsx b/front/src/ui/settings/oidc.tsx
index 35f052df..93414c7b 100644
--- a/front/src/ui/settings/oidc.tsx
+++ b/front/src/ui/settings/oidc.tsx
@@ -23,6 +23,8 @@ export const OidcSettings = () => {
invalidate: ["auth", "users", "me"],
});
+ if (data && Object.keys(data.oidc).length === 0) return null;
+
return (
{unlinkError && {unlinkError}
}
diff --git a/front/src/ui/settings/playback.tsx b/front/src/ui/settings/playback.tsx
index ef396b3f..c3775df6 100644
--- a/front/src/ui/settings/playback.tsx
+++ b/front/src/ui/settings/playback.tsx
@@ -1,6 +1,10 @@
-import SubtitleLanguage from "@material-symbols/svg-400/rounded/closed_caption-fill.svg";
-import PlayModeI from "@material-symbols/svg-400/rounded/display_settings-fill.svg";
-import AudioLanguage from "@material-symbols/svg-400/rounded/music_note-fill.svg";
+import SubtitleLanguage from "@material-symbols/svg-400/rounded/closed_caption.svg";
+import PlayModeI from "@material-symbols/svg-400/rounded/display_settings.svg";
+import MovieInfo from "@material-symbols/svg-400/rounded/movie_info.svg";
+import AudioLanguage from "@material-symbols/svg-400/rounded/music_note.svg";
+import PlayArrow from "@material-symbols/svg-400/rounded/play_arrow.svg";
+import Replay from "@material-symbols/svg-400/rounded/replay.svg";
+import Theaters from "@material-symbols/svg-400/rounded/theaters.svg";
import langmap from "langmap";
import { useTranslation } from "react-i18next";
import { Select } from "~/primitives";
@@ -85,3 +89,62 @@ export const PlaybackSettings = () => {
);
};
+
+const defaultChapterSkipBehaviors = [
+ "autoSkip",
+ "showSkipButton",
+ "disabled",
+] as const;
+
+const introCreditsChapterSkipBehaviors = [
+ "autoSkip",
+ "autoSkipExceptFirstAppearance",
+ "showSkipButton",
+ "disabled",
+] as const;
+
+const chapterTypes = [
+ { type: "recap", icon: Replay },
+ { type: "intro", icon: PlayArrow },
+ { type: "credits", icon: Theaters },
+ { type: "preview", icon: MovieInfo },
+] as const;
+
+export const ChapterSkipSettings = () => {
+ const { t } = useTranslation();
+ const [chapterSkip, setChapterSkip] = useSetting("chapterSkip")!;
+
+ return (
+
+ {chapterTypes.map(({ type, icon }) => {
+ const values =
+ type === "intro" || type === "credits"
+ ? introCreditsChapterSkipBehaviors
+ : defaultChapterSkipBehaviors;
+
+ return (
+
+
+ );
+ })}
+
+ );
+};
diff --git a/transcoder/src/info.go b/transcoder/src/info.go
index 31f337a8..badacfab 100644
--- a/transcoder/src/info.go
+++ b/transcoder/src/info.go
@@ -158,7 +158,7 @@ type Chapter struct {
/// The type value is used to mark special chapters (opening/credits...)
Type ChapterType `json:"type" db:"type"`
// true only for introductions where the audio track is new (first time we'we heard this one in the serie)
- FirstAppearance *bool `json:"firstAppearance" db:"first_appearance"`
+ FirstAppearance *bool `json:"firstAppearance,omitempty" db:"first_appearance"`
/// Accuracy of the fingerprint match (0-100).
MatchAccuracy *int32 `json:"matchAccuracy,omitempty" db:"match_accuracy"`
}