22 Commits

Author SHA1 Message Date
renovate[bot]
44bb2d5bc0 chore(deps): update docker.io/traefik docker tag to v3.6.2 2025-12-01 02:27:38 +00:00
af4742ae0b Fix sqlarr of api (#1188) 2025-11-30 19:29:46 +00:00
acelinkio
e401ca98c0 downgrade cloudpirates postgres 0.12.0 (#1187) 2025-11-28 13:15:55 -08:00
Arlan Lloyd
a756c875fd 0.12.1 has a bug with rendering nested values 2025-11-28 21:13:10 +00:00
acelinkio
2ef26e5d02 add devspace (#1173) 2025-11-28 20:52:10 +01:00
acelinkio
e7d9002156 kyoo_api logs redact password & other sensitive fields (#1182) 2025-11-28 16:42:27 +00:00
acelinkio
28d2e193aa kyoo_api extension install specify schema (#1183) 2025-11-28 16:39:47 +00:00
ce5bee11c0 Use unnest in insertion methods (#1185) 2025-11-28 17:28:11 +01:00
60d59d7f7b Wrap every insert with a trace 2025-11-28 17:25:29 +01:00
464d720ef9 Fix unnest issues 2025-11-28 17:11:43 +01:00
8fc279d2ed Use unnest everywhere 2025-11-28 17:11:43 +01:00
a45e992339 Properly type unnestValues return 2025-11-28 17:11:43 +01:00
5f8ddd435a Use unnest for entries 2025-11-28 17:11:43 +01:00
d822463fe0 Add a trace for api migrations 2025-11-28 17:11:43 +01:00
acelinkio
3a0cbf786d fix(deps): update aws-sdk-go-v2 monorepo (#1159) 2025-11-24 13:12:25 -08:00
renovate[bot]
dfb4777a5d fix(deps): update aws-sdk-go-v2 monorepo 2025-11-24 20:29:44 +00:00
acelinkio
eea32c47e9 chore(deps): update postgres docker tag to v0.12.1 (#1181) 2025-11-24 09:49:58 -08:00
renovate[bot]
6bcd03b18e chore(deps): update postgres docker tag to v0.12.1 2025-11-24 13:46:46 +00:00
acelinkio
87a3df6897 chore(deps): update dependency @biomejs/biome to v2.3.6 (#1179) 2025-11-23 18:38:41 -08:00
acelinkio
7f7a16e9b5 chore(deps): update postgres docker tag to v0.12.0 (#1180) 2025-11-23 18:38:17 -08:00
renovate[bot]
b95dd9056b chore(deps): update postgres docker tag to v0.12.0 2025-11-24 02:22:25 +00:00
renovate[bot]
5044f941b1 chore(deps): update dependency @biomejs/biome to v2.3.6 2025-11-24 02:08:06 +00:00
32 changed files with 910 additions and 572 deletions

1
.gitignore vendored
View File

@@ -1,4 +1,5 @@
/video
.devspace/
.env
.venv
.idea

View File

@@ -20,7 +20,7 @@
"sharp": "^0.34.4",
},
"devDependencies": {
"@biomejs/biome": "2.3.5",
"@biomejs/biome": "2.3.6",
"@types/pg": "^8.15.5",
},
},
@@ -29,23 +29,23 @@
"drizzle-orm@0.44.7": "patches/drizzle-orm@0.44.7.patch",
},
"packages": {
"@biomejs/biome": ["@biomejs/biome@2.3.5", "", { "optionalDependencies": { "@biomejs/cli-darwin-arm64": "2.3.5", "@biomejs/cli-darwin-x64": "2.3.5", "@biomejs/cli-linux-arm64": "2.3.5", "@biomejs/cli-linux-arm64-musl": "2.3.5", "@biomejs/cli-linux-x64": "2.3.5", "@biomejs/cli-linux-x64-musl": "2.3.5", "@biomejs/cli-win32-arm64": "2.3.5", "@biomejs/cli-win32-x64": "2.3.5" }, "bin": { "biome": "bin/biome" } }, "sha512-HvLhNlIlBIbAV77VysRIBEwp55oM/QAjQEin74QQX9Xb259/XP/D5AGGnZMOyF1el4zcvlNYYR3AyTMUV3ILhg=="],
"@biomejs/biome": ["@biomejs/biome@2.3.6", "", { "optionalDependencies": { "@biomejs/cli-darwin-arm64": "2.3.6", "@biomejs/cli-darwin-x64": "2.3.6", "@biomejs/cli-linux-arm64": "2.3.6", "@biomejs/cli-linux-arm64-musl": "2.3.6", "@biomejs/cli-linux-x64": "2.3.6", "@biomejs/cli-linux-x64-musl": "2.3.6", "@biomejs/cli-win32-arm64": "2.3.6", "@biomejs/cli-win32-x64": "2.3.6" }, "bin": { "biome": "bin/biome" } }, "sha512-oqUhWyU6tae0MFsr/7iLe++QWRg+6jtUhlx9/0GmCWDYFFrK366sBLamNM7D9Y+c7YSynUFKr8lpEp1r6Sk7eA=="],
"@biomejs/cli-darwin-arm64": ["@biomejs/cli-darwin-arm64@2.3.5", "", { "os": "darwin", "cpu": "arm64" }, "sha512-fLdTur8cJU33HxHUUsii3GLx/TR0BsfQx8FkeqIiW33cGMtUD56fAtrh+2Fx1uhiCsVZlFh6iLKUU3pniZREQw=="],
"@biomejs/cli-darwin-arm64": ["@biomejs/cli-darwin-arm64@2.3.6", "", { "os": "darwin", "cpu": "arm64" }, "sha512-P4JWE5d8UayBxYe197QJwyW4ZHp0B+zvRIGCusOm1WbxmlhpAQA1zEqQuunHgSIzvyEEp4TVxiKGXNFZPg7r9Q=="],
"@biomejs/cli-darwin-x64": ["@biomejs/cli-darwin-x64@2.3.5", "", { "os": "darwin", "cpu": "x64" }, "sha512-qpT8XDqeUlzrOW8zb4k3tjhT7rmvVRumhi2657I2aGcY4B+Ft5fNwDdZGACzn8zj7/K1fdWjgwYE3i2mSZ+vOA=="],
"@biomejs/cli-darwin-x64": ["@biomejs/cli-darwin-x64@2.3.6", "", { "os": "darwin", "cpu": "x64" }, "sha512-I4rTebj+F/L9K93IU7yTFs8nQ6EhaCOivxduRha4w4WEZK80yoZ8OAdR1F33m4yJ/NfUuTUbP/Wjs+vKjlCoWA=="],
"@biomejs/cli-linux-arm64": ["@biomejs/cli-linux-arm64@2.3.5", "", { "os": "linux", "cpu": "arm64" }, "sha512-u/pybjTBPGBHB66ku4pK1gj+Dxgx7/+Z0jAriZISPX1ocTO8aHh8x8e7Kb1rB4Ms0nA/SzjtNOVJ4exVavQBCw=="],
"@biomejs/cli-linux-arm64": ["@biomejs/cli-linux-arm64@2.3.6", "", { "os": "linux", "cpu": "arm64" }, "sha512-JjYy83eVBnvuINZiqyFO7xx72v8Srh4hsgaacSBCjC22DwM6+ZvnX1/fj8/SBiLuUOfZ8YhU2pfq2Dzakeyg1A=="],
"@biomejs/cli-linux-arm64-musl": ["@biomejs/cli-linux-arm64-musl@2.3.5", "", { "os": "linux", "cpu": "arm64" }, "sha512-eGUG7+hcLgGnMNl1KHVZUYxahYAhC462jF/wQolqu4qso2MSk32Q+QrpN7eN4jAHAg7FUMIo897muIhK4hXhqg=="],
"@biomejs/cli-linux-arm64-musl": ["@biomejs/cli-linux-arm64-musl@2.3.6", "", { "os": "linux", "cpu": "arm64" }, "sha512-oK1NpIXIixbJ/4Tcx40cwiieqah6rRUtMGOHDeK2ToT7yUFVEvXUGRKqH0O4hqZ9tW8TcXNZKfgRH6xrsjVtGg=="],
"@biomejs/cli-linux-x64": ["@biomejs/cli-linux-x64@2.3.5", "", { "os": "linux", "cpu": "x64" }, "sha512-XrIVi9YAW6ye0CGQ+yax0gLfx+BFOtKaNX74n+xHWla6Cl6huUmcKNO7HPx7BiKnJUzrxXY1qYlm7xMvi08X4g=="],
"@biomejs/cli-linux-x64": ["@biomejs/cli-linux-x64@2.3.6", "", { "os": "linux", "cpu": "x64" }, "sha512-ZjPXzy5yN9wusIoX+8Zp4p6cL8r0NzJCXg/4r1KLVveIPXd2jKVlqZ6ZyzEq385WwU3OX5KOwQYLQsOc788waQ=="],
"@biomejs/cli-linux-x64-musl": ["@biomejs/cli-linux-x64-musl@2.3.5", "", { "os": "linux", "cpu": "x64" }, "sha512-awVuycTPpVTH/+WDVnEEYSf6nbCBHf/4wB3lquwT7puhNg8R4XvonWNZzUsfHZrCkjkLhFH/vCZK5jHatD9FEg=="],
"@biomejs/cli-linux-x64-musl": ["@biomejs/cli-linux-x64-musl@2.3.6", "", { "os": "linux", "cpu": "x64" }, "sha512-QvxB8GHQeaO4FCtwJpJjCgJkbHBbWxRHUxQlod+xeaYE6gtJdSkYkuxdKAQUZEOIsec+PeaDAhW9xjzYbwmOFA=="],
"@biomejs/cli-win32-arm64": ["@biomejs/cli-win32-arm64@2.3.5", "", { "os": "win32", "cpu": "arm64" }, "sha512-DlBiMlBZZ9eIq4H7RimDSGsYcOtfOIfZOaI5CqsWiSlbTfqbPVfWtCf92wNzx8GNMbu1s7/g3ZZESr6+GwM/SA=="],
"@biomejs/cli-win32-arm64": ["@biomejs/cli-win32-arm64@2.3.6", "", { "os": "win32", "cpu": "arm64" }, "sha512-YM7hLHpwjdt8R7+O2zS1Vo2cKgqEeptiXB1tWW1rgjN5LlpZovBVKtg7zfwfRrFx3i08aNZThYpTcowpTlczug=="],
"@biomejs/cli-win32-x64": ["@biomejs/cli-win32-x64@2.3.5", "", { "os": "win32", "cpu": "x64" }, "sha512-nUmR8gb6yvrKhtRgzwo/gDimPwnO5a4sCydf8ZS2kHIJhEmSmk+STsusr1LHTuM//wXppBawvSQi2xFXJCdgKQ=="],
"@biomejs/cli-win32-x64": ["@biomejs/cli-win32-x64@2.3.6", "", { "os": "win32", "cpu": "x64" }, "sha512-psgNEYgMAobY5h+QHRBVR9xvg2KocFuBKm6axZWB/aD12NWhQjiVFQUjV6wMXhlH4iT0Q9c3yK5JFRiDC/rzHA=="],
"@drizzle-team/brocli": ["@drizzle-team/brocli@0.10.2", "", {}, "sha512-z33Il7l5dKjUgGULTqBsQBQwckHh5AbIuxhdsIxDDiZAzBOrZO6q9ogcWC65kU382AfynTfgNumVcNIjuIua6w=="],

23
api/devspace.yaml Normal file
View File

@@ -0,0 +1,23 @@
version: v2beta1
name: api
dev:
api:
imageSelector: ghcr.io/zoriya/kyoo_api
devImage: docker.io/oven/bun:latest
workingDir: /app
sync:
- path: .:/app
excludePaths:
- node_modules
startContainer: true
onUpload:
exec:
- command: bun install --frozen-lockfile
onChange:
- "./bun.lock"
command:
- bash
- -c
- "bun install && bun dev"
ports:
- port: "3567"

View File

@@ -24,7 +24,7 @@
"sharp": "^0.34.4"
},
"devDependencies": {
"@biomejs/biome": "2.3.5",
"@biomejs/biome": "2.3.6",
"@types/pg": "^8.15.5"
},
"module": "src/index.js",

View File

@@ -4,7 +4,7 @@ pkgs.mkShell {
bun
biome
# for psql to debug from the cli
postgresql_15
postgresql_18
# to build libvips (for sharp)
nodejs
node-gyp

View File

@@ -1,5 +1,5 @@
import path from "node:path";
import { getCurrentSpan, record, setAttributes } from "@elysiajs/opentelemetry";
import { getCurrentSpan, setAttributes } from "@elysiajs/opentelemetry";
import { SpanStatusCode } from "@opentelemetry/api";
import { encode } from "blurhash";
import { and, eq, is, lt, type SQL, sql } from "drizzle-orm";
@@ -9,7 +9,9 @@ import type { PoolClient } from "pg";
import sharp from "sharp";
import { db, type Transaction } from "~/db";
import { mqueue } from "~/db/schema/mqueue";
import { unnestValues } from "~/db/utils";
import type { Image } from "~/models/utils";
import { record } from "~/otel";
import { getFile } from "~/utils";
export const imageDir = process.env.IMAGES_PATH ?? "/images";
@@ -76,91 +78,87 @@ export const enqueueOptImage = (
};
};
export const flushImageQueue = async (
tx: Transaction,
imgQueue: ImageTask[],
priority: number,
) => {
if (!imgQueue.length) return;
record("enqueue images", async () => {
await tx
.insert(mqueue)
.values(imgQueue.map((x) => ({ kind: "image", message: x, priority })));
export const flushImageQueue = record(
"enqueueImages",
async (tx: Transaction, imgQueue: ImageTask[], priority: number) => {
if (!imgQueue.length) return;
await tx.insert(mqueue).select(
unnestValues(
imgQueue.map((x) => ({ kind: "image", message: x, priority })),
mqueue,
),
);
await tx.execute(sql`notify kyoo_image`);
});
};
},
);
export const processImages = async () => {
return record("download images", async () => {
let running = false;
async function processAll() {
if (running) return;
running = true;
export const processImages = record("processImages", async () => {
let running = false;
async function processAll() {
if (running) return;
running = true;
let found = true;
while (found) {
found = await processOne();
}
running = false;
let found = true;
while (found) {
found = await processOne();
}
running = false;
}
const client = (await db.$client.connect()) as PoolClient;
client.on("notification", (evt) => {
if (evt.channel !== "kyoo_image") return;
processAll();
});
await client.query("listen kyoo_image");
// start processing old tasks
await processAll();
return () => client.release(true);
const client = (await db.$client.connect()) as PoolClient;
client.on("notification", (evt) => {
if (evt.channel !== "kyoo_image") return;
processAll();
});
};
await client.query("listen kyoo_image");
async function processOne() {
return record("download", async () => {
return await db.transaction(async (tx) => {
const [item] = await tx
.select()
.from(mqueue)
.for("update", { skipLocked: true })
.where(and(eq(mqueue.kind, "image"), lt(mqueue.attempt, 5)))
.orderBy(mqueue.priority, mqueue.attempt, mqueue.createdAt)
.limit(1);
// start processing old tasks
await processAll();
return () => client.release(true);
});
if (!item) return false;
const processOne = record("download", async () => {
return await db.transaction(async (tx) => {
const [item] = await tx
.select()
.from(mqueue)
.for("update", { skipLocked: true })
.where(and(eq(mqueue.kind, "image"), lt(mqueue.attempt, 5)))
.orderBy(mqueue.priority, mqueue.attempt, mqueue.createdAt)
.limit(1);
const img = item.message as ImageTask;
setAttributes({ "item.url": img.url });
try {
const blurhash = await downloadImage(img.id, img.url);
const ret: Image = { id: img.id, source: img.url, blurhash };
if (!item) return false;
const table = sql.raw(img.table);
const column = sql.raw(img.column);
const img = item.message as ImageTask;
setAttributes({ "item.url": img.url });
try {
const blurhash = await downloadImage(img.id, img.url);
const ret: Image = { id: img.id, source: img.url, blurhash };
await tx.execute(sql`
const table = sql.raw(img.table);
const column = sql.raw(img.column);
await tx.execute(sql`
update ${table} set ${column} = ${ret}
where ${column}->'id' = ${sql.raw(`'"${img.id}"'::jsonb`)}
`);
await tx.delete(mqueue).where(eq(mqueue.id, item.id));
} catch (err: any) {
const span = getCurrentSpan();
if (span) {
span.recordException(err);
span.setStatus({ code: SpanStatusCode.ERROR });
}
console.error("Failed to download image", img.url, err.message);
await tx
.update(mqueue)
.set({ attempt: sql`${mqueue.attempt}+1` })
.where(eq(mqueue.id, item.id));
await tx.delete(mqueue).where(eq(mqueue.id, item.id));
} catch (err: any) {
const span = getCurrentSpan();
if (span) {
span.recordException(err);
span.setStatus({ code: SpanStatusCode.ERROR });
}
return true;
});
console.error("Failed to download image", img.url, err.message);
await tx
.update(mqueue)
.set({ attempt: sql`${mqueue.attempt}+1` })
.where(eq(mqueue.id, item.id));
}
return true;
});
}
});
async function downloadImage(id: string, url: string): Promise<string> {
const low = await getFile(path.join(imageDir, `${id}.low.jpg`))

View File

@@ -5,81 +5,89 @@ import { conflictUpdateAllExcept } from "~/db/utils";
import type { SeedCollection } from "~/models/collections";
import type { SeedMovie } from "~/models/movie";
import type { SeedSerie } from "~/models/serie";
import { record } from "~/otel";
import { enqueueOptImage, flushImageQueue, type ImageTask } from "../images";
type ShowTrans = typeof showTranslations.$inferInsert;
export const insertCollection = async (
collection: SeedCollection | undefined,
show: (({ kind: "movie" } & SeedMovie) | ({ kind: "serie" } & SeedSerie)) & {
nextRefresh: string;
export const insertCollection = record(
"insertCollection",
async (
collection: SeedCollection | undefined,
show: (
| ({ kind: "movie" } & SeedMovie)
| ({ kind: "serie" } & SeedSerie)
) & {
nextRefresh: string;
},
) => {
if (!collection) return null;
const { translations, ...col } = collection;
return await db.transaction(async (tx) => {
const imgQueue: ImageTask[] = [];
const [ret] = await tx
.insert(shows)
.values({
kind: "collection",
status: "unknown",
startAir: show.kind === "movie" ? show.airDate : show.startAir,
endAir: show.kind === "movie" ? show.airDate : show.endAir,
nextRefresh: show.nextRefresh,
entriesCount: 0,
original: {} as any,
...col,
})
.onConflictDoUpdate({
target: shows.slug,
set: {
...conflictUpdateAllExcept(shows, [
"pk",
"id",
"slug",
"createdAt",
"startAir",
"endAir",
]),
startAir: sql`least(${shows.startAir}, excluded.start_air)`,
endAir: sql`greatest(${shows.endAir}, excluded.end_air)`,
},
})
.returning({ pk: shows.pk, id: shows.id, slug: shows.slug });
const trans: ShowTrans[] = Object.entries(translations).map(
([lang, tr]) => ({
pk: ret.pk,
language: lang,
...tr,
poster: enqueueOptImage(imgQueue, {
url: tr.poster,
column: showTranslations.poster,
}),
thumbnail: enqueueOptImage(imgQueue, {
url: tr.thumbnail,
column: showTranslations.thumbnail,
}),
logo: enqueueOptImage(imgQueue, {
url: tr.logo,
column: showTranslations.logo,
}),
banner: enqueueOptImage(imgQueue, {
url: tr.banner,
column: showTranslations.banner,
}),
}),
);
await flushImageQueue(tx, imgQueue, 100);
// we can't unnest values here because show translations contains arrays.
await tx
.insert(showTranslations)
.values(trans)
.onConflictDoUpdate({
target: [showTranslations.pk, showTranslations.language],
set: conflictUpdateAllExcept(showTranslations, ["pk", "language"]),
});
return ret;
});
},
) => {
if (!collection) return null;
const { translations, ...col } = collection;
return await db.transaction(async (tx) => {
const imgQueue: ImageTask[] = [];
const [ret] = await tx
.insert(shows)
.values({
kind: "collection",
status: "unknown",
startAir: show.kind === "movie" ? show.airDate : show.startAir,
endAir: show.kind === "movie" ? show.airDate : show.endAir,
nextRefresh: show.nextRefresh,
entriesCount: 0,
original: {} as any,
...col,
})
.onConflictDoUpdate({
target: shows.slug,
set: {
...conflictUpdateAllExcept(shows, [
"pk",
"id",
"slug",
"createdAt",
"startAir",
"endAir",
]),
startAir: sql`least(${shows.startAir}, excluded.start_air)`,
endAir: sql`greatest(${shows.endAir}, excluded.end_air)`,
},
})
.returning({ pk: shows.pk, id: shows.id, slug: shows.slug });
const trans: ShowTrans[] = Object.entries(translations).map(
([lang, tr]) => ({
pk: ret.pk,
language: lang,
...tr,
poster: enqueueOptImage(imgQueue, {
url: tr.poster,
column: showTranslations.poster,
}),
thumbnail: enqueueOptImage(imgQueue, {
url: tr.thumbnail,
column: showTranslations.thumbnail,
}),
logo: enqueueOptImage(imgQueue, {
url: tr.logo,
column: showTranslations.logo,
}),
banner: enqueueOptImage(imgQueue, {
url: tr.banner,
column: showTranslations.banner,
}),
}),
);
await flushImageQueue(tx, imgQueue, 100);
await tx
.insert(showTranslations)
.values(trans)
.onConflictDoUpdate({
target: [showTranslations.pk, showTranslations.language],
set: conflictUpdateAllExcept(showTranslations, ["pk", "language"]),
});
return ret;
});
};
);

View File

@@ -6,8 +6,9 @@ import {
entryVideoJoin,
videos,
} from "~/db/schema";
import { conflictUpdateAllExcept, values } from "~/db/utils";
import { conflictUpdateAllExcept, unnest, unnestValues } from "~/db/utils";
import type { SeedEntry as SEntry, SeedExtra as SExtra } from "~/models/entry";
import { record } from "~/otel";
import { enqueueOptImage, flushImageQueue, type ImageTask } from "../images";
import { guessNextRefresh } from "../refresh";
import { updateAvailableCount, updateAvailableSince } from "./shows";
@@ -42,160 +43,164 @@ const generateSlug = (
}
};
export const insertEntries = async (
show: { pk: number; slug: string; kind: "movie" | "serie" | "collection" },
items: (SeedEntry | SeedExtra)[],
onlyExtras = false,
) => {
if (!items.length) return [];
export const insertEntries = record(
"insertEntries",
async (
show: { pk: number; slug: string; kind: "movie" | "serie" | "collection" },
items: (SeedEntry | SeedExtra)[],
onlyExtras = false,
) => {
if (!items.length) return [];
const retEntries = await db.transaction(async (tx) => {
const imgQueue: ImageTask[] = [];
const vals: EntryI[] = items.map((seed) => {
const { translations, videos, video, ...entry } = seed;
return {
...entry,
showPk: show.pk,
slug: generateSlug(show.slug, seed),
thumbnail: enqueueOptImage(imgQueue, {
url: seed.thumbnail,
column: entries.thumbnail,
}),
nextRefresh:
entry.kind !== "extra"
? guessNextRefresh(entry.airDate ?? new Date())
: guessNextRefresh(new Date()),
episodeNumber:
entry.kind === "episode"
? entry.episodeNumber
: entry.kind === "special"
? entry.number
: undefined,
};
});
const ret = await tx
.insert(entries)
.values(vals)
.onConflictDoUpdate({
target: entries.slug,
set: conflictUpdateAllExcept(entries, [
"pk",
"showPk",
"id",
"slug",
"createdAt",
]),
})
.returning({ pk: entries.pk, id: entries.id, slug: entries.slug });
const trans: EntryTransI[] = items.flatMap((seed, i) => {
if (seed.kind === "extra") {
return [
{
pk: ret[i].pk,
// yeah we hardcode the language to extra because if we want to support
// translations one day it won't be awkward
language: "extra",
name: seed.name,
description: null,
poster: undefined,
},
];
}
return Object.entries(seed.translations).map(([lang, tr]) => ({
// assumes ret is ordered like items.
pk: ret[i].pk,
language: lang,
...tr,
poster:
seed.kind === "movie"
? enqueueOptImage(imgQueue, {
url: (tr as any).poster,
column: entryTranslations.poster,
})
: undefined,
}));
});
await flushImageQueue(tx, imgQueue, 0);
await tx
.insert(entryTranslations)
.values(trans)
.onConflictDoUpdate({
target: [entryTranslations.pk, entryTranslations.language],
set: conflictUpdateAllExcept(entryTranslations, ["pk", "language"]),
const retEntries = await db.transaction(async (tx) => {
const imgQueue: ImageTask[] = [];
const vals: EntryI[] = items.map((seed) => {
const { translations, videos, video, ...entry } = seed;
return {
...entry,
showPk: show.pk,
slug: generateSlug(show.slug, seed),
thumbnail: enqueueOptImage(imgQueue, {
url: seed.thumbnail,
column: entries.thumbnail,
}),
nextRefresh:
entry.kind !== "extra"
? guessNextRefresh(entry.airDate ?? new Date())
: guessNextRefresh(new Date()),
episodeNumber:
entry.kind === "episode"
? entry.episodeNumber
: entry.kind === "special"
? entry.number
: undefined,
};
});
const ret = await tx
.insert(entries)
.select(unnestValues(vals, entries))
.onConflictDoUpdate({
target: entries.slug,
set: conflictUpdateAllExcept(entries, [
"pk",
"showPk",
"id",
"slug",
"createdAt",
]),
})
.returning({ pk: entries.pk, id: entries.id, slug: entries.slug });
return ret;
});
const trans: EntryTransI[] = items.flatMap((seed, i) => {
if (seed.kind === "extra") {
return [
{
pk: ret[i].pk,
// yeah we hardcode the language to extra because if we want to support
// translations one day it won't be awkward
language: "extra",
name: seed.name,
description: null,
poster: undefined,
},
];
}
const vids = items.flatMap((seed, i) => {
if (seed.kind === "extra") {
return {
videoId: seed.video,
return Object.entries(seed.translations).map(([lang, tr]) => ({
// assumes ret is ordered like items.
pk: ret[i].pk,
language: lang,
...tr,
poster:
seed.kind === "movie"
? enqueueOptImage(imgQueue, {
url: (tr as any).poster,
column: entryTranslations.poster,
})
: undefined,
}));
});
await flushImageQueue(tx, imgQueue, 0);
await tx
.insert(entryTranslations)
.select(unnestValues(trans, entryTranslations))
.onConflictDoUpdate({
target: [entryTranslations.pk, entryTranslations.language],
set: conflictUpdateAllExcept(entryTranslations, ["pk", "language"]),
});
return ret;
});
const vids = items.flatMap((seed, i) => {
if (seed.kind === "extra") {
return {
videoId: seed.video,
entryPk: retEntries[i].pk,
entrySlug: retEntries[i].slug,
needRendering: false,
};
}
if (!seed.videos) return [];
return seed.videos.map((x, j) => ({
videoId: x,
entryPk: retEntries[i].pk,
entrySlug: retEntries[i].slug,
needRendering: false,
};
// The first video should not have a rendering.
needRendering: j !== 0 && seed.videos!.length > 1,
}));
});
if (vids.length === 0) {
// we have not added videos but we need to update the `entriesCount`
if (show.kind === "serie" && !onlyExtras)
await updateAvailableCount(db, [show.pk], true);
return retEntries.map((x) => ({ id: x.id, slug: x.slug, videos: [] }));
}
if (!seed.videos) return [];
return seed.videos.map((x, j) => ({
videoId: x,
entryPk: retEntries[i].pk,
entrySlug: retEntries[i].slug,
// The first video should not have a rendering.
needRendering: j !== 0 && seed.videos!.length > 1,
const retVideos = await db.transaction(async (tx) => {
const ret = await tx
.insert(entryVideoJoin)
.select(
db
.select({
entryPk: sql<number>`vids."entryPk"`.as("entry"),
videoPk: videos.pk,
slug: computeVideoSlug(
sql`vids."entrySlug"`,
sql`vids."needRendering"`,
),
})
.from(
unnest(vids, "vids", {
entryPk: "integer",
entrySlug: "varchar(255)",
needRendering: "boolean",
videoId: "uuid",
}),
)
.innerJoin(videos, eq(videos.id, sql`vids."videoId"`)),
)
.onConflictDoNothing()
.returning({
slug: entryVideoJoin.slug,
entryPk: entryVideoJoin.entryPk,
});
if (!onlyExtras)
await updateAvailableCount(tx, [show.pk], show.kind === "serie");
await updateAvailableSince(tx, [...new Set(vids.map((x) => x.entryPk))]);
return ret;
});
return retEntries.map((entry) => ({
id: entry.id,
slug: entry.slug,
videos: retVideos.filter((x) => x.entryPk === entry.pk),
}));
});
if (vids.length === 0) {
// we have not added videos but we need to update the `entriesCount`
if (show.kind === "serie" && !onlyExtras)
await updateAvailableCount(db, [show.pk], true);
return retEntries.map((x) => ({ id: x.id, slug: x.slug, videos: [] }));
}
const retVideos = await db.transaction(async (tx) => {
const ret = await tx
.insert(entryVideoJoin)
.select(
db
.select({
entryPk: sql<number>`vids.entryPk`.as("entry"),
videoPk: videos.pk,
slug: computeVideoSlug(
sql`vids.entrySlug`,
sql`vids.needRendering`,
),
})
.from(
values(vids, {
entryPk: "integer",
needRendering: "boolean",
videoId: "uuid",
}).as("vids"),
)
.innerJoin(videos, eq(videos.id, sql`vids.videoId`)),
)
.onConflictDoNothing()
.returning({
slug: entryVideoJoin.slug,
entryPk: entryVideoJoin.entryPk,
});
if (!onlyExtras)
await updateAvailableCount(tx, [show.pk], show.kind === "serie");
await updateAvailableSince(tx, [...new Set(vids.map((x) => x.entryPk))]);
return ret;
});
return retEntries.map((entry) => ({
id: entry.id,
slug: entry.slug,
videos: retVideos.filter((x) => x.entryPk === entry.pk),
}));
};
},
);
export function computeVideoSlug(entrySlug: SQL | Column, needsRendering: SQL) {
return sql<string>`

View File

@@ -1,77 +1,78 @@
import { db } from "~/db";
import { seasons, seasonTranslations } from "~/db/schema";
import { conflictUpdateAllExcept } from "~/db/utils";
import { conflictUpdateAllExcept, unnestValues } from "~/db/utils";
import type { SeedSeason } from "~/models/season";
import { record } from "~/otel";
import { enqueueOptImage, flushImageQueue, type ImageTask } from "../images";
import { guessNextRefresh } from "../refresh";
type SeasonI = typeof seasons.$inferInsert;
type SeasonTransI = typeof seasonTranslations.$inferInsert;
export const insertSeasons = async (
show: { pk: number; slug: string },
items: SeedSeason[],
) => {
if (!items.length) return [];
export const insertSeasons = record(
"insertSeasons",
async (show: { pk: number; slug: string }, items: SeedSeason[]) => {
if (!items.length) return [];
return db.transaction(async (tx) => {
const imgQueue: ImageTask[] = [];
const vals: SeasonI[] = items.map((x) => {
const { translations, ...season } = x;
return {
...season,
showPk: show.pk,
slug:
season.seasonNumber === 0
? `${show.slug}-specials`
: `${show.slug}-s${season.seasonNumber}`,
nextRefresh: guessNextRefresh(season.startAir ?? new Date()),
};
});
const ret = await tx
.insert(seasons)
.values(vals)
.onConflictDoUpdate({
target: seasons.slug,
set: conflictUpdateAllExcept(seasons, [
"pk",
"showPk",
"id",
"slug",
"createdAt",
]),
})
.returning({ pk: seasons.pk, id: seasons.id, slug: seasons.slug });
const trans: SeasonTransI[] = items.flatMap((seed, i) =>
Object.entries(seed.translations).map(([lang, tr]) => ({
// assumes ret is ordered like items.
pk: ret[i].pk,
language: lang,
...tr,
poster: enqueueOptImage(imgQueue, {
url: tr.poster,
column: seasonTranslations.poster,
}),
thumbnail: enqueueOptImage(imgQueue, {
url: tr.thumbnail,
column: seasonTranslations.thumbnail,
}),
banner: enqueueOptImage(imgQueue, {
url: tr.banner,
column: seasonTranslations.banner,
}),
})),
);
await flushImageQueue(tx, imgQueue, -10);
await tx
.insert(seasonTranslations)
.values(trans)
.onConflictDoUpdate({
target: [seasonTranslations.pk, seasonTranslations.language],
set: conflictUpdateAllExcept(seasonTranslations, ["pk", "language"]),
return db.transaction(async (tx) => {
const imgQueue: ImageTask[] = [];
const vals: SeasonI[] = items.map((x) => {
const { translations, ...season } = x;
return {
...season,
showPk: show.pk,
slug:
season.seasonNumber === 0
? `${show.slug}-specials`
: `${show.slug}-s${season.seasonNumber}`,
nextRefresh: guessNextRefresh(season.startAir ?? new Date()),
};
});
const ret = await tx
.insert(seasons)
.select(unnestValues(vals, seasons))
.onConflictDoUpdate({
target: seasons.slug,
set: conflictUpdateAllExcept(seasons, [
"pk",
"showPk",
"id",
"slug",
"createdAt",
]),
})
.returning({ pk: seasons.pk, id: seasons.id, slug: seasons.slug });
return ret;
});
};
const trans: SeasonTransI[] = items.flatMap((seed, i) =>
Object.entries(seed.translations).map(([lang, tr]) => ({
// assumes ret is ordered like items.
pk: ret[i].pk,
language: lang,
...tr,
poster: enqueueOptImage(imgQueue, {
url: tr.poster,
column: seasonTranslations.poster,
}),
thumbnail: enqueueOptImage(imgQueue, {
url: tr.thumbnail,
column: seasonTranslations.thumbnail,
}),
banner: enqueueOptImage(imgQueue, {
url: tr.banner,
column: seasonTranslations.banner,
}),
})),
);
await flushImageQueue(tx, imgQueue, -10);
await tx
.insert(seasonTranslations)
.select(unnestValues(trans, seasonTranslations))
.onConflictDoUpdate({
target: [seasonTranslations.pk, seasonTranslations.language],
set: conflictUpdateAllExcept(seasonTranslations, ["pk", "language"]),
});
return ret;
});
},
);

View File

@@ -21,88 +21,93 @@ import type { SeedCollection } from "~/models/collections";
import type { SeedMovie } from "~/models/movie";
import type { SeedSerie } from "~/models/serie";
import type { Original } from "~/models/utils";
import { record } from "~/otel";
import { getYear } from "~/utils";
import { enqueueOptImage, flushImageQueue, type ImageTask } from "../images";
type Show = typeof shows.$inferInsert;
type ShowTrans = typeof showTranslations.$inferInsert;
export const insertShow = async (
show: Omit<Show, "original">,
original: Original & {
poster: string | null;
thumbnail: string | null;
banner: string | null;
logo: string | null;
},
translations:
| SeedMovie["translations"]
| SeedSerie["translations"]
| SeedCollection["translations"],
) => {
return await db.transaction(async (tx) => {
const imgQueue: ImageTask[] = [];
const orig = {
...original,
poster: enqueueOptImage(imgQueue, {
url: original.poster,
table: shows,
column: sql`${shows.original}['poster']`,
}),
thumbnail: enqueueOptImage(imgQueue, {
url: original.thumbnail,
table: shows,
column: sql`${shows.original}['thumbnail']`,
}),
banner: enqueueOptImage(imgQueue, {
url: original.banner,
table: shows,
column: sql`${shows.original}['banner']`,
}),
logo: enqueueOptImage(imgQueue, {
url: original.logo,
table: shows,
column: sql`${shows.original}['logo']`,
}),
};
const ret = await insertBaseShow(tx, { ...show, original: orig });
if ("status" in ret) return ret;
const trans: ShowTrans[] = Object.entries(translations).map(
([lang, tr]) => ({
pk: ret.pk,
language: lang,
...tr,
latinName: tr.latinName ?? null,
export const insertShow = record(
"insertShow",
async (
show: Omit<Show, "original">,
original: Original & {
poster: string | null;
thumbnail: string | null;
banner: string | null;
logo: string | null;
},
translations:
| SeedMovie["translations"]
| SeedSerie["translations"]
| SeedCollection["translations"],
) => {
return await db.transaction(async (tx) => {
const imgQueue: ImageTask[] = [];
const orig = {
...original,
poster: enqueueOptImage(imgQueue, {
url: tr.poster,
column: showTranslations.poster,
url: original.poster,
table: shows,
column: sql`${shows.original}['poster']`,
}),
thumbnail: enqueueOptImage(imgQueue, {
url: tr.thumbnail,
column: showTranslations.thumbnail,
}),
logo: enqueueOptImage(imgQueue, {
url: tr.logo,
column: showTranslations.logo,
url: original.thumbnail,
table: shows,
column: sql`${shows.original}['thumbnail']`,
}),
banner: enqueueOptImage(imgQueue, {
url: tr.banner,
column: showTranslations.banner,
url: original.banner,
table: shows,
column: sql`${shows.original}['banner']`,
}),
}),
);
await flushImageQueue(tx, imgQueue, 200);
await tx
.insert(showTranslations)
.values(trans)
.onConflictDoUpdate({
target: [showTranslations.pk, showTranslations.language],
set: conflictUpdateAllExcept(showTranslations, ["pk", "language"]),
});
return ret;
});
};
logo: enqueueOptImage(imgQueue, {
url: original.logo,
table: shows,
column: sql`${shows.original}['logo']`,
}),
};
const ret = await insertBaseShow(tx, { ...show, original: orig });
if ("status" in ret) return ret;
const trans: ShowTrans[] = Object.entries(translations).map(
([lang, tr]) => ({
pk: ret.pk,
language: lang,
...tr,
latinName: tr.latinName ?? null,
poster: enqueueOptImage(imgQueue, {
url: tr.poster,
column: showTranslations.poster,
}),
thumbnail: enqueueOptImage(imgQueue, {
url: tr.thumbnail,
column: showTranslations.thumbnail,
}),
logo: enqueueOptImage(imgQueue, {
url: tr.logo,
column: showTranslations.logo,
}),
banner: enqueueOptImage(imgQueue, {
url: tr.banner,
column: showTranslations.banner,
}),
}),
);
await flushImageQueue(tx, imgQueue, 200);
// we can't unnest values here because show translations contains arrays.
await tx
.insert(showTranslations)
.values(trans)
.onConflictDoUpdate({
target: [showTranslations.pk, showTranslations.language],
set: conflictUpdateAllExcept(showTranslations, ["pk", "language"]),
});
return ret;
});
},
);
async function insertBaseShow(tx: Transaction, show: Show) {
function insert() {

View File

@@ -1,57 +1,63 @@
import { eq, sql } from "drizzle-orm";
import { db } from "~/db";
import { roles, staff } from "~/db/schema";
import { conflictUpdateAllExcept } from "~/db/utils";
import { conflictUpdateAllExcept, unnestValues } from "~/db/utils";
import type { SeedStaff } from "~/models/staff";
import { record } from "~/otel";
import { enqueueOptImage, flushImageQueue, type ImageTask } from "../images";
export const insertStaff = async (
seed: SeedStaff[] | undefined,
showPk: number,
) => {
if (!seed?.length) return [];
export const insertStaff = record(
"insertStaff",
async (seed: SeedStaff[] | undefined, showPk: number) => {
if (!seed?.length) return [];
return await db.transaction(async (tx) => {
const imgQueue: ImageTask[] = [];
const people = seed.map((x) => ({
...x.staff,
image: enqueueOptImage(imgQueue, {
url: x.staff.image,
column: staff.image,
}),
}));
const ret = await tx
.insert(staff)
.values(people)
.onConflictDoUpdate({
target: staff.slug,
set: conflictUpdateAllExcept(staff, ["pk", "id", "slug", "createdAt"]),
})
.returning({ pk: staff.pk, id: staff.id, slug: staff.slug });
const rval = seed.map((x, i) => ({
showPk,
staffPk: ret[i].pk,
kind: x.kind,
order: i,
character: {
...x.character,
return await db.transaction(async (tx) => {
const imgQueue: ImageTask[] = [];
const people = seed.map((x) => ({
...x.staff,
image: enqueueOptImage(imgQueue, {
url: x.character.image,
table: roles,
column: sql`${roles.character}['image']`,
url: x.staff.image,
column: staff.image,
}),
},
}));
}));
const ret = await tx
.insert(staff)
.select(unnestValues(people, staff))
.onConflictDoUpdate({
target: staff.slug,
set: conflictUpdateAllExcept(staff, [
"pk",
"id",
"slug",
"createdAt",
]),
})
.returning({ pk: staff.pk, id: staff.id, slug: staff.slug });
await flushImageQueue(tx, imgQueue, -200);
const rval = seed.map((x, i) => ({
showPk,
staffPk: ret[i].pk,
kind: x.kind,
order: i,
character: {
...x.character,
image: enqueueOptImage(imgQueue, {
url: x.character.image,
table: roles,
column: sql`${roles.character}['image']`,
}),
},
}));
// always replace all roles. this is because:
// - we want `order` to stay in sync (& without duplicates)
// - we don't have ways to identify a role so we can't onConflict
await tx.delete(roles).where(eq(roles.showPk, showPk));
await tx.insert(roles).values(rval);
await flushImageQueue(tx, imgQueue, -200);
return ret;
});
};
// always replace all roles. this is because:
// - we want `order` to stay in sync (& without duplicates)
// - we don't have ways to identify a role so we can't onConflict
await tx.delete(roles).where(eq(roles.showPk, showPk));
await tx.insert(roles).select(unnestValues(rval, roles));
return ret;
});
},
);

View File

@@ -1,63 +1,74 @@
import { sql } from "drizzle-orm";
import { db } from "~/db";
import { showStudioJoin, studios, studioTranslations } from "~/db/schema";
import { conflictUpdateAllExcept } from "~/db/utils";
import { conflictUpdateAllExcept, sqlarr, unnestValues } from "~/db/utils";
import type { SeedStudio } from "~/models/studio";
import { record } from "~/otel";
import { enqueueOptImage, flushImageQueue, type ImageTask } from "../images";
type StudioI = typeof studios.$inferInsert;
type StudioTransI = typeof studioTranslations.$inferInsert;
export const insertStudios = async (
seed: SeedStudio[] | undefined,
showPk: number,
) => {
if (!seed?.length) return [];
export const insertStudios = record(
"insertStudios",
async (seed: SeedStudio[] | undefined, showPk: number) => {
if (!seed?.length) return [];
return await db.transaction(async (tx) => {
const vals: StudioI[] = seed.map((x) => {
const { translations, ...item } = x;
return item;
});
const ret = await tx
.insert(studios)
.values(vals)
.onConflictDoUpdate({
target: studios.slug,
set: conflictUpdateAllExcept(studios, [
"pk",
"id",
"slug",
"createdAt",
]),
})
.returning({ pk: studios.pk, id: studios.id, slug: studios.slug });
const imgQueue: ImageTask[] = [];
const trans: StudioTransI[] = seed.flatMap((x, i) =>
Object.entries(x.translations).map(([lang, tr]) => ({
pk: ret[i].pk,
language: lang,
name: tr.name,
logo: enqueueOptImage(imgQueue, {
url: tr.logo,
column: studioTranslations.logo,
}),
})),
);
await flushImageQueue(tx, imgQueue, -100);
await tx
.insert(studioTranslations)
.values(trans)
.onConflictDoUpdate({
target: [studioTranslations.pk, studioTranslations.language],
set: conflictUpdateAllExcept(studioTranslations, ["pk", "language"]),
return await db.transaction(async (tx) => {
const vals: StudioI[] = seed.map((x) => {
const { translations, ...item } = x;
return item;
});
await tx
.insert(showStudioJoin)
.values(ret.map((studio) => ({ showPk: showPk, studioPk: studio.pk })))
.onConflictDoNothing();
return ret;
});
};
const ret = await tx
.insert(studios)
.select(unnestValues(vals, studios))
.onConflictDoUpdate({
target: studios.slug,
set: conflictUpdateAllExcept(studios, [
"pk",
"id",
"slug",
"createdAt",
]),
})
.returning({ pk: studios.pk, id: studios.id, slug: studios.slug });
const imgQueue: ImageTask[] = [];
const trans: StudioTransI[] = seed.flatMap((x, i) =>
Object.entries(x.translations).map(([lang, tr]) => ({
pk: ret[i].pk,
language: lang,
name: tr.name,
logo: enqueueOptImage(imgQueue, {
url: tr.logo,
column: studioTranslations.logo,
}),
})),
);
await flushImageQueue(tx, imgQueue, -100);
await tx
.insert(studioTranslations)
.select(unnestValues(trans, studioTranslations))
.onConflictDoUpdate({
target: [studioTranslations.pk, studioTranslations.language],
set: conflictUpdateAllExcept(studioTranslations, ["pk", "language"]),
});
await tx
.insert(showStudioJoin)
.select(
db
.select({
showPk: sql`${showPk}`.as("showPk"),
studioPk: sql`v."studioPk"`.as("studioPk"),
})
.from(
sql`unnest(${sqlarr(ret.map((x) => x.pk))}::integer[]) as v("studioPk")`,
),
)
.onConflictDoNothing();
return ret;
});
},
);

View File

@@ -35,7 +35,8 @@ import {
jsonbBuildObject,
jsonbObjectAgg,
sqlarr,
values,
unnest,
unnestValues,
} from "~/db/utils";
import { Entry } from "~/models/entry";
import { KError } from "~/models/error";
@@ -129,10 +130,10 @@ async function linkVideos(
slug: computeVideoSlug(entriesQ.slug, hasRenderingQ),
})
.from(
values(links, {
unnest(links, "j", {
video: "integer",
entry: "jsonb",
}).as("j"),
}),
)
.innerJoin(videos, eq(videos.pk, sql`j.video`))
.innerJoin(
@@ -835,7 +836,7 @@ export const videosWriteH = new Elysia({ prefix: "/videos", tags: ["videos"] })
try {
vids = await tx
.insert(videos)
.values(body)
.select(unnestValues(body, videos))
.onConflictDoUpdate({
target: [videos.path],
set: conflictUpdateAllExcept(videos, ["pk", "id", "createdAt"]),

View File

@@ -6,6 +6,7 @@ import { sql } from "drizzle-orm";
import { drizzle } from "drizzle-orm/node-postgres";
import { migrate as migrateDb } from "drizzle-orm/node-postgres/migrator";
import type { PoolConfig } from "pg";
import { record } from "~/otel";
import * as schema from "./schema";
const config: PoolConfig = {
@@ -22,8 +23,10 @@ const config: PoolConfig = {
async function parseSslConfig(): Promise<PoolConfig> {
// Due to an upstream bug, if `ssl` is not falsey, an SSL connection will always be attempted. This means
// that non-SSL connection options under `ssl` (which is incorrectly named) cannot be set unless SSL is enabled.
if (!process.env.PGSSLMODE || process.env.PGSSLMODE === "disable")
if (!process.env.PGSSLMODE || process.env.PGSSLMODE === "disable") {
config.ssl = false;
return config;
}
// Despite this field's name, it is used to configure everything below the application layer.
const ssl: ConnectionOptions = {};
@@ -112,7 +115,19 @@ const postgresConfig = await parseSslConfig();
// use this when using drizzle-kit since it can't parse await statements
// const postgresConfig = config;
console.log("Connecting to postgres with config", postgresConfig);
console.log("Connecting to postgres with config", {
...postgresConfig,
password: postgresConfig.password ? "<redacted>" : undefined,
ssl:
postgresConfig.ssl && typeof postgresConfig.ssl === "object"
? {
...postgresConfig.ssl,
key: "<redacted>",
cert: "<redacted>",
ca: "<redacted>",
}
: postgresConfig.ssl,
});
export const db = drizzle({
schema,
connection: postgresConfig,
@@ -122,24 +137,26 @@ instrumentDrizzleClient(db, {
maxQueryTextLength: 100_000_000,
});
export const migrate = async () => {
export const migrate = record("migrate", async () => {
const APP_SCHEMA = "kyoo";
try {
await db.execute(
sql.raw(`
create extension if not exists pg_trgm;
set pg_trgm.word_similarity_threshold = 0.4;
alter database "${postgresConfig.database}" set pg_trgm.word_similarity_threshold = 0.4;
`),
create schema if not exists ${APP_SCHEMA};
create extension if not exists pg_trgm schema ${APP_SCHEMA};
set pg_trgm.word_similarity_threshold = 0.4;
alter database "${postgresConfig.database}" set pg_trgm.word_similarity_threshold = 0.4;
`),
);
} catch (err: any) {
console.error("Error while updating pg_trgm", err.message);
}
await migrateDb(db, {
migrationsSchema: "kyoo",
migrationsSchema: APP_SCHEMA,
migrationsFolder: "./drizzle",
});
console.log(`Database ${postgresConfig.database} migrated!`);
};
});
export type Transaction =
| typeof db

View File

@@ -8,12 +8,17 @@ import {
type Subquery,
sql,
Table,
type TableConfig,
View,
ViewBaseConfig,
} from "drizzle-orm";
import type { CasingCache } from "drizzle-orm/casing";
import type { AnyMySqlSelect } from "drizzle-orm/mysql-core";
import type { AnyPgSelect, SelectedFieldsFlat } from "drizzle-orm/pg-core";
import type {
AnyPgSelect,
PgTableWithColumns,
SelectedFieldsFlat,
} from "drizzle-orm/pg-core";
import type { AnySQLiteSelect } from "drizzle-orm/sqlite-core";
import type { WithSubquery } from "drizzle-orm/subquery";
import { db } from "./index";
@@ -69,8 +74,18 @@ export function conflictUpdateAllExcept<
}
// drizzle is bugged and doesn't allow js arrays to be used in raw sql.
export function sqlarr(array: unknown[]) {
return `{${array.map((item) => `"${item}"`).join(",")}}`;
export function sqlarr(array: unknown[]): string {
return `{${array
.map((item) =>
item === "null" || item === null || item === undefined
? "null"
: Array.isArray(item)
? sqlarr(item)
: typeof item === "object"
? `"${JSON.stringify(item).replaceAll("\\", "\\\\").replaceAll('"', '\\"')}"`
: `"${item?.toString().replaceAll('"', '\\"')}"`,
)
.join(", ")}}`;
}
// See https://github.com/drizzle-team/drizzle-orm/issues/4044
@@ -103,6 +118,102 @@ export function values<K extends string>(
};
}
/* goal:
* unnestValues([{a: 1, b: 2}, {a: 3, b: 4}], tbl)
*
* ```sql
* select a, b, now() as updated_at from unnest($1::integer[], $2::integer[]);
* ```
* params:
* $1: [1, 2]
* $2: [3, 4]
*
* select
*/
export const unnestValues = <
T extends Record<string, unknown>,
C extends TableConfig = never,
>(
values: T[],
typeInfo: PgTableWithColumns<C>,
) => {
if (values[0] === undefined)
throw new Error("Invalid values, expecting at least one items");
const columns = getTableColumns(typeInfo);
const keys = Object.keys(values[0]).filter((x) => x in columns);
// @ts-expect-error: drizzle internal
const casing = db.dialect.casing as CasingCache;
const dbNames = Object.fromEntries(
Object.entries(columns).map(([k, v]) => [k, casing.getColumnCasing(v)]),
);
const vals = values.reduce(
(acc, cur, i) => {
for (const k of keys) {
if (k in cur) acc[k].push(cur[k]);
else acc[k].push(null);
}
for (const k of Object.keys(cur)) {
if (k in acc) continue;
if (!(k in columns)) continue;
keys.push(k);
acc[k] = new Array(i).fill(null);
acc[k].push(cur[k]);
}
return acc;
},
Object.fromEntries(keys.map((x) => [x, [] as unknown[]])),
);
const computed = Object.entries(columns)
.filter(([k, v]) => (v.defaultFn || v.onUpdateFn) && !keys.includes(k))
.map(([k]) => k);
return db
.select(
Object.fromEntries([
...keys.map((x) => [x, sql.raw(`"${dbNames[x]}"`)]),
...computed.map((x) => [
x,
(columns[x].defaultFn?.() ?? columns[x].onUpdateFn!()).as(dbNames[x]),
]),
]) as {
[k in keyof typeof typeInfo.$inferInsert]-?: SQL.Aliased<
(typeof typeInfo.$inferInsert)[k]
>;
},
)
.from(
sql`unnest(${sql.join(
keys.map(
(k) =>
sql`${sqlarr(vals[k])}${sql.raw(`::${columns[k].getSQLType()}[]`)}`,
),
sql.raw(", "),
)}) as v(${sql.raw(keys.map((x) => `"${dbNames[x]}"`).join(", "))})`,
);
};
export const unnest = <T extends Record<string, unknown>>(
values: T[],
name: string,
typeInfo: Record<keyof T, string>,
) => {
const keys = Object.keys(typeInfo);
const vals = values.reduce(
(acc, cur) => {
for (const k of keys) {
if (k in cur) acc[k].push(cur[k]);
else acc[k].push(null);
}
return acc;
},
Object.fromEntries(keys.map((x) => [x, [] as unknown[]])),
);
return sql`unnest(${sql.join(
keys.map((k) => sql`${sqlarr(vals[k])}${sql.raw(`::${typeInfo[k]}[]`)}`),
sql.raw(", "),
)}) as ${sql.raw(name)}(${sql.raw(keys.map((x) => `"${x}"`).join(", "))})`;
};
export const coalesce = <T>(val: SQL<T> | SQLWrapper, def: SQL<T> | Column) => {
return sql<T>`coalesce(${val}, ${def})`;
};

View File

@@ -1,4 +1,4 @@
import { opentelemetry } from "@elysiajs/opentelemetry";
import { record as elysiaRecord, opentelemetry } from "@elysiajs/opentelemetry";
import { OTLPMetricExporter as GrpcMetricExporter } from "@opentelemetry/exporter-metrics-otlp-grpc";
import { OTLPMetricExporter as HttpMetricExporter } from "@opentelemetry/exporter-metrics-otlp-proto";
import { OTLPTraceExporter as GrpcTraceExporter } from "@opentelemetry/exporter-trace-otlp-grpc";
@@ -32,3 +32,12 @@ export const otel = new Elysia()
}),
)
.as("global");
export function record<T extends (...args: any) => any>(
spanName: string,
fn: T,
): T {
const wrapped = (...args: Parameters<T>) =>
elysiaRecord(spanName, () => fn(...args));
return wrapped as T;
}

View File

@@ -17,6 +17,7 @@ describe("images", () => {
});
it("Create a serie download images", async () => {
await db.delete(mqueue);
await createSerie(madeInAbyss);
const release = await processImages();
// remove notifications to prevent other images to be downloaded (do not curl 20000 images for nothing)
@@ -31,12 +32,17 @@ describe("images", () => {
});
it("Download 404 image", async () => {
await db.delete(mqueue);
const url404 = "https://mockhttp.org/status/404";
const [ret, body] = await createMovie({
...dune,
translations: {
en: {
...dune.translations.en,
poster: "https://www.google.com/404",
poster: url404,
thumbnail: null,
banner: null,
logo: null,
},
},
});
@@ -49,7 +55,7 @@ describe("images", () => {
const failed = await db.query.mqueue.findFirst({
where: and(
eq(mqueue.kind, "image"),
eq(sql`${mqueue.message}->>'url'`, "https://www.google.com/404"),
eq(sql`${mqueue.message}->>'url'`, url404),
),
});
expect(failed!.attempt).toBe(5);

View File

@@ -3,6 +3,7 @@ import { beforeAll } from "bun:test";
process.env.PGDATABASE = "kyoo_test";
process.env.JWT_SECRET = "this is a secret";
process.env.JWT_ISSUER = "https://kyoo.zoriya.dev";
process.env.IMAGES_PATH = "./images";
beforeAll(async () => {
// lazy load this so env set before actually applies

18
auth/devspace.yaml Normal file
View File

@@ -0,0 +1,18 @@
version: v2beta1
name: auth
dev:
auth:
imageSelector: ghcr.io/zoriya/kyoo_auth
devImage: docker.io/golang:1.25
workingDir: /app
sync:
- path: .:/app
startContainer: true
onUpload:
restartContainer: true
command:
- bash
- -c
- "go mod download; go run -race ."
ports:
- port: "4568"

View File

@@ -7,7 +7,7 @@ pkgs.mkShell {
sqlc
go-swag
# for psql in cli (+ pgformatter for sql files)
postgresql_15
postgresql_18
pgformatter
# to run tests
hurl

View File

@@ -12,4 +12,4 @@ dependencies:
- condition: postgres.enabled
name: postgres
repository: oci://registry-1.docker.io/cloudpirates
version: 0.11.6
version: 0.12.0

View File

@@ -438,7 +438,7 @@ traefikproxy:
extraEnv: []
image:
repository: docker.io/traefik
tag: v3.5.3
tag: v3.6.2
replicaCount: 1
updateStrategy: ~
podLabels: {}

18
devspace.yaml Normal file
View File

@@ -0,0 +1,18 @@
version: v2beta1
name: kyoo-devspace
dependencies:
api:
path: ./api
pipeline: dev
auth:
path: ./auth
pipeline: dev
front:
path: ./front
pipeline: dev
scanner:
path: ./scanner
pipeline: dev
transcoder:
path: ./transcoder
pipeline: dev

View File

@@ -190,12 +190,12 @@ services:
- "/var/run/docker.sock:/var/run/docker.sock:ro"
postgres:
image: postgres:15
image: postgres:18
restart: on-failure
env_file:
- ./.env
volumes:
- db:/var/lib/postgresql/data
- db:/var/lib/postgresql
ports:
- "5432:5432"
environment:

View File

@@ -139,12 +139,12 @@ services:
- "/var/run/docker.sock:/var/run/docker.sock:ro"
postgres:
image: postgres:15
image: postgres:18
restart: unless-stopped
env_file:
- ./.env
volumes:
- db:/var/lib/postgresql/data
- db:/var/lib/postgresql
environment:
- POSTGRES_USER=$PGUSER
- POSTGRES_PASSWORD=$PGPASSWORD

29
front/devspace.yaml Normal file
View File

@@ -0,0 +1,29 @@
version: v2beta1
name: front
dev:
front:
imageSelector: ghcr.io/zoriya/kyoo_front
devImage: docker.io/oven/bun:latest
workingDir: /app
sync:
- path: .:/app
excludePaths:
- node_modules
startContainer: true
onUpload:
exec:
- command: bun install --frozen-lockfile
onChange:
- "./bun.lock"
# increased sysctl limits for file watching
# front uses Metro javascript bundler which watches a lot of files
# these are node level settings that should be raised
# example values:
# fs.inotify.max_user_instances = 8192
# fs.inotify.max_user_watches = 1048576
command:
- bash
- -c
- "bun install --frozen-lockfile; bun dev --port 8901"
ports:
- port: "8901"

25
scanner/devspace.yaml Normal file
View File

@@ -0,0 +1,25 @@
version: v2beta1
name: scanner
dev:
scanner:
imageSelector: ghcr.io/zoriya/kyoo_scanner
devImage: docker.io/astral/uv:python3.13-trixie
workingDir: /app
sync:
- path: .:/app
excludePaths:
- __pycache__
- .venv
startContainer: true
onUpload:
restartContainer: true
command:
- bash
- -c
- |
echo "Running uv sync..."
uv sync --locked || (echo 'uv sync failed' && exit 1)
echo "Starting FastAPI..."
/app/.venv/bin/fastapi run scanner --port 4389
ports:
- port: "4389"

View File

@@ -10,6 +10,10 @@ pkgs.mkShell {
(import ./transcoder/shell.nix {inherit pkgs;})
];
packages = [
pkgs.devspace
];
# env vars aren't inherited from the `inputsFrom`
SHARP_FORCE_GLOBAL_LIBVIPS = 1;
UV_PYTHON_PREFERENCE = "only-system";

41
transcoder/devspace.yaml Normal file
View File

@@ -0,0 +1,41 @@
version: v2beta1
name: transcoder
dev:
transcoder:
imageSelector: ghcr.io/zoriya/kyoo_transcoder
# would be good to publish the kyoo_transcoder builder image with all deps installed
devImage: golang:1.25
workingDir: /app
sync:
- path: .:/app
startContainer: true
onUpload:
restartContainer: true
command:
- bash
- -c
- |
echo "Determining architecture..."
ARCH=$(dpkg --print-architecture)
echo "Container architecture: $ARCH"
echo "Updating apt and installing transcoder dependencies..."
apt-get update && \
apt-get install --no-install-recommends --no-install-suggests -y \
ffmpeg \
libavformat-dev libavutil-dev libswscale-dev \
vainfo mesa-va-drivers \
ca-certificates
if [ "$ARCH" = "amd64" ]; then
echo "Installing Intel VAAPI drivers (amd64 only)..."
apt-get install -y intel-media-va-driver-non-free i965-va-driver-shaders
else
echo "Skipping Intel VAAPI drivers for arch $ARCH"
fi
echo "Downloading Go dependencies..."
go mod download
echo "Starting transcoder..."
go run -race .
ports:
- port: "7666"

View File

@@ -5,7 +5,7 @@ go 1.24.2
require (
github.com/asticode/go-astisub v0.38.0
github.com/aws/aws-sdk-go-v2 v1.39.6
github.com/aws/aws-sdk-go-v2/service/s3 v1.90.2
github.com/aws/aws-sdk-go-v2/service/s3 v1.91.0
github.com/disintegration/imaging v1.6.2
github.com/exaring/otelpgx v0.9.3
github.com/golang-migrate/migrate/v4 v4.19.0
@@ -73,8 +73,8 @@ require (
require (
github.com/aws/aws-sdk-go-v2/aws/protocol/eventstream v1.7.3 // indirect
github.com/aws/aws-sdk-go-v2/config v1.31.20
github.com/aws/aws-sdk-go-v2/credentials v1.18.24 // indirect
github.com/aws/aws-sdk-go-v2/config v1.31.21
github.com/aws/aws-sdk-go-v2/credentials v1.18.25 // indirect
github.com/aws/aws-sdk-go-v2/feature/ec2/imds v1.18.13 // indirect
github.com/aws/aws-sdk-go-v2/internal/configsources v1.4.13 // indirect
github.com/aws/aws-sdk-go-v2/internal/endpoints/v2 v2.7.13 // indirect
@@ -86,7 +86,7 @@ require (
github.com/aws/aws-sdk-go-v2/service/internal/s3shared v1.19.13 // indirect
github.com/aws/aws-sdk-go-v2/service/sso v1.30.3 // indirect
github.com/aws/aws-sdk-go-v2/service/ssooidc v1.35.7 // indirect
github.com/aws/aws-sdk-go-v2/service/sts v1.40.2 // indirect
github.com/aws/aws-sdk-go-v2/service/sts v1.41.0 // indirect
github.com/aws/smithy-go v1.23.2 // indirect
github.com/decred/dcrd/dcrec/secp256k1/v4 v4.4.0 // indirect
github.com/goccy/go-json v0.10.5 // indirect

View File

@@ -17,10 +17,10 @@ github.com/aws/aws-sdk-go-v2 v1.39.6 h1:2JrPCVgWJm7bm83BDwY5z8ietmeJUbh3O2ACnn+X
github.com/aws/aws-sdk-go-v2 v1.39.6/go.mod h1:c9pm7VwuW0UPxAEYGyTmyurVcNrbF6Rt/wixFqDhcjE=
github.com/aws/aws-sdk-go-v2/aws/protocol/eventstream v1.7.3 h1:DHctwEM8P8iTXFxC/QK0MRjwEpWQeM9yzidCRjldUz0=
github.com/aws/aws-sdk-go-v2/aws/protocol/eventstream v1.7.3/go.mod h1:xdCzcZEtnSTKVDOmUZs4l/j3pSV6rpo1WXl5ugNsL8Y=
github.com/aws/aws-sdk-go-v2/config v1.31.20 h1:/jWF4Wu90EhKCgjTdy1DGxcbcbNrjfBHvksEL79tfQc=
github.com/aws/aws-sdk-go-v2/config v1.31.20/go.mod h1:95Hh1Tc5VYKL9NJ7tAkDcqeKt+MCXQB1hQZaRdJIZE0=
github.com/aws/aws-sdk-go-v2/credentials v1.18.24 h1:iJ2FmPT35EaIB0+kMa6TnQ+PwG5A1prEdAw+PsMzfHg=
github.com/aws/aws-sdk-go-v2/credentials v1.18.24/go.mod h1:U91+DrfjAiXPDEGYhh/x29o4p0qHX5HDqG7y5VViv64=
github.com/aws/aws-sdk-go-v2/config v1.31.21 h1:gH/y+NphLGIVuNHXNkTQir3PmL44Efe8OpPAsbDms0o=
github.com/aws/aws-sdk-go-v2/config v1.31.21/go.mod h1:P6I8guuLej6F2++fKUlo9OIhI59LuEsyEZZMMmgqh/4=
github.com/aws/aws-sdk-go-v2/credentials v1.18.25 h1:MvtSN3ECsQbgEHcux1pZQhuMjZnShlsqcS0Pqlan4Vw=
github.com/aws/aws-sdk-go-v2/credentials v1.18.25/go.mod h1:YATyDPzlHucr1cxEE9rsZl7ZG3gQsxpjD6o5of/8qXE=
github.com/aws/aws-sdk-go-v2/feature/ec2/imds v1.18.13 h1:T1brd5dR3/fzNFAQch/iBKeX07/ffu/cLu+q+RuzEWk=
github.com/aws/aws-sdk-go-v2/feature/ec2/imds v1.18.13/go.mod h1:Peg/GBAQ6JDt+RoBf4meB1wylmAipb7Kg2ZFakZTlwk=
github.com/aws/aws-sdk-go-v2/internal/configsources v1.4.13 h1:a+8/MLcWlIxo1lF9xaGt3J/u3yOZx+CdSveSNwjhD40=
@@ -39,14 +39,14 @@ github.com/aws/aws-sdk-go-v2/service/internal/presigned-url v1.13.13 h1:kDqdFvMY
github.com/aws/aws-sdk-go-v2/service/internal/presigned-url v1.13.13/go.mod h1:lmKuogqSU3HzQCwZ9ZtcqOc5XGMqtDK7OIc2+DxiUEg=
github.com/aws/aws-sdk-go-v2/service/internal/s3shared v1.19.13 h1:zhBJXdhWIFZ1acfDYIhu4+LCzdUS2Vbcum7D01dXlHQ=
github.com/aws/aws-sdk-go-v2/service/internal/s3shared v1.19.13/go.mod h1:JaaOeCE368qn2Hzi3sEzY6FgAZVCIYcC2nwbro2QCh8=
github.com/aws/aws-sdk-go-v2/service/s3 v1.90.2 h1:DhdbtDl4FdNlj31+xiRXANxEE+eC7n8JQz+/ilwQ8Uc=
github.com/aws/aws-sdk-go-v2/service/s3 v1.90.2/go.mod h1:+wArOOrcHUevqdto9k1tKOF5++YTe9JEcPSc9Tx2ZSw=
github.com/aws/aws-sdk-go-v2/service/s3 v1.91.0 h1:b8FQI84BFRqCHjInLKS7bo+iSH8oVJ9C2noKC2H3jwY=
github.com/aws/aws-sdk-go-v2/service/s3 v1.91.0/go.mod h1:+wArOOrcHUevqdto9k1tKOF5++YTe9JEcPSc9Tx2ZSw=
github.com/aws/aws-sdk-go-v2/service/sso v1.30.3 h1:NjShtS1t8r5LUfFVtFeI8xLAHQNTa7UI0VawXlrBMFQ=
github.com/aws/aws-sdk-go-v2/service/sso v1.30.3/go.mod h1:fKvyjJcz63iL/ftA6RaM8sRCtN4r4zl4tjL3qw5ec7k=
github.com/aws/aws-sdk-go-v2/service/ssooidc v1.35.7 h1:gTsnx0xXNQ6SBbymoDvcoRHL+q4l/dAFsQuKfDWSaGc=
github.com/aws/aws-sdk-go-v2/service/ssooidc v1.35.7/go.mod h1:klO+ejMvYsB4QATfEOIXk8WAEwN4N0aBfJpvC+5SZBo=
github.com/aws/aws-sdk-go-v2/service/sts v1.40.2 h1:HK5ON3KmQV2HcAunnx4sKLB9aPf3gKGwVAf7xnx0QT0=
github.com/aws/aws-sdk-go-v2/service/sts v1.40.2/go.mod h1:E19xDjpzPZC7LS2knI9E6BaRFDK43Eul7vd6rSq2HWk=
github.com/aws/aws-sdk-go-v2/service/sts v1.41.0 h1:JoO/STlEltv5nSbzbg709MLNW0/BWgyK2t/R9OWcCyQ=
github.com/aws/aws-sdk-go-v2/service/sts v1.41.0/go.mod h1:E19xDjpzPZC7LS2knI9E6BaRFDK43Eul7vd6rSq2HWk=
github.com/aws/smithy-go v1.23.2 h1:Crv0eatJUQhaManss33hS5r40CG3ZFH+21XSkqMrIUM=
github.com/aws/smithy-go v1.23.2/go.mod h1:LEj2LM3rBRQJxPZTB4KuzZkaZYnZPnvgIhb4pu07mx0=
github.com/cenkalti/backoff/v5 v5.0.3 h1:ZN+IMa753KfX5hd8vVaMixjnqRZ3y8CuJKRKj1xcsSM=

View File

@@ -6,7 +6,7 @@ pkgs.mkShell {
go-migrate
go-swag
# for psql in cli (+ pgformatter for sql files)
postgresql_15
postgresql_18
pgformatter
# to debug video files
mediainfo