mirror of
https://github.com/zoriya/Kyoo.git
synced 2025-12-06 06:36:25 +00:00
Compare commits
84 Commits
adfe61349b
...
master
| Author | SHA1 | Date | |
|---|---|---|---|
| 58603c5180 | |||
| a429b0ace9 | |||
| 0f62854128 | |||
| 2deeaaf97e | |||
| 6f07e51a07 | |||
| c839fc826e | |||
| 10ac7e1ec6 | |||
| 79075e497d | |||
| 8109b7ada6 | |||
| 30f26b2f6a | |||
| a1b975cc5d | |||
| 4f2b2d2cd2 | |||
| d3ccd14fe0 | |||
| 7f5bc2f57c | |||
| c2c9bbe555 | |||
| 20e6fbbc33 | |||
| 5f9064ec37 | |||
| 433b90a3fb | |||
| 81c6f68509 | |||
| 96ac331903 | |||
|
|
f1c2724a7b | ||
|
|
12fe7c157f | ||
| c29ad99ca0 | |||
|
|
a99f29074c | ||
|
|
f449a0878a | ||
|
|
097985ab6d | ||
| 11c300ecf7 | |||
| 1e975ce238 | |||
| b39fa4262d | |||
| d7699389bc | |||
|
|
1036e9f3f3 | ||
|
|
b4749f3ed3 | ||
|
|
a20c61206f | ||
|
|
0644a43cb1 | ||
| af4742ae0b | |||
|
|
e401ca98c0 | ||
|
|
a756c875fd | ||
|
|
2ef26e5d02 | ||
|
|
e7d9002156 | ||
|
|
28d2e193aa | ||
| ce5bee11c0 | |||
| 60d59d7f7b | |||
| 464d720ef9 | |||
| 8fc279d2ed | |||
| a45e992339 | |||
| 5f8ddd435a | |||
| d822463fe0 | |||
|
|
3a0cbf786d | ||
|
|
dfb4777a5d | ||
|
|
eea32c47e9 | ||
|
|
6bcd03b18e | ||
|
|
87a3df6897 | ||
|
|
7f7a16e9b5 | ||
|
|
b95dd9056b | ||
|
|
5044f941b1 | ||
| c56f9ea791 | |||
| eb56dd70d6 | |||
| a4f5ef33ff | |||
| 20ab1dae6c | |||
| 7ebc0fe504 | |||
| 019aceb8d9 | |||
| f59cb5d671 | |||
| d4deafe1dc | |||
| 7b2f1c7a82 | |||
| c5fa3ecb01 | |||
| 3602905e86 | |||
| 1f7844b8a5 | |||
|
|
3b76fb2647 | ||
|
|
9a00d5036f | ||
|
|
7c315602cd | ||
|
|
19e0e402da | ||
|
|
ef38468178 | ||
| 2cbbb450c2 | |||
| 9f466ff702 | |||
| 05f7fabb3c | |||
| 5bc6a06b91 | |||
| f7e801e574 | |||
|
|
c663189df1 | ||
| 37ec32b52d | |||
| 188ce3f67d | |||
| 18b2ae2c5f | |||
| a115c83cba | |||
| 27d25f4829 | |||
|
|
64dae6ddce |
@@ -10,6 +10,8 @@ LIBRARY_ROOT=./video
|
||||
# You should set this to a path where kyoo can write large amount of data, this is used as a cache by the transcoder.
|
||||
# It will automatically be cleaned up on kyoo's startup/shutdown/runtime.
|
||||
CACHE_ROOT=/tmp/kyoo_cache
|
||||
# Where to store downloaded images of the shows
|
||||
IMAGES_PATH="./images";
|
||||
# A pattern (regex) to ignore files.
|
||||
LIBRARY_IGNORE_PATTERN=".*/[dD]ownloads?/.*"
|
||||
|
||||
@@ -36,7 +38,7 @@ PUBLIC_URL=http://localhost:8901
|
||||
# Set `verified` to true if you don't wanna manually verify users.
|
||||
EXTRA_CLAIMS='{"permissions": ["core.read", "core.play"], "verified": false}'
|
||||
# This is the permissions of the first user (aka the first user is admin)
|
||||
FIRST_USER_CLAIMS='{"permissions": ["users.read", "users.write", "apikeys.read", "apikeys.write", "users.delete", "core.read", "core.write", "core.play", "scanner.trigger"], "verified": true}'
|
||||
FIRST_USER_CLAIMS='{"permissions": ["users.read", "users.write", "users.delete", "apikeys.read", "apikeys.write", "core.read", "core.write", "core.play", "scanner.trigger"], "verified": true}'
|
||||
|
||||
# Guest (meaning unlogged in users) can be:
|
||||
# unauthorized (they need to connect before doing anything)
|
||||
|
||||
6
.github/workflows/api-test.yml
vendored
6
.github/workflows/api-test.yml
vendored
@@ -15,17 +15,18 @@ jobs:
|
||||
postgres:
|
||||
image: postgres:15
|
||||
ports:
|
||||
- "5432:5432"
|
||||
- "5432:5432"
|
||||
env:
|
||||
POSTGRES_USER: kyoo
|
||||
POSTGRES_PASSWORD: password
|
||||
POSTGRES_DB: kyoo_test
|
||||
options: >-
|
||||
--health-cmd pg_isready
|
||||
--health-interval 10s
|
||||
--health-timeout 5s
|
||||
--health-retries 5
|
||||
steps:
|
||||
- uses: actions/checkout@v5
|
||||
- uses: actions/checkout@v6
|
||||
- uses: oven-sh/setup-bun@v2
|
||||
|
||||
- name: Install dependencies
|
||||
@@ -37,3 +38,4 @@ jobs:
|
||||
run: bun test
|
||||
env:
|
||||
PGHOST: localhost
|
||||
IMAGES_PATH: ./images
|
||||
|
||||
4
.github/workflows/auth-hurl.yml
vendored
4
.github/workflows/auth-hurl.yml
vendored
@@ -15,7 +15,7 @@ jobs:
|
||||
postgres:
|
||||
image: postgres:15
|
||||
ports:
|
||||
- "5432:5432"
|
||||
- "5432:5432"
|
||||
env:
|
||||
POSTGRES_USER: kyoo
|
||||
POSTGRES_PASSWORD: password
|
||||
@@ -25,7 +25,7 @@ jobs:
|
||||
--health-timeout 5s
|
||||
--health-retries 5
|
||||
steps:
|
||||
- uses: actions/checkout@v5
|
||||
- uses: actions/checkout@v6
|
||||
|
||||
- uses: gacts/install-hurl@v1
|
||||
|
||||
|
||||
12
.github/workflows/coding-style.yml
vendored
12
.github/workflows/coding-style.yml
vendored
@@ -9,7 +9,7 @@ jobs:
|
||||
run:
|
||||
working-directory: ./api
|
||||
steps:
|
||||
- uses: actions/checkout@v5
|
||||
- uses: actions/checkout@v6
|
||||
|
||||
- name: Setup Biome
|
||||
uses: biomejs/setup-biome@v2
|
||||
@@ -26,7 +26,7 @@ jobs:
|
||||
run:
|
||||
working-directory: ./front
|
||||
steps:
|
||||
- uses: actions/checkout@v5
|
||||
- uses: actions/checkout@v6
|
||||
|
||||
- name: Setup Biome
|
||||
uses: biomejs/setup-biome@v2
|
||||
@@ -37,10 +37,10 @@ jobs:
|
||||
run: biome ci .
|
||||
|
||||
scanner:
|
||||
name: "Lint scanner/autosync"
|
||||
name: "Lint scanner"
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v5
|
||||
- uses: actions/checkout@v6
|
||||
|
||||
- uses: chartboost/ruff-action@v1
|
||||
with:
|
||||
@@ -53,7 +53,7 @@ jobs:
|
||||
run:
|
||||
working-directory: ./transcoder
|
||||
steps:
|
||||
- uses: actions/checkout@v5
|
||||
- uses: actions/checkout@v6
|
||||
|
||||
- name: Run go fmt
|
||||
run: if [ "$(gofmt -s -l . | wc -l)" -gt 0 ]; then exit 1; fi
|
||||
@@ -65,7 +65,7 @@ jobs:
|
||||
run:
|
||||
working-directory: ./auth
|
||||
steps:
|
||||
- uses: actions/checkout@v5
|
||||
- uses: actions/checkout@v6
|
||||
|
||||
- name: Run go fmt
|
||||
run: if [ "$(gofmt -s -l . | wc -l)" -gt 0 ]; then exit 1; fi
|
||||
|
||||
7
.github/workflows/docker.yml
vendored
7
.github/workflows/docker.yml
vendored
@@ -34,11 +34,6 @@ jobs:
|
||||
label: scanner
|
||||
image: ${{ github.repository_owner }}/kyoo_scanner
|
||||
|
||||
- context: ./autosync
|
||||
dockerfile: Dockerfile
|
||||
label: autosync
|
||||
image: ${{ github.repository_owner }}/kyoo_autosync
|
||||
|
||||
- context: ./transcoder
|
||||
dockerfile: Dockerfile
|
||||
label: transcoder
|
||||
@@ -52,7 +47,7 @@ jobs:
|
||||
DOCKERHUB_ENABLED: ${{ secrets.DOCKER_USERNAME && secrets.DOCKER_PASSWORD && 'true' || 'false' }}
|
||||
name: Build ${{matrix.label}}
|
||||
steps:
|
||||
- uses: actions/checkout@v5
|
||||
- uses: actions/checkout@v6
|
||||
|
||||
- uses: dorny/paths-filter@v3
|
||||
id: filter
|
||||
|
||||
64
.github/workflows/helm-release-chart.yml
vendored
64
.github/workflows/helm-release-chart.yml
vendored
@@ -2,41 +2,53 @@ name: Release Helm Chart
|
||||
on:
|
||||
push:
|
||||
tags:
|
||||
- v*
|
||||
- v*
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
channel:
|
||||
description: 'Release channel (master, edge, or leave blank for tag-based)'
|
||||
required: false
|
||||
default: 'master'
|
||||
|
||||
jobs:
|
||||
release:
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v5
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
|
||||
|
||||
- name: Set up Helm
|
||||
uses: azure/setup-helm@v4
|
||||
- name: Set up Helm
|
||||
uses: azure/setup-helm@1a275c3b69536ee54be43f2070a358922e12c8d4 # v4.3.1
|
||||
|
||||
- name: Log in to GHCR
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.actor }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
- name: Log in to GHCR
|
||||
uses: docker/login-action@5e57cd118135c172c3672efd75eb46360885c0ef # v3.6.0
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.actor }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Update Helm Dependencies
|
||||
run: |
|
||||
helm dependency update ./chart
|
||||
- name: Update Helm Dependencies
|
||||
run: helm dependency update ./chart
|
||||
|
||||
- name: Package Helm Chart
|
||||
run: |
|
||||
export tag=$(echo ${GITHUB_REF#refs/tags/} | sed 's/^v//')
|
||||
helm package ./chart --version $tag --app-version $tag
|
||||
|
||||
- name: Build Helm-safe repo name
|
||||
run: |
|
||||
REPO_NAME="$(echo "oci://ghcr.io/${GITHUB_REPOSITORY_OWNER}/helm-charts" | tr '[:upper:]' '[:lower:]')"
|
||||
echo "REPO_NAME=${REPO_NAME}" >> "${GITHUB_ENV}"
|
||||
- name: Determine Chart Version
|
||||
id: version
|
||||
run: |
|
||||
if [ "${{ github.event_name }}" = "workflow_dispatch" ]; then
|
||||
TAG="${{ github.event.inputs.channel }}"
|
||||
else
|
||||
TAG=$(echo ${GITHUB_REF#refs/tags/} | sed 's/^v//')
|
||||
fi
|
||||
echo "TAG=$TAG" >> "${GITHUB_ENV}"
|
||||
echo "Using chart version: $TAG"
|
||||
|
||||
- name: Push Helm Chart to GHCR
|
||||
run: |
|
||||
helm push kyoo-*.tgz "${REPO_NAME}"
|
||||
- name: Package Helm Chart
|
||||
run: helm package ./chart --version $TAG --app-version $TAG
|
||||
|
||||
- name: Build Helm-safe repo name
|
||||
run: |
|
||||
REPO_NAME="$(echo "oci://ghcr.io/${GITHUB_REPOSITORY_OWNER}/helm-charts" | tr '[:upper:]' '[:lower:]')"
|
||||
echo "REPO_NAME=${REPO_NAME}" >> "${GITHUB_ENV}"
|
||||
|
||||
- name: Push Helm Chart to GHCR
|
||||
run: helm push kyoo-*.tgz "${REPO_NAME}"
|
||||
|
||||
2
.github/workflows/helm-test-chart.yml
vendored
2
.github/workflows/helm-test-chart.yml
vendored
@@ -15,7 +15,7 @@ jobs:
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v5
|
||||
uses: actions/checkout@v6
|
||||
|
||||
- name: Set up Helm
|
||||
uses: azure/setup-helm@v4
|
||||
|
||||
4
.github/workflows/native-build.yml
vendored
4
.github/workflows/native-build.yml
vendored
@@ -2,7 +2,7 @@ name: Native build
|
||||
on:
|
||||
push:
|
||||
tags:
|
||||
- v*
|
||||
- v*
|
||||
|
||||
jobs:
|
||||
update:
|
||||
@@ -13,7 +13,7 @@ jobs:
|
||||
working-directory: ./front
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v5
|
||||
uses: actions/checkout@v6
|
||||
|
||||
# This is required because GHA doesn't support secrets in the `if` condition
|
||||
- name: Check if Expo build is enabled
|
||||
|
||||
2
.github/workflows/native-update.yml
vendored
2
.github/workflows/native-update.yml
vendored
@@ -13,7 +13,7 @@ jobs:
|
||||
working-directory: ./front
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v5
|
||||
uses: actions/checkout@v6
|
||||
|
||||
# This is required because GHA doesn't support secrets in the `if` condition
|
||||
- name: Check if Expo build is enabled
|
||||
|
||||
4
.github/workflows/release.yml
vendored
4
.github/workflows/release.yml
vendored
@@ -2,7 +2,7 @@ name: Release
|
||||
on:
|
||||
push:
|
||||
tags:
|
||||
- v*
|
||||
- v*
|
||||
|
||||
jobs:
|
||||
update:
|
||||
@@ -10,7 +10,7 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v5
|
||||
uses: actions/checkout@v6
|
||||
|
||||
- name: Set correct versions
|
||||
run: |
|
||||
|
||||
3
.gitignore
vendored
3
.gitignore
vendored
@@ -1,4 +1,5 @@
|
||||
/video
|
||||
.devspace/
|
||||
.env
|
||||
.venv
|
||||
.idea
|
||||
@@ -6,6 +7,4 @@
|
||||
log.html
|
||||
output.xml
|
||||
report.html
|
||||
chart/charts
|
||||
chart/Chart.lock
|
||||
tmp
|
||||
|
||||
24
api/bun.lock
24
api/bun.lock
@@ -5,7 +5,7 @@
|
||||
"": {
|
||||
"name": "api",
|
||||
"dependencies": {
|
||||
"@elysiajs/opentelemetry": "^1.4.6",
|
||||
"@elysiajs/opentelemetry": "^1.4.8",
|
||||
"@elysiajs/swagger": "zoriya/elysia-swagger#build",
|
||||
"@kubiks/otel-drizzle": "zoriya/drizzle-otel#build",
|
||||
"@types/bun": "^1.3.1",
|
||||
@@ -20,7 +20,7 @@
|
||||
"sharp": "^0.34.4",
|
||||
},
|
||||
"devDependencies": {
|
||||
"@biomejs/biome": "2.3.4",
|
||||
"@biomejs/biome": "2.3.7",
|
||||
"@types/pg": "^8.15.5",
|
||||
},
|
||||
},
|
||||
@@ -29,27 +29,27 @@
|
||||
"drizzle-orm@0.44.7": "patches/drizzle-orm@0.44.7.patch",
|
||||
},
|
||||
"packages": {
|
||||
"@biomejs/biome": ["@biomejs/biome@2.3.4", "", { "optionalDependencies": { "@biomejs/cli-darwin-arm64": "2.3.4", "@biomejs/cli-darwin-x64": "2.3.4", "@biomejs/cli-linux-arm64": "2.3.4", "@biomejs/cli-linux-arm64-musl": "2.3.4", "@biomejs/cli-linux-x64": "2.3.4", "@biomejs/cli-linux-x64-musl": "2.3.4", "@biomejs/cli-win32-arm64": "2.3.4", "@biomejs/cli-win32-x64": "2.3.4" }, "bin": { "biome": "bin/biome" } }, "sha512-TU08LXjBHdy0mEY9APtEtZdNQQijXUDSXR7IK1i45wgoPD5R0muK7s61QcFir6FpOj/RP1+YkPx5QJlycXUU3w=="],
|
||||
"@biomejs/biome": ["@biomejs/biome@2.3.7", "", { "optionalDependencies": { "@biomejs/cli-darwin-arm64": "2.3.7", "@biomejs/cli-darwin-x64": "2.3.7", "@biomejs/cli-linux-arm64": "2.3.7", "@biomejs/cli-linux-arm64-musl": "2.3.7", "@biomejs/cli-linux-x64": "2.3.7", "@biomejs/cli-linux-x64-musl": "2.3.7", "@biomejs/cli-win32-arm64": "2.3.7", "@biomejs/cli-win32-x64": "2.3.7" }, "bin": { "biome": "bin/biome" } }, "sha512-CTbAS/jNAiUc6rcq94BrTB8z83O9+BsgWj2sBCQg9rD6Wkh2gjfR87usjx0Ncx0zGXP1NKgT7JNglay5Zfs9jw=="],
|
||||
|
||||
"@biomejs/cli-darwin-arm64": ["@biomejs/cli-darwin-arm64@2.3.4", "", { "os": "darwin", "cpu": "arm64" }, "sha512-w40GvlNzLaqmuWYiDU6Ys9FNhJiclngKqcGld3iJIiy2bpJ0Q+8n3haiaC81uTPY/NA0d8Q/I3Z9+ajc14102Q=="],
|
||||
"@biomejs/cli-darwin-arm64": ["@biomejs/cli-darwin-arm64@2.3.7", "", { "os": "darwin", "cpu": "arm64" }, "sha512-LirkamEwzIUULhXcf2D5b+NatXKeqhOwilM+5eRkbrnr6daKz9rsBL0kNZ16Hcy4b8RFq22SG4tcLwM+yx/wFA=="],
|
||||
|
||||
"@biomejs/cli-darwin-x64": ["@biomejs/cli-darwin-x64@2.3.4", "", { "os": "darwin", "cpu": "x64" }, "sha512-3s7TLVtjJ7ni1xADXsS7x7GMUrLBZXg8SemXc3T0XLslzvqKj/dq1xGeBQ+pOWQzng9MaozfacIHdK2UlJ3jGA=="],
|
||||
"@biomejs/cli-darwin-x64": ["@biomejs/cli-darwin-x64@2.3.7", "", { "os": "darwin", "cpu": "x64" }, "sha512-Q4TO633kvrMQkKIV7wmf8HXwF0dhdTD9S458LGE24TYgBjSRbuhvio4D5eOQzirEYg6eqxfs53ga/rbdd8nBKg=="],
|
||||
|
||||
"@biomejs/cli-linux-arm64": ["@biomejs/cli-linux-arm64@2.3.4", "", { "os": "linux", "cpu": "arm64" }, "sha512-y7efHyyM2gYmHy/AdWEip+VgTMe9973aP7XYKPzu/j8JxnPHuSUXftzmPhkVw0lfm4ECGbdBdGD6+rLmTgNZaA=="],
|
||||
"@biomejs/cli-linux-arm64": ["@biomejs/cli-linux-arm64@2.3.7", "", { "os": "linux", "cpu": "arm64" }, "sha512-inHOTdlstUBzgjDcx0ge71U4SVTbwAljmkfi3MC5WzsYCRhancqfeL+sa4Ke6v2ND53WIwCFD5hGsYExoI3EZQ=="],
|
||||
|
||||
"@biomejs/cli-linux-arm64-musl": ["@biomejs/cli-linux-arm64-musl@2.3.4", "", { "os": "linux", "cpu": "arm64" }, "sha512-IruVGQRwMURivWazchiq7gKAqZSFs5so6gi0hJyxk7x6HR+iwZbO2IxNOqyLURBvL06qkIHs7Wffl6Bw30vCbQ=="],
|
||||
"@biomejs/cli-linux-arm64-musl": ["@biomejs/cli-linux-arm64-musl@2.3.7", "", { "os": "linux", "cpu": "arm64" }, "sha512-/afy8lto4CB8scWfMdt+NoCZtatBUF62Tk3ilWH2w8ENd5spLhM77zKlFZEvsKJv9AFNHknMl03zO67CiklL2Q=="],
|
||||
|
||||
"@biomejs/cli-linux-x64": ["@biomejs/cli-linux-x64@2.3.4", "", { "os": "linux", "cpu": "x64" }, "sha512-gKfjWR/6/dfIxPJCw8REdEowiXCkIpl9jycpNVHux8aX2yhWPLjydOshkDL6Y/82PcQJHn95VCj7J+BRcE5o1Q=="],
|
||||
"@biomejs/cli-linux-x64": ["@biomejs/cli-linux-x64@2.3.7", "", { "os": "linux", "cpu": "x64" }, "sha512-fJMc3ZEuo/NaMYo5rvoWjdSS5/uVSW+HPRQujucpZqm2ZCq71b8MKJ9U4th9yrv2L5+5NjPF0nqqILCl8HY/fg=="],
|
||||
|
||||
"@biomejs/cli-linux-x64-musl": ["@biomejs/cli-linux-x64-musl@2.3.4", "", { "os": "linux", "cpu": "x64" }, "sha512-mzKFFv/w66e4/jCobFmD3kymCqG+FuWE7sVa4Yjqd9v7qt2UhXo67MSZKY9Ih18V2IwPzRKQPCw6KwdZs6AXSA=="],
|
||||
"@biomejs/cli-linux-x64-musl": ["@biomejs/cli-linux-x64-musl@2.3.7", "", { "os": "linux", "cpu": "x64" }, "sha512-CQUtgH1tIN6e5wiYSJqzSwJumHYolNtaj1dwZGCnZXm2PZU1jOJof9TsyiP3bXNDb+VOR7oo7ZvY01If0W3iFQ=="],
|
||||
|
||||
"@biomejs/cli-win32-arm64": ["@biomejs/cli-win32-arm64@2.3.4", "", { "os": "win32", "cpu": "arm64" }, "sha512-5TJ6JfVez+yyupJ/iGUici2wzKf0RrSAxJhghQXtAEsc67OIpdwSKAQboemILrwKfHDi5s6mu7mX+VTCTUydkw=="],
|
||||
"@biomejs/cli-win32-arm64": ["@biomejs/cli-win32-arm64@2.3.7", "", { "os": "win32", "cpu": "arm64" }, "sha512-aJAE8eCNyRpcfx2JJAtsPtISnELJ0H4xVVSwnxm13bzI8RwbXMyVtxy2r5DV1xT3WiSP+7LxORcApWw0LM8HiA=="],
|
||||
|
||||
"@biomejs/cli-win32-x64": ["@biomejs/cli-win32-x64@2.3.4", "", { "os": "win32", "cpu": "x64" }, "sha512-FGCijXecmC4IedQ0esdYNlMpx0Jxgf4zceCaMu6fkjWyjgn50ZQtMiqZZQ0Q/77yqPxvtkgZAvt5uGw0gAAjig=="],
|
||||
"@biomejs/cli-win32-x64": ["@biomejs/cli-win32-x64@2.3.7", "", { "os": "win32", "cpu": "x64" }, "sha512-pulzUshqv9Ed//MiE8MOUeeEkbkSHVDVY5Cz5wVAnH1DUqliCQG3j6s1POaITTFqFfo7AVIx2sWdKpx/GS+Nqw=="],
|
||||
|
||||
"@drizzle-team/brocli": ["@drizzle-team/brocli@0.10.2", "", {}, "sha512-z33Il7l5dKjUgGULTqBsQBQwckHh5AbIuxhdsIxDDiZAzBOrZO6q9ogcWC65kU382AfynTfgNumVcNIjuIua6w=="],
|
||||
|
||||
"@elysiajs/opentelemetry": ["@elysiajs/opentelemetry@1.4.6", "", { "dependencies": { "@opentelemetry/api": "^1.9.0", "@opentelemetry/instrumentation": "^0.200.0", "@opentelemetry/sdk-node": "^0.200.0" }, "peerDependencies": { "elysia": ">= 1.4.0" } }, "sha512-jR7t4M6ZvMnBqzzHsNTL6y3sNq9jbGi2vKxbkizi/OO5tlvlKl/rnBGyFjZUjQ1Hte7rCz+2kfmgOQMhkjk+Og=="],
|
||||
"@elysiajs/opentelemetry": ["@elysiajs/opentelemetry@1.4.8", "", { "dependencies": { "@opentelemetry/api": "^1.9.0", "@opentelemetry/instrumentation": "^0.200.0", "@opentelemetry/sdk-node": "^0.200.0" }, "peerDependencies": { "elysia": ">= 1.4.0" } }, "sha512-c9unbcdXfehExCv1GsiTCfos5SyIAyDwP7apcMeXmUMBaJZiAYMfiEH8RFFFIfIHJHC/xlNJzUPodkcUaaoJJQ=="],
|
||||
|
||||
"@elysiajs/swagger": ["@elysiajs/swagger@github:zoriya/elysia-swagger#f88fbc7", { "dependencies": { "@scalar/themes": "^0.9.81", "@scalar/types": "^0.1.3", "openapi-types": "^12.1.3", "pathe": "^1.1.2" }, "peerDependencies": { "elysia": ">= 1.3.0" } }, "zoriya-elysia-swagger-f88fbc7"],
|
||||
|
||||
|
||||
23
api/devspace.yaml
Normal file
23
api/devspace.yaml
Normal file
@@ -0,0 +1,23 @@
|
||||
version: v2beta1
|
||||
name: api
|
||||
dev:
|
||||
api:
|
||||
imageSelector: ghcr.io/zoriya/kyoo_api
|
||||
devImage: docker.io/oven/bun:latest
|
||||
workingDir: /app
|
||||
sync:
|
||||
- path: .:/app
|
||||
excludePaths:
|
||||
- node_modules
|
||||
startContainer: true
|
||||
onUpload:
|
||||
exec:
|
||||
- command: bun install --frozen-lockfile
|
||||
onChange:
|
||||
- "./bun.lock"
|
||||
command:
|
||||
- bash
|
||||
- -c
|
||||
- "bun install && bun dev"
|
||||
ports:
|
||||
- port: "3567"
|
||||
3
api/drizzle/0023_mqueue-priority.sql
Normal file
3
api/drizzle/0023_mqueue-priority.sql
Normal file
@@ -0,0 +1,3 @@
|
||||
ALTER TABLE "kyoo"."history" ALTER COLUMN "time" SET DEFAULT 0;--> statement-breakpoint
|
||||
ALTER TABLE "kyoo"."history" ALTER COLUMN "time" SET NOT NULL;--> statement-breakpoint
|
||||
ALTER TABLE "kyoo"."mqueue" ADD COLUMN "priority" integer DEFAULT 0 NOT NULL;
|
||||
1
api/drizzle/0024_fix-season-count.sql
Normal file
1
api/drizzle/0024_fix-season-count.sql
Normal file
@@ -0,0 +1 @@
|
||||
ALTER TABLE "kyoo"."seasons" ALTER COLUMN "entries_count" SET DEFAULT 0;
|
||||
1880
api/drizzle/meta/0023_snapshot.json
Normal file
1880
api/drizzle/meta/0023_snapshot.json
Normal file
File diff suppressed because it is too large
Load Diff
1881
api/drizzle/meta/0024_snapshot.json
Normal file
1881
api/drizzle/meta/0024_snapshot.json
Normal file
File diff suppressed because it is too large
Load Diff
@@ -162,6 +162,20 @@
|
||||
"when": 1752446736231,
|
||||
"tag": "0022_seasons-count",
|
||||
"breakpoints": true
|
||||
},
|
||||
{
|
||||
"idx": 23,
|
||||
"version": "7",
|
||||
"when": 1763924097229,
|
||||
"tag": "0023_mqueue-priority",
|
||||
"breakpoints": true
|
||||
},
|
||||
{
|
||||
"idx": 24,
|
||||
"version": "7",
|
||||
"when": 1763932730557,
|
||||
"tag": "0024_fix-season-count",
|
||||
"breakpoints": true
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
@@ -9,7 +9,7 @@
|
||||
"format": "biome check --write ."
|
||||
},
|
||||
"dependencies": {
|
||||
"@elysiajs/opentelemetry": "^1.4.6",
|
||||
"@elysiajs/opentelemetry": "^1.4.8",
|
||||
"@elysiajs/swagger": "zoriya/elysia-swagger#build",
|
||||
"@kubiks/otel-drizzle": "zoriya/drizzle-otel#build",
|
||||
"@types/bun": "^1.3.1",
|
||||
@@ -24,7 +24,7 @@
|
||||
"sharp": "^0.34.4"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@biomejs/biome": "2.3.4",
|
||||
"@biomejs/biome": "2.3.7",
|
||||
"@types/pg": "^8.15.5"
|
||||
},
|
||||
"module": "src/index.js",
|
||||
|
||||
@@ -4,7 +4,7 @@ pkgs.mkShell {
|
||||
bun
|
||||
biome
|
||||
# for psql to debug from the cli
|
||||
postgresql_15
|
||||
postgresql_18
|
||||
# to build libvips (for sharp)
|
||||
nodejs
|
||||
node-gyp
|
||||
@@ -13,4 +13,7 @@ pkgs.mkShell {
|
||||
];
|
||||
|
||||
SHARP_FORCE_GLOBAL_LIBVIPS = 1;
|
||||
shellHook = ''
|
||||
export LD_LIBRARY_PATH=${pkgs.stdenv.cc.cc.lib}/lib:$LD_LIBRARY_PATH
|
||||
'';
|
||||
}
|
||||
|
||||
@@ -73,7 +73,7 @@ export const auth = new Elysia({ name: "auth" })
|
||||
.macro({
|
||||
permissions(perms: string[]) {
|
||||
return {
|
||||
beforeHandle: ({ jwt, status }) => {
|
||||
beforeHandle: function permissionCheck({ jwt, status }) {
|
||||
for (const perm of perms) {
|
||||
if (!jwt!.permissions.includes(perm)) {
|
||||
return status(403, {
|
||||
|
||||
@@ -52,8 +52,7 @@ export const base = new Elysia({ name: "base" })
|
||||
console.error(code, error);
|
||||
return {
|
||||
status: 500,
|
||||
message: "message" in error ? (error?.message ?? code) : code,
|
||||
details: error,
|
||||
message: "Internal server error",
|
||||
} as KError;
|
||||
})
|
||||
.get("/health", () => ({ status: "healthy" }) as const, {
|
||||
|
||||
@@ -157,7 +157,7 @@ export const mapProgress = ({ aliased }: { aliased: boolean }) => {
|
||||
const ret = {
|
||||
time: coalesce(time, sql<number>`0`),
|
||||
percent: coalesce(percent, sql<number>`0`),
|
||||
playedDate: sql`to_char(${playedDate}, 'YYYY-MM-DD"T"HH24:MI:SS"Z"')`,
|
||||
playedDate: sql<string>`to_char(${playedDate}, 'YYYY-MM-DD"T"HH24:MI:SS"Z"')`,
|
||||
videoId: sql<string>`${videoId}`,
|
||||
};
|
||||
if (!aliased) return ret;
|
||||
|
||||
@@ -27,9 +27,9 @@ function getRedirectToImageHandler({ filter }: { filter?: SQL }) {
|
||||
status,
|
||||
redirect,
|
||||
}: {
|
||||
params: { id: string; image: "poster" | "thumbnail" | "banner" | "logo" };
|
||||
params: { id?: string; image: "poster" | "thumbnail" | "banner" | "logo" };
|
||||
headers: { "accept-language": string };
|
||||
query: { quality: "high" | "medium" | "low" };
|
||||
query: { quality?: "high" | "medium" | "low" };
|
||||
set: Context["set"];
|
||||
status: Context["status"];
|
||||
redirect: Context["redirect"];
|
||||
@@ -212,12 +212,9 @@ export const imagesH = new Elysia({ tags: ["images"] })
|
||||
},
|
||||
)
|
||||
.guard({
|
||||
headers: t.Object(
|
||||
{
|
||||
"accept-language": AcceptLanguage(),
|
||||
},
|
||||
{ additionalProperties: true },
|
||||
),
|
||||
headers: t.Object({
|
||||
"accept-language": AcceptLanguage(),
|
||||
}),
|
||||
})
|
||||
.get(
|
||||
"/studios/:id/logo",
|
||||
@@ -307,6 +304,9 @@ export const imagesH = new Elysia({ tags: ["images"] })
|
||||
description: "The type of image to retrive.",
|
||||
}),
|
||||
}),
|
||||
headers: t.Object({
|
||||
"accept-language": AcceptLanguage(),
|
||||
}),
|
||||
})
|
||||
.get(
|
||||
"/movies/:id/:image",
|
||||
|
||||
@@ -1,4 +1,6 @@
|
||||
import path from "node:path";
|
||||
import { getCurrentSpan, setAttributes } from "@elysiajs/opentelemetry";
|
||||
import { SpanStatusCode } from "@opentelemetry/api";
|
||||
import { encode } from "blurhash";
|
||||
import { and, eq, is, lt, type SQL, sql } from "drizzle-orm";
|
||||
import { PgColumn, type PgTable } from "drizzle-orm/pg-core";
|
||||
@@ -7,13 +9,15 @@ import type { PoolClient } from "pg";
|
||||
import sharp from "sharp";
|
||||
import { db, type Transaction } from "~/db";
|
||||
import { mqueue } from "~/db/schema/mqueue";
|
||||
import { unnestValues } from "~/db/utils";
|
||||
import type { Image } from "~/models/utils";
|
||||
import { record } from "~/otel";
|
||||
import { getFile } from "~/utils";
|
||||
|
||||
export const imageDir = process.env.IMAGES_PATH ?? "./images";
|
||||
export const imageDir = process.env.IMAGES_PATH ?? "/images";
|
||||
export const defaultBlurhash = "000000";
|
||||
|
||||
type ImageTask = {
|
||||
export type ImageTask = {
|
||||
id: string;
|
||||
url: string;
|
||||
table: string;
|
||||
@@ -23,12 +27,12 @@ type ImageTask = {
|
||||
// this will only push a task to the image downloader service and not download it instantly.
|
||||
// this is both done to prevent too many requests to be sent at once and to make sure POST
|
||||
// requests are not blocked by image downloading or blurhash calculation
|
||||
export const enqueueOptImage = async (
|
||||
tx: Transaction,
|
||||
export const enqueueOptImage = (
|
||||
imgQueue: ImageTask[],
|
||||
img:
|
||||
| { url: string | null; column: PgColumn }
|
||||
| { url: string | null; table: PgTable; column: SQL },
|
||||
): Promise<Image | null> => {
|
||||
| { url?: string | null; column: PgColumn }
|
||||
| { url?: string | null; table: PgTable; column: SQL },
|
||||
): Image | null => {
|
||||
if (!img.url) return null;
|
||||
|
||||
const hasher = new Bun.CryptoHasher("sha256");
|
||||
@@ -64,11 +68,8 @@ export const enqueueOptImage = async (
|
||||
table: db.dialect.sqlToQuery(sql`${img.column.table}`).sql,
|
||||
column: sql.identifier(img.column.name).value,
|
||||
};
|
||||
await tx.insert(mqueue).values({
|
||||
kind: "image",
|
||||
message,
|
||||
});
|
||||
await tx.execute(sql`notify kyoo_image`);
|
||||
|
||||
imgQueue.push(message);
|
||||
|
||||
return {
|
||||
id,
|
||||
@@ -77,45 +78,21 @@ export const enqueueOptImage = async (
|
||||
};
|
||||
};
|
||||
|
||||
export const processImages = async () => {
|
||||
async function processOne() {
|
||||
return await db.transaction(async (tx) => {
|
||||
const [item] = await tx
|
||||
.select()
|
||||
.from(mqueue)
|
||||
.for("update", { skipLocked: true })
|
||||
.where(and(eq(mqueue.kind, "image"), lt(mqueue.attempt, 5)))
|
||||
.orderBy(mqueue.attempt, mqueue.createdAt)
|
||||
.limit(1);
|
||||
|
||||
if (!item) return false;
|
||||
|
||||
const img = item.message as ImageTask;
|
||||
try {
|
||||
const blurhash = await downloadImage(img.id, img.url);
|
||||
const ret: Image = { id: img.id, source: img.url, blurhash };
|
||||
|
||||
const table = sql.raw(img.table);
|
||||
const column = sql.raw(img.column);
|
||||
|
||||
await tx.execute(sql`
|
||||
update ${table} set ${column} = ${ret}
|
||||
where ${column}->'id' = ${sql.raw(`'"${img.id}"'::jsonb`)}
|
||||
`);
|
||||
|
||||
await tx.delete(mqueue).where(eq(mqueue.id, item.id));
|
||||
} catch (err: any) {
|
||||
console.error("Failed to download image", img.url, err.message);
|
||||
// don't use the transaction here, it can be aborted.
|
||||
await db
|
||||
.update(mqueue)
|
||||
.set({ attempt: sql`${mqueue.attempt}+1` })
|
||||
.where(eq(mqueue.id, item.id));
|
||||
}
|
||||
return true;
|
||||
});
|
||||
}
|
||||
export const flushImageQueue = record(
|
||||
"enqueueImages",
|
||||
async (tx: Transaction, imgQueue: ImageTask[], priority: number) => {
|
||||
if (!imgQueue.length) return;
|
||||
await tx.insert(mqueue).select(
|
||||
unnestValues(
|
||||
imgQueue.map((x) => ({ kind: "image", message: x, priority })),
|
||||
mqueue,
|
||||
),
|
||||
);
|
||||
await tx.execute(sql`notify kyoo_image`);
|
||||
},
|
||||
);
|
||||
|
||||
export const processImages = record("processImages", async () => {
|
||||
let running = false;
|
||||
async function processAll() {
|
||||
if (running) return;
|
||||
@@ -138,7 +115,50 @@ export const processImages = async () => {
|
||||
// start processing old tasks
|
||||
await processAll();
|
||||
return () => client.release(true);
|
||||
};
|
||||
});
|
||||
|
||||
const processOne = record("download", async () => {
|
||||
return await db.transaction(async (tx) => {
|
||||
const [item] = await tx
|
||||
.select()
|
||||
.from(mqueue)
|
||||
.for("update", { skipLocked: true })
|
||||
.where(and(eq(mqueue.kind, "image"), lt(mqueue.attempt, 5)))
|
||||
.orderBy(mqueue.priority, mqueue.attempt, mqueue.createdAt)
|
||||
.limit(1);
|
||||
|
||||
if (!item) return false;
|
||||
|
||||
const img = item.message as ImageTask;
|
||||
setAttributes({ "item.url": img.url });
|
||||
try {
|
||||
const blurhash = await downloadImage(img.id, img.url);
|
||||
const ret: Image = { id: img.id, source: img.url, blurhash };
|
||||
|
||||
const table = sql.raw(img.table);
|
||||
const column = sql.raw(img.column);
|
||||
|
||||
await tx.execute(sql`
|
||||
update ${table} set ${column} = ${ret}
|
||||
where ${column}->'id' = ${sql.raw(`'"${img.id}"'::jsonb`)}
|
||||
`);
|
||||
|
||||
await tx.delete(mqueue).where(eq(mqueue.id, item.id));
|
||||
} catch (err: any) {
|
||||
const span = getCurrentSpan();
|
||||
if (span) {
|
||||
span.recordException(err);
|
||||
span.setStatus({ code: SpanStatusCode.ERROR });
|
||||
}
|
||||
console.error("Failed to download image", img.url, err.message);
|
||||
await tx
|
||||
.update(mqueue)
|
||||
.set({ attempt: sql`${mqueue.attempt}+1` })
|
||||
.where(eq(mqueue.id, item.id));
|
||||
}
|
||||
return true;
|
||||
});
|
||||
});
|
||||
|
||||
async function downloadImage(id: string, url: string): Promise<string> {
|
||||
const low = await getFile(path.join(imageDir, `${id}.low.jpg`))
|
||||
|
||||
@@ -5,79 +5,89 @@ import { conflictUpdateAllExcept } from "~/db/utils";
|
||||
import type { SeedCollection } from "~/models/collections";
|
||||
import type { SeedMovie } from "~/models/movie";
|
||||
import type { SeedSerie } from "~/models/serie";
|
||||
import { enqueueOptImage } from "../images";
|
||||
import { record } from "~/otel";
|
||||
import { enqueueOptImage, flushImageQueue, type ImageTask } from "../images";
|
||||
|
||||
type ShowTrans = typeof showTranslations.$inferInsert;
|
||||
|
||||
export const insertCollection = async (
|
||||
collection: SeedCollection | undefined,
|
||||
show: (({ kind: "movie" } & SeedMovie) | ({ kind: "serie" } & SeedSerie)) & {
|
||||
nextRefresh: string;
|
||||
export const insertCollection = record(
|
||||
"insertCollection",
|
||||
async (
|
||||
collection: SeedCollection | undefined,
|
||||
show: (
|
||||
| ({ kind: "movie" } & SeedMovie)
|
||||
| ({ kind: "serie" } & SeedSerie)
|
||||
) & {
|
||||
nextRefresh: string;
|
||||
},
|
||||
) => {
|
||||
if (!collection) return null;
|
||||
const { translations, ...col } = collection;
|
||||
|
||||
return await db.transaction(async (tx) => {
|
||||
const imgQueue: ImageTask[] = [];
|
||||
const [ret] = await tx
|
||||
.insert(shows)
|
||||
.values({
|
||||
kind: "collection",
|
||||
status: "unknown",
|
||||
startAir: show.kind === "movie" ? show.airDate : show.startAir,
|
||||
endAir: show.kind === "movie" ? show.airDate : show.endAir,
|
||||
nextRefresh: show.nextRefresh,
|
||||
entriesCount: 0,
|
||||
original: {} as any,
|
||||
...col,
|
||||
})
|
||||
.onConflictDoUpdate({
|
||||
target: shows.slug,
|
||||
set: {
|
||||
...conflictUpdateAllExcept(shows, [
|
||||
"pk",
|
||||
"id",
|
||||
"slug",
|
||||
"createdAt",
|
||||
"startAir",
|
||||
"endAir",
|
||||
]),
|
||||
startAir: sql`least(${shows.startAir}, excluded.start_air)`,
|
||||
endAir: sql`greatest(${shows.endAir}, excluded.end_air)`,
|
||||
},
|
||||
})
|
||||
.returning({ pk: shows.pk, id: shows.id, slug: shows.slug });
|
||||
|
||||
const trans: ShowTrans[] = Object.entries(translations).map(
|
||||
([lang, tr]) => ({
|
||||
pk: ret.pk,
|
||||
language: lang,
|
||||
...tr,
|
||||
poster: enqueueOptImage(imgQueue, {
|
||||
url: tr.poster,
|
||||
column: showTranslations.poster,
|
||||
}),
|
||||
thumbnail: enqueueOptImage(imgQueue, {
|
||||
url: tr.thumbnail,
|
||||
column: showTranslations.thumbnail,
|
||||
}),
|
||||
logo: enqueueOptImage(imgQueue, {
|
||||
url: tr.logo,
|
||||
column: showTranslations.logo,
|
||||
}),
|
||||
banner: enqueueOptImage(imgQueue, {
|
||||
url: tr.banner,
|
||||
column: showTranslations.banner,
|
||||
}),
|
||||
}),
|
||||
);
|
||||
await flushImageQueue(tx, imgQueue, 100);
|
||||
// we can't unnest values here because show translations contains arrays.
|
||||
await tx
|
||||
.insert(showTranslations)
|
||||
.values(trans)
|
||||
.onConflictDoUpdate({
|
||||
target: [showTranslations.pk, showTranslations.language],
|
||||
set: conflictUpdateAllExcept(showTranslations, ["pk", "language"]),
|
||||
});
|
||||
return ret;
|
||||
});
|
||||
},
|
||||
) => {
|
||||
if (!collection) return null;
|
||||
const { translations, ...col } = collection;
|
||||
|
||||
return await db.transaction(async (tx) => {
|
||||
const [ret] = await tx
|
||||
.insert(shows)
|
||||
.values({
|
||||
kind: "collection",
|
||||
status: "unknown",
|
||||
startAir: show.kind === "movie" ? show.airDate : show.startAir,
|
||||
endAir: show.kind === "movie" ? show.airDate : show.endAir,
|
||||
nextRefresh: show.nextRefresh,
|
||||
entriesCount: 0,
|
||||
original: {} as any,
|
||||
...col,
|
||||
})
|
||||
.onConflictDoUpdate({
|
||||
target: shows.slug,
|
||||
set: {
|
||||
...conflictUpdateAllExcept(shows, [
|
||||
"pk",
|
||||
"id",
|
||||
"slug",
|
||||
"createdAt",
|
||||
"startAir",
|
||||
"endAir",
|
||||
]),
|
||||
startAir: sql`least(${shows.startAir}, excluded.start_air)`,
|
||||
endAir: sql`greatest(${shows.endAir}, excluded.end_air)`,
|
||||
},
|
||||
})
|
||||
.returning({ pk: shows.pk, id: shows.id, slug: shows.slug });
|
||||
|
||||
const trans: ShowTrans[] = await Promise.all(
|
||||
Object.entries(translations).map(async ([lang, tr]) => ({
|
||||
pk: ret.pk,
|
||||
language: lang,
|
||||
...tr,
|
||||
poster: await enqueueOptImage(tx, {
|
||||
url: tr.poster,
|
||||
column: showTranslations.poster,
|
||||
}),
|
||||
thumbnail: await enqueueOptImage(tx, {
|
||||
url: tr.thumbnail,
|
||||
column: showTranslations.thumbnail,
|
||||
}),
|
||||
logo: await enqueueOptImage(tx, {
|
||||
url: tr.logo,
|
||||
column: showTranslations.logo,
|
||||
}),
|
||||
banner: await enqueueOptImage(tx, {
|
||||
url: tr.banner,
|
||||
column: showTranslations.banner,
|
||||
}),
|
||||
})),
|
||||
);
|
||||
await tx
|
||||
.insert(showTranslations)
|
||||
.values(trans)
|
||||
.onConflictDoUpdate({
|
||||
target: [showTranslations.pk, showTranslations.language],
|
||||
set: conflictUpdateAllExcept(showTranslations, ["pk", "language"]),
|
||||
});
|
||||
return ret;
|
||||
});
|
||||
};
|
||||
);
|
||||
|
||||
@@ -6,9 +6,10 @@ import {
|
||||
entryVideoJoin,
|
||||
videos,
|
||||
} from "~/db/schema";
|
||||
import { conflictUpdateAllExcept, values } from "~/db/utils";
|
||||
import { conflictUpdateAllExcept, unnest, unnestValues } from "~/db/utils";
|
||||
import type { SeedEntry as SEntry, SeedExtra as SExtra } from "~/models/entry";
|
||||
import { enqueueOptImage } from "../images";
|
||||
import { record } from "~/otel";
|
||||
import { enqueueOptImage, flushImageQueue, type ImageTask } from "../images";
|
||||
import { guessNextRefresh } from "../refresh";
|
||||
import { updateAvailableCount, updateAvailableSince } from "./shows";
|
||||
|
||||
@@ -42,22 +43,24 @@ const generateSlug = (
|
||||
}
|
||||
};
|
||||
|
||||
export const insertEntries = async (
|
||||
show: { pk: number; slug: string; kind: "movie" | "serie" | "collection" },
|
||||
items: (SeedEntry | SeedExtra)[],
|
||||
onlyExtras = false,
|
||||
) => {
|
||||
if (!items.length) return [];
|
||||
export const insertEntries = record(
|
||||
"insertEntries",
|
||||
async (
|
||||
show: { pk: number; slug: string; kind: "movie" | "serie" | "collection" },
|
||||
items: (SeedEntry | SeedExtra)[],
|
||||
onlyExtras = false,
|
||||
) => {
|
||||
if (!items.length) return [];
|
||||
|
||||
const retEntries = await db.transaction(async (tx) => {
|
||||
const vals: EntryI[] = await Promise.all(
|
||||
items.map(async (seed) => {
|
||||
const retEntries = await db.transaction(async (tx) => {
|
||||
const imgQueue: ImageTask[] = [];
|
||||
const vals: EntryI[] = items.map((seed) => {
|
||||
const { translations, videos, video, ...entry } = seed;
|
||||
return {
|
||||
...entry,
|
||||
showPk: show.pk,
|
||||
slug: generateSlug(show.slug, seed),
|
||||
thumbnail: await enqueueOptImage(tx, {
|
||||
thumbnail: enqueueOptImage(imgQueue, {
|
||||
url: seed.thumbnail,
|
||||
column: entries.thumbnail,
|
||||
}),
|
||||
@@ -72,136 +75,132 @@ export const insertEntries = async (
|
||||
? entry.number
|
||||
: undefined,
|
||||
};
|
||||
}),
|
||||
);
|
||||
const ret = await tx
|
||||
.insert(entries)
|
||||
.values(vals)
|
||||
.onConflictDoUpdate({
|
||||
target: entries.slug,
|
||||
set: conflictUpdateAllExcept(entries, [
|
||||
"pk",
|
||||
"showPk",
|
||||
"id",
|
||||
"slug",
|
||||
"createdAt",
|
||||
]),
|
||||
})
|
||||
.returning({ pk: entries.pk, id: entries.id, slug: entries.slug });
|
||||
|
||||
const trans: EntryTransI[] = (
|
||||
await Promise.all(
|
||||
items.map(async (seed, i) => {
|
||||
if (seed.kind === "extra") {
|
||||
return [
|
||||
{
|
||||
pk: ret[i].pk,
|
||||
// yeah we hardcode the language to extra because if we want to support
|
||||
// translations one day it won't be awkward
|
||||
language: "extra",
|
||||
name: seed.name,
|
||||
description: null,
|
||||
poster: undefined,
|
||||
},
|
||||
];
|
||||
}
|
||||
|
||||
return await Promise.all(
|
||||
Object.entries(seed.translations).map(async ([lang, tr]) => ({
|
||||
// assumes ret is ordered like items.
|
||||
pk: ret[i].pk,
|
||||
language: lang,
|
||||
...tr,
|
||||
poster:
|
||||
seed.kind === "movie"
|
||||
? await enqueueOptImage(tx, {
|
||||
url: (tr as any).poster,
|
||||
column: entryTranslations.poster,
|
||||
})
|
||||
: undefined,
|
||||
})),
|
||||
);
|
||||
}),
|
||||
)
|
||||
).flat();
|
||||
await tx
|
||||
.insert(entryTranslations)
|
||||
.values(trans)
|
||||
.onConflictDoUpdate({
|
||||
target: [entryTranslations.pk, entryTranslations.language],
|
||||
set: conflictUpdateAllExcept(entryTranslations, ["pk", "language"]),
|
||||
});
|
||||
const ret = await tx
|
||||
.insert(entries)
|
||||
.select(unnestValues(vals, entries))
|
||||
.onConflictDoUpdate({
|
||||
target: entries.slug,
|
||||
set: conflictUpdateAllExcept(entries, [
|
||||
"pk",
|
||||
"showPk",
|
||||
"id",
|
||||
"slug",
|
||||
"createdAt",
|
||||
]),
|
||||
})
|
||||
.returning({ pk: entries.pk, id: entries.id, slug: entries.slug });
|
||||
|
||||
return ret;
|
||||
});
|
||||
const trans: EntryTransI[] = items.flatMap((seed, i) => {
|
||||
if (seed.kind === "extra") {
|
||||
return [
|
||||
{
|
||||
pk: ret[i].pk,
|
||||
// yeah we hardcode the language to extra because if we want to support
|
||||
// translations one day it won't be awkward
|
||||
language: "extra",
|
||||
name: seed.name,
|
||||
description: null,
|
||||
poster: undefined,
|
||||
},
|
||||
];
|
||||
}
|
||||
|
||||
const vids = items.flatMap((seed, i) => {
|
||||
if (seed.kind === "extra") {
|
||||
return {
|
||||
videoId: seed.video,
|
||||
return Object.entries(seed.translations).map(([lang, tr]) => ({
|
||||
// assumes ret is ordered like items.
|
||||
pk: ret[i].pk,
|
||||
language: lang,
|
||||
...tr,
|
||||
poster:
|
||||
seed.kind === "movie"
|
||||
? enqueueOptImage(imgQueue, {
|
||||
url: (tr as any).poster,
|
||||
column: entryTranslations.poster,
|
||||
})
|
||||
: undefined,
|
||||
}));
|
||||
});
|
||||
await flushImageQueue(tx, imgQueue, 0);
|
||||
await tx
|
||||
.insert(entryTranslations)
|
||||
.select(unnestValues(trans, entryTranslations))
|
||||
.onConflictDoUpdate({
|
||||
target: [entryTranslations.pk, entryTranslations.language],
|
||||
set: conflictUpdateAllExcept(entryTranslations, ["pk", "language"]),
|
||||
});
|
||||
|
||||
return ret;
|
||||
});
|
||||
|
||||
const vids = items.flatMap((seed, i) => {
|
||||
if (seed.kind === "extra") {
|
||||
return {
|
||||
videoId: seed.video,
|
||||
entryPk: retEntries[i].pk,
|
||||
entrySlug: retEntries[i].slug,
|
||||
needRendering: false,
|
||||
};
|
||||
}
|
||||
if (!seed.videos) return [];
|
||||
return seed.videos.map((x, j) => ({
|
||||
videoId: x,
|
||||
entryPk: retEntries[i].pk,
|
||||
entrySlug: retEntries[i].slug,
|
||||
needRendering: false,
|
||||
};
|
||||
// The first video should not have a rendering.
|
||||
needRendering: j !== 0 && seed.videos!.length > 1,
|
||||
}));
|
||||
});
|
||||
|
||||
if (vids.length === 0) {
|
||||
// we have not added videos but we need to update the `entriesCount`
|
||||
if (show.kind === "serie" && !onlyExtras)
|
||||
await updateAvailableCount(db, [show.pk], true);
|
||||
return retEntries.map((x) => ({ id: x.id, slug: x.slug, videos: [] }));
|
||||
}
|
||||
if (!seed.videos) return [];
|
||||
return seed.videos.map((x, j) => ({
|
||||
videoId: x,
|
||||
entryPk: retEntries[i].pk,
|
||||
entrySlug: retEntries[i].slug,
|
||||
// The first video should not have a rendering.
|
||||
needRendering: j !== 0 && seed.videos!.length > 1,
|
||||
|
||||
const retVideos = await db.transaction(async (tx) => {
|
||||
const ret = await tx
|
||||
.insert(entryVideoJoin)
|
||||
.select(
|
||||
db
|
||||
.select({
|
||||
entryPk: sql<number>`vids."entryPk"`.as("entry"),
|
||||
videoPk: videos.pk,
|
||||
slug: computeVideoSlug(
|
||||
sql`vids."entrySlug"`,
|
||||
sql`vids."needRendering"`,
|
||||
),
|
||||
})
|
||||
.from(
|
||||
unnest(vids, "vids", {
|
||||
entryPk: "integer",
|
||||
entrySlug: "varchar(255)",
|
||||
needRendering: "boolean",
|
||||
videoId: "uuid",
|
||||
}),
|
||||
)
|
||||
.innerJoin(videos, eq(videos.id, sql`vids."videoId"`)),
|
||||
)
|
||||
.onConflictDoNothing()
|
||||
.returning({
|
||||
slug: entryVideoJoin.slug,
|
||||
entryPk: entryVideoJoin.entryPk,
|
||||
});
|
||||
|
||||
if (!onlyExtras)
|
||||
await updateAvailableCount(tx, [show.pk], show.kind === "serie");
|
||||
|
||||
await updateAvailableSince(tx, [...new Set(vids.map((x) => x.entryPk))]);
|
||||
return ret;
|
||||
});
|
||||
|
||||
return retEntries.map((entry) => ({
|
||||
id: entry.id,
|
||||
slug: entry.slug,
|
||||
videos: retVideos.filter((x) => x.entryPk === entry.pk),
|
||||
}));
|
||||
});
|
||||
|
||||
if (vids.length === 0) {
|
||||
// we have not added videos but we need to update the `entriesCount`
|
||||
if (show.kind === "serie" && !onlyExtras)
|
||||
await updateAvailableCount(db, [show.pk], true);
|
||||
return retEntries.map((x) => ({ id: x.id, slug: x.slug, videos: [] }));
|
||||
}
|
||||
|
||||
const retVideos = await db.transaction(async (tx) => {
|
||||
const ret = await tx
|
||||
.insert(entryVideoJoin)
|
||||
.select(
|
||||
db
|
||||
.select({
|
||||
entryPk: sql<number>`vids.entryPk`.as("entry"),
|
||||
videoPk: videos.pk,
|
||||
slug: computeVideoSlug(
|
||||
sql`vids.entrySlug`,
|
||||
sql`vids.needRendering`,
|
||||
),
|
||||
})
|
||||
.from(
|
||||
values(vids, {
|
||||
entryPk: "integer",
|
||||
needRendering: "boolean",
|
||||
videoId: "uuid",
|
||||
}).as("vids"),
|
||||
)
|
||||
.innerJoin(videos, eq(videos.id, sql`vids.videoId`)),
|
||||
)
|
||||
.onConflictDoNothing()
|
||||
.returning({
|
||||
slug: entryVideoJoin.slug,
|
||||
entryPk: entryVideoJoin.entryPk,
|
||||
});
|
||||
|
||||
if (!onlyExtras)
|
||||
await updateAvailableCount(tx, [show.pk], show.kind === "serie");
|
||||
|
||||
await updateAvailableSince(tx, [...new Set(vids.map((x) => x.entryPk))]);
|
||||
return ret;
|
||||
});
|
||||
|
||||
return retEntries.map((entry) => ({
|
||||
id: entry.id,
|
||||
slug: entry.slug,
|
||||
videos: retVideos.filter((x) => x.entryPk === entry.pk),
|
||||
}));
|
||||
};
|
||||
},
|
||||
);
|
||||
|
||||
export function computeVideoSlug(entrySlug: SQL | Column, needsRendering: SQL) {
|
||||
return sql<string>`
|
||||
|
||||
@@ -1,82 +1,78 @@
|
||||
import { db } from "~/db";
|
||||
import { seasons, seasonTranslations } from "~/db/schema";
|
||||
import { conflictUpdateAllExcept } from "~/db/utils";
|
||||
import { conflictUpdateAllExcept, unnestValues } from "~/db/utils";
|
||||
import type { SeedSeason } from "~/models/season";
|
||||
import { enqueueOptImage } from "../images";
|
||||
import { record } from "~/otel";
|
||||
import { enqueueOptImage, flushImageQueue, type ImageTask } from "../images";
|
||||
import { guessNextRefresh } from "../refresh";
|
||||
|
||||
type SeasonI = typeof seasons.$inferInsert;
|
||||
type SeasonTransI = typeof seasonTranslations.$inferInsert;
|
||||
|
||||
export const insertSeasons = async (
|
||||
show: { pk: number; slug: string },
|
||||
items: SeedSeason[],
|
||||
) => {
|
||||
if (!items.length) return [];
|
||||
export const insertSeasons = record(
|
||||
"insertSeasons",
|
||||
async (show: { pk: number; slug: string }, items: SeedSeason[]) => {
|
||||
if (!items.length) return [];
|
||||
|
||||
return db.transaction(async (tx) => {
|
||||
const vals: SeasonI[] = items.map((x) => {
|
||||
const { translations, ...season } = x;
|
||||
return {
|
||||
...season,
|
||||
showPk: show.pk,
|
||||
slug:
|
||||
season.seasonNumber === 0
|
||||
? `${show.slug}-specials`
|
||||
: `${show.slug}-s${season.seasonNumber}`,
|
||||
nextRefresh: guessNextRefresh(season.startAir ?? new Date()),
|
||||
};
|
||||
});
|
||||
const ret = await tx
|
||||
.insert(seasons)
|
||||
.values(vals)
|
||||
.onConflictDoUpdate({
|
||||
target: seasons.slug,
|
||||
set: conflictUpdateAllExcept(seasons, [
|
||||
"pk",
|
||||
"showPk",
|
||||
"id",
|
||||
"slug",
|
||||
"createdAt",
|
||||
]),
|
||||
})
|
||||
.returning({ pk: seasons.pk, id: seasons.id, slug: seasons.slug });
|
||||
|
||||
const trans: SeasonTransI[] = (
|
||||
await Promise.all(
|
||||
items.map(
|
||||
async (seed, i) =>
|
||||
await Promise.all(
|
||||
Object.entries(seed.translations).map(async ([lang, tr]) => ({
|
||||
// assumes ret is ordered like items.
|
||||
pk: ret[i].pk,
|
||||
language: lang,
|
||||
...tr,
|
||||
poster: await enqueueOptImage(tx, {
|
||||
url: tr.poster,
|
||||
column: seasonTranslations.poster,
|
||||
}),
|
||||
thumbnail: await enqueueOptImage(tx, {
|
||||
url: tr.thumbnail,
|
||||
column: seasonTranslations.thumbnail,
|
||||
}),
|
||||
banner: await enqueueOptImage(tx, {
|
||||
url: tr.banner,
|
||||
column: seasonTranslations.banner,
|
||||
}),
|
||||
})),
|
||||
),
|
||||
),
|
||||
)
|
||||
).flat();
|
||||
await tx
|
||||
.insert(seasonTranslations)
|
||||
.values(trans)
|
||||
.onConflictDoUpdate({
|
||||
target: [seasonTranslations.pk, seasonTranslations.language],
|
||||
set: conflictUpdateAllExcept(seasonTranslations, ["pk", "language"]),
|
||||
return db.transaction(async (tx) => {
|
||||
const imgQueue: ImageTask[] = [];
|
||||
const vals: SeasonI[] = items.map((x) => {
|
||||
const { translations, ...season } = x;
|
||||
return {
|
||||
...season,
|
||||
showPk: show.pk,
|
||||
slug:
|
||||
season.seasonNumber === 0
|
||||
? `${show.slug}-specials`
|
||||
: `${show.slug}-s${season.seasonNumber}`,
|
||||
nextRefresh: guessNextRefresh(season.startAir ?? new Date()),
|
||||
};
|
||||
});
|
||||
const ret = await tx
|
||||
.insert(seasons)
|
||||
.select(unnestValues(vals, seasons))
|
||||
.onConflictDoUpdate({
|
||||
target: seasons.slug,
|
||||
set: conflictUpdateAllExcept(seasons, [
|
||||
"pk",
|
||||
"showPk",
|
||||
"id",
|
||||
"slug",
|
||||
"createdAt",
|
||||
]),
|
||||
})
|
||||
.returning({ pk: seasons.pk, id: seasons.id, slug: seasons.slug });
|
||||
|
||||
return ret;
|
||||
});
|
||||
};
|
||||
const trans: SeasonTransI[] = items.flatMap((seed, i) =>
|
||||
Object.entries(seed.translations).map(([lang, tr]) => ({
|
||||
// assumes ret is ordered like items.
|
||||
pk: ret[i].pk,
|
||||
language: lang,
|
||||
...tr,
|
||||
poster: enqueueOptImage(imgQueue, {
|
||||
url: tr.poster,
|
||||
column: seasonTranslations.poster,
|
||||
}),
|
||||
thumbnail: enqueueOptImage(imgQueue, {
|
||||
url: tr.thumbnail,
|
||||
column: seasonTranslations.thumbnail,
|
||||
}),
|
||||
banner: enqueueOptImage(imgQueue, {
|
||||
url: tr.banner,
|
||||
column: seasonTranslations.banner,
|
||||
}),
|
||||
})),
|
||||
);
|
||||
await flushImageQueue(tx, imgQueue, -10);
|
||||
await tx
|
||||
.insert(seasonTranslations)
|
||||
.select(unnestValues(trans, seasonTranslations))
|
||||
.onConflictDoUpdate({
|
||||
target: [seasonTranslations.pk, seasonTranslations.language],
|
||||
set: conflictUpdateAllExcept(seasonTranslations, ["pk", "language"]),
|
||||
});
|
||||
|
||||
return ret;
|
||||
});
|
||||
},
|
||||
);
|
||||
|
||||
@@ -21,86 +21,93 @@ import type { SeedCollection } from "~/models/collections";
|
||||
import type { SeedMovie } from "~/models/movie";
|
||||
import type { SeedSerie } from "~/models/serie";
|
||||
import type { Original } from "~/models/utils";
|
||||
import { record } from "~/otel";
|
||||
import { getYear } from "~/utils";
|
||||
import { enqueueOptImage } from "../images";
|
||||
import { enqueueOptImage, flushImageQueue, type ImageTask } from "../images";
|
||||
|
||||
type Show = typeof shows.$inferInsert;
|
||||
type ShowTrans = typeof showTranslations.$inferInsert;
|
||||
|
||||
export const insertShow = async (
|
||||
show: Omit<Show, "original">,
|
||||
original: Original & {
|
||||
poster: string | null;
|
||||
thumbnail: string | null;
|
||||
banner: string | null;
|
||||
logo: string | null;
|
||||
},
|
||||
translations:
|
||||
| SeedMovie["translations"]
|
||||
| SeedSerie["translations"]
|
||||
| SeedCollection["translations"],
|
||||
) => {
|
||||
return await db.transaction(async (tx) => {
|
||||
const orig = {
|
||||
...original,
|
||||
poster: await enqueueOptImage(tx, {
|
||||
url: original.poster,
|
||||
table: shows,
|
||||
column: sql`${shows.original}['poster']`,
|
||||
}),
|
||||
thumbnail: await enqueueOptImage(tx, {
|
||||
url: original.thumbnail,
|
||||
table: shows,
|
||||
column: sql`${shows.original}['thumbnail']`,
|
||||
}),
|
||||
banner: await enqueueOptImage(tx, {
|
||||
url: original.banner,
|
||||
table: shows,
|
||||
column: sql`${shows.original}['banner']`,
|
||||
}),
|
||||
logo: await enqueueOptImage(tx, {
|
||||
url: original.logo,
|
||||
table: shows,
|
||||
column: sql`${shows.original}['logo']`,
|
||||
}),
|
||||
};
|
||||
const ret = await insertBaseShow(tx, { ...show, original: orig });
|
||||
if ("status" in ret) return ret;
|
||||
export const insertShow = record(
|
||||
"insertShow",
|
||||
async (
|
||||
show: Omit<Show, "original">,
|
||||
original: Original & {
|
||||
poster?: string | null;
|
||||
thumbnail?: string | null;
|
||||
banner?: string | null;
|
||||
logo?: string | null;
|
||||
},
|
||||
translations:
|
||||
| SeedMovie["translations"]
|
||||
| SeedSerie["translations"]
|
||||
| SeedCollection["translations"],
|
||||
) => {
|
||||
return await db.transaction(async (tx) => {
|
||||
const imgQueue: ImageTask[] = [];
|
||||
const orig = {
|
||||
...original,
|
||||
poster: enqueueOptImage(imgQueue, {
|
||||
url: original.poster,
|
||||
table: shows,
|
||||
column: sql`${shows.original}['poster']`,
|
||||
}),
|
||||
thumbnail: enqueueOptImage(imgQueue, {
|
||||
url: original.thumbnail,
|
||||
table: shows,
|
||||
column: sql`${shows.original}['thumbnail']`,
|
||||
}),
|
||||
banner: enqueueOptImage(imgQueue, {
|
||||
url: original.banner,
|
||||
table: shows,
|
||||
column: sql`${shows.original}['banner']`,
|
||||
}),
|
||||
logo: enqueueOptImage(imgQueue, {
|
||||
url: original.logo,
|
||||
table: shows,
|
||||
column: sql`${shows.original}['logo']`,
|
||||
}),
|
||||
};
|
||||
const ret = await insertBaseShow(tx, { ...show, original: orig });
|
||||
if ("status" in ret) return ret;
|
||||
|
||||
const trans: ShowTrans[] = await Promise.all(
|
||||
Object.entries(translations).map(async ([lang, tr]) => ({
|
||||
pk: ret.pk,
|
||||
language: lang,
|
||||
...tr,
|
||||
latinName: tr.latinName ?? null,
|
||||
poster: await enqueueOptImage(tx, {
|
||||
url: tr.poster,
|
||||
column: showTranslations.poster,
|
||||
const trans: ShowTrans[] = Object.entries(translations).map(
|
||||
([lang, tr]) => ({
|
||||
pk: ret.pk,
|
||||
language: lang,
|
||||
...tr,
|
||||
latinName: tr.latinName ?? null,
|
||||
poster: enqueueOptImage(imgQueue, {
|
||||
url: tr.poster,
|
||||
column: showTranslations.poster,
|
||||
}),
|
||||
thumbnail: enqueueOptImage(imgQueue, {
|
||||
url: tr.thumbnail,
|
||||
column: showTranslations.thumbnail,
|
||||
}),
|
||||
logo: enqueueOptImage(imgQueue, {
|
||||
url: tr.logo,
|
||||
column: showTranslations.logo,
|
||||
}),
|
||||
banner: enqueueOptImage(imgQueue, {
|
||||
url: tr.banner,
|
||||
column: showTranslations.banner,
|
||||
}),
|
||||
}),
|
||||
thumbnail: await enqueueOptImage(tx, {
|
||||
url: tr.thumbnail,
|
||||
column: showTranslations.thumbnail,
|
||||
}),
|
||||
logo: await enqueueOptImage(tx, {
|
||||
url: tr.logo,
|
||||
column: showTranslations.logo,
|
||||
}),
|
||||
banner: await enqueueOptImage(tx, {
|
||||
url: tr.banner,
|
||||
column: showTranslations.banner,
|
||||
}),
|
||||
})),
|
||||
);
|
||||
await tx
|
||||
.insert(showTranslations)
|
||||
.values(trans)
|
||||
.onConflictDoUpdate({
|
||||
target: [showTranslations.pk, showTranslations.language],
|
||||
set: conflictUpdateAllExcept(showTranslations, ["pk", "language"]),
|
||||
});
|
||||
return ret;
|
||||
});
|
||||
};
|
||||
);
|
||||
await flushImageQueue(tx, imgQueue, 200);
|
||||
// we can't unnest values here because show translations contains arrays.
|
||||
await tx
|
||||
.insert(showTranslations)
|
||||
.values(trans)
|
||||
.onConflictDoUpdate({
|
||||
target: [showTranslations.pk, showTranslations.language],
|
||||
set: conflictUpdateAllExcept(showTranslations, ["pk", "language"]),
|
||||
});
|
||||
return ret;
|
||||
});
|
||||
},
|
||||
);
|
||||
|
||||
async function insertBaseShow(tx: Transaction, show: Show) {
|
||||
function insert() {
|
||||
|
||||
@@ -1,58 +1,67 @@
|
||||
import { eq, sql } from "drizzle-orm";
|
||||
import { db } from "~/db";
|
||||
import { roles, staff } from "~/db/schema";
|
||||
import { conflictUpdateAllExcept } from "~/db/utils";
|
||||
import { conflictUpdateAllExcept, unnestValues } from "~/db/utils";
|
||||
import type { SeedStaff } from "~/models/staff";
|
||||
import { enqueueOptImage } from "../images";
|
||||
import { record } from "~/otel";
|
||||
import { uniqBy } from "~/utils";
|
||||
import { enqueueOptImage, flushImageQueue, type ImageTask } from "../images";
|
||||
|
||||
export const insertStaff = async (
|
||||
seed: SeedStaff[] | undefined,
|
||||
showPk: number,
|
||||
) => {
|
||||
if (!seed?.length) return [];
|
||||
export const insertStaff = record(
|
||||
"insertStaff",
|
||||
async (seed: SeedStaff[] | undefined, showPk: number) => {
|
||||
if (!seed?.length) return [];
|
||||
|
||||
return await db.transaction(async (tx) => {
|
||||
const people = await Promise.all(
|
||||
seed.map(async (x) => ({
|
||||
...x.staff,
|
||||
image: await enqueueOptImage(tx, {
|
||||
url: x.staff.image,
|
||||
column: staff.image,
|
||||
}),
|
||||
})),
|
||||
);
|
||||
const ret = await tx
|
||||
.insert(staff)
|
||||
.values(people)
|
||||
.onConflictDoUpdate({
|
||||
target: staff.slug,
|
||||
set: conflictUpdateAllExcept(staff, ["pk", "id", "slug", "createdAt"]),
|
||||
})
|
||||
.returning({ pk: staff.pk, id: staff.id, slug: staff.slug });
|
||||
return await db.transaction(async (tx) => {
|
||||
const imgQueue: ImageTask[] = [];
|
||||
const people = uniqBy(
|
||||
seed.map((x) => ({
|
||||
...x.staff,
|
||||
image: enqueueOptImage(imgQueue, {
|
||||
url: x.staff.image,
|
||||
column: staff.image,
|
||||
}),
|
||||
})),
|
||||
(x) => x.slug,
|
||||
);
|
||||
const ret = await tx
|
||||
.insert(staff)
|
||||
.select(unnestValues(people, staff))
|
||||
.onConflictDoUpdate({
|
||||
target: staff.slug,
|
||||
set: conflictUpdateAllExcept(staff, [
|
||||
"pk",
|
||||
"id",
|
||||
"slug",
|
||||
"createdAt",
|
||||
]),
|
||||
})
|
||||
.returning({ pk: staff.pk, id: staff.id, slug: staff.slug });
|
||||
|
||||
const rval = await Promise.all(
|
||||
seed.map(async (x, i) => ({
|
||||
const rval = seed.map((x, i) => ({
|
||||
showPk,
|
||||
staffPk: ret[i].pk,
|
||||
staffPk: ret.find((y) => y.slug === x.staff.slug)!.pk,
|
||||
kind: x.kind,
|
||||
order: i,
|
||||
character: {
|
||||
...x.character,
|
||||
image: await enqueueOptImage(tx, {
|
||||
image: enqueueOptImage(imgQueue, {
|
||||
url: x.character.image,
|
||||
table: roles,
|
||||
column: sql`${roles.character}['image']`,
|
||||
}),
|
||||
},
|
||||
})),
|
||||
);
|
||||
}));
|
||||
|
||||
// always replace all roles. this is because:
|
||||
// - we want `order` to stay in sync (& without duplicates)
|
||||
// - we don't have ways to identify a role so we can't onConflict
|
||||
await tx.delete(roles).where(eq(roles.showPk, showPk));
|
||||
await tx.insert(roles).values(rval);
|
||||
await flushImageQueue(tx, imgQueue, -200);
|
||||
|
||||
return ret;
|
||||
});
|
||||
};
|
||||
// always replace all roles. this is because:
|
||||
// - we want `order` to stay in sync (& without duplicates)
|
||||
// - we don't have ways to identify a role so we can't onConflict
|
||||
await tx.delete(roles).where(eq(roles.showPk, showPk));
|
||||
await tx.insert(roles).select(unnestValues(rval, roles));
|
||||
|
||||
return ret;
|
||||
});
|
||||
},
|
||||
);
|
||||
|
||||
@@ -1,68 +1,76 @@
|
||||
import { sql } from "drizzle-orm";
|
||||
import { db } from "~/db";
|
||||
import { showStudioJoin, studios, studioTranslations } from "~/db/schema";
|
||||
import { conflictUpdateAllExcept } from "~/db/utils";
|
||||
import { conflictUpdateAllExcept, sqlarr, unnestValues } from "~/db/utils";
|
||||
import type { SeedStudio } from "~/models/studio";
|
||||
import { enqueueOptImage } from "../images";
|
||||
import { record } from "~/otel";
|
||||
import { uniqBy } from "~/utils";
|
||||
import { enqueueOptImage, flushImageQueue, type ImageTask } from "../images";
|
||||
|
||||
type StudioI = typeof studios.$inferInsert;
|
||||
type StudioTransI = typeof studioTranslations.$inferInsert;
|
||||
|
||||
export const insertStudios = async (
|
||||
seed: SeedStudio[] | undefined,
|
||||
showPk: number,
|
||||
) => {
|
||||
if (!seed?.length) return [];
|
||||
export const insertStudios = record(
|
||||
"insertStudios",
|
||||
async (seed: SeedStudio[] | undefined, showPk: number) => {
|
||||
if (!seed?.length) return [];
|
||||
|
||||
return await db.transaction(async (tx) => {
|
||||
const vals: StudioI[] = seed.map((x) => {
|
||||
const { translations, ...item } = x;
|
||||
return item;
|
||||
});
|
||||
|
||||
const ret = await tx
|
||||
.insert(studios)
|
||||
.values(vals)
|
||||
.onConflictDoUpdate({
|
||||
target: studios.slug,
|
||||
set: conflictUpdateAllExcept(studios, [
|
||||
"pk",
|
||||
"id",
|
||||
"slug",
|
||||
"createdAt",
|
||||
]),
|
||||
})
|
||||
.returning({ pk: studios.pk, id: studios.id, slug: studios.slug });
|
||||
|
||||
const trans: StudioTransI[] = (
|
||||
await Promise.all(
|
||||
seed.map(
|
||||
async (x, i) =>
|
||||
await Promise.all(
|
||||
Object.entries(x.translations).map(async ([lang, tr]) => ({
|
||||
pk: ret[i].pk,
|
||||
language: lang,
|
||||
name: tr.name,
|
||||
logo: await enqueueOptImage(tx, {
|
||||
url: tr.logo,
|
||||
column: studioTranslations.logo,
|
||||
}),
|
||||
})),
|
||||
),
|
||||
),
|
||||
)
|
||||
).flat();
|
||||
await tx
|
||||
.insert(studioTranslations)
|
||||
.values(trans)
|
||||
.onConflictDoUpdate({
|
||||
target: [studioTranslations.pk, studioTranslations.language],
|
||||
set: conflictUpdateAllExcept(studioTranslations, ["pk", "language"]),
|
||||
return await db.transaction(async (tx) => {
|
||||
seed = uniqBy(seed!, (x) => x.slug);
|
||||
const vals: StudioI[] = seed.map((x) => {
|
||||
const { translations, ...item } = x;
|
||||
return item;
|
||||
});
|
||||
|
||||
await tx
|
||||
.insert(showStudioJoin)
|
||||
.values(ret.map((studio) => ({ showPk: showPk, studioPk: studio.pk })))
|
||||
.onConflictDoNothing();
|
||||
return ret;
|
||||
});
|
||||
};
|
||||
const ret = await tx
|
||||
.insert(studios)
|
||||
.select(unnestValues(vals, studios))
|
||||
.onConflictDoUpdate({
|
||||
target: studios.slug,
|
||||
set: conflictUpdateAllExcept(studios, [
|
||||
"pk",
|
||||
"id",
|
||||
"slug",
|
||||
"createdAt",
|
||||
]),
|
||||
})
|
||||
.returning({ pk: studios.pk, id: studios.id, slug: studios.slug });
|
||||
|
||||
const imgQueue: ImageTask[] = [];
|
||||
const trans: StudioTransI[] = seed.flatMap((x, i) =>
|
||||
Object.entries(x.translations).map(([lang, tr]) => ({
|
||||
pk: ret[i].pk,
|
||||
language: lang,
|
||||
name: tr.name,
|
||||
logo: enqueueOptImage(imgQueue, {
|
||||
url: tr.logo,
|
||||
column: studioTranslations.logo,
|
||||
}),
|
||||
})),
|
||||
);
|
||||
await flushImageQueue(tx, imgQueue, -100);
|
||||
await tx
|
||||
.insert(studioTranslations)
|
||||
.select(unnestValues(trans, studioTranslations))
|
||||
.onConflictDoUpdate({
|
||||
target: [studioTranslations.pk, studioTranslations.language],
|
||||
set: conflictUpdateAllExcept(studioTranslations, ["pk", "language"]),
|
||||
});
|
||||
|
||||
await tx
|
||||
.insert(showStudioJoin)
|
||||
.select(
|
||||
db
|
||||
.select({
|
||||
showPk: sql`${showPk}`.as("showPk"),
|
||||
studioPk: sql`v."studioPk"`.as("studioPk"),
|
||||
})
|
||||
.from(
|
||||
sql`unnest(${sqlarr(ret.map((x) => x.pk))}::integer[]) as v("studioPk")`,
|
||||
),
|
||||
)
|
||||
.onConflictDoNothing();
|
||||
return ret;
|
||||
});
|
||||
},
|
||||
);
|
||||
|
||||
@@ -55,20 +55,13 @@ export const seedMovie = async (
|
||||
const { translations, videos, collection, studios, staff, ...movie } = seed;
|
||||
const nextRefresh = guessNextRefresh(movie.airDate ?? new Date());
|
||||
|
||||
const original = translations[movie.originalLanguage];
|
||||
if (!original) {
|
||||
return {
|
||||
status: 422,
|
||||
message: "No translation available in the original language.",
|
||||
};
|
||||
}
|
||||
|
||||
const col = await insertCollection(collection, {
|
||||
kind: "movie",
|
||||
nextRefresh,
|
||||
...seed,
|
||||
});
|
||||
|
||||
const original = translations[movie.originalLanguage];
|
||||
const show = await insertShow(
|
||||
{
|
||||
kind: "movie",
|
||||
@@ -78,11 +71,17 @@ export const seedMovie = async (
|
||||
entriesCount: 1,
|
||||
...movie,
|
||||
},
|
||||
{
|
||||
...original,
|
||||
latinName: original.latinName ?? null,
|
||||
language: movie.originalLanguage,
|
||||
},
|
||||
original
|
||||
? {
|
||||
...original,
|
||||
latinName: original.latinName ?? null,
|
||||
language: movie.originalLanguage,
|
||||
}
|
||||
: {
|
||||
name: null,
|
||||
latinName: null,
|
||||
language: movie.originalLanguage,
|
||||
},
|
||||
translations,
|
||||
);
|
||||
if ("status" in show) return show;
|
||||
|
||||
@@ -91,20 +91,13 @@ export const seedSerie = async (
|
||||
} = seed;
|
||||
const nextRefresh = guessNextRefresh(serie.startAir ?? new Date());
|
||||
|
||||
const original = translations[serie.originalLanguage];
|
||||
if (!original) {
|
||||
return {
|
||||
status: 422,
|
||||
message: "No translation available in the original language.",
|
||||
};
|
||||
}
|
||||
|
||||
const col = await insertCollection(collection, {
|
||||
kind: "serie",
|
||||
nextRefresh,
|
||||
...seed,
|
||||
});
|
||||
|
||||
const original = translations[serie.originalLanguage];
|
||||
const show = await insertShow(
|
||||
{
|
||||
kind: "serie",
|
||||
@@ -113,11 +106,17 @@ export const seedSerie = async (
|
||||
entriesCount: entries.length,
|
||||
...serie,
|
||||
},
|
||||
{
|
||||
...original,
|
||||
latinName: original.latinName ?? null,
|
||||
language: serie.originalLanguage,
|
||||
},
|
||||
original
|
||||
? {
|
||||
...original,
|
||||
latinName: original.latinName ?? null,
|
||||
language: serie.originalLanguage,
|
||||
}
|
||||
: {
|
||||
name: null,
|
||||
latinName: null,
|
||||
language: serie.originalLanguage,
|
||||
},
|
||||
translations,
|
||||
);
|
||||
if ("status" in show) return show;
|
||||
|
||||
@@ -35,7 +35,8 @@ import {
|
||||
jsonbBuildObject,
|
||||
jsonbObjectAgg,
|
||||
sqlarr,
|
||||
values,
|
||||
unnest,
|
||||
unnestValues,
|
||||
} from "~/db/utils";
|
||||
import { Entry } from "~/models/entry";
|
||||
import { KError } from "~/models/error";
|
||||
@@ -129,10 +130,10 @@ async function linkVideos(
|
||||
slug: computeVideoSlug(entriesQ.slug, hasRenderingQ),
|
||||
})
|
||||
.from(
|
||||
values(links, {
|
||||
unnest(links, "j", {
|
||||
video: "integer",
|
||||
entry: "jsonb",
|
||||
}).as("j"),
|
||||
}),
|
||||
)
|
||||
.innerJoin(videos, eq(videos.pk, sql`j.video`))
|
||||
.innerJoin(
|
||||
@@ -830,12 +831,15 @@ export const videosWriteH = new Elysia({ prefix: "/videos", tags: ["videos"] })
|
||||
.post(
|
||||
"",
|
||||
async ({ body, status }) => {
|
||||
if (body.length === 0) {
|
||||
return status(422, { status: 422, message: "No videos" });
|
||||
}
|
||||
return await db.transaction(async (tx) => {
|
||||
let vids: { pk: number; id: string; path: string; guess: Guess }[] = [];
|
||||
try {
|
||||
vids = await tx
|
||||
.insert(videos)
|
||||
.values(body)
|
||||
.select(unnestValues(body, videos))
|
||||
.onConflictDoUpdate({
|
||||
target: [videos.path],
|
||||
set: conflictUpdateAllExcept(videos, ["pk", "id", "createdAt"]),
|
||||
@@ -924,6 +928,7 @@ export const videosWriteH = new Elysia({ prefix: "/videos", tags: ["videos"] })
|
||||
description:
|
||||
"Invalid rendering specified. (conflicts with an existing video)",
|
||||
},
|
||||
422: KError,
|
||||
},
|
||||
},
|
||||
)
|
||||
|
||||
@@ -6,24 +6,27 @@ import { sql } from "drizzle-orm";
|
||||
import { drizzle } from "drizzle-orm/node-postgres";
|
||||
import { migrate as migrateDb } from "drizzle-orm/node-postgres/migrator";
|
||||
import type { PoolConfig } from "pg";
|
||||
import { record } from "~/otel";
|
||||
import * as schema from "./schema";
|
||||
|
||||
async function getPostgresConfig(): Promise<PoolConfig> {
|
||||
const config: PoolConfig = {
|
||||
connectionString: process.env.POSTGRES_URL,
|
||||
host: process.env.PGHOST ?? "postgres",
|
||||
port: Number(process.env.PGPORT) || 5432,
|
||||
database: process.env.PGDATABASE ?? "kyoo",
|
||||
user: process.env.PGUSER ?? "kyoo",
|
||||
password: process.env.PGPASSWORD ?? "password",
|
||||
options: process.env.PGOPTIONS,
|
||||
application_name: process.env.PGAPPNAME ?? "kyoo",
|
||||
};
|
||||
const config: PoolConfig = {
|
||||
connectionString: process.env.POSTGRES_URL,
|
||||
host: process.env.PGHOST ?? "postgres",
|
||||
port: Number(process.env.PGPORT) || 5432,
|
||||
database: process.env.PGDATABASE ?? "kyoo",
|
||||
user: process.env.PGUSER ?? "kyoo",
|
||||
password: process.env.PGPASSWORD ?? "password",
|
||||
options: process.env.PGOPTIONS,
|
||||
application_name: process.env.PGAPPNAME ?? "kyoo",
|
||||
};
|
||||
|
||||
async function parseSslConfig(): Promise<PoolConfig> {
|
||||
// Due to an upstream bug, if `ssl` is not falsey, an SSL connection will always be attempted. This means
|
||||
// that non-SSL connection options under `ssl` (which is incorrectly named) cannot be set unless SSL is enabled.
|
||||
if (!process.env.PGSSLMODE || process.env.PGSSLMODE === "disable")
|
||||
if (!process.env.PGSSLMODE || process.env.PGSSLMODE === "disable") {
|
||||
config.ssl = false;
|
||||
return config;
|
||||
}
|
||||
|
||||
// Despite this field's name, it is used to configure everything below the application layer.
|
||||
const ssl: ConnectionOptions = {};
|
||||
@@ -108,8 +111,23 @@ async function getPostgresConfig(): Promise<PoolConfig> {
|
||||
return config;
|
||||
}
|
||||
|
||||
const postgresConfig = await getPostgresConfig();
|
||||
const postgresConfig = await parseSslConfig();
|
||||
// use this when using drizzle-kit since it can't parse await statements
|
||||
// const postgresConfig = config;
|
||||
|
||||
console.log("Connecting to postgres with config", {
|
||||
...postgresConfig,
|
||||
password: postgresConfig.password ? "<redacted>" : undefined,
|
||||
ssl:
|
||||
postgresConfig.ssl && typeof postgresConfig.ssl === "object"
|
||||
? {
|
||||
...postgresConfig.ssl,
|
||||
key: "<redacted>",
|
||||
cert: "<redacted>",
|
||||
ca: "<redacted>",
|
||||
}
|
||||
: postgresConfig.ssl,
|
||||
});
|
||||
export const db = drizzle({
|
||||
schema,
|
||||
connection: postgresConfig,
|
||||
@@ -119,24 +137,26 @@ instrumentDrizzleClient(db, {
|
||||
maxQueryTextLength: 100_000_000,
|
||||
});
|
||||
|
||||
export const migrate = async () => {
|
||||
export const migrate = record("migrate", async () => {
|
||||
const APP_SCHEMA = "kyoo";
|
||||
try {
|
||||
await db.execute(
|
||||
sql.raw(`
|
||||
create extension if not exists pg_trgm;
|
||||
set pg_trgm.word_similarity_threshold = 0.4;
|
||||
alter database "${postgresConfig.database}" set pg_trgm.word_similarity_threshold = 0.4;
|
||||
`),
|
||||
create schema if not exists ${APP_SCHEMA};
|
||||
create extension if not exists pg_trgm schema ${APP_SCHEMA};
|
||||
set pg_trgm.word_similarity_threshold = 0.4;
|
||||
alter database "${postgresConfig.database}" set pg_trgm.word_similarity_threshold = 0.4;
|
||||
`),
|
||||
);
|
||||
} catch (err: any) {
|
||||
console.error("Error while updating pg_trgm", err.message);
|
||||
}
|
||||
await migrateDb(db, {
|
||||
migrationsSchema: "kyoo",
|
||||
migrationsSchema: APP_SCHEMA,
|
||||
migrationsFolder: "./drizzle",
|
||||
});
|
||||
console.log(`Database ${postgresConfig.database} migrated!`);
|
||||
};
|
||||
});
|
||||
|
||||
export type Transaction =
|
||||
| typeof db
|
||||
|
||||
@@ -12,9 +12,8 @@ import {
|
||||
uuid,
|
||||
varchar,
|
||||
} from "drizzle-orm/pg-core";
|
||||
import { timestamp } from "../utils";
|
||||
import { shows } from "./shows";
|
||||
import { image, language, schema } from "./utils";
|
||||
import { image, language, schema, timestamp } from "./utils";
|
||||
import { entryVideoJoin } from "./videos";
|
||||
|
||||
export const entryType = schema.enum("entry_type", [
|
||||
|
||||
@@ -1,9 +1,8 @@
|
||||
import { sql } from "drizzle-orm";
|
||||
import { check, index, integer } from "drizzle-orm/pg-core";
|
||||
import { timestamp } from "../utils";
|
||||
import { entries } from "./entries";
|
||||
import { profiles } from "./profiles";
|
||||
import { schema } from "./utils";
|
||||
import { schema, timestamp } from "./utils";
|
||||
import { videos } from "./videos";
|
||||
|
||||
export const history = schema.table(
|
||||
|
||||
@@ -1,7 +1,6 @@
|
||||
import { sql } from "drizzle-orm";
|
||||
import { index, integer, jsonb, uuid, varchar } from "drizzle-orm/pg-core";
|
||||
import { timestamp } from "../utils";
|
||||
import { schema } from "./utils";
|
||||
import { schema, timestamp } from "./utils";
|
||||
|
||||
export const mqueue = schema.table(
|
||||
"mqueue",
|
||||
@@ -9,6 +8,7 @@ export const mqueue = schema.table(
|
||||
id: uuid().notNull().primaryKey().defaultRandom(),
|
||||
kind: varchar({ length: 255 }).notNull(),
|
||||
message: jsonb().notNull(),
|
||||
priority: integer().notNull().default(0),
|
||||
attempt: integer().notNull().default(0),
|
||||
createdAt: timestamp({ withTimezone: true, mode: "iso" })
|
||||
.notNull()
|
||||
|
||||
@@ -10,9 +10,8 @@ import {
|
||||
uuid,
|
||||
varchar,
|
||||
} from "drizzle-orm/pg-core";
|
||||
import { timestamp } from "../utils";
|
||||
import { shows } from "./shows";
|
||||
import { image, language, schema } from "./utils";
|
||||
import { image, language, schema, timestamp } from "./utils";
|
||||
|
||||
export const season_extid = () =>
|
||||
jsonb()
|
||||
@@ -40,7 +39,7 @@ export const seasons = schema.table(
|
||||
startAir: date(),
|
||||
endAir: date(),
|
||||
|
||||
entriesCount: integer().notNull(),
|
||||
entriesCount: integer().notNull().default(0),
|
||||
availableCount: integer().notNull().default(0),
|
||||
|
||||
externalId: season_extid(),
|
||||
@@ -92,7 +91,7 @@ export const seasonRelations = relations(seasons, ({ one, many }) => ({
|
||||
|
||||
export const seasonTrRelations = relations(seasonTranslations, ({ one }) => ({
|
||||
season: one(seasons, {
|
||||
relationName: "season_translation",
|
||||
relationName: "season_translations",
|
||||
fields: [seasonTranslations.pk],
|
||||
references: [seasons.pk],
|
||||
}),
|
||||
|
||||
@@ -13,12 +13,11 @@ import {
|
||||
varchar,
|
||||
} from "drizzle-orm/pg-core";
|
||||
import type { Image, Original } from "~/models/utils";
|
||||
import { timestamp } from "../utils";
|
||||
import { entries } from "./entries";
|
||||
import { seasons } from "./seasons";
|
||||
import { roles } from "./staff";
|
||||
import { showStudioJoin } from "./studios";
|
||||
import { externalid, image, language, schema } from "./utils";
|
||||
import { externalid, image, language, schema, timestamp } from "./utils";
|
||||
|
||||
export const showKind = schema.enum("show_kind", [
|
||||
"serie",
|
||||
|
||||
@@ -8,9 +8,8 @@ import {
|
||||
varchar,
|
||||
} from "drizzle-orm/pg-core";
|
||||
import type { Character } from "~/models/staff";
|
||||
import { timestamp } from "../utils";
|
||||
import { shows } from "./shows";
|
||||
import { externalid, image, schema } from "./utils";
|
||||
import { externalid, image, schema, timestamp } from "./utils";
|
||||
|
||||
export const roleKind = schema.enum("role_kind", [
|
||||
"actor",
|
||||
|
||||
@@ -7,9 +7,8 @@ import {
|
||||
uuid,
|
||||
varchar,
|
||||
} from "drizzle-orm/pg-core";
|
||||
import { timestamp } from "../utils";
|
||||
import { shows } from "./shows";
|
||||
import { externalid, image, language, schema } from "./utils";
|
||||
import { externalid, image, language, schema, timestamp } from "./utils";
|
||||
|
||||
export const studios = schema.table("studios", {
|
||||
pk: integer().primaryKey().generatedAlwaysAsIdentity(),
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import { jsonb, pgSchema, varchar } from "drizzle-orm/pg-core";
|
||||
import { customType, jsonb, pgSchema, varchar } from "drizzle-orm/pg-core";
|
||||
import type { Image } from "~/models/utils";
|
||||
|
||||
export const schema = pgSchema("kyoo");
|
||||
@@ -20,3 +20,19 @@ export const externalid = () =>
|
||||
>()
|
||||
.notNull()
|
||||
.default({});
|
||||
|
||||
export const timestamp = customType<{
|
||||
data: string;
|
||||
driverData: string;
|
||||
config: { withTimezone: boolean; precision?: number; mode: "iso" };
|
||||
}>({
|
||||
dataType(config) {
|
||||
const precision = config?.precision ? ` (${config.precision})` : "";
|
||||
return `timestamp${precision}${config?.withTimezone ? " with time zone" : ""}`;
|
||||
},
|
||||
fromDriver(value: string): string {
|
||||
// postgres format: 2025-06-22 16:13:37.489301+00
|
||||
// what we want: 2025-06-22T16:13:37Z
|
||||
return `${value.substring(0, 10)}T${value.substring(11, 19)}Z`;
|
||||
},
|
||||
});
|
||||
|
||||
@@ -10,9 +10,8 @@ import {
|
||||
varchar,
|
||||
} from "drizzle-orm/pg-core";
|
||||
import type { Guess } from "~/models/video";
|
||||
import { timestamp } from "../utils";
|
||||
import { entries } from "./entries";
|
||||
import { schema } from "./utils";
|
||||
import { schema, timestamp } from "./utils";
|
||||
|
||||
export const videos = schema.table(
|
||||
"videos",
|
||||
|
||||
@@ -1,10 +1,9 @@
|
||||
import { sql } from "drizzle-orm";
|
||||
import { check, integer, primaryKey } from "drizzle-orm/pg-core";
|
||||
import { timestamp } from "../utils";
|
||||
import { entries } from "./entries";
|
||||
import { profiles } from "./profiles";
|
||||
import { shows } from "./shows";
|
||||
import { schema } from "./utils";
|
||||
import { schema, timestamp } from "./utils";
|
||||
|
||||
export const watchlistStatus = schema.enum("watchlist_status", [
|
||||
"watching",
|
||||
|
||||
@@ -8,15 +8,16 @@ import {
|
||||
type Subquery,
|
||||
sql,
|
||||
Table,
|
||||
type TableConfig,
|
||||
View,
|
||||
ViewBaseConfig,
|
||||
} from "drizzle-orm";
|
||||
import type { CasingCache } from "drizzle-orm/casing";
|
||||
import type { AnyMySqlSelect } from "drizzle-orm/mysql-core";
|
||||
import {
|
||||
type AnyPgSelect,
|
||||
customType,
|
||||
type SelectedFieldsFlat,
|
||||
import type {
|
||||
AnyPgSelect,
|
||||
PgTableWithColumns,
|
||||
SelectedFieldsFlat,
|
||||
} from "drizzle-orm/pg-core";
|
||||
import type { AnySQLiteSelect } from "drizzle-orm/sqlite-core";
|
||||
import type { WithSubquery } from "drizzle-orm/subquery";
|
||||
@@ -73,8 +74,22 @@ export function conflictUpdateAllExcept<
|
||||
}
|
||||
|
||||
// drizzle is bugged and doesn't allow js arrays to be used in raw sql.
|
||||
export function sqlarr(array: unknown[]) {
|
||||
return `{${array.map((item) => `"${item}"`).join(",")}}`;
|
||||
export function sqlarr(array: unknown[]): string {
|
||||
function escapeStr(str: string) {
|
||||
return str.replaceAll("\\", "\\\\").replaceAll('"', '\\"');
|
||||
}
|
||||
|
||||
return `{${array
|
||||
.map((item) =>
|
||||
item === "null" || item === null || item === undefined
|
||||
? "null"
|
||||
: Array.isArray(item)
|
||||
? sqlarr(item)
|
||||
: typeof item === "object"
|
||||
? `"${escapeStr(JSON.stringify(item))}"`
|
||||
: `"${escapeStr(item.toString())}"`,
|
||||
)
|
||||
.join(", ")}}`;
|
||||
}
|
||||
|
||||
// See https://github.com/drizzle-team/drizzle-orm/issues/4044
|
||||
@@ -107,6 +122,102 @@ export function values<K extends string>(
|
||||
};
|
||||
}
|
||||
|
||||
/* goal:
|
||||
* unnestValues([{a: 1, b: 2}, {a: 3, b: 4}], tbl)
|
||||
*
|
||||
* ```sql
|
||||
* select a, b, now() as updated_at from unnest($1::integer[], $2::integer[]);
|
||||
* ```
|
||||
* params:
|
||||
* $1: [1, 2]
|
||||
* $2: [3, 4]
|
||||
*
|
||||
* select
|
||||
*/
|
||||
export const unnestValues = <
|
||||
T extends Record<string, unknown>,
|
||||
C extends TableConfig = never,
|
||||
>(
|
||||
values: T[],
|
||||
typeInfo: PgTableWithColumns<C>,
|
||||
) => {
|
||||
if (values[0] === undefined)
|
||||
throw new Error("Invalid values, expecting at least one items");
|
||||
|
||||
const columns = getTableColumns(typeInfo);
|
||||
const keys = Object.keys(values[0]).filter((x) => x in columns);
|
||||
// @ts-expect-error: drizzle internal
|
||||
const casing = db.dialect.casing as CasingCache;
|
||||
const dbNames = Object.fromEntries(
|
||||
Object.entries(columns).map(([k, v]) => [k, casing.getColumnCasing(v)]),
|
||||
);
|
||||
const vals = values.reduce(
|
||||
(acc, cur, i) => {
|
||||
for (const k of keys) {
|
||||
if (k in cur) acc[k].push(cur[k]);
|
||||
else acc[k].push(null);
|
||||
}
|
||||
for (const k of Object.keys(cur)) {
|
||||
if (k in acc) continue;
|
||||
if (!(k in columns)) continue;
|
||||
keys.push(k);
|
||||
acc[k] = new Array(i).fill(null);
|
||||
acc[k].push(cur[k]);
|
||||
}
|
||||
return acc;
|
||||
},
|
||||
Object.fromEntries(keys.map((x) => [x, [] as unknown[]])),
|
||||
);
|
||||
const computed = Object.entries(columns)
|
||||
.filter(([k, v]) => (v.defaultFn || v.onUpdateFn) && !keys.includes(k))
|
||||
.map(([k]) => k);
|
||||
return db
|
||||
.select(
|
||||
Object.fromEntries([
|
||||
...keys.map((x) => [x, sql.raw(`"${dbNames[x]}"`)]),
|
||||
...computed.map((x) => [
|
||||
x,
|
||||
(columns[x].defaultFn?.() ?? columns[x].onUpdateFn!()).as(dbNames[x]),
|
||||
]),
|
||||
]) as {
|
||||
[k in keyof typeof typeInfo.$inferInsert]-?: SQL.Aliased<
|
||||
(typeof typeInfo.$inferInsert)[k]
|
||||
>;
|
||||
},
|
||||
)
|
||||
.from(
|
||||
sql`unnest(${sql.join(
|
||||
keys.map(
|
||||
(k) =>
|
||||
sql`${sqlarr(vals[k])}${sql.raw(`::${columns[k].getSQLType()}[]`)}`,
|
||||
),
|
||||
sql.raw(", "),
|
||||
)}) as v(${sql.raw(keys.map((x) => `"${dbNames[x]}"`).join(", "))})`,
|
||||
);
|
||||
};
|
||||
|
||||
export const unnest = <T extends Record<string, unknown>>(
|
||||
values: T[],
|
||||
name: string,
|
||||
typeInfo: Record<keyof T, string>,
|
||||
) => {
|
||||
const keys = Object.keys(typeInfo);
|
||||
const vals = values.reduce(
|
||||
(acc, cur) => {
|
||||
for (const k of keys) {
|
||||
if (k in cur) acc[k].push(cur[k]);
|
||||
else acc[k].push(null);
|
||||
}
|
||||
return acc;
|
||||
},
|
||||
Object.fromEntries(keys.map((x) => [x, [] as unknown[]])),
|
||||
);
|
||||
return sql`unnest(${sql.join(
|
||||
keys.map((k) => sql`${sqlarr(vals[k])}${sql.raw(`::${typeInfo[k]}[]`)}`),
|
||||
sql.raw(", "),
|
||||
)}) as ${sql.raw(name)}(${sql.raw(keys.map((x) => `"${x}"`).join(", "))})`;
|
||||
};
|
||||
|
||||
export const coalesce = <T>(val: SQL<T> | SQLWrapper, def: SQL<T> | Column) => {
|
||||
return sql<T>`coalesce(${val}, ${def})`;
|
||||
};
|
||||
@@ -157,19 +268,3 @@ export const isUniqueConstraint = (e: unknown): boolean => {
|
||||
cause.code === "23505"
|
||||
);
|
||||
};
|
||||
|
||||
export const timestamp = customType<{
|
||||
data: string;
|
||||
driverData: string;
|
||||
config: { withTimezone: boolean; precision?: number; mode: "iso" };
|
||||
}>({
|
||||
dataType(config) {
|
||||
const precision = config?.precision ? ` (${config.precision})` : "";
|
||||
return `timestamp${precision}${config?.withTimezone ? " with time zone" : ""}`;
|
||||
},
|
||||
fromDriver(value: string): string {
|
||||
// postgres format: 2025-06-22 16:13:37.489301+00
|
||||
// what we want: 2025-06-22T16:13:37Z
|
||||
return `${value.substring(0, 10)}T${value.substring(11, 19)}Z`;
|
||||
},
|
||||
});
|
||||
|
||||
@@ -8,7 +8,9 @@ import { comment } from "./utils";
|
||||
await migrate();
|
||||
|
||||
// run image processor task in background
|
||||
processImages();
|
||||
for (let i = 0; i < 10; i++) {
|
||||
processImages();
|
||||
}
|
||||
|
||||
const app = new Elysia()
|
||||
.use(
|
||||
|
||||
@@ -7,10 +7,12 @@ export const Original = t.Object({
|
||||
description: "The language code this was made in.",
|
||||
examples: ["ja"],
|
||||
}),
|
||||
name: t.String({
|
||||
description: "The name in the original language",
|
||||
examples: ["進撃の巨人"],
|
||||
}),
|
||||
name: t.Nullable(
|
||||
t.String({
|
||||
description: "The name in the original language",
|
||||
examples: ["進撃の巨人"],
|
||||
}),
|
||||
),
|
||||
latinName: t.Nullable(
|
||||
t.String({
|
||||
description: comment`
|
||||
|
||||
@@ -1,6 +1,5 @@
|
||||
import type { ObjectOptions } from "@sinclair/typebox";
|
||||
import { type TSchema, t } from "elysia";
|
||||
import { buildUrl } from "~/utils";
|
||||
import { generateAfter } from "./keyset-paginate";
|
||||
import type { Sort } from "./sort";
|
||||
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import { opentelemetry } from "@elysiajs/opentelemetry";
|
||||
import { record as elysiaRecord, opentelemetry } from "@elysiajs/opentelemetry";
|
||||
import { OTLPMetricExporter as GrpcMetricExporter } from "@opentelemetry/exporter-metrics-otlp-grpc";
|
||||
import { OTLPMetricExporter as HttpMetricExporter } from "@opentelemetry/exporter-metrics-otlp-proto";
|
||||
import { OTLPTraceExporter as GrpcTraceExporter } from "@opentelemetry/exporter-trace-otlp-grpc";
|
||||
@@ -32,3 +32,12 @@ export const otel = new Elysia()
|
||||
}),
|
||||
)
|
||||
.as("global");
|
||||
|
||||
export function record<T extends (...args: any) => any>(
|
||||
spanName: string,
|
||||
fn: T,
|
||||
): T {
|
||||
const wrapped = (...args: Parameters<T>) =>
|
||||
elysiaRecord(spanName, () => fn(...args));
|
||||
return wrapped as T;
|
||||
}
|
||||
|
||||
@@ -28,3 +28,13 @@ export function getFile(path: string): BunFile | S3File {
|
||||
|
||||
return Bun.file(path);
|
||||
}
|
||||
|
||||
export function uniqBy<T>(a: T[], key: (val: T) => string): T[] {
|
||||
const seen: Record<string, boolean> = {};
|
||||
return a.filter((item) => {
|
||||
const k = key(item);
|
||||
if (seen[k]) return false;
|
||||
seen[k] = true;
|
||||
return true;
|
||||
});
|
||||
}
|
||||
|
||||
48
api/tests/helpers/collections-helper.ts
Normal file
48
api/tests/helpers/collections-helper.ts
Normal file
@@ -0,0 +1,48 @@
|
||||
import { buildUrl } from "tests/utils";
|
||||
import { handlers } from "~/base";
|
||||
import { getJwtHeaders } from "./jwt";
|
||||
|
||||
export const getCollection = async (
|
||||
id: string,
|
||||
{
|
||||
langs,
|
||||
...query
|
||||
}: { langs?: string; preferOriginal?: boolean; with?: string[] },
|
||||
) => {
|
||||
const resp = await handlers.handle(
|
||||
new Request(buildUrl(`collections/${id}`, query), {
|
||||
method: "GET",
|
||||
headers: langs
|
||||
? {
|
||||
"Accept-Language": langs,
|
||||
...(await getJwtHeaders()),
|
||||
}
|
||||
: await getJwtHeaders(),
|
||||
}),
|
||||
);
|
||||
const body = await resp.json();
|
||||
return [resp, body] as const;
|
||||
};
|
||||
|
||||
export const getCollections = async ({
|
||||
langs,
|
||||
...query
|
||||
}: {
|
||||
langs?: string;
|
||||
preferOriginal?: boolean;
|
||||
with?: string[];
|
||||
}) => {
|
||||
const resp = await handlers.handle(
|
||||
new Request(buildUrl("collections", query), {
|
||||
method: "GET",
|
||||
headers: langs
|
||||
? {
|
||||
"Accept-Language": langs,
|
||||
...(await getJwtHeaders()),
|
||||
}
|
||||
: await getJwtHeaders(),
|
||||
}),
|
||||
);
|
||||
const body = await resp.json();
|
||||
return [resp, body] as const;
|
||||
};
|
||||
@@ -1,4 +1,5 @@
|
||||
export * from "~/base";
|
||||
export * from "./collections-helper";
|
||||
export * from "./movies-helper";
|
||||
export * from "./series-helper";
|
||||
export * from "./shows-helper";
|
||||
|
||||
@@ -20,6 +20,7 @@ const [resp, body] = await createVideo([
|
||||
title: "mia",
|
||||
episodes: [{ season: 1, episode: 13 }],
|
||||
from: "test",
|
||||
history: [],
|
||||
},
|
||||
part: null,
|
||||
path: "/video/mia s1e13.mkv",
|
||||
@@ -33,6 +34,7 @@ const [resp, body] = await createVideo([
|
||||
episodes: [{ season: 2, episode: 1 }],
|
||||
years: [2017],
|
||||
from: "test",
|
||||
history: [],
|
||||
},
|
||||
part: null,
|
||||
path: "/video/mia 2017 s2e1.mkv",
|
||||
@@ -41,7 +43,7 @@ const [resp, body] = await createVideo([
|
||||
for: [{ slug: `${madeInAbyss.slug}-s2e1` }],
|
||||
},
|
||||
{
|
||||
guess: { title: "bubble", from: "test" },
|
||||
guess: { title: "bubble", from: "test", history: [] },
|
||||
part: null,
|
||||
path: "/video/bubble.mkv",
|
||||
rendering: "sha5",
|
||||
|
||||
@@ -1,19 +1,23 @@
|
||||
import { describe, expect, it } from "bun:test";
|
||||
import { eq } from "drizzle-orm";
|
||||
import { beforeAll, describe, expect, it } from "bun:test";
|
||||
import { and, eq, sql } from "drizzle-orm";
|
||||
import { createMovie, createSerie } from "tests/helpers";
|
||||
import { expectStatus } from "tests/utils";
|
||||
import { defaultBlurhash, processImages } from "~/controllers/seed/images";
|
||||
import { db } from "~/db";
|
||||
import { mqueue, shows, staff, studios, videos } from "~/db/schema";
|
||||
import { madeInAbyss } from "~/models/examples";
|
||||
import { createSerie } from "../helpers";
|
||||
import { dune, madeInAbyss } from "~/models/examples";
|
||||
|
||||
describe("images", () => {
|
||||
it("Create a serie download images", async () => {
|
||||
beforeAll(async () => {
|
||||
await db.delete(shows);
|
||||
await db.delete(studios);
|
||||
await db.delete(staff);
|
||||
await db.delete(videos);
|
||||
await db.delete(mqueue);
|
||||
});
|
||||
|
||||
it("Create a serie download images", async () => {
|
||||
await db.delete(mqueue);
|
||||
await createSerie(madeInAbyss);
|
||||
const release = await processImages();
|
||||
// remove notifications to prevent other images to be downloaded (do not curl 20000 images for nothing)
|
||||
@@ -26,4 +30,34 @@ describe("images", () => {
|
||||
expect(ret!.original.poster!.blurhash).toBeString();
|
||||
expect(ret!.original.poster!.blurhash).not.toBe(defaultBlurhash);
|
||||
});
|
||||
|
||||
it("Download 404 image", async () => {
|
||||
await db.delete(mqueue);
|
||||
const url404 = "https://mockhttp.org/status/404";
|
||||
const [ret, body] = await createMovie({
|
||||
...dune,
|
||||
translations: {
|
||||
en: {
|
||||
...dune.translations.en,
|
||||
poster: url404,
|
||||
thumbnail: null,
|
||||
banner: null,
|
||||
logo: null,
|
||||
},
|
||||
},
|
||||
});
|
||||
expectStatus(ret, body).toBe(201);
|
||||
|
||||
const release = await processImages();
|
||||
// remove notifications to prevent other images to be downloaded (do not curl 20000 images for nothing)
|
||||
release();
|
||||
|
||||
const failed = await db.query.mqueue.findFirst({
|
||||
where: and(
|
||||
eq(mqueue.kind, "image"),
|
||||
eq(sql`${mqueue.message}->>'url'`, url404),
|
||||
),
|
||||
});
|
||||
expect(failed!.attempt).toBe(5);
|
||||
});
|
||||
});
|
||||
|
||||
@@ -6,9 +6,12 @@ import {
|
||||
getStaffRoles,
|
||||
} from "tests/helpers";
|
||||
import { expectStatus } from "tests/utils";
|
||||
import { db } from "~/db";
|
||||
import { staff } from "~/db/schema";
|
||||
import { madeInAbyss } from "~/models/examples";
|
||||
|
||||
beforeAll(async () => {
|
||||
await db.delete(staff);
|
||||
await createSerie(madeInAbyss);
|
||||
});
|
||||
|
||||
|
||||
@@ -2,7 +2,7 @@ import { beforeAll, describe, expect, it } from "bun:test";
|
||||
import { eq } from "drizzle-orm";
|
||||
import { expectStatus } from "tests/utils";
|
||||
import { db } from "~/db";
|
||||
import { seasons, shows, videos } from "~/db/schema";
|
||||
import { entries, seasons, shows, videos } from "~/db/schema";
|
||||
import { madeInAbyss, madeInAbyssVideo } from "~/models/examples";
|
||||
import { createSerie } from "../helpers";
|
||||
|
||||
@@ -104,4 +104,61 @@ describe("Serie seeding", () => {
|
||||
],
|
||||
});
|
||||
});
|
||||
|
||||
it("Can create a serie with quotes", async () => {
|
||||
await db.delete(entries);
|
||||
const [resp, body] = await createSerie({
|
||||
...madeInAbyss,
|
||||
slug: "quote-test",
|
||||
seasons: [
|
||||
{
|
||||
...madeInAbyss.seasons[0],
|
||||
translations: {
|
||||
en: {
|
||||
...madeInAbyss.seasons[0].translations.en,
|
||||
name: "Season'1",
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
...madeInAbyss.seasons[1],
|
||||
translations: {
|
||||
en: {
|
||||
...madeInAbyss.seasons[0].translations.en,
|
||||
name: 'Season"2',
|
||||
description: `This's """""quote, idk'''''`,
|
||||
},
|
||||
},
|
||||
},
|
||||
],
|
||||
});
|
||||
|
||||
expectStatus(resp, body).toBe(201);
|
||||
expect(body.id).toBeString();
|
||||
expect(body.slug).toBe("quote-test");
|
||||
|
||||
const ret = await db.query.shows.findFirst({
|
||||
where: eq(shows.id, body.id),
|
||||
with: {
|
||||
seasons: {
|
||||
orderBy: seasons.seasonNumber,
|
||||
with: { translations: true },
|
||||
},
|
||||
entries: {
|
||||
with: {
|
||||
translations: true,
|
||||
evj: { with: { video: true } },
|
||||
},
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
expect(ret).not.toBeNull();
|
||||
expect(ret!.seasons).toBeArrayOfSize(2);
|
||||
expect(ret!.seasons[0].translations[0].name).toBe("Season'1");
|
||||
expect(ret!.seasons[1].translations[0].name).toBe('Season"2');
|
||||
expect(ret!.entries).toBeArrayOfSize(
|
||||
madeInAbyss.entries.length + madeInAbyss.extras.length,
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
@@ -1,9 +1,12 @@
|
||||
import { beforeAll } from "bun:test";
|
||||
import { migrate } from "~/db";
|
||||
|
||||
process.env.PGDATABASE = "kyoo_test";
|
||||
process.env.JWT_SECRET = "this is a secret";
|
||||
process.env.JWT_ISSUER = "https://kyoo.zoriya.dev";
|
||||
process.env.IMAGES_PATH = "./images";
|
||||
|
||||
beforeAll(async () => {
|
||||
// lazy load this so env set before actually applies
|
||||
const { migrate } = await import("~/db");
|
||||
await migrate();
|
||||
});
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"compilerOptions": {
|
||||
"target": "ES2021",
|
||||
"target": "ES2022",
|
||||
"module": "ES2022",
|
||||
"moduleResolution": "node",
|
||||
"esModuleInterop": true,
|
||||
|
||||
@@ -1,11 +1,11 @@
|
||||
FROM golang:1.25 AS build
|
||||
FROM --platform=$BUILDPLATFORM golang:1.25 AS build
|
||||
WORKDIR /app
|
||||
|
||||
COPY go.mod go.sum ./
|
||||
RUN go mod download
|
||||
|
||||
COPY . .
|
||||
RUN CGO_ENABLED=0 GOOS=linux go build -o /keibi
|
||||
RUN CGO_ENABLED=0 GOOS=${TARGETOS:-linux} GOARCH=$TARGETARCH go build -o /keibi
|
||||
|
||||
FROM gcr.io/distroless/base-debian11
|
||||
WORKDIR /app
|
||||
|
||||
@@ -4,10 +4,9 @@ import (
|
||||
"context"
|
||||
"crypto/rand"
|
||||
"encoding/base64"
|
||||
"fmt"
|
||||
"maps"
|
||||
"net/http"
|
||||
"strings"
|
||||
"slices"
|
||||
"time"
|
||||
|
||||
"github.com/golang-jwt/jwt/v5"
|
||||
@@ -45,7 +44,7 @@ func MapDbKey(key *dbc.Apikey) ApiKeyWToken {
|
||||
CreatedAt: key.CreatedAt,
|
||||
LastUsed: key.LastUsed,
|
||||
},
|
||||
Token: fmt.Sprintf("%s-%s", key.Name, key.Token),
|
||||
Token: key.Token,
|
||||
}
|
||||
}
|
||||
|
||||
@@ -75,7 +74,10 @@ func (h *Handler) CreateApiKey(c echo.Context) error {
|
||||
return err
|
||||
}
|
||||
|
||||
if _, conflict := h.config.EnvApiKeys[req.Name]; conflict {
|
||||
conflict := slices.ContainsFunc(h.config.EnvApiKeys, func(k ApiKeyWToken) bool {
|
||||
return k.Name == req.Name
|
||||
})
|
||||
if conflict {
|
||||
return echo.NewHTTPError(409, "An env apikey is already defined with the same name")
|
||||
}
|
||||
|
||||
@@ -174,17 +176,15 @@ func (h *Handler) ListApiKey(c echo.Context) error {
|
||||
}
|
||||
|
||||
func (h *Handler) createApiJwt(apikey string) (string, error) {
|
||||
info := strings.SplitN(apikey, "-", 2)
|
||||
if len(info) != 2 {
|
||||
return "", echo.NewHTTPError(http.StatusForbidden, "Invalid api key format")
|
||||
var key *ApiKeyWToken
|
||||
for _, k := range h.config.EnvApiKeys {
|
||||
if k.Token == apikey {
|
||||
key = &k
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
key, fromEnv := h.config.EnvApiKeys[info[0]]
|
||||
if !fromEnv {
|
||||
dbKey, err := h.db.GetApiKey(context.Background(), dbc.GetApiKeyParams{
|
||||
Name: info[0],
|
||||
Token: info[1],
|
||||
})
|
||||
if key == nil {
|
||||
dbKey, err := h.db.GetApiKey(context.Background(), apikey)
|
||||
if err == pgx.ErrNoRows {
|
||||
return "", echo.NewHTTPError(http.StatusForbidden, "Invalid api key")
|
||||
} else if err != nil {
|
||||
@@ -195,7 +195,8 @@ func (h *Handler) createApiJwt(apikey string) (string, error) {
|
||||
h.db.TouchApiKey(context.Background(), dbKey.Pk)
|
||||
}()
|
||||
|
||||
key = MapDbKey(&dbKey)
|
||||
found := MapDbKey(&dbKey)
|
||||
key = &found
|
||||
}
|
||||
|
||||
claims := maps.Clone(key.Claims)
|
||||
@@ -210,6 +211,7 @@ func (h *Handler) createApiJwt(apikey string) (string, error) {
|
||||
Time: time.Now().UTC().Add(time.Hour),
|
||||
}
|
||||
jwt := jwt.NewWithClaims(jwt.SigningMethodRS256, claims)
|
||||
jwt.Header["kid"] = h.config.JwtKid
|
||||
t, err := jwt.SignedString(h.config.JwtPrivateKey)
|
||||
if err != nil {
|
||||
return "", err
|
||||
|
||||
@@ -12,6 +12,7 @@ import (
|
||||
"fmt"
|
||||
"maps"
|
||||
"os"
|
||||
"slices"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
@@ -31,7 +32,7 @@ type Configuration struct {
|
||||
GuestClaims jwt.MapClaims
|
||||
ProtectedClaims []string
|
||||
ExpirationDelay time.Duration
|
||||
EnvApiKeys map[string]ApiKeyWToken
|
||||
EnvApiKeys []ApiKeyWToken
|
||||
}
|
||||
|
||||
var DefaultConfig = Configuration{
|
||||
@@ -39,7 +40,7 @@ var DefaultConfig = Configuration{
|
||||
FirstUserClaims: make(jwt.MapClaims),
|
||||
ProtectedClaims: []string{"permissions"},
|
||||
ExpirationDelay: 30 * 24 * time.Hour,
|
||||
EnvApiKeys: make(map[string]ApiKeyWToken),
|
||||
EnvApiKeys: make([]ApiKeyWToken, 0),
|
||||
}
|
||||
|
||||
func LoadConfiguration(db *dbc.Queries) (*Configuration, error) {
|
||||
@@ -137,14 +138,14 @@ func LoadConfiguration(db *dbc.Queries) (*Configuration, error) {
|
||||
}
|
||||
|
||||
name = strings.ToLower(name)
|
||||
ret.EnvApiKeys[name] = ApiKeyWToken{
|
||||
ret.EnvApiKeys = append(ret.EnvApiKeys, ApiKeyWToken{
|
||||
ApiKey: ApiKey{
|
||||
Id: uuid.New(),
|
||||
Name: name,
|
||||
Claims: claims,
|
||||
},
|
||||
Token: v[1],
|
||||
}
|
||||
})
|
||||
|
||||
}
|
||||
apikeys, err := db.ListApiKeys(context.Background())
|
||||
@@ -152,7 +153,10 @@ func LoadConfiguration(db *dbc.Queries) (*Configuration, error) {
|
||||
return nil, err
|
||||
}
|
||||
for _, key := range apikeys {
|
||||
if _, defined := ret.EnvApiKeys[key.Name]; defined {
|
||||
dup := slices.ContainsFunc(ret.EnvApiKeys, func(k ApiKeyWToken) bool {
|
||||
return k.Name == key.Name
|
||||
})
|
||||
if dup {
|
||||
return nil, fmt.Errorf(
|
||||
"an api key with the name %s is already defined in database. Can't specify a new one via env var",
|
||||
key.Name,
|
||||
|
||||
@@ -76,17 +76,11 @@ select
|
||||
from
|
||||
keibi.apikeys
|
||||
where
|
||||
name = $1
|
||||
and token = $2
|
||||
token = $1
|
||||
`
|
||||
|
||||
type GetApiKeyParams struct {
|
||||
Name string `json:"name"`
|
||||
Token string `json:"token"`
|
||||
}
|
||||
|
||||
func (q *Queries) GetApiKey(ctx context.Context, arg GetApiKeyParams) (Apikey, error) {
|
||||
row := q.db.QueryRow(ctx, getApiKey, arg.Name, arg.Token)
|
||||
func (q *Queries) GetApiKey(ctx context.Context, token string) (Apikey, error) {
|
||||
row := q.db.QueryRow(ctx, getApiKey, token)
|
||||
var i Apikey
|
||||
err := row.Scan(
|
||||
&i.Pk,
|
||||
|
||||
18
auth/devspace.yaml
Normal file
18
auth/devspace.yaml
Normal file
@@ -0,0 +1,18 @@
|
||||
version: v2beta1
|
||||
name: auth
|
||||
dev:
|
||||
auth:
|
||||
imageSelector: ghcr.io/zoriya/kyoo_auth
|
||||
devImage: docker.io/golang:1.25
|
||||
workingDir: /app
|
||||
sync:
|
||||
- path: .:/app
|
||||
startContainer: true
|
||||
onUpload:
|
||||
restartContainer: true
|
||||
command:
|
||||
- bash
|
||||
- -c
|
||||
- "go mod download; go run -race ."
|
||||
ports:
|
||||
- port: "4568"
|
||||
25
auth/go.mod
25
auth/go.mod
@@ -6,10 +6,11 @@ toolchain go1.25.4
|
||||
|
||||
require (
|
||||
github.com/alexedwards/argon2id v1.0.0
|
||||
github.com/exaring/otelpgx v0.9.3
|
||||
github.com/golang-jwt/jwt/v5 v5.3.0
|
||||
github.com/google/uuid v1.6.0
|
||||
github.com/jackc/pgx/v5 v5.7.6
|
||||
github.com/labstack/echo-jwt/v4 v4.3.1
|
||||
github.com/labstack/echo-jwt/v4 v4.4.0
|
||||
github.com/labstack/echo/v4 v4.13.4
|
||||
github.com/lestrrat-go/jwx/v3 v3.0.12
|
||||
github.com/swaggo/echo-swagger v1.4.1
|
||||
@@ -18,12 +19,8 @@ require (
|
||||
go.opentelemetry.io/otel v1.38.0
|
||||
go.opentelemetry.io/otel/exporters/otlp/otlplog/otlploggrpc v0.14.0
|
||||
go.opentelemetry.io/otel/exporters/otlp/otlplog/otlploghttp v0.14.0
|
||||
go.opentelemetry.io/otel/exporters/otlp/otlpmetric v0.43.0
|
||||
go.opentelemetry.io/otel/exporters/otlp/otlptrace v1.38.0
|
||||
go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracegrpc v1.38.0
|
||||
go.opentelemetry.io/otel/exporters/stdout/stdoutlog v0.14.0
|
||||
go.opentelemetry.io/otel/exporters/stdout/stdoutmetric v1.38.0
|
||||
go.opentelemetry.io/otel/exporters/stdout/stdouttrace v1.38.0
|
||||
go.opentelemetry.io/otel/log v0.14.0
|
||||
go.opentelemetry.io/otel/sdk v1.38.0
|
||||
go.opentelemetry.io/otel/sdk/log v0.14.0
|
||||
@@ -33,7 +30,6 @@ require (
|
||||
require (
|
||||
github.com/cenkalti/backoff/v5 v5.0.3 // indirect
|
||||
github.com/decred/dcrd/dcrec/secp256k1/v4 v4.4.0 // indirect
|
||||
github.com/exaring/otelpgx v0.9.3 // indirect
|
||||
github.com/go-logr/logr v1.4.3 // indirect
|
||||
github.com/go-logr/stdr v1.2.2 // indirect
|
||||
github.com/goccy/go-json v0.10.5 // indirect
|
||||
@@ -43,13 +39,12 @@ require (
|
||||
github.com/lestrrat-go/httprc/v3 v3.0.1 // indirect
|
||||
github.com/lestrrat-go/option v1.0.1 // indirect
|
||||
github.com/lestrrat-go/option/v2 v2.0.0 // indirect
|
||||
github.com/pgx-contrib/pgxotel v0.0.0-20250908221444-24ae56d05ec0 // indirect
|
||||
github.com/segmentio/asm v1.2.1 // indirect
|
||||
go.opentelemetry.io/auto/sdk v1.1.0 // indirect
|
||||
go.opentelemetry.io/otel/metric v1.38.0 // indirect
|
||||
go.opentelemetry.io/otel/trace v1.38.0 // indirect
|
||||
go.opentelemetry.io/proto/otlp v1.7.1 // indirect
|
||||
golang.org/x/mod v0.28.0 // indirect
|
||||
golang.org/x/mod v0.29.0 // indirect
|
||||
google.golang.org/genproto/googleapis/api v0.0.0-20250825161204-c5933d9347a5 // indirect
|
||||
google.golang.org/genproto/googleapis/rpc v0.0.0-20250825161204-c5933d9347a5 // indirect
|
||||
google.golang.org/grpc v1.75.0 // indirect
|
||||
@@ -86,13 +81,13 @@ require (
|
||||
go.opentelemetry.io/otel/exporters/otlp/otlpmetric/otlpmetricgrpc v1.38.0
|
||||
go.opentelemetry.io/otel/exporters/otlp/otlpmetric/otlpmetrichttp v1.38.0
|
||||
go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracehttp v1.38.0
|
||||
golang.org/x/crypto v0.43.0 // indirect
|
||||
golang.org/x/net v0.45.0 // indirect
|
||||
golang.org/x/sync v0.17.0 // indirect
|
||||
golang.org/x/sys v0.37.0 // indirect
|
||||
golang.org/x/text v0.30.0 // indirect
|
||||
golang.org/x/time v0.12.0 // indirect
|
||||
golang.org/x/tools v0.37.0 // indirect
|
||||
golang.org/x/crypto v0.45.0 // indirect
|
||||
golang.org/x/net v0.47.0 // indirect
|
||||
golang.org/x/sync v0.18.0 // indirect
|
||||
golang.org/x/sys v0.38.0 // indirect
|
||||
golang.org/x/text v0.31.0 // indirect
|
||||
golang.org/x/time v0.14.0 // indirect
|
||||
golang.org/x/tools v0.38.0 // indirect
|
||||
gopkg.in/yaml.v2 v2.4.0 // indirect
|
||||
gopkg.in/yaml.v3 v3.0.1 // indirect
|
||||
)
|
||||
|
||||
73
auth/go.sum
73
auth/go.sum
@@ -64,8 +64,10 @@ github.com/golang-jwt/jwt/v5 v5.3.0 h1:pv4AsKCKKZuqlgs5sUmn4x8UlGa0kEVt/puTpKx9v
|
||||
github.com/golang-jwt/jwt/v5 v5.3.0/go.mod h1:fxCRLWMO43lRc8nhHWY6LGqRcf+1gQWArsqaEUEa5bE=
|
||||
github.com/golang-migrate/migrate/v4 v4.19.0 h1:RcjOnCGz3Or6HQYEJ/EEVLfWnmw9KnoigPSjzhCuaSE=
|
||||
github.com/golang-migrate/migrate/v4 v4.19.0/go.mod h1:9dyEcu+hO+G9hPSw8AIg50yg622pXJsoHItQnDGZkI0=
|
||||
github.com/google/go-cmp v0.6.0 h1:ofyhxvXcZhMsU5ulbFiLKl/XBFqE1GSq7atu8tAmTRI=
|
||||
github.com/google/go-cmp v0.6.0/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY=
|
||||
github.com/golang/protobuf v1.5.4 h1:i7eJL8qZTpSEXOPTxNKhASYpMn+8e5Q6AdndVa1dWek=
|
||||
github.com/golang/protobuf v1.5.4/go.mod h1:lnTiLA8Wa4RWRcIUkrtSVa5nRhsEGBg48fD6rSs7xps=
|
||||
github.com/google/go-cmp v0.7.0 h1:wk8382ETsv4JYUZwIsn6YpYiWiBsYLSJiTsyBybVuN8=
|
||||
github.com/google/go-cmp v0.7.0/go.mod h1:pXiqmnSA92OHEEa9HXL2W4E7lf9JzCmGVUdgjX3N/iU=
|
||||
github.com/google/uuid v1.6.0 h1:NIvaJDMOsjHA8n1jAhLSgzrAzy1Hgr+hNrb57e+94F0=
|
||||
github.com/google/uuid v1.6.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo=
|
||||
github.com/grpc-ecosystem/grpc-gateway/v2 v2.27.2 h1:8Tjv8EJ+pM1xP8mK6egEbD1OgnVTyacbefKhmbLhIhU=
|
||||
@@ -91,8 +93,8 @@ github.com/kr/pretty v0.3.1 h1:flRD4NNwYAUpkphVc1HcthR4KEIFJ65n8Mw5qdRn3LE=
|
||||
github.com/kr/pretty v0.3.1/go.mod h1:hoEshYVHaxMs3cyo3Yncou5ZscifuDolrwPKZanG3xk=
|
||||
github.com/kr/text v0.2.0 h1:5Nx0Ya0ZqY2ygV366QzturHI13Jq95ApcVaJBhpS+AY=
|
||||
github.com/kr/text v0.2.0/go.mod h1:eLer722TekiGuMkidMxC/pM04lWEeraHUUmBw8l2grE=
|
||||
github.com/labstack/echo-jwt/v4 v4.3.1 h1:d8+/qf8nx7RxeL46LtoIwHJsH2PNN8xXCQ/jDianycE=
|
||||
github.com/labstack/echo-jwt/v4 v4.3.1/go.mod h1:yJi83kN8S/5vePVPd+7ID75P4PqPNVRs2HVeuvYJH00=
|
||||
github.com/labstack/echo-jwt/v4 v4.4.0 h1:nrXaEnJupfc2R4XChcLRDyghhMZup77F8nIzHnBK19U=
|
||||
github.com/labstack/echo-jwt/v4 v4.4.0/go.mod h1:kYXWgWms9iFqI3ldR+HAEj/Zfg5rZtR7ePOgktG4Hjg=
|
||||
github.com/labstack/echo/v4 v4.13.4 h1:oTZZW+T3s9gAu5L8vmzihV7/lkXGZuITzTQkTEhcXEA=
|
||||
github.com/labstack/echo/v4 v4.13.4/go.mod h1:g63b33BZ5vZzcIUF8AtRH40DrTlXnx4UMC8rBdndmjQ=
|
||||
github.com/labstack/gommon v0.4.2 h1:F8qTUNXgG1+6WQmqoUWnz8WiEU60mXVVw0P4ht1WRA0=
|
||||
@@ -133,14 +135,12 @@ github.com/opencontainers/go-digest v1.0.0 h1:apOUWs51W5PlhuyGyz9FCeeBIOUDA/6nW8
|
||||
github.com/opencontainers/go-digest v1.0.0/go.mod h1:0JzlMkj0TRzQZfJkVvzbP0HBR3IKzErnv2BNG4W4MAM=
|
||||
github.com/opencontainers/image-spec v1.1.0 h1:8SG7/vwALn54lVB/0yZ/MMwhFrPYtpEHQb2IpWsCzug=
|
||||
github.com/opencontainers/image-spec v1.1.0/go.mod h1:W4s4sFTMaBeK1BQLXbG4AdM2szdn85PY75RI83NrTrM=
|
||||
github.com/pgx-contrib/pgxotel v0.0.0-20250908221444-24ae56d05ec0 h1:pXjoOmtVzAOXlhubnCDLkgIZG0jRZZJrJ2stfoRggHY=
|
||||
github.com/pgx-contrib/pgxotel v0.0.0-20250908221444-24ae56d05ec0/go.mod h1:ZbfsWT2cAdyyDUPRSlBfBMV9M2FPqgG+b/FR1DKZ2zs=
|
||||
github.com/pkg/errors v0.9.1 h1:FEBLx1zS214owpjy7qsBeixbURkuhQAwrK5UwLGTwt4=
|
||||
github.com/pkg/errors v0.9.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0=
|
||||
github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM=
|
||||
github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
|
||||
github.com/rogpeppe/go-internal v1.11.0 h1:cWPaGQEPrBb5/AsnsZesgZZ9yb1OQ+GOISoDNXVBh4M=
|
||||
github.com/rogpeppe/go-internal v1.11.0/go.mod h1:ddIwULY96R17DhadqLgMfk9H9tvdUzkipdSkR5nkCZA=
|
||||
github.com/rogpeppe/go-internal v1.13.1 h1:KvO1DLK/DRN07sQ1LQKScxyZJuNnedQ5/wKSR38lUII=
|
||||
github.com/rogpeppe/go-internal v1.13.1/go.mod h1:uMEvuHeurkdAXX61udpOXGD/AzZDWNMNyH2VO9fmH0o=
|
||||
github.com/segmentio/asm v1.2.1 h1:DTNbBqs57ioxAD4PrArqftgypG4/qNpXoJx8TVXxPR0=
|
||||
github.com/segmentio/asm v1.2.1/go.mod h1:BqMnlJP91P8d+4ibuonYZw9mfnzI9HfxselHZr5aAcs=
|
||||
github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
|
||||
@@ -169,16 +169,14 @@ go.opentelemetry.io/contrib/instrumentation/github.com/labstack/echo/otelecho v0
|
||||
go.opentelemetry.io/contrib/instrumentation/github.com/labstack/echo/otelecho v0.63.0/go.mod h1:ZEA7j2B35siNV0T00aapacNzjz4tvOlNoHp0ncCfwNQ=
|
||||
go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.54.0 h1:TT4fX+nBOA/+LUkobKGW1ydGcn+G3vRw9+g5HwCphpk=
|
||||
go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.54.0/go.mod h1:L7UH0GbB0p47T4Rri3uHjbpCFYrVrwc1I25QhNPiGK8=
|
||||
go.opentelemetry.io/otel v1.37.0 h1:9zhNfelUvx0KBfu/gb+ZgeAfAgtWrfHJZcAqFC228wQ=
|
||||
go.opentelemetry.io/otel v1.37.0/go.mod h1:ehE/umFRLnuLa/vSccNq9oS1ErUlkkK71gMcN34UG8I=
|
||||
go.opentelemetry.io/contrib/propagators/b3 v1.38.0 h1:uHsCCOSKl0kLrV2dLkFK+8Ywk9iKa/fptkytc6aFFEo=
|
||||
go.opentelemetry.io/contrib/propagators/b3 v1.38.0/go.mod h1:wMRSZJZcY8ya9mApLLhwIMjqmApy2o/Ml+62lhvxyHU=
|
||||
go.opentelemetry.io/otel v1.38.0 h1:RkfdswUDRimDg0m2Az18RKOsnI8UDzppJAtj01/Ymk8=
|
||||
go.opentelemetry.io/otel v1.38.0/go.mod h1:zcmtmQ1+YmQM9wrNsTGV/q/uyusom3P8RxwExxkZhjM=
|
||||
go.opentelemetry.io/otel/exporters/otlp/otlplog/otlploggrpc v0.14.0 h1:OMqPldHt79PqWKOMYIAQs3CxAi7RLgPxwfFSwr4ZxtM=
|
||||
go.opentelemetry.io/otel/exporters/otlp/otlplog/otlploggrpc v0.14.0/go.mod h1:1biG4qiqTxKiUCtoWDPpL3fB3KxVwCiGw81j3nKMuHE=
|
||||
go.opentelemetry.io/otel/exporters/otlp/otlplog/otlploghttp v0.14.0 h1:QQqYw3lkrzwVsoEX0w//EhH/TCnpRdEenKBOOEIMjWc=
|
||||
go.opentelemetry.io/otel/exporters/otlp/otlplog/otlploghttp v0.14.0/go.mod h1:gSVQcr17jk2ig4jqJ2DX30IdWH251JcNAecvrqTxH1s=
|
||||
go.opentelemetry.io/otel/exporters/otlp/otlpmetric v0.43.0 h1:f+VtlQwREKbGdbq/Mx/xMDLrPktBZ1+5PzNMrYSsdXo=
|
||||
go.opentelemetry.io/otel/exporters/otlp/otlpmetric v0.43.0/go.mod h1:V0A1wlhxQUdvqQk+vMA5+NwT7I6AFSyQv1EXLQBb8dM=
|
||||
go.opentelemetry.io/otel/exporters/otlp/otlpmetric/otlpmetricgrpc v1.38.0 h1:vl9obrcoWVKp/lwl8tRE33853I8Xru9HFbw/skNeLs8=
|
||||
go.opentelemetry.io/otel/exporters/otlp/otlpmetric/otlpmetricgrpc v1.38.0/go.mod h1:GAXRxmLJcVM3u22IjTg74zWBrRCKq8BnOqUVLodpcpw=
|
||||
go.opentelemetry.io/otel/exporters/otlp/otlpmetric/otlpmetrichttp v1.38.0 h1:Oe2z/BCg5q7k4iXC3cqJxKYg0ieRiOqF0cecFYdPTwk=
|
||||
@@ -189,51 +187,45 @@ go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracegrpc v1.38.0 h1:lwI4D
|
||||
go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracegrpc v1.38.0/go.mod h1:Kz/oCE7z5wuyhPxsXDuaPteSWqjSBD5YaSdbxZYGbGk=
|
||||
go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracehttp v1.38.0 h1:aTL7F04bJHUlztTsNGJ2l+6he8c+y/b//eR0jjjemT4=
|
||||
go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracehttp v1.38.0/go.mod h1:kldtb7jDTeol0l3ewcmd8SDvx3EmIE7lyvqbasU3QC4=
|
||||
go.opentelemetry.io/otel/exporters/stdout/stdoutlog v0.14.0 h1:B/g+qde6Mkzxbry5ZZag0l7QrQBCtVm7lVjaLgmpje8=
|
||||
go.opentelemetry.io/otel/exporters/stdout/stdoutlog v0.14.0/go.mod h1:mOJK8eMmgW6ocDJn6Bn11CcZ05gi3P8GylBXEkZtbgA=
|
||||
go.opentelemetry.io/otel/exporters/stdout/stdoutmetric v1.38.0 h1:wm/Q0GAAykXv83wzcKzGGqAnnfLFyFe7RslekZuv+VI=
|
||||
go.opentelemetry.io/otel/exporters/stdout/stdoutmetric v1.38.0/go.mod h1:ra3Pa40+oKjvYh+ZD3EdxFZZB0xdMfuileHAm4nNN7w=
|
||||
go.opentelemetry.io/otel/exporters/stdout/stdouttrace v1.38.0 h1:kJxSDN4SgWWTjG/hPp3O7LCGLcHXFlvS2/FFOrwL+SE=
|
||||
go.opentelemetry.io/otel/exporters/stdout/stdouttrace v1.38.0/go.mod h1:mgIOzS7iZeKJdeB8/NYHrJ48fdGc71Llo5bJ1J4DWUE=
|
||||
go.opentelemetry.io/otel/log v0.14.0 h1:2rzJ+pOAZ8qmZ3DDHg73NEKzSZkhkGIua9gXtxNGgrM=
|
||||
go.opentelemetry.io/otel/log v0.14.0/go.mod h1:5jRG92fEAgx0SU/vFPxmJvhIuDU9E1SUnEQrMlJpOno=
|
||||
go.opentelemetry.io/otel/metric v1.37.0 h1:mvwbQS5m0tbmqML4NqK+e3aDiO02vsf/WgbsdpcPoZE=
|
||||
go.opentelemetry.io/otel/metric v1.37.0/go.mod h1:04wGrZurHYKOc+RKeye86GwKiTb9FKm1WHtO+4EVr2E=
|
||||
go.opentelemetry.io/otel/metric v1.38.0 h1:Kl6lzIYGAh5M159u9NgiRkmoMKjvbsKtYRwgfrA6WpA=
|
||||
go.opentelemetry.io/otel/metric v1.38.0/go.mod h1:kB5n/QoRM8YwmUahxvI3bO34eVtQf2i4utNVLr9gEmI=
|
||||
go.opentelemetry.io/otel/sdk v1.38.0 h1:l48sr5YbNf2hpCUj/FoGhW9yDkl+Ma+LrVl8qaM5b+E=
|
||||
go.opentelemetry.io/otel/sdk v1.38.0/go.mod h1:ghmNdGlVemJI3+ZB5iDEuk4bWA3GkTpW+DOoZMYBVVg=
|
||||
go.opentelemetry.io/otel/sdk/log v0.14.0 h1:JU/U3O7N6fsAXj0+CXz21Czg532dW2V4gG1HE/e8Zrg=
|
||||
go.opentelemetry.io/otel/sdk/log v0.14.0/go.mod h1:imQvII+0ZylXfKU7/wtOND8Hn4OpT3YUoIgqJVksUkM=
|
||||
go.opentelemetry.io/otel/sdk/log/logtest v0.14.0 h1:Ijbtz+JKXl8T2MngiwqBlPaHqc4YCaP/i13Qrow6gAM=
|
||||
go.opentelemetry.io/otel/sdk/log/logtest v0.14.0/go.mod h1:dCU8aEL6q+L9cYTqcVOk8rM9Tp8WdnHOPLiBgp0SGOA=
|
||||
go.opentelemetry.io/otel/sdk/metric v1.38.0 h1:aSH66iL0aZqo//xXzQLYozmWrXxyFkBJ6qT5wthqPoM=
|
||||
go.opentelemetry.io/otel/sdk/metric v1.38.0/go.mod h1:dg9PBnW9XdQ1Hd6ZnRz689CbtrUp0wMMs9iPcgT9EZA=
|
||||
go.opentelemetry.io/otel/trace v1.37.0 h1:HLdcFNbRQBE2imdSEgm/kwqmQj1Or1l/7bW6mxVK7z4=
|
||||
go.opentelemetry.io/otel/trace v1.37.0/go.mod h1:TlgrlQ+PtQO5XFerSPUYG0JSgGyryXewPGyayAWSBS0=
|
||||
go.opentelemetry.io/otel/trace v1.38.0 h1:Fxk5bKrDZJUH+AMyyIXGcFAPah0oRcT+LuNtJrmcNLE=
|
||||
go.opentelemetry.io/otel/trace v1.38.0/go.mod h1:j1P9ivuFsTceSWe1oY+EeW3sc+Pp42sO++GHkg4wwhs=
|
||||
go.opentelemetry.io/proto/otlp v1.7.1 h1:gTOMpGDb0WTBOP8JaO72iL3auEZhVmAQg4ipjOVAtj4=
|
||||
go.opentelemetry.io/proto/otlp v1.7.1/go.mod h1:b2rVh6rfI/s2pHWNlB7ILJcRALpcNDzKhACevjI+ZnE=
|
||||
go.uber.org/goleak v1.3.0 h1:2K3zAYmnTNqV73imy9J1T3WC+gmCePx2hEGkimedGto=
|
||||
go.uber.org/goleak v1.3.0/go.mod h1:CoHD4mav9JJNrW/WLlf7HGZPjdw8EucARQHekz1X6bE=
|
||||
golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w=
|
||||
golang.org/x/crypto v0.0.0-20210921155107-089bfa567519/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc=
|
||||
golang.org/x/crypto v0.14.0/go.mod h1:MVFd36DqK4CsrnJYDkBA3VC4m2GkXAM0PvzMCn4JQf4=
|
||||
golang.org/x/crypto v0.43.0 h1:dduJYIi3A3KOfdGOHX8AVZ/jGiyPa3IbBozJ5kNuE04=
|
||||
golang.org/x/crypto v0.43.0/go.mod h1:BFbav4mRNlXJL4wNeejLpWxB7wMbc79PdRGhWKncxR0=
|
||||
golang.org/x/crypto v0.45.0 h1:jMBrvKuj23MTlT0bQEOBcAE0mjg8mK9RXFhRH6nyF3Q=
|
||||
golang.org/x/crypto v0.45.0/go.mod h1:XTGrrkGJve7CYK7J8PEww4aY7gM3qMCElcJQ8n8JdX4=
|
||||
golang.org/x/mod v0.6.0-dev.0.20220419223038-86c51ed26bb4/go.mod h1:jJ57K6gSWd91VN4djpZkiMVwK6gcyfeH4XE8wZrZaV4=
|
||||
golang.org/x/mod v0.8.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs=
|
||||
golang.org/x/mod v0.28.0 h1:gQBtGhjxykdjY9YhZpSlZIsbnaE2+PgjfLWUQTnoZ1U=
|
||||
golang.org/x/mod v0.28.0/go.mod h1:yfB/L0NOf/kmEbXjzCPOx1iK1fRutOydrCMsqRhEBxI=
|
||||
golang.org/x/mod v0.29.0 h1:HV8lRxZC4l2cr3Zq1LvtOsi/ThTgWnUk/y64QSs8GwA=
|
||||
golang.org/x/mod v0.29.0/go.mod h1:NyhrlYXJ2H4eJiRy/WDBO6HMqZQ6q9nk4JzS3NuCK+w=
|
||||
golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
|
||||
golang.org/x/net v0.0.0-20210226172049-e18ecbb05110/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg=
|
||||
golang.org/x/net v0.0.0-20220722155237-a158d28d115b/go.mod h1:XRhObCWvk6IyKnWLug+ECip1KBveYUHfp+8e9klMJ9c=
|
||||
golang.org/x/net v0.6.0/go.mod h1:2Tu9+aMcznHK/AK1HMvgo6xiTLG5rD5rZLDS+rp2Bjs=
|
||||
golang.org/x/net v0.10.0/go.mod h1:0qNGK6F8kojg2nk9dLZ2mShWaEBan6FAoqfSigmmuDg=
|
||||
golang.org/x/net v0.45.0 h1:RLBg5JKixCy82FtLJpeNlVM0nrSqpCRYzVU1n8kj0tM=
|
||||
golang.org/x/net v0.45.0/go.mod h1:ECOoLqd5U3Lhyeyo/QDCEVQ4sNgYsqvCZ722XogGieY=
|
||||
golang.org/x/net v0.47.0 h1:Mx+4dIFzqraBXUugkia1OOvlD6LemFo1ALMHjrXDOhY=
|
||||
golang.org/x/net v0.47.0/go.mod h1:/jNxtkgq5yWUGYkaZGqo27cfGZ1c5Nen03aYrrKpVRU=
|
||||
golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||
golang.org/x/sync v0.0.0-20220722155255-886fb9371eb4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||
golang.org/x/sync v0.1.0/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||
golang.org/x/sync v0.17.0 h1:l60nONMj9l5drqw6jlhIELNv9I0A4OFgRsG9k2oT9Ug=
|
||||
golang.org/x/sync v0.17.0/go.mod h1:9KTHXmSnoGruLpwFjVSX0lNNA75CykiMECbovNTZqGI=
|
||||
golang.org/x/sync v0.18.0 h1:kr88TuHDroi+UVf+0hZnirlk8o8T+4MrK6mr60WkH/I=
|
||||
golang.org/x/sync v0.18.0/go.mod h1:9KTHXmSnoGruLpwFjVSX0lNNA75CykiMECbovNTZqGI=
|
||||
golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
|
||||
golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||
golang.org/x/sys v0.0.0-20210615035016-665e8c7367d1/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
@@ -243,8 +235,8 @@ golang.org/x/sys v0.5.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.8.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.13.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.37.0 h1:fdNQudmxPjkdUTPnLn5mdQv7Zwvbvpaxqs831goi9kQ=
|
||||
golang.org/x/sys v0.37.0/go.mod h1:OgkHotnGiDImocRcuBABYBEXf8A9a87e/uXjp9XT3ks=
|
||||
golang.org/x/sys v0.38.0 h1:3yZWxaJjBmCWXqhN1qh02AkOnCQ1poK6oF+a7xWL6Gc=
|
||||
golang.org/x/sys v0.38.0/go.mod h1:OgkHotnGiDImocRcuBABYBEXf8A9a87e/uXjp9XT3ks=
|
||||
golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo=
|
||||
golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8=
|
||||
golang.org/x/term v0.5.0/go.mod h1:jMB1sMXY+tzblOD4FWmEbocvup2/aLOaQEp7JmGp78k=
|
||||
@@ -256,20 +248,19 @@ golang.org/x/text v0.3.7/go.mod h1:u+2+/6zg+i71rQMx5EYifcz6MCKuco9NR6JIITiCfzQ=
|
||||
golang.org/x/text v0.7.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8=
|
||||
golang.org/x/text v0.9.0/go.mod h1:e1OnstbJyHTd6l/uOt8jFFHp6TRDWZR/bV3emEE/zU8=
|
||||
golang.org/x/text v0.13.0/go.mod h1:TvPlkZtksWOMsz7fbANvkp4WM8x/WCo/om8BMLbz+aE=
|
||||
golang.org/x/text v0.30.0 h1:yznKA/E9zq54KzlzBEAWn1NXSQ8DIp/NYMy88xJjl4k=
|
||||
golang.org/x/text v0.30.0/go.mod h1:yDdHFIX9t+tORqspjENWgzaCVXgk0yYnYuSZ8UzzBVM=
|
||||
golang.org/x/time v0.11.0 h1:/bpjEDfN9tkoN/ryeYHnv5hcMlc8ncjMcM4XBk5NWV0=
|
||||
golang.org/x/time v0.11.0/go.mod h1:CDIdPxbZBQxdj6cxyCIdrNogrJKMJ7pr37NYpMcMDSg=
|
||||
golang.org/x/time v0.12.0 h1:ScB/8o8olJvc+CQPWrK3fPZNfh7qgwCrY0zJmoEQLSE=
|
||||
golang.org/x/time v0.12.0/go.mod h1:CDIdPxbZBQxdj6cxyCIdrNogrJKMJ7pr37NYpMcMDSg=
|
||||
golang.org/x/text v0.31.0 h1:aC8ghyu4JhP8VojJ2lEHBnochRno1sgL6nEi9WGFGMM=
|
||||
golang.org/x/text v0.31.0/go.mod h1:tKRAlv61yKIjGGHX/4tP1LTbc13YSec1pxVEWXzfoeM=
|
||||
golang.org/x/time v0.14.0 h1:MRx4UaLrDotUKUdCIqzPC48t1Y9hANFKIRpNx+Te8PI=
|
||||
golang.org/x/time v0.14.0/go.mod h1:eL/Oa2bBBK0TkX57Fyni+NgnyQQN4LitPmob2Hjnqw4=
|
||||
golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
|
||||
golang.org/x/tools v0.0.0-20191119224855-298f0cb1881e/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=
|
||||
golang.org/x/tools v0.1.12/go.mod h1:hNGJHUnrk76NpqgfD5Aqm5Crs+Hm0VOH/i9J2+nxYbc=
|
||||
golang.org/x/tools v0.6.0/go.mod h1:Xwgl3UAJ/d3gWutnCtw505GrjyAbvKui8lOU390QaIU=
|
||||
golang.org/x/tools v0.37.0 h1:DVSRzp7FwePZW356yEAChSdNcQo6Nsp+fex1SUW09lE=
|
||||
golang.org/x/tools v0.37.0/go.mod h1:MBN5QPQtLMHVdvsbtarmTNukZDdgwdwlO5qGacAzF0w=
|
||||
golang.org/x/tools v0.38.0 h1:Hx2Xv8hISq8Lm16jvBZ2VQf+RLmbd7wVUsALibYI/IQ=
|
||||
golang.org/x/tools v0.38.0/go.mod h1:yEsQ/d/YK8cjh0L6rZlY8tgtlKiBNTL14pGDJPJpYQs=
|
||||
golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
|
||||
google.golang.org/genproto v0.0.0-20240213162025-012b6fc9bca9 h1:9+tzLLstTlPTRyJTh+ah5wIMsBW5c4tQwGTN3thOW9Y=
|
||||
gonum.org/v1/gonum v0.16.0 h1:5+ul4Swaf3ESvrOnidPp4GZbzf0mxVQpDCYUQE7OJfk=
|
||||
gonum.org/v1/gonum v0.16.0/go.mod h1:fef3am4MQ93R2HHpKnLk4/Tbh/s0+wqD5nfa6Pnwy4E=
|
||||
google.golang.org/genproto/googleapis/api v0.0.0-20250825161204-c5933d9347a5 h1:BIRfGDEjiHRrk0QKZe3Xv2ieMhtgRGeLcZQ0mIVn4EY=
|
||||
google.golang.org/genproto/googleapis/api v0.0.0-20250825161204-c5933d9347a5/go.mod h1:j3QtIyytwqGr1JUDtYXwtMXWPKsEa5LtzIFN1Wn5WvE=
|
||||
google.golang.org/genproto/googleapis/rpc v0.0.0-20250825161204-c5933d9347a5 h1:eaY8u2EuxbRv7c3NiGK0/NedzVsCcV6hDuU5qPX5EGE=
|
||||
|
||||
@@ -88,7 +88,9 @@ func setupOtel(e *echo.Echo) (func(), error) {
|
||||
otel.SetTracerProvider(tp)
|
||||
|
||||
e.Use(otelecho.Middleware("kyoo.auth", otelecho.WithSkipper(func(c echo.Context) bool {
|
||||
return c.Path() == "/auth/health" || c.Path() == "/auth/ready"
|
||||
return (c.Path() == "/auth/health" ||
|
||||
c.Path() == "/auth/ready" ||
|
||||
strings.HasPrefix(c.Path(), "/.well-known/"))
|
||||
})))
|
||||
|
||||
return func() {
|
||||
|
||||
@@ -7,7 +7,7 @@ pkgs.mkShell {
|
||||
sqlc
|
||||
go-swag
|
||||
# for psql in cli (+ pgformatter for sql files)
|
||||
postgresql_15
|
||||
postgresql_18
|
||||
pgformatter
|
||||
# to run tests
|
||||
hurl
|
||||
|
||||
@@ -4,8 +4,7 @@ select
|
||||
from
|
||||
keibi.apikeys
|
||||
where
|
||||
name = $1
|
||||
and token = $2;
|
||||
token = $1;
|
||||
|
||||
-- name: TouchApiKey :exec
|
||||
update
|
||||
|
||||
@@ -11,7 +11,7 @@ HTTP 401
|
||||
|
||||
POST {{host}}/keys
|
||||
# this is created from the gh workflow file's env var
|
||||
X-API-KEY: hurl-1234apikey
|
||||
X-API-KEY: 1234apikey
|
||||
{
|
||||
"name": "dryflower",
|
||||
"claims": {
|
||||
@@ -32,7 +32,7 @@ jwt: jsonpath "$.token"
|
||||
|
||||
# Duplicates email
|
||||
POST {{host}}/keys
|
||||
X-API-KEY: hurl-1234apikey
|
||||
X-API-KEY: 1234apikey
|
||||
{
|
||||
"name": "dryflower",
|
||||
"claims": {
|
||||
@@ -57,5 +57,5 @@ Authorization: Bearer {{jwt}}
|
||||
HTTP 403
|
||||
|
||||
DELETE {{host}}/keys/{{id}}
|
||||
X-API-KEY: hurl-1234apikey
|
||||
X-API-KEY: 1234apikey
|
||||
HTTP 200
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
POST {{host}}/keys
|
||||
# this is created from the gh workflow file's env var
|
||||
X-API-KEY: hurl-1234apikey
|
||||
X-API-KEY: 1234apikey
|
||||
{
|
||||
"name": "dryflower",
|
||||
"claims": {
|
||||
@@ -32,5 +32,5 @@ jsonpath "$.items[0].claims.permissions" contains "apikeys.read"
|
||||
# Clean api key
|
||||
|
||||
DELETE {{host}}/keys/{{id}}
|
||||
X-API-KEY: hurl-1234apikey
|
||||
X-API-KEY: 1234apikey
|
||||
HTTP 200
|
||||
|
||||
2
chart/.gitignore
vendored
Normal file
2
chart/.gitignore
vendored
Normal file
@@ -0,0 +1,2 @@
|
||||
charts
|
||||
|
||||
6
chart/Chart.lock
Normal file
6
chart/Chart.lock
Normal file
@@ -0,0 +1,6 @@
|
||||
dependencies:
|
||||
- name: postgres
|
||||
repository: oci://registry-1.docker.io/cloudpirates
|
||||
version: 0.12.4
|
||||
digest: sha256:e486b44703c7a97eee25f7715ab040d197d79c41ea1c422ae009b1f68985f544
|
||||
generated: "2025-12-01T20:17:25.152279487+01:00"
|
||||
@@ -12,4 +12,4 @@ dependencies:
|
||||
- condition: postgres.enabled
|
||||
name: postgres
|
||||
repository: oci://registry-1.docker.io/cloudpirates
|
||||
version: 0.11.6
|
||||
version: 0.12.4
|
||||
|
||||
@@ -14,7 +14,7 @@ metadata:
|
||||
spec:
|
||||
replicas: {{ .Values.scanner.replicaCount }}
|
||||
{{- with .Values.scanner.updateStrategy }}
|
||||
strategy:
|
||||
strategy:
|
||||
{{- toYaml . | nindent 4 }}
|
||||
{{- end }}
|
||||
selector:
|
||||
@@ -62,13 +62,11 @@ spec:
|
||||
value: "http://{{ include "kyoo.auth.fullname" . }}:4568/.well-known/jwks.json"
|
||||
- name: JWT_ISSUER
|
||||
value: {{ .Values.kyoo.address | quote }}
|
||||
- name: HELPERVAR_APIKEY
|
||||
- name: KYOO_APIKEY
|
||||
valueFrom:
|
||||
secretKeyRef:
|
||||
key: {{ .Values.kyoo.auth.apikeys.scanner.apikeyKey }}
|
||||
name: {{ .Values.kyoo.auth.apikeys.scanner.existingSecret }}
|
||||
- name: KYOO_APIKEY
|
||||
value: "scanner-$(HELPERVAR_APIKEY)"
|
||||
- name: THEMOVIEDB_API_ACCESS_TOKEN
|
||||
valueFrom:
|
||||
secretKeyRef:
|
||||
@@ -136,4 +134,4 @@ spec:
|
||||
{{- end }}
|
||||
{{- with .Values.scanner.extraVolumes }}
|
||||
{{- toYaml . | nindent 8 }}
|
||||
{{- end }}
|
||||
{{- end }}
|
||||
|
||||
@@ -115,7 +115,6 @@ kyoo:
|
||||
extra: []
|
||||
# - name: example
|
||||
# existingSecret: bigsecret
|
||||
## value of the apieky should be $name-$apikey
|
||||
# apikeyKey: example_apikey
|
||||
# claims: '{"permissions": ["core.read"]}'
|
||||
|
||||
@@ -201,7 +200,7 @@ api:
|
||||
volumeMounts: []
|
||||
volumes: []
|
||||
replicaCount: 1
|
||||
# default to recreate for better user experience with ReadWriteOnce volumes
|
||||
# default to recreate for better user experience with ReadWriteOnce volumes
|
||||
updateStrategy:
|
||||
type: Recreate
|
||||
podLabels: {}
|
||||
@@ -430,6 +429,8 @@ traefikproxy:
|
||||
extraArgs:
|
||||
- '--entryPoints.web.address=:80/tcp'
|
||||
- '--entryPoints.websecure.address=:443/tcp'
|
||||
- '--entryPoints.web.forwardedHeaders.insecure=true'
|
||||
- '--entryPoints.websecure.forwardedHeaders.insecure=true'
|
||||
- '--api.dashboard=true'
|
||||
- '--api.insecure=true'
|
||||
- '--log.level=INFO'
|
||||
@@ -468,11 +469,11 @@ postgres:
|
||||
existingSecret: "{{ .Values.global.postgres.infra.existingSecret }}"
|
||||
secretKeys:
|
||||
# set the postgres user password to the same as our user
|
||||
passwordKey: "{{ .Values.global.postgres.infra.passwordKey }}"
|
||||
adminPasswordKey: "{{ .Values.global.postgres.infra.passwordKey }}"
|
||||
initdb:
|
||||
scripts:
|
||||
kyoo_api.sql: |
|
||||
CREATE DATABASE {{ .Values.global.postgres.kyoo_api.database }} WITH OWNER {{ .Values.global.postgres.infra.user }};
|
||||
CREATE DATABASE {{ .Values.global.postgres.kyoo_api.database }} WITH OWNER {{ .Values.global.postgres.infra.user }};
|
||||
\connect {{ .Values.global.postgres.kyoo_api.database }};
|
||||
REVOKE ALL ON SCHEMA public FROM PUBLIC;
|
||||
CREATE SCHEMA IF NOT EXISTS kyoo AUTHORIZATION {{ .Values.global.postgres.infra.user }};
|
||||
@@ -481,7 +482,7 @@ postgres:
|
||||
SET pg_trgm.word_similarity_threshold = 0.4;
|
||||
ALTER DATABASE {{ .Values.global.postgres.kyoo_api.database }} SET pg_trgm.word_similarity_threshold = 0.4;
|
||||
kyoo_auth.sql: |
|
||||
CREATE DATABASE {{ .Values.global.postgres.kyoo_auth.database }} WITH OWNER {{ .Values.global.postgres.infra.user }};
|
||||
CREATE DATABASE {{ .Values.global.postgres.kyoo_auth.database }} WITH OWNER {{ .Values.global.postgres.infra.user }};
|
||||
\connect {{ .Values.global.postgres.kyoo_auth.database }};
|
||||
REVOKE ALL ON SCHEMA public FROM PUBLIC;
|
||||
CREATE SCHEMA IF NOT EXISTS keibi AUTHORIZATION {{ .Values.global.postgres.infra.user }};
|
||||
@@ -496,13 +497,13 @@ postgres:
|
||||
REVOKE ALL ON SCHEMA public FROM PUBLIC;
|
||||
CREATE SCHEMA IF NOT EXISTS gocoder AUTHORIZATION {{ .Values.global.postgres.infra.user }};
|
||||
user.sql: |
|
||||
ALTER ROLE {{ .Values.global.postgres.infra.user }}
|
||||
ALTER ROLE {{ .Values.global.postgres.infra.user }}
|
||||
IN DATABASE {{ .Values.global.postgres.kyoo_api.database }} SET search_path TO "$user", kyoo;
|
||||
ALTER ROLE {{ .Values.global.postgres.infra.user }}
|
||||
IN DATABASE {{ .Values.global.postgres.kyoo_auth.database }} SET search_path TO "$user", keibi;
|
||||
ALTER ROLE {{ .Values.global.postgres.infra.user }}
|
||||
ALTER ROLE {{ .Values.global.postgres.infra.user }}
|
||||
IN DATABASE {{ .Values.global.postgres.kyoo_scanner.database }} SET search_path TO "$user", scanner;
|
||||
ALTER ROLE {{ .Values.global.postgres.infra.user }}
|
||||
ALTER ROLE {{ .Values.global.postgres.infra.user }}
|
||||
IN DATABASE {{ .Values.global.postgres.kyoo_transcoder.database }} SET search_path TO "$user", gocoder;
|
||||
persistence:
|
||||
enabled: true
|
||||
|
||||
18
devspace.yaml
Normal file
18
devspace.yaml
Normal file
@@ -0,0 +1,18 @@
|
||||
version: v2beta1
|
||||
name: kyoo-devspace
|
||||
dependencies:
|
||||
api:
|
||||
path: ./api
|
||||
pipeline: dev
|
||||
auth:
|
||||
path: ./auth
|
||||
pipeline: dev
|
||||
front:
|
||||
path: ./front
|
||||
pipeline: dev
|
||||
scanner:
|
||||
path: ./scanner
|
||||
pipeline: dev
|
||||
transcoder:
|
||||
path: ./transcoder
|
||||
pipeline: dev
|
||||
@@ -88,7 +88,7 @@ services:
|
||||
env_file:
|
||||
- ./.env
|
||||
volumes:
|
||||
- images:/app/images
|
||||
- images:/images
|
||||
labels:
|
||||
- "traefik.enable=true"
|
||||
- "traefik.http.routers.swagger.rule=PathPrefix(`/swagger`)"
|
||||
@@ -121,7 +121,7 @@ services:
|
||||
# Use this env var once we use mTLS for auth
|
||||
# - KYOO_URL=${KYOO_URL:-http://api:3567/api}
|
||||
- KYOO_URL=${KYOO_URL:-http://traefik:8901/api}
|
||||
- KYOO_APIKEY=scanner-$KEIBI_APIKEY_SCANNER
|
||||
- KYOO_APIKEY=$KEIBI_APIKEY_SCANNER
|
||||
- JWKS_URL=http://auth:4568/.well-known/jwks.json
|
||||
- JWT_ISSUER=${PUBLIC_URL}
|
||||
volumes:
|
||||
@@ -177,7 +177,7 @@ services:
|
||||
profiles: ['qsv']
|
||||
|
||||
traefik:
|
||||
image: traefik:v3.5
|
||||
image: traefik:v3.6
|
||||
restart: on-failure
|
||||
command:
|
||||
- "--providers.docker=true"
|
||||
@@ -190,12 +190,12 @@ services:
|
||||
- "/var/run/docker.sock:/var/run/docker.sock:ro"
|
||||
|
||||
postgres:
|
||||
image: postgres:15
|
||||
image: postgres:18
|
||||
restart: on-failure
|
||||
env_file:
|
||||
- ./.env
|
||||
volumes:
|
||||
- db:/var/lib/postgresql/data
|
||||
- db:/var/lib/postgresql
|
||||
ports:
|
||||
- "5432:5432"
|
||||
environment:
|
||||
|
||||
@@ -58,7 +58,7 @@ services:
|
||||
env_file:
|
||||
- ./.env
|
||||
volumes:
|
||||
- images:/app/images
|
||||
- images:/images
|
||||
labels:
|
||||
- "traefik.enable=true"
|
||||
- "traefik.http.routers.swagger.rule=PathPrefix(`/swagger`)"
|
||||
@@ -78,7 +78,7 @@ services:
|
||||
# Use this env var once we use mTLS for auth
|
||||
# - KYOO_URL=${KYOO_URL:-http://api:3567/api}
|
||||
- KYOO_URL=${KYOO_URL:-http://traefik:8901/api}
|
||||
- KYOO_APIKEY=scanner-$KEIBI_APIKEY_SCANNER
|
||||
- KYOO_APIKEY=$KEIBI_APIKEY_SCANNER
|
||||
- JWKS_URL=http://auth:4568/.well-known/jwks.json
|
||||
- JWT_ISSUER=${PUBLIC_URL}
|
||||
volumes:
|
||||
@@ -126,7 +126,7 @@ services:
|
||||
profiles: ["qsv"]
|
||||
|
||||
traefik:
|
||||
image: traefik:v3.5
|
||||
image: traefik:v3.6
|
||||
restart: unless-stopped
|
||||
command:
|
||||
- "--providers.docker=true"
|
||||
@@ -139,12 +139,12 @@ services:
|
||||
- "/var/run/docker.sock:/var/run/docker.sock:ro"
|
||||
|
||||
postgres:
|
||||
image: postgres:15
|
||||
image: postgres:18
|
||||
restart: unless-stopped
|
||||
env_file:
|
||||
- ./.env
|
||||
volumes:
|
||||
- db:/var/lib/postgresql/data
|
||||
- db:/var/lib/postgresql
|
||||
environment:
|
||||
- POSTGRES_USER=$PGUSER
|
||||
- POSTGRES_PASSWORD=$PGPASSWORD
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
FROM oven/bun AS builder
|
||||
FROM --platform=$BUILDPLATFORM oven/bun AS builder
|
||||
WORKDIR /app
|
||||
|
||||
# https://github.com/oven-sh/bun/issues/24538
|
||||
|
||||
29
front/devspace.yaml
Normal file
29
front/devspace.yaml
Normal file
@@ -0,0 +1,29 @@
|
||||
version: v2beta1
|
||||
name: front
|
||||
dev:
|
||||
front:
|
||||
imageSelector: ghcr.io/zoriya/kyoo_front
|
||||
devImage: docker.io/oven/bun:latest
|
||||
workingDir: /app
|
||||
sync:
|
||||
- path: .:/app
|
||||
excludePaths:
|
||||
- node_modules
|
||||
startContainer: true
|
||||
onUpload:
|
||||
exec:
|
||||
- command: bun install --frozen-lockfile
|
||||
onChange:
|
||||
- "./bun.lock"
|
||||
# increased sysctl limits for file watching
|
||||
# front uses Metro javascript bundler which watches a lot of files
|
||||
# these are node level settings that should be raised
|
||||
# example values:
|
||||
# fs.inotify.max_user_instances = 8192
|
||||
# fs.inotify.max_user_watches = 1048576
|
||||
command:
|
||||
- bash
|
||||
- -c
|
||||
- "bun install --frozen-lockfile; bun dev --port 8901"
|
||||
ports:
|
||||
- port: "8901"
|
||||
@@ -10,7 +10,7 @@ export const Collection = z
|
||||
slug: z.string(),
|
||||
name: z.string(),
|
||||
original: z.object({
|
||||
name: z.string(),
|
||||
name: z.string().nullable(),
|
||||
latinName: z.string().nullable(),
|
||||
language: z.string(),
|
||||
}),
|
||||
|
||||
@@ -11,7 +11,7 @@ export const Movie = z
|
||||
slug: z.string(),
|
||||
name: z.string(),
|
||||
original: z.object({
|
||||
name: z.string(),
|
||||
name: z.string().nullable(),
|
||||
latinName: z.string().nullable(),
|
||||
language: z.string(),
|
||||
}),
|
||||
|
||||
@@ -12,7 +12,7 @@ export const Serie = z
|
||||
slug: z.string(),
|
||||
name: z.string(),
|
||||
original: z.object({
|
||||
name: z.string(),
|
||||
name: z.string().nullable(),
|
||||
latinName: z.string().nullable(),
|
||||
language: z.string(),
|
||||
}),
|
||||
|
||||
@@ -28,10 +28,12 @@ export const ImageBackground = ({
|
||||
const { css, theme } = useYoshiki();
|
||||
const { apiUrl, authToken } = useToken();
|
||||
|
||||
const uri = src ? `${apiUrl}${src[quality ?? "high"]}` : null;
|
||||
return (
|
||||
<EImageBackground
|
||||
recyclingKey={uri}
|
||||
source={{
|
||||
uri: src ? `${apiUrl}${src[quality ?? "high"]}` : null,
|
||||
uri,
|
||||
// use cookies on web to allow `img` to make the call instead of js
|
||||
headers:
|
||||
authToken && Platform.OS !== "web"
|
||||
|
||||
@@ -37,10 +37,12 @@ export const Image = ({
|
||||
const { css, theme } = useYoshiki();
|
||||
const { apiUrl, authToken } = useToken();
|
||||
|
||||
const uri = src ? `${apiUrl}${src[quality ?? "high"]}` : null;
|
||||
return (
|
||||
<EImage
|
||||
recyclingKey={uri}
|
||||
source={{
|
||||
uri: src ? `${apiUrl}${src[quality ?? "high"]}` : null,
|
||||
uri,
|
||||
// use cookies on web to allow `img` to make the call instead of js
|
||||
headers:
|
||||
authToken && Platform.OS !== "web"
|
||||
|
||||
@@ -84,6 +84,7 @@ export const login = async (
|
||||
export const logout = async () => {
|
||||
const accounts = readAccounts();
|
||||
const account = accounts.find((x) => x.selected);
|
||||
removeAccounts((x) => x.selected);
|
||||
if (account) {
|
||||
await queryFn({
|
||||
method: "DELETE",
|
||||
@@ -92,7 +93,6 @@ export const logout = async () => {
|
||||
parser: null,
|
||||
});
|
||||
}
|
||||
removeAccounts((x) => x.selected);
|
||||
};
|
||||
|
||||
export const deleteAccount = async () => {
|
||||
|
||||
@@ -11,7 +11,7 @@ LIBRARY_IGNORE_PATTERN=".*/[dD]ownloads?/.*"
|
||||
THEMOVIEDB_API_ACCESS_TOKEN=""
|
||||
|
||||
KYOO_URL="http://api:3567/api"
|
||||
KYOO_APIKEY=scanner-$KEIBI_APIKEY_SCANNER
|
||||
KYOO_APIKEY=$KEIBI_APIKEY_SCANNER
|
||||
|
||||
JWKS_URL="http://auth:4568/.well-known/jwks.json"
|
||||
JWT_ISSUER=$PUBLIC_URL
|
||||
|
||||
25
scanner/devspace.yaml
Normal file
25
scanner/devspace.yaml
Normal file
@@ -0,0 +1,25 @@
|
||||
version: v2beta1
|
||||
name: scanner
|
||||
dev:
|
||||
scanner:
|
||||
imageSelector: ghcr.io/zoriya/kyoo_scanner
|
||||
devImage: docker.io/astral/uv:python3.13-trixie
|
||||
workingDir: /app
|
||||
sync:
|
||||
- path: .:/app
|
||||
excludePaths:
|
||||
- __pycache__
|
||||
- .venv
|
||||
startContainer: true
|
||||
onUpload:
|
||||
restartContainer: true
|
||||
command:
|
||||
- bash
|
||||
- -c
|
||||
- |
|
||||
echo "Running uv sync..."
|
||||
uv sync --locked || (echo 'uv sync failed' && exit 1)
|
||||
echo "Starting FastAPI..."
|
||||
/app/.venv/bin/fastapi run scanner --port 4389
|
||||
ports:
|
||||
- port: "4389"
|
||||
@@ -18,7 +18,8 @@ create table scanner.requests(
|
||||
external_id jsonb not null default '{}'::jsonb,
|
||||
videos jsonb not null default '[]'::jsonb,
|
||||
status scanner.request_status not null default 'pending',
|
||||
error jsonb,
|
||||
started_at timestamptz,
|
||||
created_at timestamptz not null default now()::timestamptz,
|
||||
constraint unique_kty unique(kind, title, year)
|
||||
constraint unique_kty unique nulls not distinct (kind, title, year)
|
||||
);
|
||||
|
||||
@@ -5,13 +5,15 @@ from fastapi import FastAPI
|
||||
|
||||
from scanner.client import KyooClient
|
||||
from scanner.fsscan import FsScanner
|
||||
from scanner.otel import instrument
|
||||
from scanner.log import configure_logging
|
||||
from scanner.otel import setup_otelproviders, instrument
|
||||
from scanner.providers.composite import CompositeProvider
|
||||
from scanner.providers.themoviedatabase import TheMovieDatabase
|
||||
from scanner.requests import RequestCreator, RequestProcessor
|
||||
|
||||
from .database import get_db, init_pool, migrate
|
||||
from .routers.routes import router
|
||||
from .routers.health import router as health_router
|
||||
|
||||
|
||||
@asynccontextmanager
|
||||
@@ -24,6 +26,12 @@ async def lifespan(_):
|
||||
):
|
||||
# there's no way someone else used the same id, right?
|
||||
is_master = await db.fetchval("select pg_try_advisory_lock(198347)")
|
||||
is_http = not is_master and await db.fetchval(
|
||||
"select pg_try_advisory_lock(645633)"
|
||||
)
|
||||
if is_http:
|
||||
yield
|
||||
return
|
||||
if is_master:
|
||||
await migrate()
|
||||
processor = RequestProcessor(pool, client, tmdb)
|
||||
@@ -68,4 +76,7 @@ app = FastAPI(
|
||||
lifespan=lifespan,
|
||||
)
|
||||
app.include_router(router)
|
||||
app.include_router(health_router)
|
||||
configure_logging()
|
||||
setup_otelproviders()
|
||||
instrument(app)
|
||||
|
||||
@@ -3,7 +3,7 @@ from logging import getLogger
|
||||
from types import TracebackType
|
||||
from typing import Literal
|
||||
|
||||
from aiohttp import ClientSession
|
||||
from aiohttp import ClientResponse, ClientResponseError, ClientSession
|
||||
from pydantic import TypeAdapter
|
||||
|
||||
from .models.movie import Movie
|
||||
@@ -38,9 +38,19 @@ class KyooClient(metaclass=Singleton):
|
||||
):
|
||||
await self._client.close()
|
||||
|
||||
async def raise_for_status(self, r: ClientResponse):
|
||||
if r.status >= 400:
|
||||
raise ClientResponseError(
|
||||
r.request_info,
|
||||
r.history,
|
||||
status=r.status,
|
||||
message=await r.text(),
|
||||
headers=r.headers,
|
||||
)
|
||||
|
||||
async def get_videos_info(self) -> VideoInfo:
|
||||
async with self._client.get("videos") as r:
|
||||
r.raise_for_status()
|
||||
await self.raise_for_status(r)
|
||||
return VideoInfo(**await r.json())
|
||||
|
||||
async def create_videos(self, videos: list[Video]) -> list[VideoCreated]:
|
||||
@@ -48,7 +58,7 @@ class KyooClient(metaclass=Singleton):
|
||||
"videos",
|
||||
data=TypeAdapter(list[Video]).dump_json(videos, by_alias=True),
|
||||
) as r:
|
||||
r.raise_for_status()
|
||||
await self.raise_for_status(r)
|
||||
return TypeAdapter(list[VideoCreated]).validate_json(await r.text())
|
||||
|
||||
async def delete_videos(self, videos: list[str] | set[str]):
|
||||
@@ -56,14 +66,14 @@ class KyooClient(metaclass=Singleton):
|
||||
"videos",
|
||||
data=TypeAdapter(list[str] | set[str]).dump_json(videos, by_alias=True),
|
||||
) as r:
|
||||
r.raise_for_status()
|
||||
await self.raise_for_status(r)
|
||||
|
||||
async def create_movie(self, movie: Movie) -> Resource:
|
||||
async with self._client.post(
|
||||
"movies",
|
||||
data=movie.model_dump_json(by_alias=True),
|
||||
) as r:
|
||||
r.raise_for_status()
|
||||
await self.raise_for_status(r)
|
||||
return Resource.model_validate(await r.json())
|
||||
|
||||
async def create_serie(self, serie: Serie) -> Resource:
|
||||
@@ -71,7 +81,7 @@ class KyooClient(metaclass=Singleton):
|
||||
"series",
|
||||
data=serie.model_dump_json(by_alias=True),
|
||||
) as r:
|
||||
r.raise_for_status()
|
||||
await self.raise_for_status(r)
|
||||
return Resource.model_validate(await r.json())
|
||||
|
||||
async def link_videos(
|
||||
@@ -100,4 +110,4 @@ class KyooClient(metaclass=Singleton):
|
||||
by_alias=True,
|
||||
),
|
||||
) as r:
|
||||
r.raise_for_status()
|
||||
await self.raise_for_status(r)
|
||||
|
||||
@@ -5,8 +5,10 @@ from logging import getLogger
|
||||
from typing import Any, cast
|
||||
|
||||
from asyncpg import Connection, Pool, create_pool
|
||||
from opentelemetry import trace
|
||||
|
||||
logger = getLogger(__name__)
|
||||
tracer = trace.get_tracer("kyoo.scanner")
|
||||
|
||||
pool: Pool
|
||||
|
||||
@@ -55,6 +57,7 @@ async def get_db_fapi():
|
||||
yield db
|
||||
|
||||
|
||||
@tracer.start_as_current_span("migrate")
|
||||
async def migrate(migrations_dir="./migrations"):
|
||||
async with get_db() as db:
|
||||
_ = await db.execute(
|
||||
|
||||
@@ -1,3 +1,5 @@
|
||||
import asyncio
|
||||
import itertools
|
||||
import os
|
||||
import re
|
||||
from contextlib import asynccontextmanager
|
||||
@@ -111,30 +113,33 @@ class FsScanner:
|
||||
logger.error("Unexpected error while monitoring files.", exc_info=e)
|
||||
|
||||
async def _register(self, videos: list[str] | set[str]):
|
||||
# TODO: we should probably chunk those
|
||||
vids: list[Video] = []
|
||||
for path in list(videos):
|
||||
async def process(path: str):
|
||||
try:
|
||||
vid = await identify(path)
|
||||
vid = self._match(vid)
|
||||
vids.append(vid)
|
||||
return self._match(vid)
|
||||
except Exception as e:
|
||||
logger.error("Couldn't identify %s.", path, exc_info=e)
|
||||
created = await self._client.create_videos(vids)
|
||||
return None
|
||||
|
||||
await self._requests.enqueue(
|
||||
[
|
||||
Request(
|
||||
kind=x.guess.kind,
|
||||
title=x.guess.title,
|
||||
year=next(iter(x.guess.years), None),
|
||||
external_id=x.guess.external_id,
|
||||
videos=[Request.Video(id=x.id, episodes=x.guess.episodes)],
|
||||
)
|
||||
for x in created
|
||||
if not any(x.entries) and x.guess.kind != "extra"
|
||||
]
|
||||
)
|
||||
for batch in itertools.batched(videos, 20):
|
||||
vids = await asyncio.gather(*(process(path) for path in batch))
|
||||
created = await self._client.create_videos(
|
||||
[v for v in vids if v is not None]
|
||||
)
|
||||
|
||||
await self._requests.enqueue(
|
||||
[
|
||||
Request(
|
||||
kind=x.guess.kind,
|
||||
title=x.guess.title,
|
||||
year=next(iter(x.guess.years), None),
|
||||
external_id=x.guess.external_id,
|
||||
videos=[Request.Video(id=x.id, episodes=x.guess.episodes)],
|
||||
)
|
||||
for x in created
|
||||
if not any(x.entries) and x.guess.kind != "extra"
|
||||
]
|
||||
)
|
||||
|
||||
def _match(self, video: Video) -> Video:
|
||||
video.for_ = []
|
||||
|
||||
32
scanner/scanner/log.py
Normal file
32
scanner/scanner/log.py
Normal file
@@ -0,0 +1,32 @@
|
||||
import logging
|
||||
import os
|
||||
import sys
|
||||
|
||||
from opentelemetry.sdk._logs import LoggingHandler
|
||||
|
||||
|
||||
def configure_logging():
|
||||
root_logger = logging.getLogger()
|
||||
root_logger.setLevel(logging.DEBUG)
|
||||
|
||||
logging.getLogger("watchfiles").setLevel(logging.WARNING)
|
||||
logging.getLogger("rebulk").setLevel(logging.WARNING)
|
||||
|
||||
# Add stdout handler
|
||||
stdout_handler = logging.StreamHandler(sys.stdout)
|
||||
# set logging level via STDOUT_LOG_LEVEL env var or default to INFO
|
||||
stdout_handler.setLevel(
|
||||
getattr(logging, os.getenv("STDOUT_LOG_LEVEL", "INFO").upper())
|
||||
)
|
||||
stdout_handler.setFormatter(
|
||||
logging.Formatter(
|
||||
fmt="[{levelname}][{name}] {message}",
|
||||
style="{",
|
||||
)
|
||||
)
|
||||
root_logger.addHandler(stdout_handler)
|
||||
|
||||
# Add OpenTelemetry handler
|
||||
# set logging level via OTEL_LOG_LEVEL env var
|
||||
# https://opentelemetry.io/docs/specs/otel/configuration/sdk-environment-variables/#general-sdk-configuration
|
||||
root_logger.addHandler(LoggingHandler())
|
||||
@@ -1,5 +1,6 @@
|
||||
from __future__ import annotations
|
||||
from typing import Literal
|
||||
from datetime import datetime
|
||||
from typing import Any, Literal
|
||||
|
||||
from pydantic import Field
|
||||
|
||||
@@ -18,3 +19,17 @@ class Request(Model, extra="allow"):
|
||||
class Video(Model):
|
||||
id: str
|
||||
episodes: list[Guess.Episode]
|
||||
|
||||
|
||||
class RequestRet(Model):
|
||||
id: str
|
||||
kind: Literal["episode", "movie"]
|
||||
title: str
|
||||
year: int | None
|
||||
status: Literal[
|
||||
"pending",
|
||||
"running",
|
||||
"failed",
|
||||
]
|
||||
error: dict[str, Any] | None
|
||||
started_at: datetime | None
|
||||
|
||||
@@ -1,75 +1,77 @@
|
||||
import logging
|
||||
import os
|
||||
|
||||
from fastapi import FastAPI
|
||||
from opentelemetry import metrics, trace
|
||||
from opentelemetry._logs import set_logger_provider
|
||||
from opentelemetry.exporter.otlp.proto.grpc._log_exporter import (
|
||||
OTLPLogExporter as GrpcLogExporter,
|
||||
)
|
||||
from opentelemetry.exporter.otlp.proto.grpc.metric_exporter import (
|
||||
OTLPMetricExporter as GrpcMetricExporter,
|
||||
)
|
||||
from opentelemetry.exporter.otlp.proto.grpc.trace_exporter import (
|
||||
OTLPSpanExporter as GrpcSpanExporter,
|
||||
)
|
||||
from opentelemetry.exporter.otlp.proto.http._log_exporter import (
|
||||
OTLPLogExporter as HttpLogExporter,
|
||||
)
|
||||
from opentelemetry.exporter.otlp.proto.http.metric_exporter import (
|
||||
OTLPMetricExporter as HttpMetricExporter,
|
||||
)
|
||||
from opentelemetry.exporter.otlp.proto.http.trace_exporter import (
|
||||
OTLPSpanExporter as HttpSpanExporter,
|
||||
)
|
||||
from opentelemetry import trace, metrics, _logs
|
||||
from opentelemetry.sdk.trace import TracerProvider
|
||||
from opentelemetry.sdk.trace.export import BatchSpanProcessor
|
||||
from opentelemetry.sdk.metrics import MeterProvider
|
||||
from opentelemetry.sdk.metrics.export import PeriodicExportingMetricReader
|
||||
from opentelemetry.sdk._logs import LoggerProvider
|
||||
from opentelemetry.sdk._logs.export import BatchLogRecordProcessor
|
||||
from opentelemetry.sdk.resources import Resource
|
||||
from opentelemetry.instrumentation.aiohttp_client import AioHttpClientInstrumentor
|
||||
from opentelemetry.instrumentation.asyncpg import AsyncPGInstrumentor
|
||||
from opentelemetry.instrumentation.fastapi import FastAPIInstrumentor
|
||||
from opentelemetry.sdk._logs import LoggerProvider, LoggingHandler
|
||||
from opentelemetry.sdk._logs.export import BatchLogRecordProcessor
|
||||
from opentelemetry.sdk.metrics import MeterProvider
|
||||
from opentelemetry.sdk.metrics.export import PeriodicExportingMetricReader
|
||||
from opentelemetry.sdk.resources import SERVICE_NAME, Resource
|
||||
from opentelemetry.sdk.trace import TracerProvider
|
||||
from opentelemetry.sdk.trace.export import BatchSpanProcessor
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def setup_otelproviders() -> tuple[object, object, object]:
|
||||
import os
|
||||
|
||||
if not (os.getenv("OTEL_EXPORTER_OTLP_ENDPOINT", "").strip()):
|
||||
logger.info(
|
||||
"OTEL_EXPORTER_OTLP_ENDPOINT not specified, skipping otel provider setup."
|
||||
)
|
||||
return None, None, None
|
||||
|
||||
# choose exporters (grpc vs http) ...
|
||||
if os.getenv("OTEL_EXPORTER_OTLP_PROTOCOL", "").lower().strip() == "grpc":
|
||||
from opentelemetry.exporter.otlp.proto.grpc._log_exporter import OTLPLogExporter
|
||||
from opentelemetry.exporter.otlp.proto.grpc.metric_exporter import (
|
||||
OTLPMetricExporter,
|
||||
)
|
||||
from opentelemetry.exporter.otlp.proto.grpc.trace_exporter import (
|
||||
OTLPSpanExporter,
|
||||
)
|
||||
|
||||
logger.info("Using gRPC libs for OpenTelemetry exporter.")
|
||||
|
||||
else:
|
||||
from opentelemetry.exporter.otlp.proto.http._log_exporter import OTLPLogExporter
|
||||
from opentelemetry.exporter.otlp.proto.http.metric_exporter import (
|
||||
OTLPMetricExporter,
|
||||
)
|
||||
from opentelemetry.exporter.otlp.proto.http.trace_exporter import (
|
||||
OTLPSpanExporter,
|
||||
)
|
||||
|
||||
logger.info("Using HTTP libs for OpenTelemetry exporter.")
|
||||
|
||||
resource = Resource.create(
|
||||
{"service.name": os.getenv("OTEL_SERVICE_NAME", "kyoo.scanner")}
|
||||
)
|
||||
|
||||
# Traces
|
||||
tracer_provider = TracerProvider(resource=resource)
|
||||
tracer_provider.add_span_processor(BatchSpanProcessor(OTLPSpanExporter()))
|
||||
trace.set_tracer_provider(tracer_provider)
|
||||
|
||||
# Metrics
|
||||
meter_provider = MeterProvider(
|
||||
resource=resource,
|
||||
metric_readers=[PeriodicExportingMetricReader(OTLPMetricExporter())],
|
||||
)
|
||||
metrics.set_meter_provider(meter_provider)
|
||||
|
||||
# Logs — install logger provider + processor/exporter
|
||||
logger_provider = LoggerProvider(resource=resource)
|
||||
logger_provider.add_log_record_processor(BatchLogRecordProcessor(OTLPLogExporter()))
|
||||
_logs.set_logger_provider(logger_provider)
|
||||
|
||||
return tracer_provider, meter_provider, logger_provider
|
||||
|
||||
|
||||
def instrument(app: FastAPI):
|
||||
proto = os.getenv("OTEL_EXPORTER_OTLP_PROTOCOL", "http/protobuf")
|
||||
resource = Resource.create(attributes={SERVICE_NAME: "kyoo.scanner"})
|
||||
|
||||
provider = LoggerProvider(resource=resource)
|
||||
provider.add_log_record_processor(
|
||||
BatchLogRecordProcessor(
|
||||
HttpLogExporter() if proto == "http/protobuf" else GrpcLogExporter()
|
||||
)
|
||||
)
|
||||
set_logger_provider(provider)
|
||||
handler = LoggingHandler(level=logging.DEBUG, logger_provider=provider)
|
||||
logging.basicConfig(handlers=[handler], level=logging.DEBUG)
|
||||
logging.getLogger("watchfiles").setLevel(logging.WARNING)
|
||||
logging.getLogger("rebulk").setLevel(logging.WARNING)
|
||||
|
||||
provider = TracerProvider(resource=resource)
|
||||
provider.add_span_processor(
|
||||
BatchSpanProcessor(
|
||||
HttpSpanExporter() if proto == "http/protobuf" else GrpcSpanExporter()
|
||||
)
|
||||
)
|
||||
trace.set_tracer_provider(provider)
|
||||
|
||||
provider = MeterProvider(
|
||||
metric_readers=[
|
||||
PeriodicExportingMetricReader(
|
||||
HttpMetricExporter()
|
||||
if proto == "http/protobuf"
|
||||
else GrpcMetricExporter()
|
||||
)
|
||||
],
|
||||
resource=resource,
|
||||
)
|
||||
metrics.set_meter_provider(provider)
|
||||
|
||||
FastAPIInstrumentor.instrument_app(
|
||||
app,
|
||||
http_capture_headers_server_request=[".*"],
|
||||
|
||||
@@ -47,7 +47,7 @@ class Provider(ABC):
|
||||
search = await self.search_movies(title, year, language=[])
|
||||
if not any(search):
|
||||
raise ProviderError(
|
||||
f"Couldn't find a movie with title {title}. (year: {year}"
|
||||
f"Couldn't find a movie with title {title}. (year: {year})"
|
||||
)
|
||||
ret = await self.get_movie(
|
||||
{k: v.data_id for k, v in search[0].external_id.items()}
|
||||
@@ -68,7 +68,7 @@ class Provider(ABC):
|
||||
search = await self.search_series(title, year, language=[])
|
||||
if not any(search):
|
||||
raise ProviderError(
|
||||
f"Couldn't find a serie with title {title}. (year: {year}"
|
||||
f"Couldn't find a serie with title {title}. (year: {year})"
|
||||
)
|
||||
ret = await self.get_serie(
|
||||
{k: v.data_id for k, v in search[0].external_id.items()}
|
||||
|
||||
@@ -420,6 +420,8 @@ class TheMovieDatabase(Provider):
|
||||
(x["episode_number"] for x in season["episodes"]), None
|
||||
),
|
||||
"entries_count": len(season["episodes"]),
|
||||
# there can be gaps in episodes (like 1,2,5,6,7)
|
||||
"episodes": [x["episode_number"] for x in season["episodes"]],
|
||||
},
|
||||
)
|
||||
|
||||
@@ -429,9 +431,9 @@ class TheMovieDatabase(Provider):
|
||||
# TODO: batch those
|
||||
ret = await asyncio.gather(
|
||||
*[
|
||||
self._get_entry(serie_id, s.season_number, s.extra["first_entry"] + e)
|
||||
self._get_entry(serie_id, s.season_number, e)
|
||||
for s in seasons
|
||||
for e in range(0, s.extra["entries_count"])
|
||||
for e in s.extra["episodes"]
|
||||
]
|
||||
)
|
||||
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
from asyncio import CancelledError, Event, TaskGroup
|
||||
from logging import getLogger
|
||||
from traceback import TracebackException
|
||||
from typing import cast
|
||||
|
||||
from asyncpg import Connection, Pool
|
||||
@@ -40,6 +41,8 @@ class RequestCreator:
|
||||
"""
|
||||
delete from scanner.requests
|
||||
where status = 'failed'
|
||||
or (status = 'running'
|
||||
and now() - started_at > interval '1 hour')
|
||||
"""
|
||||
)
|
||||
|
||||
@@ -55,6 +58,7 @@ class RequestProcessor:
|
||||
self._database: Connection = None # type: ignore
|
||||
self._client = client
|
||||
self._providers = providers
|
||||
self._processing = False
|
||||
|
||||
@tracer.start_as_current_span("listen_requests")
|
||||
async def listen(self, tg: TaskGroup):
|
||||
@@ -75,7 +79,7 @@ class RequestProcessor:
|
||||
self._database.add_termination_listener(terminated)
|
||||
await self._database.add_listener("scanner_requests", process)
|
||||
|
||||
logger.info("Listening for requestes")
|
||||
logger.info("Listening for requests")
|
||||
_ = await closed.wait()
|
||||
logger.info("stopping...")
|
||||
except CancelledError:
|
||||
@@ -85,15 +89,22 @@ class RequestProcessor:
|
||||
raise
|
||||
|
||||
async def process_all(self):
|
||||
found = True
|
||||
while found:
|
||||
try:
|
||||
found = await self.process_request()
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
"Failed to process one of the metadata request", exc_info=e
|
||||
)
|
||||
if self._processing:
|
||||
return
|
||||
self._processing = True
|
||||
try:
|
||||
found = True
|
||||
while found:
|
||||
try:
|
||||
found = await self.process_request()
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
"Failed to process one of the metadata request", exc_info=e
|
||||
)
|
||||
finally:
|
||||
self._processing = False
|
||||
|
||||
@tracer.start_as_current_span("process video")
|
||||
async def process_request(self):
|
||||
cur = await self._database.fetchrow(
|
||||
"""
|
||||
@@ -121,43 +132,55 @@ class RequestProcessor:
|
||||
return False
|
||||
request = Request.model_validate(cur)
|
||||
|
||||
with tracer.start_as_current_span(f"process {request.title}") as span:
|
||||
logger.info(f"Starting to process {request.title}")
|
||||
try:
|
||||
show = await self._run_request(request)
|
||||
finished = await self._database.fetchrow(
|
||||
"""
|
||||
delete from scanner.requests
|
||||
where pk = $1
|
||||
returning
|
||||
videos
|
||||
""",
|
||||
request.pk,
|
||||
span = trace.get_current_span()
|
||||
span.update_name(f"process {request.title}")
|
||||
logger.info(f"Starting to process {request.title}")
|
||||
try:
|
||||
show = await self._run_request(request)
|
||||
finished = await self._database.fetchrow(
|
||||
"""
|
||||
delete from scanner.requests
|
||||
where pk = $1
|
||||
returning
|
||||
videos
|
||||
""",
|
||||
request.pk,
|
||||
)
|
||||
if finished and finished["videos"] != request.videos:
|
||||
videos = TypeAdapter(list[Request.Video]).validate_python(
|
||||
finished["videos"]
|
||||
)
|
||||
if finished and finished["videos"] != request.videos:
|
||||
videos = TypeAdapter(list[Request.Video]).validate_python(
|
||||
finished["videos"]
|
||||
)
|
||||
await self._client.link_videos(
|
||||
"movie" if request.kind == "movie" else "serie",
|
||||
show.slug,
|
||||
videos,
|
||||
)
|
||||
except Exception as e:
|
||||
span.set_status(trace.Status(trace.StatusCode.ERROR))
|
||||
span.record_exception(e)
|
||||
logger.error("Couldn't process request", exc_info=e)
|
||||
cur = await self._database.execute(
|
||||
"""
|
||||
update
|
||||
scanner.requests
|
||||
set
|
||||
status = 'failed'
|
||||
where
|
||||
pk = $1
|
||||
""",
|
||||
request.pk,
|
||||
await self._client.link_videos(
|
||||
"movie" if request.kind == "movie" else "serie",
|
||||
show.slug,
|
||||
videos,
|
||||
)
|
||||
except Exception as e:
|
||||
span.set_status(trace.Status(trace.StatusCode.ERROR))
|
||||
span.record_exception(e)
|
||||
logger.error("Couldn't process request", exc_info=e)
|
||||
cur = await self._database.execute(
|
||||
"""
|
||||
update
|
||||
scanner.requests
|
||||
set
|
||||
status = 'failed',
|
||||
error = $2
|
||||
where
|
||||
pk = $1
|
||||
""",
|
||||
request.pk,
|
||||
{
|
||||
"title": type(e).__name__,
|
||||
"message": str(e),
|
||||
"traceback": [
|
||||
line
|
||||
for part in TracebackException.from_exception(e).format()
|
||||
for line in part.split("\n")
|
||||
if line.strip()
|
||||
],
|
||||
},
|
||||
)
|
||||
return True
|
||||
|
||||
async def _run_request(self, request: Request) -> Resource:
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user